Dienstag_2
This commit is contained in:
parent
adc5542da0
commit
d705fd863e
11
.idea/Web- und Skriptsprachen (Python-Blockkurs).iml
generated
Normal file
11
.idea/Web- und Skriptsprachen (Python-Blockkurs).iml
generated
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$" />
|
||||||
|
<orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
<component name="TestRunnerService">
|
||||||
|
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
|
||||||
|
</component>
|
||||||
|
</module>
|
10
.idea/misc.xml
generated
Normal file
10
.idea/misc.xml
generated
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="JavaScriptSettings">
|
||||||
|
<option name="languageLevel" value="ES6" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7" project-jdk-type="Python SDK" />
|
||||||
|
<component name="PythonCompatibilityInspectionAdvertiser">
|
||||||
|
<option name="version" value="3" />
|
||||||
|
</component>
|
||||||
|
</project>
|
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/.idea/Web- und Skriptsprachen (Python-Blockkurs).iml" filepath="$PROJECT_DIR$/.idea/Web- und Skriptsprachen (Python-Blockkurs).iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="VcsDirectoryMappings">
|
||||||
|
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||||
|
</component>
|
||||||
|
</project>
|
623
.idea/workspace.xml
generated
Normal file
623
.idea/workspace.xml
generated
Normal file
@ -0,0 +1,623 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ChangeListManager">
|
||||||
|
<list default="true" id="39276d0a-00e3-48d0-9c0e-a0d08278b21f" name="Default Changelist" comment="Fertiges PONG-Spiel (Beta-Version zu finden unter "Tag2_EigenesSpiel.py")">
|
||||||
|
<change afterPath="$PROJECT_DIR$/.idea/inspectionProfiles/Project_Default.xml" afterDir="false" />
|
||||||
|
</list>
|
||||||
|
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
|
||||||
|
<option name="SHOW_DIALOG" value="false" />
|
||||||
|
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||||
|
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||||
|
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||||
|
</component>
|
||||||
|
<component name="CoverageDataManager">
|
||||||
|
<SUITE FILE_PATH="coverage/Web__und_Skriptsprachen__Python_Blockkurs_$AB1_Aufg3.coverage" NAME="AB1_Aufg3 Coverage Results" MODIFIED="1538810960299" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
||||||
|
<SUITE FILE_PATH="coverage/Web__und_Skriptsprachen__Python_Blockkurs_$Pong.coverage" NAME="Pong Coverage Results" MODIFIED="1539414307376" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
||||||
|
<SUITE FILE_PATH="coverage/Web__und_Skriptsprachen__Python_Blockkurs_$Tag2_MittelwertAusVariablerAnzahlParameter.coverage" NAME="Tag2_MittelwertAusVariablerAnzahlParameter Coverage Results" MODIFIED="1538815118408" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
||||||
|
<SUITE FILE_PATH="coverage/Web__und_Skriptsprachen__Python_Blockkurs_$Tag2_EigenesSpiel.coverage" NAME="Tag2_EigenesSpiel Coverage Results" MODIFIED="1538827299769" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
||||||
|
<SUITE FILE_PATH="coverage/Web__und_Skriptsprachen__Python_Blockkurs_$AB1_Aufg5.coverage" NAME="AB1_Aufg5 Coverage Results" MODIFIED="1538811804618" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
||||||
|
<SUITE FILE_PATH="coverage/Web__und_Skriptsprachen__Python_Blockkurs_$FirstProgram.coverage" NAME="FirstProgram Coverage Results" MODIFIED="1538766705533" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
||||||
|
</component>
|
||||||
|
<component name="DatabaseView">
|
||||||
|
<option name="SHOW_INTERMEDIATE" value="true" />
|
||||||
|
<option name="GROUP_DATA_SOURCES" value="true" />
|
||||||
|
<option name="GROUP_SCHEMA" value="true" />
|
||||||
|
<option name="GROUP_CONTENTS" value="false" />
|
||||||
|
<option name="SORT_POSITIONED" value="false" />
|
||||||
|
<option name="SHOW_EMPTY_GROUPS" value="false" />
|
||||||
|
<option name="AUTO_SCROLL_FROM_SOURCE" value="false" />
|
||||||
|
<option name="HIDDEN_KINDS">
|
||||||
|
<set />
|
||||||
|
</option>
|
||||||
|
<expand />
|
||||||
|
<select />
|
||||||
|
</component>
|
||||||
|
<component name="FUSProjectUsageTrigger">
|
||||||
|
<session id="2005159020">
|
||||||
|
<usages-collector id="statistics.lifecycle.project">
|
||||||
|
<counts>
|
||||||
|
<entry key="project.closed" value="17" />
|
||||||
|
<entry key="project.open.time.1" value="1" />
|
||||||
|
<entry key="project.open.time.10" value="3" />
|
||||||
|
<entry key="project.open.time.11" value="3" />
|
||||||
|
<entry key="project.open.time.12" value="2" />
|
||||||
|
<entry key="project.open.time.13" value="1" />
|
||||||
|
<entry key="project.open.time.14" value="3" />
|
||||||
|
<entry key="project.open.time.15" value="1" />
|
||||||
|
<entry key="project.open.time.59" value="1" />
|
||||||
|
<entry key="project.open.time.8" value="2" />
|
||||||
|
<entry key="project.open.time.9" value="1" />
|
||||||
|
<entry key="project.opened" value="18" />
|
||||||
|
</counts>
|
||||||
|
</usages-collector>
|
||||||
|
<usages-collector id="statistics.file.extensions.open">
|
||||||
|
<counts>
|
||||||
|
<entry key="css" value="2" />
|
||||||
|
<entry key="html" value="4" />
|
||||||
|
<entry key="js" value="1" />
|
||||||
|
<entry key="py" value="8" />
|
||||||
|
<entry key="txt" value="1" />
|
||||||
|
</counts>
|
||||||
|
</usages-collector>
|
||||||
|
<usages-collector id="statistics.file.types.open">
|
||||||
|
<counts>
|
||||||
|
<entry key="CSS" value="2" />
|
||||||
|
<entry key="HTML" value="4" />
|
||||||
|
<entry key="JavaScript" value="1" />
|
||||||
|
<entry key="PLAIN_TEXT" value="1" />
|
||||||
|
<entry key="Python" value="8" />
|
||||||
|
</counts>
|
||||||
|
</usages-collector>
|
||||||
|
<usages-collector id="statistics.file.extensions.edit">
|
||||||
|
<counts>
|
||||||
|
<entry key="css" value="197" />
|
||||||
|
<entry key="html" value="789" />
|
||||||
|
<entry key="js" value="1018" />
|
||||||
|
<entry key="py" value="7455" />
|
||||||
|
<entry key="txt" value="126" />
|
||||||
|
</counts>
|
||||||
|
</usages-collector>
|
||||||
|
<usages-collector id="statistics.file.types.edit">
|
||||||
|
<counts>
|
||||||
|
<entry key="CSS" value="197" />
|
||||||
|
<entry key="HTML" value="789" />
|
||||||
|
<entry key="JavaScript" value="1018" />
|
||||||
|
<entry key="PLAIN_TEXT" value="126" />
|
||||||
|
<entry key="Python" value="7455" />
|
||||||
|
</counts>
|
||||||
|
</usages-collector>
|
||||||
|
</session>
|
||||||
|
</component>
|
||||||
|
<component name="FileEditorManager">
|
||||||
|
<leaf SIDE_TABS_SIZE_LIMIT_KEY="300">
|
||||||
|
<file pinned="false" current-in-tab="false">
|
||||||
|
<entry file="file://$PROJECT_DIR$/Tag2_EigenesSpiel.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="1037">
|
||||||
|
<caret line="61" column="4" selection-start-line="61" selection-start-column="4" selection-end-line="63" selection-end-column="42" />
|
||||||
|
<folding>
|
||||||
|
<marker date="1538827277725" expanded="true" signature="32:37" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="213:307" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="322:327" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="462:654" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="812:814" ph="..." />
|
||||||
|
</folding>
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
</file>
|
||||||
|
<file pinned="false" current-in-tab="false">
|
||||||
|
<entry file="file://$PROJECT_DIR$/Pong.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="1564">
|
||||||
|
<caret line="94" column="8" selection-start-line="94" selection-start-column="8" selection-end-line="94" selection-end-column="8" />
|
||||||
|
<folding>
|
||||||
|
<element signature="e#0#13#0" expanded="true" />
|
||||||
|
<marker date="1539359571752" expanded="true" signature="872:1094" ph="..." />
|
||||||
|
<marker date="1539359571752" expanded="true" signature="1117:1174" ph="..." />
|
||||||
|
<marker date="1539359571752" expanded="true" signature="2523:2842" ph="..." />
|
||||||
|
</folding>
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
</file>
|
||||||
|
<file pinned="false" current-in-tab="true">
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB4_Aufg1.html">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="561">
|
||||||
|
<caret line="33" column="7" lean-forward="true" selection-start-line="33" selection-start-column="7" selection-end-line="33" selection-end-column="7" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
</file>
|
||||||
|
</leaf>
|
||||||
|
</component>
|
||||||
|
<component name="FileTemplateManagerImpl">
|
||||||
|
<option name="RECENT_TEMPLATES">
|
||||||
|
<list>
|
||||||
|
<option value="Python Script" />
|
||||||
|
<option value="HTML File" />
|
||||||
|
<option value="CSS File" />
|
||||||
|
<option value="JavaScript File" />
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
</component>
|
||||||
|
<component name="Git.Settings">
|
||||||
|
<option name="UPDATE_TYPE" value="REBASE" />
|
||||||
|
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
||||||
|
</component>
|
||||||
|
<component name="IdeDocumentHistory">
|
||||||
|
<option name="CHANGED_PATHS">
|
||||||
|
<list>
|
||||||
|
<option value="$PROJECT_DIR$/AB1_Aufg2.py" />
|
||||||
|
<option value="$PROJECT_DIR$/AB1_Aufg3.py" />
|
||||||
|
<option value="$PROJECT_DIR$/AB1_Aufg5.py" />
|
||||||
|
<option value="$PROJECT_DIR$/Tag2_MittelwertAusVariablerAnzahlParameter.py" />
|
||||||
|
<option value="$PROJECT_DIR$/Tag2_EigenesSpiel.py" />
|
||||||
|
<option value="$PROJECT_DIR$/Pong.py" />
|
||||||
|
<option value="$PROJECT_DIR$/AB4_Aufg2.html" />
|
||||||
|
<option value="$PROJECT_DIR$/formatiert.css" />
|
||||||
|
<option value="$PROJECT_DIR$/HTML_Test_Doc.html" />
|
||||||
|
<option value="$PROJECT_DIR$/Format_zu_AB5.css" />
|
||||||
|
<option value="$PROJECT_DIR$/AB4_Aufg1.html" />
|
||||||
|
<option value="$PROJECT_DIR$/Popups_zu_AB6.js" />
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
</component>
|
||||||
|
<component name="JsBuildToolGruntFileManager" detection-done="true" sorting="DEFINITION_ORDER" />
|
||||||
|
<component name="JsBuildToolPackageJson" detection-done="true" sorting="DEFINITION_ORDER" />
|
||||||
|
<component name="JsGulpfileManager">
|
||||||
|
<detection-done>true</detection-done>
|
||||||
|
<sorting>DEFINITION_ORDER</sorting>
|
||||||
|
</component>
|
||||||
|
<component name="ProjectFrameBounds" extendedState="6">
|
||||||
|
<option name="x" value="56" />
|
||||||
|
<option name="y" value="54" />
|
||||||
|
<option name="width" value="1400" />
|
||||||
|
<option name="height" value="820" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectLevelVcsManager" settingsEditedManually="true">
|
||||||
|
<ConfirmationsSetting value="2" id="Add" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectView">
|
||||||
|
<navigator proportions="" version="1">
|
||||||
|
<foldersAlwaysOnTop value="true" />
|
||||||
|
</navigator>
|
||||||
|
<panes>
|
||||||
|
<pane id="ProjectPane">
|
||||||
|
<subPane>
|
||||||
|
<expand>
|
||||||
|
<path>
|
||||||
|
<item name="Web- und Skriptsprachen (Python-Blockkurs)" type="b2602c69:ProjectViewProjectNode" />
|
||||||
|
<item name="Web- und Skriptsprachen (Python-Blockkurs)" type="462c0819:PsiDirectoryNode" />
|
||||||
|
</path>
|
||||||
|
</expand>
|
||||||
|
<select />
|
||||||
|
</subPane>
|
||||||
|
</pane>
|
||||||
|
<pane id="Scope" />
|
||||||
|
</panes>
|
||||||
|
</component>
|
||||||
|
<component name="PropertiesComponent">
|
||||||
|
<property name="DefaultHtmlFileTemplate" value="HTML File" />
|
||||||
|
<property name="WebServerToolWindowFactoryState" value="false" />
|
||||||
|
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
|
||||||
|
<property name="list.type.of.created.stylesheet" value="CSS" />
|
||||||
|
<property name="settings.editor.selected.configurable" value="com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable" />
|
||||||
|
</component>
|
||||||
|
<component name="RunDashboard">
|
||||||
|
<option name="ruleStates">
|
||||||
|
<list>
|
||||||
|
<RuleState>
|
||||||
|
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
|
||||||
|
</RuleState>
|
||||||
|
<RuleState>
|
||||||
|
<option name="name" value="StatusDashboardGroupingRule" />
|
||||||
|
</RuleState>
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
</component>
|
||||||
|
<component name="RunManager" selected="Python.Pong">
|
||||||
|
<configuration name="AB1_Aufg3" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||||
|
<module name="Web- und Skriptsprachen (Python-Blockkurs)" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/AB1_Aufg3.py" />
|
||||||
|
<option name="PARAMETERS" value="" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<configuration name="AB1_Aufg5" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||||
|
<module name="Web- und Skriptsprachen (Python-Blockkurs)" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/AB1_Aufg5.py" />
|
||||||
|
<option name="PARAMETERS" value="" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<configuration name="Pong" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||||
|
<module name="Web- und Skriptsprachen (Python-Blockkurs)" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/Pong.py" />
|
||||||
|
<option name="PARAMETERS" value="" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<configuration name="Tag2_EigenesSpiel" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||||
|
<module name="Web- und Skriptsprachen (Python-Blockkurs)" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/Tag2_EigenesSpiel.py" />
|
||||||
|
<option name="PARAMETERS" value="" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<configuration name="Tag2_MittelwertAusVariablerAnzahlParameter" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||||
|
<module name="Web- und Skriptsprachen (Python-Blockkurs)" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/Tag2_MittelwertAusVariablerAnzahlParameter.py" />
|
||||||
|
<option name="PARAMETERS" value="" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<list>
|
||||||
|
<item itemvalue="Python.AB1_Aufg3" />
|
||||||
|
<item itemvalue="Python.AB1_Aufg5" />
|
||||||
|
<item itemvalue="Python.Tag2_MittelwertAusVariablerAnzahlParameter" />
|
||||||
|
<item itemvalue="Python.Tag2_EigenesSpiel" />
|
||||||
|
<item itemvalue="Python.Pong" />
|
||||||
|
</list>
|
||||||
|
<recent_temporary>
|
||||||
|
<list>
|
||||||
|
<item itemvalue="Python.Pong" />
|
||||||
|
<item itemvalue="Python.Tag2_EigenesSpiel" />
|
||||||
|
<item itemvalue="Python.Tag2_MittelwertAusVariablerAnzahlParameter" />
|
||||||
|
<item itemvalue="Python.AB1_Aufg5" />
|
||||||
|
<item itemvalue="Python.AB1_Aufg3" />
|
||||||
|
</list>
|
||||||
|
</recent_temporary>
|
||||||
|
</component>
|
||||||
|
<component name="SvnConfiguration">
|
||||||
|
<configuration />
|
||||||
|
</component>
|
||||||
|
<component name="TaskManager">
|
||||||
|
<task active="true" id="Default" summary="Default task">
|
||||||
|
<changelist id="39276d0a-00e3-48d0-9c0e-a0d08278b21f" name="Default Changelist" comment="" />
|
||||||
|
<created>1538765946920</created>
|
||||||
|
<option name="number" value="Default" />
|
||||||
|
<option name="presentableId" value="Default" />
|
||||||
|
<updated>1538765946920</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00001" summary="Initialization">
|
||||||
|
<created>1538813521977</created>
|
||||||
|
<option name="number" value="00001" />
|
||||||
|
<option name="presentableId" value="LOCAL-00001" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1538813521977</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00002" summary="Das ist ein Commit-Test für Janko">
|
||||||
|
<created>1538815642691</created>
|
||||||
|
<option name="number" value="00002" />
|
||||||
|
<option name="presentableId" value="LOCAL-00002" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1538815642692</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00003" summary="Fertiges PONG-Spiel (Beta-Version zu finden unter "Tag2_EigenesSpiel.py")">
|
||||||
|
<created>1539416867064</created>
|
||||||
|
<option name="number" value="00003" />
|
||||||
|
<option name="presentableId" value="LOCAL-00003" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1539416867064</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00004" summary="Fertiges PONG-Spiel (Beta-Version zu finden unter "Tag2_EigenesSpiel.py")">
|
||||||
|
<created>1539677464477</created>
|
||||||
|
<option name="number" value="00004" />
|
||||||
|
<option name="presentableId" value="LOCAL-00004" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1539677464477</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00005" summary="Fertiges PONG-Spiel (Beta-Version zu finden unter "Tag2_EigenesSpiel.py")">
|
||||||
|
<created>1539677495365</created>
|
||||||
|
<option name="number" value="00005" />
|
||||||
|
<option name="presentableId" value="LOCAL-00005" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1539677495365</updated>
|
||||||
|
</task>
|
||||||
|
<option name="localTasksCounter" value="6" />
|
||||||
|
<servers />
|
||||||
|
</component>
|
||||||
|
<component name="ToolWindowManager">
|
||||||
|
<frame x="-8" y="-8" width="1616" height="876" extended-state="6" />
|
||||||
|
<editor active="true" />
|
||||||
|
<layout>
|
||||||
|
<window_info content_ui="combo" id="Project" order="0" visible="true" weight="0.22750643" />
|
||||||
|
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
|
||||||
|
<window_info id="Favorites" order="2" side_tool="true" />
|
||||||
|
<window_info anchor="bottom" id="Message" order="0" />
|
||||||
|
<window_info anchor="bottom" id="Find" order="1" />
|
||||||
|
<window_info anchor="bottom" id="Run" order="2" weight="0.32839838" />
|
||||||
|
<window_info anchor="bottom" id="Debug" order="3" weight="0.39973083" />
|
||||||
|
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
|
||||||
|
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
|
||||||
|
<window_info anchor="bottom" id="TODO" order="6" />
|
||||||
|
<window_info anchor="bottom" id="Docker" order="7" show_stripe_button="false" />
|
||||||
|
<window_info anchor="bottom" id="Version Control" order="8" show_stripe_button="false" />
|
||||||
|
<window_info anchor="bottom" id="Database Changes" order="9" show_stripe_button="false" />
|
||||||
|
<window_info anchor="bottom" id="Event Log" order="10" side_tool="true" weight="0.32974428" />
|
||||||
|
<window_info anchor="bottom" id="Terminal" order="11" weight="0.32896653" />
|
||||||
|
<window_info anchor="bottom" id="Python Console" order="12" weight="0.32896653" />
|
||||||
|
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
|
||||||
|
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
|
||||||
|
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
|
||||||
|
<window_info anchor="right" id="SciView" order="3" weight="0.32969153" />
|
||||||
|
<window_info anchor="right" x="0" y="0" width="300" height="620" id="Documentation" order="4" side_tool="true" weight="0.32985073" />
|
||||||
|
<window_info anchor="right" id="Database" order="5" weight="0.32969153" />
|
||||||
|
</layout>
|
||||||
|
<layout-to-restore>
|
||||||
|
<window_info active="true" content_ui="combo" id="Project" order="0" visible="true" weight="0.22750643" />
|
||||||
|
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
|
||||||
|
<window_info id="Favorites" order="2" side_tool="true" />
|
||||||
|
<window_info anchor="bottom" id="Message" order="0" />
|
||||||
|
<window_info anchor="bottom" id="Find" order="1" />
|
||||||
|
<window_info anchor="bottom" id="Run" order="2" weight="0.32839838" />
|
||||||
|
<window_info anchor="bottom" id="Debug" order="3" weight="0.39973083" />
|
||||||
|
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
|
||||||
|
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
|
||||||
|
<window_info anchor="bottom" id="TODO" order="6" />
|
||||||
|
<window_info anchor="bottom" id="Docker" order="7" show_stripe_button="false" />
|
||||||
|
<window_info anchor="bottom" id="Version Control" order="8" show_stripe_button="false" />
|
||||||
|
<window_info anchor="bottom" id="Database Changes" order="9" show_stripe_button="false" />
|
||||||
|
<window_info anchor="bottom" id="Event Log" order="10" side_tool="true" weight="0.32974428" />
|
||||||
|
<window_info anchor="bottom" id="Terminal" order="11" weight="0.32896653" />
|
||||||
|
<window_info anchor="bottom" id="Python Console" order="12" weight="0.32896653" />
|
||||||
|
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
|
||||||
|
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
|
||||||
|
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
|
||||||
|
<window_info anchor="right" id="SciView" order="3" weight="0.32969153" />
|
||||||
|
<window_info anchor="right" x="0" y="0" width="300" height="620" id="Documentation" order="4" side_tool="true" weight="0.32985073" />
|
||||||
|
<window_info anchor="right" id="Database" order="5" weight="0.32969153" />
|
||||||
|
</layout-to-restore>
|
||||||
|
</component>
|
||||||
|
<component name="TypeScriptGeneratedFilesManager">
|
||||||
|
<option name="version" value="1" />
|
||||||
|
</component>
|
||||||
|
<component name="VcsContentAnnotationSettings">
|
||||||
|
<option name="myLimit" value="2678400000" />
|
||||||
|
</component>
|
||||||
|
<component name="VcsManagerConfiguration">
|
||||||
|
<MESSAGE value="Initialization" />
|
||||||
|
<MESSAGE value="Das ist ein Commit-Test für Janko" />
|
||||||
|
<MESSAGE value="Fertiges PONG-Spiel (Beta-Version zu finden unter "Tag2_EigenesSpiel.py")" />
|
||||||
|
<option name="LAST_COMMIT_MESSAGE" value="Fertiges PONG-Spiel (Beta-Version zu finden unter "Tag2_EigenesSpiel.py")" />
|
||||||
|
</component>
|
||||||
|
<component name="XDebuggerManager">
|
||||||
|
<watches-manager>
|
||||||
|
<configuration name="PythonConfigurationType">
|
||||||
|
<watch expression="size_image[0]/2 <= image_rect.centerx <= width-(size_image[0]/2) and size_image[1]/2 <= image_rect.centery <= height-(size_image[1]/2)" language="Python" />
|
||||||
|
<watch expression="image_rect.centerx" language="Python" />
|
||||||
|
<watch expression="image_rect.centery" language="Python" />
|
||||||
|
<watch expression="move_vector" language="Python" />
|
||||||
|
<watch expression="image.get_rect().size" language="Python" />
|
||||||
|
</configuration>
|
||||||
|
</watches-manager>
|
||||||
|
</component>
|
||||||
|
<component name="debuggerHistoryManager">
|
||||||
|
<expressions id="watch">
|
||||||
|
<expression>
|
||||||
|
<expression-string>width-(size_image[0]/2)</expression-string>
|
||||||
|
<language-id>Python</language-id>
|
||||||
|
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||||
|
</expression>
|
||||||
|
<expression>
|
||||||
|
<expression-string>image.get_rect().size</expression-string>
|
||||||
|
<language-id>Python</language-id>
|
||||||
|
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||||
|
</expression>
|
||||||
|
<expression>
|
||||||
|
<expression-string>move_vector</expression-string>
|
||||||
|
<language-id>Python</language-id>
|
||||||
|
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||||
|
</expression>
|
||||||
|
<expression>
|
||||||
|
<expression-string>image_rect.centery</expression-string>
|
||||||
|
<language-id>Python</language-id>
|
||||||
|
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||||
|
</expression>
|
||||||
|
<expression>
|
||||||
|
<expression-string>image_rect.centerx</expression-string>
|
||||||
|
<language-id>Python</language-id>
|
||||||
|
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||||
|
</expression>
|
||||||
|
<expression>
|
||||||
|
<expression-string>size_image[0]/2 <= image_rect.centerx <= width-(size_image[0]/2) and size_image[1]/2 <= image_rect.centery <= height-(size_image[1]/2)</expression-string>
|
||||||
|
<language-id>Python</language-id>
|
||||||
|
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||||
|
</expression>
|
||||||
|
</expressions>
|
||||||
|
</component>
|
||||||
|
<component name="editorHistoryManager">
|
||||||
|
<entry file="file://$PROJECT_DIR$/FirstProgramm.py.txt" />
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB1_Aufg2.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="34">
|
||||||
|
<caret line="2" column="32" selection-start-line="2" selection-start-column="32" selection-end-line="2" selection-end-column="32" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB1_Aufg3.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="187">
|
||||||
|
<caret line="11" selection-start-line="11" selection-end-line="11" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB1_Aufg4.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor" />
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB1_Aufg5.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="153">
|
||||||
|
<caret line="9" column="17" selection-start-line="9" selection-start-column="17" selection-end-line="9" selection-end-column="17" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/FirstProgram.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state>
|
||||||
|
<caret column="25" selection-start-column="25" selection-end-column="25" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/Tag2_MittelwertAusVariablerAnzahlParameter.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="119">
|
||||||
|
<caret line="7" column="5" selection-start-line="7" selection-start-column="5" selection-end-line="7" selection-end-column="5" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$APPLICATION_HOME_DIR$/helpers/pydev/_pydev_imps/_pydev_execfile.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="289">
|
||||||
|
<caret line="17" selection-start-line="17" selection-end-line="17" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/formatiert.css">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="17">
|
||||||
|
<caret line="1" column="15" selection-start-line="1" selection-start-column="15" selection-end-line="1" selection-end-column="15" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/HTML_Test_Doc.html">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="85">
|
||||||
|
<caret line="5" column="49" selection-start-line="5" selection-start-column="49" selection-end-line="5" selection-end-column="49" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB4_Aufg2.html">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="255">
|
||||||
|
<caret line="15" column="8" selection-start-line="15" selection-start-column="8" selection-end-line="15" selection-end-column="8" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/Format_zu_AB5.css">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="561">
|
||||||
|
<caret line="33" column="22" selection-start-line="33" selection-start-column="22" selection-end-line="33" selection-end-column="22" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/Popups_zu_AB6.js">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="136">
|
||||||
|
<caret line="8" column="33" selection-start-line="8" selection-start-column="33" selection-end-line="8" selection-end-column="33" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/Tag2_EigenesSpiel.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="1037">
|
||||||
|
<caret line="61" column="4" selection-start-line="61" selection-start-column="4" selection-end-line="63" selection-end-column="42" />
|
||||||
|
<folding>
|
||||||
|
<marker date="1538827277725" expanded="true" signature="32:37" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="213:307" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="322:327" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="462:654" ph="..." />
|
||||||
|
<marker date="1538827277725" expanded="true" signature="812:814" ph="..." />
|
||||||
|
</folding>
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/Pong.py">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="1564">
|
||||||
|
<caret line="94" column="8" selection-start-line="94" selection-start-column="8" selection-end-line="94" selection-end-column="8" />
|
||||||
|
<folding>
|
||||||
|
<element signature="e#0#13#0" expanded="true" />
|
||||||
|
<marker date="1539359571752" expanded="true" signature="872:1094" ph="..." />
|
||||||
|
<marker date="1539359571752" expanded="true" signature="1117:1174" ph="..." />
|
||||||
|
<marker date="1539359571752" expanded="true" signature="2523:2842" ph="..." />
|
||||||
|
</folding>
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
<entry file="file://$PROJECT_DIR$/AB4_Aufg1.html">
|
||||||
|
<provider selected="true" editor-type-id="text-editor">
|
||||||
|
<state relative-caret-position="561">
|
||||||
|
<caret line="33" column="7" lean-forward="true" selection-start-line="33" selection-start-column="7" selection-end-line="33" selection-end-column="7" />
|
||||||
|
</state>
|
||||||
|
</provider>
|
||||||
|
</entry>
|
||||||
|
</component>
|
||||||
|
</project>
|
1
venv/Lib/site-packages/easy-install.pth
Normal file
1
venv/Lib/site-packages/easy-install.pth
Normal file
@ -0,0 +1 @@
|
|||||||
|
./setuptools-39.1.0-py3.7.egg
|
@ -0,0 +1,69 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: pip
|
||||||
|
Version: 10.0.1
|
||||||
|
Summary: The PyPA recommended tool for installing Python packages.
|
||||||
|
Home-page: https://pip.pypa.io/
|
||||||
|
Author: The pip developers
|
||||||
|
Author-email: python-virtualenv@groups.google.com
|
||||||
|
License: MIT
|
||||||
|
Description: pip
|
||||||
|
===
|
||||||
|
|
||||||
|
The `PyPA recommended`_ tool for installing Python packages.
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/pip.svg
|
||||||
|
:target: https://pypi.org/project/pip/
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/travis/pypa/pip/master.svg
|
||||||
|
:target: http://travis-ci.org/pypa/pip
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg
|
||||||
|
:target: https://ci.appveyor.com/project/pypa/pip/history
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
||||||
|
:target: https://pip.pypa.io/en/latest
|
||||||
|
|
||||||
|
* `Installation`_
|
||||||
|
* `Documentation`_
|
||||||
|
* `Changelog`_
|
||||||
|
* `GitHub Page`_
|
||||||
|
* `Issue Tracking`_
|
||||||
|
* `User mailing list`_
|
||||||
|
* `Dev mailing list`_
|
||||||
|
* User IRC: #pypa on Freenode.
|
||||||
|
* Dev IRC: #pypa-dev on Freenode.
|
||||||
|
|
||||||
|
Code of Conduct
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Everyone interacting in the pip project's codebases, issue trackers, chat
|
||||||
|
rooms and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
||||||
|
|
||||||
|
.. _PyPA recommended: https://packaging.python.org/en/latest/current/
|
||||||
|
.. _Installation: https://pip.pypa.io/en/stable/installing.html
|
||||||
|
.. _Documentation: https://pip.pypa.io/en/stable/
|
||||||
|
.. _Changelog: https://pip.pypa.io/en/stable/news.html
|
||||||
|
.. _GitHub Page: https://github.com/pypa/pip
|
||||||
|
.. _Issue Tracking: https://github.com/pypa/pip/issues
|
||||||
|
.. _User mailing list: http://groups.google.com/group/python-virtualenv
|
||||||
|
.. _Dev mailing list: http://groups.google.com/group/pypa-dev
|
||||||
|
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
|
||||||
|
|
||||||
|
Keywords: easy_install distutils setuptools egg virtualenv
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Topic :: Software Development :: Build Tools
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*
|
||||||
|
Provides-Extra: testing
|
347
venv/Lib/site-packages/pip-10.0.1-py3.7.egg/EGG-INFO/SOURCES.txt
Normal file
347
venv/Lib/site-packages/pip-10.0.1-py3.7.egg/EGG-INFO/SOURCES.txt
Normal file
@ -0,0 +1,347 @@
|
|||||||
|
AUTHORS.txt
|
||||||
|
LICENSE.txt
|
||||||
|
MANIFEST.in
|
||||||
|
NEWS.rst
|
||||||
|
README.rst
|
||||||
|
pyproject.toml
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
docs/Makefile
|
||||||
|
docs/__init__.py
|
||||||
|
docs/conf.py
|
||||||
|
docs/configuration.rst
|
||||||
|
docs/cookbook.rst
|
||||||
|
docs/development.rst
|
||||||
|
docs/docutils.conf
|
||||||
|
docs/index.rst
|
||||||
|
docs/installing.rst
|
||||||
|
docs/logic.rst
|
||||||
|
docs/make.bat
|
||||||
|
docs/news.rst
|
||||||
|
docs/pipext.py
|
||||||
|
docs/quickstart.rst
|
||||||
|
docs/usage.rst
|
||||||
|
docs/user_guide.rst
|
||||||
|
docs/man/pip.rst
|
||||||
|
docs/man/commands/check.rst
|
||||||
|
docs/man/commands/config.rst
|
||||||
|
docs/man/commands/download.rst
|
||||||
|
docs/man/commands/freeze.rst
|
||||||
|
docs/man/commands/hash.rst
|
||||||
|
docs/man/commands/help.rst
|
||||||
|
docs/man/commands/install.rst
|
||||||
|
docs/man/commands/list.rst
|
||||||
|
docs/man/commands/search.rst
|
||||||
|
docs/man/commands/show.rst
|
||||||
|
docs/man/commands/uninstall.rst
|
||||||
|
docs/man/commands/wheel.rst
|
||||||
|
docs/reference/index.rst
|
||||||
|
docs/reference/pip.rst
|
||||||
|
docs/reference/pip_check.rst
|
||||||
|
docs/reference/pip_config.rst
|
||||||
|
docs/reference/pip_download.rst
|
||||||
|
docs/reference/pip_freeze.rst
|
||||||
|
docs/reference/pip_hash.rst
|
||||||
|
docs/reference/pip_install.rst
|
||||||
|
docs/reference/pip_list.rst
|
||||||
|
docs/reference/pip_search.rst
|
||||||
|
docs/reference/pip_show.rst
|
||||||
|
docs/reference/pip_uninstall.rst
|
||||||
|
docs/reference/pip_wheel.rst
|
||||||
|
src/pip/__init__.py
|
||||||
|
src/pip/__main__.py
|
||||||
|
src/pip.egg-info/PKG-INFO
|
||||||
|
src/pip.egg-info/SOURCES.txt
|
||||||
|
src/pip.egg-info/dependency_links.txt
|
||||||
|
src/pip.egg-info/entry_points.txt
|
||||||
|
src/pip.egg-info/not-zip-safe
|
||||||
|
src/pip.egg-info/requires.txt
|
||||||
|
src/pip.egg-info/top_level.txt
|
||||||
|
src/pip/_internal/__init__.py
|
||||||
|
src/pip/_internal/basecommand.py
|
||||||
|
src/pip/_internal/baseparser.py
|
||||||
|
src/pip/_internal/build_env.py
|
||||||
|
src/pip/_internal/cache.py
|
||||||
|
src/pip/_internal/cmdoptions.py
|
||||||
|
src/pip/_internal/compat.py
|
||||||
|
src/pip/_internal/configuration.py
|
||||||
|
src/pip/_internal/download.py
|
||||||
|
src/pip/_internal/exceptions.py
|
||||||
|
src/pip/_internal/index.py
|
||||||
|
src/pip/_internal/locations.py
|
||||||
|
src/pip/_internal/pep425tags.py
|
||||||
|
src/pip/_internal/resolve.py
|
||||||
|
src/pip/_internal/status_codes.py
|
||||||
|
src/pip/_internal/wheel.py
|
||||||
|
src/pip/_internal/commands/__init__.py
|
||||||
|
src/pip/_internal/commands/check.py
|
||||||
|
src/pip/_internal/commands/completion.py
|
||||||
|
src/pip/_internal/commands/configuration.py
|
||||||
|
src/pip/_internal/commands/download.py
|
||||||
|
src/pip/_internal/commands/freeze.py
|
||||||
|
src/pip/_internal/commands/hash.py
|
||||||
|
src/pip/_internal/commands/help.py
|
||||||
|
src/pip/_internal/commands/install.py
|
||||||
|
src/pip/_internal/commands/list.py
|
||||||
|
src/pip/_internal/commands/search.py
|
||||||
|
src/pip/_internal/commands/show.py
|
||||||
|
src/pip/_internal/commands/uninstall.py
|
||||||
|
src/pip/_internal/commands/wheel.py
|
||||||
|
src/pip/_internal/models/__init__.py
|
||||||
|
src/pip/_internal/models/index.py
|
||||||
|
src/pip/_internal/operations/__init__.py
|
||||||
|
src/pip/_internal/operations/check.py
|
||||||
|
src/pip/_internal/operations/freeze.py
|
||||||
|
src/pip/_internal/operations/prepare.py
|
||||||
|
src/pip/_internal/req/__init__.py
|
||||||
|
src/pip/_internal/req/req_file.py
|
||||||
|
src/pip/_internal/req/req_install.py
|
||||||
|
src/pip/_internal/req/req_set.py
|
||||||
|
src/pip/_internal/req/req_uninstall.py
|
||||||
|
src/pip/_internal/utils/__init__.py
|
||||||
|
src/pip/_internal/utils/appdirs.py
|
||||||
|
src/pip/_internal/utils/deprecation.py
|
||||||
|
src/pip/_internal/utils/encoding.py
|
||||||
|
src/pip/_internal/utils/filesystem.py
|
||||||
|
src/pip/_internal/utils/glibc.py
|
||||||
|
src/pip/_internal/utils/hashes.py
|
||||||
|
src/pip/_internal/utils/logging.py
|
||||||
|
src/pip/_internal/utils/misc.py
|
||||||
|
src/pip/_internal/utils/outdated.py
|
||||||
|
src/pip/_internal/utils/packaging.py
|
||||||
|
src/pip/_internal/utils/setuptools_build.py
|
||||||
|
src/pip/_internal/utils/temp_dir.py
|
||||||
|
src/pip/_internal/utils/typing.py
|
||||||
|
src/pip/_internal/utils/ui.py
|
||||||
|
src/pip/_internal/vcs/__init__.py
|
||||||
|
src/pip/_internal/vcs/bazaar.py
|
||||||
|
src/pip/_internal/vcs/git.py
|
||||||
|
src/pip/_internal/vcs/mercurial.py
|
||||||
|
src/pip/_internal/vcs/subversion.py
|
||||||
|
src/pip/_vendor/README.rst
|
||||||
|
src/pip/_vendor/__init__.py
|
||||||
|
src/pip/_vendor/appdirs.py
|
||||||
|
src/pip/_vendor/distro.py
|
||||||
|
src/pip/_vendor/ipaddress.py
|
||||||
|
src/pip/_vendor/pyparsing.py
|
||||||
|
src/pip/_vendor/retrying.py
|
||||||
|
src/pip/_vendor/six.py
|
||||||
|
src/pip/_vendor/vendor.txt
|
||||||
|
src/pip/_vendor/cachecontrol/__init__.py
|
||||||
|
src/pip/_vendor/cachecontrol/_cmd.py
|
||||||
|
src/pip/_vendor/cachecontrol/adapter.py
|
||||||
|
src/pip/_vendor/cachecontrol/cache.py
|
||||||
|
src/pip/_vendor/cachecontrol/compat.py
|
||||||
|
src/pip/_vendor/cachecontrol/controller.py
|
||||||
|
src/pip/_vendor/cachecontrol/filewrapper.py
|
||||||
|
src/pip/_vendor/cachecontrol/heuristics.py
|
||||||
|
src/pip/_vendor/cachecontrol/serialize.py
|
||||||
|
src/pip/_vendor/cachecontrol/wrapper.py
|
||||||
|
src/pip/_vendor/cachecontrol/caches/__init__.py
|
||||||
|
src/pip/_vendor/cachecontrol/caches/file_cache.py
|
||||||
|
src/pip/_vendor/cachecontrol/caches/redis_cache.py
|
||||||
|
src/pip/_vendor/certifi/__init__.py
|
||||||
|
src/pip/_vendor/certifi/__main__.py
|
||||||
|
src/pip/_vendor/certifi/cacert.pem
|
||||||
|
src/pip/_vendor/certifi/core.py
|
||||||
|
src/pip/_vendor/chardet/__init__.py
|
||||||
|
src/pip/_vendor/chardet/big5freq.py
|
||||||
|
src/pip/_vendor/chardet/big5prober.py
|
||||||
|
src/pip/_vendor/chardet/chardistribution.py
|
||||||
|
src/pip/_vendor/chardet/charsetgroupprober.py
|
||||||
|
src/pip/_vendor/chardet/charsetprober.py
|
||||||
|
src/pip/_vendor/chardet/codingstatemachine.py
|
||||||
|
src/pip/_vendor/chardet/compat.py
|
||||||
|
src/pip/_vendor/chardet/cp949prober.py
|
||||||
|
src/pip/_vendor/chardet/enums.py
|
||||||
|
src/pip/_vendor/chardet/escprober.py
|
||||||
|
src/pip/_vendor/chardet/escsm.py
|
||||||
|
src/pip/_vendor/chardet/eucjpprober.py
|
||||||
|
src/pip/_vendor/chardet/euckrfreq.py
|
||||||
|
src/pip/_vendor/chardet/euckrprober.py
|
||||||
|
src/pip/_vendor/chardet/euctwfreq.py
|
||||||
|
src/pip/_vendor/chardet/euctwprober.py
|
||||||
|
src/pip/_vendor/chardet/gb2312freq.py
|
||||||
|
src/pip/_vendor/chardet/gb2312prober.py
|
||||||
|
src/pip/_vendor/chardet/hebrewprober.py
|
||||||
|
src/pip/_vendor/chardet/jisfreq.py
|
||||||
|
src/pip/_vendor/chardet/jpcntx.py
|
||||||
|
src/pip/_vendor/chardet/langbulgarianmodel.py
|
||||||
|
src/pip/_vendor/chardet/langcyrillicmodel.py
|
||||||
|
src/pip/_vendor/chardet/langgreekmodel.py
|
||||||
|
src/pip/_vendor/chardet/langhebrewmodel.py
|
||||||
|
src/pip/_vendor/chardet/langhungarianmodel.py
|
||||||
|
src/pip/_vendor/chardet/langthaimodel.py
|
||||||
|
src/pip/_vendor/chardet/langturkishmodel.py
|
||||||
|
src/pip/_vendor/chardet/latin1prober.py
|
||||||
|
src/pip/_vendor/chardet/mbcharsetprober.py
|
||||||
|
src/pip/_vendor/chardet/mbcsgroupprober.py
|
||||||
|
src/pip/_vendor/chardet/mbcssm.py
|
||||||
|
src/pip/_vendor/chardet/sbcharsetprober.py
|
||||||
|
src/pip/_vendor/chardet/sbcsgroupprober.py
|
||||||
|
src/pip/_vendor/chardet/sjisprober.py
|
||||||
|
src/pip/_vendor/chardet/universaldetector.py
|
||||||
|
src/pip/_vendor/chardet/utf8prober.py
|
||||||
|
src/pip/_vendor/chardet/version.py
|
||||||
|
src/pip/_vendor/chardet/cli/__init__.py
|
||||||
|
src/pip/_vendor/chardet/cli/chardetect.py
|
||||||
|
src/pip/_vendor/colorama/__init__.py
|
||||||
|
src/pip/_vendor/colorama/ansi.py
|
||||||
|
src/pip/_vendor/colorama/ansitowin32.py
|
||||||
|
src/pip/_vendor/colorama/initialise.py
|
||||||
|
src/pip/_vendor/colorama/win32.py
|
||||||
|
src/pip/_vendor/colorama/winterm.py
|
||||||
|
src/pip/_vendor/distlib/__init__.py
|
||||||
|
src/pip/_vendor/distlib/compat.py
|
||||||
|
src/pip/_vendor/distlib/database.py
|
||||||
|
src/pip/_vendor/distlib/index.py
|
||||||
|
src/pip/_vendor/distlib/locators.py
|
||||||
|
src/pip/_vendor/distlib/manifest.py
|
||||||
|
src/pip/_vendor/distlib/markers.py
|
||||||
|
src/pip/_vendor/distlib/metadata.py
|
||||||
|
src/pip/_vendor/distlib/resources.py
|
||||||
|
src/pip/_vendor/distlib/scripts.py
|
||||||
|
src/pip/_vendor/distlib/t32.exe
|
||||||
|
src/pip/_vendor/distlib/t64.exe
|
||||||
|
src/pip/_vendor/distlib/util.py
|
||||||
|
src/pip/_vendor/distlib/version.py
|
||||||
|
src/pip/_vendor/distlib/w32.exe
|
||||||
|
src/pip/_vendor/distlib/w64.exe
|
||||||
|
src/pip/_vendor/distlib/wheel.py
|
||||||
|
src/pip/_vendor/distlib/_backport/__init__.py
|
||||||
|
src/pip/_vendor/distlib/_backport/misc.py
|
||||||
|
src/pip/_vendor/distlib/_backport/shutil.py
|
||||||
|
src/pip/_vendor/distlib/_backport/sysconfig.cfg
|
||||||
|
src/pip/_vendor/distlib/_backport/sysconfig.py
|
||||||
|
src/pip/_vendor/distlib/_backport/tarfile.py
|
||||||
|
src/pip/_vendor/html5lib/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/_ihatexml.py
|
||||||
|
src/pip/_vendor/html5lib/_inputstream.py
|
||||||
|
src/pip/_vendor/html5lib/_tokenizer.py
|
||||||
|
src/pip/_vendor/html5lib/_utils.py
|
||||||
|
src/pip/_vendor/html5lib/constants.py
|
||||||
|
src/pip/_vendor/html5lib/html5parser.py
|
||||||
|
src/pip/_vendor/html5lib/serializer.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/_base.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/datrie.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/py.py
|
||||||
|
src/pip/_vendor/html5lib/filters/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/filters/alphabeticalattributes.py
|
||||||
|
src/pip/_vendor/html5lib/filters/base.py
|
||||||
|
src/pip/_vendor/html5lib/filters/inject_meta_charset.py
|
||||||
|
src/pip/_vendor/html5lib/filters/lint.py
|
||||||
|
src/pip/_vendor/html5lib/filters/optionaltags.py
|
||||||
|
src/pip/_vendor/html5lib/filters/sanitizer.py
|
||||||
|
src/pip/_vendor/html5lib/filters/whitespace.py
|
||||||
|
src/pip/_vendor/html5lib/treeadapters/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/treeadapters/genshi.py
|
||||||
|
src/pip/_vendor/html5lib/treeadapters/sax.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/base.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/dom.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/etree.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/etree_lxml.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/base.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/dom.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/etree.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/etree_lxml.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/genshi.py
|
||||||
|
src/pip/_vendor/idna/__init__.py
|
||||||
|
src/pip/_vendor/idna/codec.py
|
||||||
|
src/pip/_vendor/idna/compat.py
|
||||||
|
src/pip/_vendor/idna/core.py
|
||||||
|
src/pip/_vendor/idna/idnadata.py
|
||||||
|
src/pip/_vendor/idna/intranges.py
|
||||||
|
src/pip/_vendor/idna/package_data.py
|
||||||
|
src/pip/_vendor/idna/uts46data.py
|
||||||
|
src/pip/_vendor/lockfile/__init__.py
|
||||||
|
src/pip/_vendor/lockfile/linklockfile.py
|
||||||
|
src/pip/_vendor/lockfile/mkdirlockfile.py
|
||||||
|
src/pip/_vendor/lockfile/pidlockfile.py
|
||||||
|
src/pip/_vendor/lockfile/sqlitelockfile.py
|
||||||
|
src/pip/_vendor/lockfile/symlinklockfile.py
|
||||||
|
src/pip/_vendor/msgpack/__init__.py
|
||||||
|
src/pip/_vendor/msgpack/_version.py
|
||||||
|
src/pip/_vendor/msgpack/exceptions.py
|
||||||
|
src/pip/_vendor/msgpack/fallback.py
|
||||||
|
src/pip/_vendor/packaging/__about__.py
|
||||||
|
src/pip/_vendor/packaging/__init__.py
|
||||||
|
src/pip/_vendor/packaging/_compat.py
|
||||||
|
src/pip/_vendor/packaging/_structures.py
|
||||||
|
src/pip/_vendor/packaging/markers.py
|
||||||
|
src/pip/_vendor/packaging/requirements.py
|
||||||
|
src/pip/_vendor/packaging/specifiers.py
|
||||||
|
src/pip/_vendor/packaging/utils.py
|
||||||
|
src/pip/_vendor/packaging/version.py
|
||||||
|
src/pip/_vendor/pkg_resources/__init__.py
|
||||||
|
src/pip/_vendor/pkg_resources/py31compat.py
|
||||||
|
src/pip/_vendor/progress/__init__.py
|
||||||
|
src/pip/_vendor/progress/bar.py
|
||||||
|
src/pip/_vendor/progress/counter.py
|
||||||
|
src/pip/_vendor/progress/helpers.py
|
||||||
|
src/pip/_vendor/progress/spinner.py
|
||||||
|
src/pip/_vendor/pytoml/__init__.py
|
||||||
|
src/pip/_vendor/pytoml/core.py
|
||||||
|
src/pip/_vendor/pytoml/parser.py
|
||||||
|
src/pip/_vendor/pytoml/writer.py
|
||||||
|
src/pip/_vendor/requests/__init__.py
|
||||||
|
src/pip/_vendor/requests/__version__.py
|
||||||
|
src/pip/_vendor/requests/_internal_utils.py
|
||||||
|
src/pip/_vendor/requests/adapters.py
|
||||||
|
src/pip/_vendor/requests/api.py
|
||||||
|
src/pip/_vendor/requests/auth.py
|
||||||
|
src/pip/_vendor/requests/certs.py
|
||||||
|
src/pip/_vendor/requests/compat.py
|
||||||
|
src/pip/_vendor/requests/cookies.py
|
||||||
|
src/pip/_vendor/requests/exceptions.py
|
||||||
|
src/pip/_vendor/requests/help.py
|
||||||
|
src/pip/_vendor/requests/hooks.py
|
||||||
|
src/pip/_vendor/requests/models.py
|
||||||
|
src/pip/_vendor/requests/packages.py
|
||||||
|
src/pip/_vendor/requests/sessions.py
|
||||||
|
src/pip/_vendor/requests/status_codes.py
|
||||||
|
src/pip/_vendor/requests/structures.py
|
||||||
|
src/pip/_vendor/requests/utils.py
|
||||||
|
src/pip/_vendor/urllib3/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/_collections.py
|
||||||
|
src/pip/_vendor/urllib3/connection.py
|
||||||
|
src/pip/_vendor/urllib3/connectionpool.py
|
||||||
|
src/pip/_vendor/urllib3/exceptions.py
|
||||||
|
src/pip/_vendor/urllib3/fields.py
|
||||||
|
src/pip/_vendor/urllib3/filepost.py
|
||||||
|
src/pip/_vendor/urllib3/poolmanager.py
|
||||||
|
src/pip/_vendor/urllib3/request.py
|
||||||
|
src/pip/_vendor/urllib3/response.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/appengine.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/ntlmpool.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/pyopenssl.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/securetransport.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/socks.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
|
||||||
|
src/pip/_vendor/urllib3/packages/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/packages/ordered_dict.py
|
||||||
|
src/pip/_vendor/urllib3/packages/six.py
|
||||||
|
src/pip/_vendor/urllib3/packages/backports/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/packages/backports/makefile.py
|
||||||
|
src/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
|
||||||
|
src/pip/_vendor/urllib3/util/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/util/connection.py
|
||||||
|
src/pip/_vendor/urllib3/util/request.py
|
||||||
|
src/pip/_vendor/urllib3/util/response.py
|
||||||
|
src/pip/_vendor/urllib3/util/retry.py
|
||||||
|
src/pip/_vendor/urllib3/util/selectors.py
|
||||||
|
src/pip/_vendor/urllib3/util/ssl_.py
|
||||||
|
src/pip/_vendor/urllib3/util/timeout.py
|
||||||
|
src/pip/_vendor/urllib3/util/url.py
|
||||||
|
src/pip/_vendor/urllib3/util/wait.py
|
||||||
|
src/pip/_vendor/webencodings/__init__.py
|
||||||
|
src/pip/_vendor/webencodings/labels.py
|
||||||
|
src/pip/_vendor/webencodings/mklabels.py
|
||||||
|
src/pip/_vendor/webencodings/tests.py
|
||||||
|
src/pip/_vendor/webencodings/x_user_defined.py
|
@ -0,0 +1 @@
|
|||||||
|
|
@ -0,0 +1,5 @@
|
|||||||
|
[console_scripts]
|
||||||
|
pip = pip._internal:main
|
||||||
|
pip3 = pip._internal:main
|
||||||
|
pip3.7 = pip._internal:main
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
|
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
[testing]
|
||||||
|
pytest
|
||||||
|
mock
|
||||||
|
pretend
|
||||||
|
scripttest>=1.3
|
||||||
|
virtualenv>=1.10
|
||||||
|
freezegun
|
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1 @@
|
|||||||
|
__version__ = "10.0.1"
|
19
venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/__main__.py
Normal file
19
venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/__main__.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# If we are running from a wheel, add the wheel to sys.path
|
||||||
|
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
||||||
|
if __package__ == '':
|
||||||
|
# __file__ is pip-*.whl/pip/__main__.py
|
||||||
|
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
||||||
|
# Resulting path is the name of the wheel itself
|
||||||
|
# Add that to sys.path so we can import pip
|
||||||
|
path = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
sys.path.insert(0, path)
|
||||||
|
|
||||||
|
from pip._internal import main as _main # noqa
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(_main())
|
@ -0,0 +1,246 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
|
||||||
|
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
|
||||||
|
# isn't available. requests unconditionally imports urllib3's socks contrib
|
||||||
|
# module, triggering this warning. The warning breaks DEP-8 tests (because of
|
||||||
|
# the stderr output) and is just plain annoying in normal usage. I don't want
|
||||||
|
# to add socks as yet another dependency for pip, nor do I want to allow-stder
|
||||||
|
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
|
||||||
|
# be done before the import of pip.vcs.
|
||||||
|
from pip._vendor.urllib3.exceptions import DependencyWarning
|
||||||
|
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
|
||||||
|
|
||||||
|
# We want to inject the use of SecureTransport as early as possible so that any
|
||||||
|
# references or sessions or what have you are ensured to have it, however we
|
||||||
|
# only want to do this in the case that we're running on macOS and the linked
|
||||||
|
# OpenSSL is too old to handle TLSv1.2
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Checks for OpenSSL 1.0.1 on MacOS
|
||||||
|
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
|
||||||
|
try:
|
||||||
|
from pip._vendor.urllib3.contrib import securetransport
|
||||||
|
except (ImportError, OSError):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
securetransport.inject_into_urllib3()
|
||||||
|
|
||||||
|
from pip import __version__
|
||||||
|
from pip._internal import cmdoptions
|
||||||
|
from pip._internal.exceptions import CommandError, PipError
|
||||||
|
from pip._internal.utils.misc import get_installed_distributions, get_prog
|
||||||
|
from pip._internal.utils import deprecation
|
||||||
|
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
|
||||||
|
from pip._internal.baseparser import (
|
||||||
|
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
from pip._internal.commands import get_summaries, get_similar_commands
|
||||||
|
from pip._internal.commands import commands_dict
|
||||||
|
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Hide the InsecureRequestWarning from urllib3
|
||||||
|
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
|
def autocomplete():
|
||||||
|
"""Command and option completion for the main option parser (and options)
|
||||||
|
and its subcommands (and options).
|
||||||
|
|
||||||
|
Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
|
||||||
|
"""
|
||||||
|
# Don't complete if user hasn't sourced bash_completion file.
|
||||||
|
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
||||||
|
return
|
||||||
|
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||||
|
cword = int(os.environ['COMP_CWORD'])
|
||||||
|
try:
|
||||||
|
current = cwords[cword - 1]
|
||||||
|
except IndexError:
|
||||||
|
current = ''
|
||||||
|
|
||||||
|
subcommands = [cmd for cmd, summary in get_summaries()]
|
||||||
|
options = []
|
||||||
|
# subcommand
|
||||||
|
try:
|
||||||
|
subcommand_name = [w for w in cwords if w in subcommands][0]
|
||||||
|
except IndexError:
|
||||||
|
subcommand_name = None
|
||||||
|
|
||||||
|
parser = create_main_parser()
|
||||||
|
# subcommand options
|
||||||
|
if subcommand_name:
|
||||||
|
# special case: 'help' subcommand has no options
|
||||||
|
if subcommand_name == 'help':
|
||||||
|
sys.exit(1)
|
||||||
|
# special case: list locally installed dists for show and uninstall
|
||||||
|
should_list_installed = (
|
||||||
|
subcommand_name in ['show', 'uninstall'] and
|
||||||
|
not current.startswith('-')
|
||||||
|
)
|
||||||
|
if should_list_installed:
|
||||||
|
installed = []
|
||||||
|
lc = current.lower()
|
||||||
|
for dist in get_installed_distributions(local_only=True):
|
||||||
|
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
||||||
|
installed.append(dist.key)
|
||||||
|
# if there are no dists installed, fall back to option completion
|
||||||
|
if installed:
|
||||||
|
for dist in installed:
|
||||||
|
print(dist)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
subcommand = commands_dict[subcommand_name]()
|
||||||
|
|
||||||
|
for opt in subcommand.parser.option_list_all:
|
||||||
|
if opt.help != optparse.SUPPRESS_HELP:
|
||||||
|
for opt_str in opt._long_opts + opt._short_opts:
|
||||||
|
options.append((opt_str, opt.nargs))
|
||||||
|
|
||||||
|
# filter out previously specified options from available options
|
||||||
|
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||||
|
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||||
|
# filter options by current input
|
||||||
|
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||||
|
for option in options:
|
||||||
|
opt_label = option[0]
|
||||||
|
# append '=' to options which require args
|
||||||
|
if option[1] and option[0][:2] == "--":
|
||||||
|
opt_label += '='
|
||||||
|
print(opt_label)
|
||||||
|
else:
|
||||||
|
# show main parser options only when necessary
|
||||||
|
if current.startswith('-') or current.startswith('--'):
|
||||||
|
opts = [i.option_list for i in parser.option_groups]
|
||||||
|
opts.append(parser.option_list)
|
||||||
|
opts = (o for it in opts for o in it)
|
||||||
|
|
||||||
|
for opt in opts:
|
||||||
|
if opt.help != optparse.SUPPRESS_HELP:
|
||||||
|
subcommands += opt._long_opts + opt._short_opts
|
||||||
|
|
||||||
|
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def create_main_parser():
|
||||||
|
parser_kw = {
|
||||||
|
'usage': '\n%prog <command> [options]',
|
||||||
|
'add_help_option': False,
|
||||||
|
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||||
|
'name': 'global',
|
||||||
|
'prog': get_prog(),
|
||||||
|
}
|
||||||
|
|
||||||
|
parser = ConfigOptionParser(**parser_kw)
|
||||||
|
parser.disable_interspersed_args()
|
||||||
|
|
||||||
|
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
parser.version = 'pip %s from %s (python %s)' % (
|
||||||
|
__version__, pip_pkg_dir, sys.version[:3],
|
||||||
|
)
|
||||||
|
|
||||||
|
# add the general options
|
||||||
|
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||||
|
parser.add_option_group(gen_opts)
|
||||||
|
|
||||||
|
parser.main = True # so the help formatter knows
|
||||||
|
|
||||||
|
# create command listing for description
|
||||||
|
command_summaries = get_summaries()
|
||||||
|
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
|
||||||
|
parser.description = '\n'.join(description)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def parseopts(args):
|
||||||
|
parser = create_main_parser()
|
||||||
|
|
||||||
|
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||||
|
# call is to split the initial args into the general options before the
|
||||||
|
# subcommand and everything else.
|
||||||
|
# For example:
|
||||||
|
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||||
|
# general_options: ['--timeout==5']
|
||||||
|
# args_else: ['install', '--user', 'INITools']
|
||||||
|
general_options, args_else = parser.parse_args(args)
|
||||||
|
|
||||||
|
# --version
|
||||||
|
if general_options.version:
|
||||||
|
sys.stdout.write(parser.version)
|
||||||
|
sys.stdout.write(os.linesep)
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# pip || pip help -> print_help()
|
||||||
|
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# the subcommand name
|
||||||
|
cmd_name = args_else[0]
|
||||||
|
|
||||||
|
if cmd_name not in commands_dict:
|
||||||
|
guess = get_similar_commands(cmd_name)
|
||||||
|
|
||||||
|
msg = ['unknown command "%s"' % cmd_name]
|
||||||
|
if guess:
|
||||||
|
msg.append('maybe you meant "%s"' % guess)
|
||||||
|
|
||||||
|
raise CommandError(' - '.join(msg))
|
||||||
|
|
||||||
|
# all the args without the subcommand
|
||||||
|
cmd_args = args[:]
|
||||||
|
cmd_args.remove(cmd_name)
|
||||||
|
|
||||||
|
return cmd_name, cmd_args
|
||||||
|
|
||||||
|
|
||||||
|
def check_isolated(args):
|
||||||
|
isolated = False
|
||||||
|
|
||||||
|
if "--isolated" in args:
|
||||||
|
isolated = True
|
||||||
|
|
||||||
|
return isolated
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
if args is None:
|
||||||
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
# Configure our deprecation warnings to be sent through loggers
|
||||||
|
deprecation.install_warning_logger()
|
||||||
|
|
||||||
|
autocomplete()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cmd_name, cmd_args = parseopts(args)
|
||||||
|
except PipError as exc:
|
||||||
|
sys.stderr.write("ERROR: %s" % exc)
|
||||||
|
sys.stderr.write(os.linesep)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Needed for locale.getpreferredencoding(False) to work
|
||||||
|
# in pip._internal.utils.encoding.auto_decode
|
||||||
|
try:
|
||||||
|
locale.setlocale(locale.LC_ALL, '')
|
||||||
|
except locale.Error as e:
|
||||||
|
# setlocale can apparently crash if locale are uninitialized
|
||||||
|
logger.debug("Ignoring error %s when setting locale", e)
|
||||||
|
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
|
||||||
|
return command.main(cmd_args)
|
@ -0,0 +1,373 @@
|
|||||||
|
"""Base Command class, and related routines"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from pip._internal import cmdoptions
|
||||||
|
from pip._internal.baseparser import (
|
||||||
|
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
from pip._internal.compat import WINDOWS
|
||||||
|
from pip._internal.download import PipSession
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
BadCommand, CommandError, InstallationError, PreviousBuildDirError,
|
||||||
|
UninstallationError,
|
||||||
|
)
|
||||||
|
from pip._internal.index import PackageFinder
|
||||||
|
from pip._internal.locations import running_under_virtualenv
|
||||||
|
from pip._internal.req.req_file import parse_requirements
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.status_codes import (
|
||||||
|
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
|
||||||
|
VIRTUALENV_NOT_FOUND,
|
||||||
|
)
|
||||||
|
from pip._internal.utils import deprecation
|
||||||
|
from pip._internal.utils.logging import IndentingFormatter
|
||||||
|
from pip._internal.utils.misc import get_prog, normalize_path
|
||||||
|
from pip._internal.utils.outdated import pip_version_check
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
__all__ = ['Command']
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(object):
|
||||||
|
name = None # type: Optional[str]
|
||||||
|
usage = None # type: Optional[str]
|
||||||
|
hidden = False # type: bool
|
||||||
|
ignore_require_venv = False # type: bool
|
||||||
|
log_streams = ("ext://sys.stdout", "ext://sys.stderr")
|
||||||
|
|
||||||
|
def __init__(self, isolated=False):
|
||||||
|
parser_kw = {
|
||||||
|
'usage': self.usage,
|
||||||
|
'prog': '%s %s' % (get_prog(), self.name),
|
||||||
|
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||||
|
'add_help_option': False,
|
||||||
|
'name': self.name,
|
||||||
|
'description': self.__doc__,
|
||||||
|
'isolated': isolated,
|
||||||
|
}
|
||||||
|
|
||||||
|
self.parser = ConfigOptionParser(**parser_kw)
|
||||||
|
|
||||||
|
# Commands should add options to this option group
|
||||||
|
optgroup_name = '%s Options' % self.name.capitalize()
|
||||||
|
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||||
|
|
||||||
|
# Add the general options
|
||||||
|
gen_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.general_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
self.parser.add_option_group(gen_opts)
|
||||||
|
|
||||||
|
def _build_session(self, options, retries=None, timeout=None):
|
||||||
|
session = PipSession(
|
||||||
|
cache=(
|
||||||
|
normalize_path(os.path.join(options.cache_dir, "http"))
|
||||||
|
if options.cache_dir else None
|
||||||
|
),
|
||||||
|
retries=retries if retries is not None else options.retries,
|
||||||
|
insecure_hosts=options.trusted_hosts,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle custom ca-bundles from the user
|
||||||
|
if options.cert:
|
||||||
|
session.verify = options.cert
|
||||||
|
|
||||||
|
# Handle SSL client certificate
|
||||||
|
if options.client_cert:
|
||||||
|
session.cert = options.client_cert
|
||||||
|
|
||||||
|
# Handle timeouts
|
||||||
|
if options.timeout or timeout:
|
||||||
|
session.timeout = (
|
||||||
|
timeout if timeout is not None else options.timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle configured proxies
|
||||||
|
if options.proxy:
|
||||||
|
session.proxies = {
|
||||||
|
"http": options.proxy,
|
||||||
|
"https": options.proxy,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine if we can prompt the user for authentication or not
|
||||||
|
session.auth.prompting = not options.no_input
|
||||||
|
|
||||||
|
return session
|
||||||
|
|
||||||
|
def parse_args(self, args):
|
||||||
|
# factored out for testability
|
||||||
|
return self.parser.parse_args(args)
|
||||||
|
|
||||||
|
def main(self, args):
|
||||||
|
options, args = self.parse_args(args)
|
||||||
|
|
||||||
|
# Set verbosity so that it can be used elsewhere.
|
||||||
|
self.verbosity = options.verbose - options.quiet
|
||||||
|
|
||||||
|
if self.verbosity >= 1:
|
||||||
|
level = "DEBUG"
|
||||||
|
elif self.verbosity == -1:
|
||||||
|
level = "WARNING"
|
||||||
|
elif self.verbosity == -2:
|
||||||
|
level = "ERROR"
|
||||||
|
elif self.verbosity <= -3:
|
||||||
|
level = "CRITICAL"
|
||||||
|
else:
|
||||||
|
level = "INFO"
|
||||||
|
|
||||||
|
# The root logger should match the "console" level *unless* we
|
||||||
|
# specified "--log" to send debug logs to a file.
|
||||||
|
root_level = level
|
||||||
|
if options.log:
|
||||||
|
root_level = "DEBUG"
|
||||||
|
|
||||||
|
logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
|
||||||
|
handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"
|
||||||
|
|
||||||
|
logging.config.dictConfig({
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"filters": {
|
||||||
|
"exclude_warnings": {
|
||||||
|
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
||||||
|
"level": logging.WARNING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"formatters": {
|
||||||
|
"indent": {
|
||||||
|
"()": IndentingFormatter,
|
||||||
|
"format": "%(message)s",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"level": level,
|
||||||
|
"class": logger_class,
|
||||||
|
"no_color": options.no_color,
|
||||||
|
"stream": self.log_streams[0],
|
||||||
|
"filters": ["exclude_warnings"],
|
||||||
|
"formatter": "indent",
|
||||||
|
},
|
||||||
|
"console_errors": {
|
||||||
|
"level": "WARNING",
|
||||||
|
"class": logger_class,
|
||||||
|
"no_color": options.no_color,
|
||||||
|
"stream": self.log_streams[1],
|
||||||
|
"formatter": "indent",
|
||||||
|
},
|
||||||
|
"user_log": {
|
||||||
|
"level": "DEBUG",
|
||||||
|
"class": handler_class,
|
||||||
|
"filename": options.log or "/dev/null",
|
||||||
|
"delay": True,
|
||||||
|
"formatter": "indent",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"level": root_level,
|
||||||
|
"handlers": list(filter(None, [
|
||||||
|
"console",
|
||||||
|
"console_errors",
|
||||||
|
"user_log" if options.log else None,
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
# Disable any logging besides WARNING unless we have DEBUG level
|
||||||
|
# logging enabled. These use both pip._vendor and the bare names
|
||||||
|
# for the case where someone unbundles our libraries.
|
||||||
|
"loggers": {
|
||||||
|
name: {
|
||||||
|
"level": (
|
||||||
|
"WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
||||||
|
)
|
||||||
|
} for name in [
|
||||||
|
"pip._vendor", "distlib", "requests", "urllib3"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (3, 3):
|
||||||
|
warnings.warn(
|
||||||
|
"Python 3.3 supported has been deprecated and support for it "
|
||||||
|
"will be dropped in the future. Please upgrade your Python.",
|
||||||
|
deprecation.RemovedInPip11Warning,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: try to get these passing down from the command?
|
||||||
|
# without resorting to os.environ to hold these.
|
||||||
|
|
||||||
|
if options.no_input:
|
||||||
|
os.environ['PIP_NO_INPUT'] = '1'
|
||||||
|
|
||||||
|
if options.exists_action:
|
||||||
|
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
||||||
|
|
||||||
|
if options.require_venv and not self.ignore_require_venv:
|
||||||
|
# If a venv is required check if it can really be found
|
||||||
|
if not running_under_virtualenv():
|
||||||
|
logger.critical(
|
||||||
|
'Could not find an activated virtualenv (required).'
|
||||||
|
)
|
||||||
|
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||||
|
|
||||||
|
original_root_handlers = set(logging.root.handlers)
|
||||||
|
|
||||||
|
try:
|
||||||
|
status = self.run(options, args)
|
||||||
|
# FIXME: all commands should return an exit status
|
||||||
|
# and when it is done, isinstance is not needed anymore
|
||||||
|
if isinstance(status, int):
|
||||||
|
return status
|
||||||
|
except PreviousBuildDirError as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return PREVIOUS_BUILD_DIR_ERROR
|
||||||
|
except (InstallationError, UninstallationError, BadCommand) as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except CommandError as exc:
|
||||||
|
logger.critical('ERROR: %s', exc)
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.critical('Operation cancelled by user')
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except:
|
||||||
|
logger.critical('Exception:', exc_info=True)
|
||||||
|
|
||||||
|
return UNKNOWN_ERROR
|
||||||
|
finally:
|
||||||
|
# Check if we're using the latest version of pip available
|
||||||
|
if (not options.disable_pip_version_check and not
|
||||||
|
getattr(options, "no_index", False)):
|
||||||
|
with self._build_session(
|
||||||
|
options,
|
||||||
|
retries=0,
|
||||||
|
timeout=min(5, options.timeout)) as session:
|
||||||
|
pip_version_check(session, options)
|
||||||
|
# Avoid leaking loggers
|
||||||
|
for handler in set(logging.root.handlers) - original_root_handlers:
|
||||||
|
# this method benefit from the Logger class internal lock
|
||||||
|
logging.root.removeHandler(handler)
|
||||||
|
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementCommand(Command):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def populate_requirement_set(requirement_set, args, options, finder,
|
||||||
|
session, name, wheel_cache):
|
||||||
|
"""
|
||||||
|
Marshal cmd line args into a requirement set.
|
||||||
|
"""
|
||||||
|
# NOTE: As a side-effect, options.require_hashes and
|
||||||
|
# requirement_set.require_hashes may be updated
|
||||||
|
|
||||||
|
for filename in options.constraints:
|
||||||
|
for req_to_add in parse_requirements(
|
||||||
|
filename,
|
||||||
|
constraint=True, finder=finder, options=options,
|
||||||
|
session=session, wheel_cache=wheel_cache):
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for req in args:
|
||||||
|
req_to_add = InstallRequirement.from_line(
|
||||||
|
req, None, isolated=options.isolated_mode,
|
||||||
|
wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for req in options.editables:
|
||||||
|
req_to_add = InstallRequirement.from_editable(
|
||||||
|
req,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for filename in options.requirements:
|
||||||
|
for req_to_add in parse_requirements(
|
||||||
|
filename,
|
||||||
|
finder=finder, options=options, session=session,
|
||||||
|
wheel_cache=wheel_cache):
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
# If --require-hashes was a line in a requirements file, tell
|
||||||
|
# RequirementSet about it:
|
||||||
|
requirement_set.require_hashes = options.require_hashes
|
||||||
|
|
||||||
|
if not (args or options.editables or options.requirements):
|
||||||
|
opts = {'name': name}
|
||||||
|
if options.find_links:
|
||||||
|
raise CommandError(
|
||||||
|
'You must give at least one requirement to %(name)s '
|
||||||
|
'(maybe you meant "pip %(name)s %(links)s"?)' %
|
||||||
|
dict(opts, links=' '.join(options.find_links)))
|
||||||
|
else:
|
||||||
|
raise CommandError(
|
||||||
|
'You must give at least one requirement to %(name)s '
|
||||||
|
'(see "pip help %(name)s")' % opts)
|
||||||
|
|
||||||
|
# On Windows, any operation modifying pip should be run as:
|
||||||
|
# python -m pip ...
|
||||||
|
# See https://github.com/pypa/pip/issues/1299 for more discussion
|
||||||
|
should_show_use_python_msg = (
|
||||||
|
WINDOWS and
|
||||||
|
requirement_set.has_requirement("pip") and
|
||||||
|
os.path.basename(sys.argv[0]).startswith("pip")
|
||||||
|
)
|
||||||
|
if should_show_use_python_msg:
|
||||||
|
new_command = [
|
||||||
|
sys.executable, "-m", "pip"
|
||||||
|
] + sys.argv[1:]
|
||||||
|
raise CommandError(
|
||||||
|
'To modify pip, please run the following command:\n{}'
|
||||||
|
.format(" ".join(new_command))
|
||||||
|
)
|
||||||
|
|
||||||
|
def _build_package_finder(self, options, session,
|
||||||
|
platform=None, python_versions=None,
|
||||||
|
abi=None, implementation=None):
|
||||||
|
"""
|
||||||
|
Create a package finder appropriate to this requirement command.
|
||||||
|
"""
|
||||||
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
|
if options.no_index:
|
||||||
|
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||||
|
index_urls = []
|
||||||
|
|
||||||
|
return PackageFinder(
|
||||||
|
find_links=options.find_links,
|
||||||
|
format_control=options.format_control,
|
||||||
|
index_urls=index_urls,
|
||||||
|
trusted_hosts=options.trusted_hosts,
|
||||||
|
allow_all_prereleases=options.pre,
|
||||||
|
process_dependency_links=options.process_dependency_links,
|
||||||
|
session=session,
|
||||||
|
platform=platform,
|
||||||
|
versions=python_versions,
|
||||||
|
abi=abi,
|
||||||
|
implementation=implementation,
|
||||||
|
)
|
@ -0,0 +1,240 @@
|
|||||||
|
"""Base option parser setup"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import optparse
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
|
from pip._vendor.six import string_types
|
||||||
|
|
||||||
|
from pip._internal.compat import get_terminal_size
|
||||||
|
from pip._internal.configuration import Configuration, ConfigurationError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
|
"""A prettier/less verbose help formatter for optparse."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# help position must be aligned with __init__.parseopts.description
|
||||||
|
kwargs['max_help_position'] = 30
|
||||||
|
kwargs['indent_increment'] = 1
|
||||||
|
kwargs['width'] = get_terminal_size()[0] - 2
|
||||||
|
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def format_option_strings(self, option):
|
||||||
|
return self._format_option_strings(option, ' <%s>', ', ')
|
||||||
|
|
||||||
|
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
|
||||||
|
"""
|
||||||
|
Return a comma-separated list of option strings and metavars.
|
||||||
|
|
||||||
|
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||||
|
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
|
||||||
|
:param optsep: separator
|
||||||
|
"""
|
||||||
|
opts = []
|
||||||
|
|
||||||
|
if option._short_opts:
|
||||||
|
opts.append(option._short_opts[0])
|
||||||
|
if option._long_opts:
|
||||||
|
opts.append(option._long_opts[0])
|
||||||
|
if len(opts) > 1:
|
||||||
|
opts.insert(1, optsep)
|
||||||
|
|
||||||
|
if option.takes_value():
|
||||||
|
metavar = option.metavar or option.dest.lower()
|
||||||
|
opts.append(mvarfmt % metavar.lower())
|
||||||
|
|
||||||
|
return ''.join(opts)
|
||||||
|
|
||||||
|
def format_heading(self, heading):
|
||||||
|
if heading == 'Options':
|
||||||
|
return ''
|
||||||
|
return heading + ':\n'
|
||||||
|
|
||||||
|
def format_usage(self, usage):
|
||||||
|
"""
|
||||||
|
Ensure there is only one newline between usage and the first heading
|
||||||
|
if there is no description.
|
||||||
|
"""
|
||||||
|
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def format_description(self, description):
|
||||||
|
# leave full control over description to us
|
||||||
|
if description:
|
||||||
|
if hasattr(self.parser, 'main'):
|
||||||
|
label = 'Commands'
|
||||||
|
else:
|
||||||
|
label = 'Description'
|
||||||
|
# some doc strings have initial newlines, some don't
|
||||||
|
description = description.lstrip('\n')
|
||||||
|
# some doc strings have final newlines and spaces, some don't
|
||||||
|
description = description.rstrip()
|
||||||
|
# dedent, then reindent
|
||||||
|
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||||
|
description = '%s:\n%s\n' % (label, description)
|
||||||
|
return description
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def format_epilog(self, epilog):
|
||||||
|
# leave full control over epilog to us
|
||||||
|
if epilog:
|
||||||
|
return epilog
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def indent_lines(self, text, indent):
|
||||||
|
new_lines = [indent + line for line in text.split('\n')]
|
||||||
|
return "\n".join(new_lines)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||||
|
"""Custom help formatter for use in ConfigOptionParser.
|
||||||
|
|
||||||
|
This is updates the defaults before expanding them, allowing
|
||||||
|
them to show up correctly in the help listing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def expand_default(self, option):
|
||||||
|
if self.parser is not None:
|
||||||
|
self.parser._update_defaults(self.parser.defaults)
|
||||||
|
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
||||||
|
|
||||||
|
|
||||||
|
class CustomOptionParser(optparse.OptionParser):
|
||||||
|
|
||||||
|
def insert_option_group(self, idx, *args, **kwargs):
|
||||||
|
"""Insert an OptionGroup at a given position."""
|
||||||
|
group = self.add_option_group(*args, **kwargs)
|
||||||
|
|
||||||
|
self.option_groups.pop()
|
||||||
|
self.option_groups.insert(idx, group)
|
||||||
|
|
||||||
|
return group
|
||||||
|
|
||||||
|
@property
|
||||||
|
def option_list_all(self):
|
||||||
|
"""Get a list of all options, including those in option groups."""
|
||||||
|
res = self.option_list[:]
|
||||||
|
for i in self.option_groups:
|
||||||
|
res.extend(i.option_list)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigOptionParser(CustomOptionParser):
|
||||||
|
"""Custom option parser which updates its defaults by checking the
|
||||||
|
configuration files and environmental variables"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.name = kwargs.pop('name')
|
||||||
|
|
||||||
|
isolated = kwargs.pop("isolated", False)
|
||||||
|
self.config = Configuration(isolated)
|
||||||
|
|
||||||
|
assert self.name
|
||||||
|
optparse.OptionParser.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def check_default(self, option, key, val):
|
||||||
|
try:
|
||||||
|
return option.check_value(key, val)
|
||||||
|
except optparse.OptionValueError as exc:
|
||||||
|
print("An error occurred during configuration: %s" % exc)
|
||||||
|
sys.exit(3)
|
||||||
|
|
||||||
|
def _get_ordered_configuration_items(self):
|
||||||
|
# Configuration gives keys in an unordered manner. Order them.
|
||||||
|
override_order = ["global", self.name, ":env:"]
|
||||||
|
|
||||||
|
# Pool the options into different groups
|
||||||
|
section_items = {name: [] for name in override_order}
|
||||||
|
for section_key, val in self.config.items():
|
||||||
|
# ignore empty values
|
||||||
|
if not val:
|
||||||
|
logger.debug(
|
||||||
|
"Ignoring configuration key '%s' as it's value is empty.",
|
||||||
|
section_key
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
section, key = section_key.split(".", 1)
|
||||||
|
if section in override_order:
|
||||||
|
section_items[section].append((key, val))
|
||||||
|
|
||||||
|
# Yield each group in their override order
|
||||||
|
for section in override_order:
|
||||||
|
for key, val in section_items[section]:
|
||||||
|
yield key, val
|
||||||
|
|
||||||
|
def _update_defaults(self, defaults):
|
||||||
|
"""Updates the given defaults with values from the config files and
|
||||||
|
the environ. Does a little special handling for certain types of
|
||||||
|
options (lists)."""
|
||||||
|
|
||||||
|
# Accumulate complex default state.
|
||||||
|
self.values = optparse.Values(self.defaults)
|
||||||
|
late_eval = set()
|
||||||
|
# Then set the options with those values
|
||||||
|
for key, val in self._get_ordered_configuration_items():
|
||||||
|
# '--' because configuration supports only long names
|
||||||
|
option = self.get_option('--' + key)
|
||||||
|
|
||||||
|
# Ignore options not present in this parser. E.g. non-globals put
|
||||||
|
# in [global] by users that want them to apply to all applicable
|
||||||
|
# commands.
|
||||||
|
if option is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if option.action in ('store_true', 'store_false', 'count'):
|
||||||
|
val = strtobool(val)
|
||||||
|
elif option.action == 'append':
|
||||||
|
val = val.split()
|
||||||
|
val = [self.check_default(option, key, v) for v in val]
|
||||||
|
elif option.action == 'callback':
|
||||||
|
late_eval.add(option.dest)
|
||||||
|
opt_str = option.get_opt_string()
|
||||||
|
val = option.convert_value(opt_str, val)
|
||||||
|
# From take_action
|
||||||
|
args = option.callback_args or ()
|
||||||
|
kwargs = option.callback_kwargs or {}
|
||||||
|
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||||
|
else:
|
||||||
|
val = self.check_default(option, key, val)
|
||||||
|
|
||||||
|
defaults[option.dest] = val
|
||||||
|
|
||||||
|
for key in late_eval:
|
||||||
|
defaults[key] = getattr(self.values, key)
|
||||||
|
self.values = None
|
||||||
|
return defaults
|
||||||
|
|
||||||
|
def get_default_values(self):
|
||||||
|
"""Overriding to make updating the defaults after instantiation of
|
||||||
|
the option parser possible, _update_defaults() does the dirty work."""
|
||||||
|
if not self.process_default_values:
|
||||||
|
# Old, pre-Optik 1.5 behaviour.
|
||||||
|
return optparse.Values(self.defaults)
|
||||||
|
|
||||||
|
# Load the configuration, or error out in case of an error
|
||||||
|
try:
|
||||||
|
self.config.load()
|
||||||
|
except ConfigurationError as err:
|
||||||
|
self.exit(2, err.args[0])
|
||||||
|
|
||||||
|
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||||
|
for option in self._get_all_options():
|
||||||
|
default = defaults.get(option.dest)
|
||||||
|
if isinstance(default, string_types):
|
||||||
|
opt_str = option.get_opt_string()
|
||||||
|
defaults[option.dest] = option.check_value(opt_str, default)
|
||||||
|
return optparse.Values(defaults)
|
||||||
|
|
||||||
|
def error(self, msg):
|
||||||
|
self.print_usage(sys.stderr)
|
||||||
|
self.exit(2, "%s\n" % msg)
|
@ -0,0 +1,92 @@
|
|||||||
|
"""Build Environment used for isolation during sdist building
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from distutils.sysconfig import get_python_lib
|
||||||
|
from sysconfig import get_paths
|
||||||
|
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
|
||||||
|
class BuildEnvironment(object):
|
||||||
|
"""Creates and manages an isolated environment to install build deps
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, no_clean):
|
||||||
|
self._temp_dir = TempDirectory(kind="build-env")
|
||||||
|
self._no_clean = no_clean
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
return self._temp_dir.path
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self._temp_dir.create()
|
||||||
|
|
||||||
|
self.save_path = os.environ.get('PATH', None)
|
||||||
|
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
|
||||||
|
self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)
|
||||||
|
|
||||||
|
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
|
||||||
|
install_dirs = get_paths(install_scheme, vars={
|
||||||
|
'base': self.path,
|
||||||
|
'platbase': self.path,
|
||||||
|
})
|
||||||
|
|
||||||
|
scripts = install_dirs['scripts']
|
||||||
|
if self.save_path:
|
||||||
|
os.environ['PATH'] = scripts + os.pathsep + self.save_path
|
||||||
|
else:
|
||||||
|
os.environ['PATH'] = scripts + os.pathsep + os.defpath
|
||||||
|
|
||||||
|
# Note: prefer distutils' sysconfig to get the
|
||||||
|
# library paths so PyPy is correctly supported.
|
||||||
|
purelib = get_python_lib(plat_specific=0, prefix=self.path)
|
||||||
|
platlib = get_python_lib(plat_specific=1, prefix=self.path)
|
||||||
|
if purelib == platlib:
|
||||||
|
lib_dirs = purelib
|
||||||
|
else:
|
||||||
|
lib_dirs = purelib + os.pathsep + platlib
|
||||||
|
if self.save_pythonpath:
|
||||||
|
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
|
||||||
|
self.save_pythonpath
|
||||||
|
else:
|
||||||
|
os.environ['PYTHONPATH'] = lib_dirs
|
||||||
|
|
||||||
|
os.environ['PYTHONNOUSERSITE'] = '1'
|
||||||
|
|
||||||
|
return self.path
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if not self._no_clean:
|
||||||
|
self._temp_dir.cleanup()
|
||||||
|
|
||||||
|
def restore_var(varname, old_value):
|
||||||
|
if old_value is None:
|
||||||
|
os.environ.pop(varname, None)
|
||||||
|
else:
|
||||||
|
os.environ[varname] = old_value
|
||||||
|
|
||||||
|
restore_var('PATH', self.save_path)
|
||||||
|
restore_var('PYTHONPATH', self.save_pythonpath)
|
||||||
|
restore_var('PYTHONNOUSERSITE', self.save_nousersite)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
self._temp_dir.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
class NoOpBuildEnvironment(BuildEnvironment):
|
||||||
|
"""A no-op drop-in replacement for BuildEnvironment
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, no_clean):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
pass
|
@ -0,0 +1,202 @@
|
|||||||
|
"""Cache Management
|
||||||
|
"""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal import index
|
||||||
|
from pip._internal.compat import expanduser
|
||||||
|
from pip._internal.download import path_to_url
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.wheel import InvalidWheelFilename, Wheel
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Cache(object):
|
||||||
|
"""An abstract class - provides cache directories for data from links
|
||||||
|
|
||||||
|
|
||||||
|
:param cache_dir: The root of the cache.
|
||||||
|
:param format_control: A pip.index.FormatControl object to limit
|
||||||
|
binaries being read from the cache.
|
||||||
|
:param allowed_formats: which formats of files the cache should store.
|
||||||
|
('binary' and 'source' are the only allowed values)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control, allowed_formats):
|
||||||
|
super(Cache, self).__init__()
|
||||||
|
self.cache_dir = expanduser(cache_dir) if cache_dir else None
|
||||||
|
self.format_control = format_control
|
||||||
|
self.allowed_formats = allowed_formats
|
||||||
|
|
||||||
|
_valid_formats = {"source", "binary"}
|
||||||
|
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||||
|
|
||||||
|
def _get_cache_path_parts(self, link):
|
||||||
|
"""Get parts of part that must be os.path.joined with cache_dir
|
||||||
|
"""
|
||||||
|
|
||||||
|
# We want to generate an url to use as our cache key, we don't want to
|
||||||
|
# just re-use the URL because it might have other items in the fragment
|
||||||
|
# and we don't care about those.
|
||||||
|
key_parts = [link.url_without_fragment]
|
||||||
|
if link.hash_name is not None and link.hash is not None:
|
||||||
|
key_parts.append("=".join([link.hash_name, link.hash]))
|
||||||
|
key_url = "#".join(key_parts)
|
||||||
|
|
||||||
|
# Encode our key url with sha224, we'll use this because it has similar
|
||||||
|
# security properties to sha256, but with a shorter total output (and
|
||||||
|
# thus less secure). However the differences don't make a lot of
|
||||||
|
# difference for our use case here.
|
||||||
|
hashed = hashlib.sha224(key_url.encode()).hexdigest()
|
||||||
|
|
||||||
|
# We want to nest the directories some to prevent having a ton of top
|
||||||
|
# level directories where we might run out of sub directories on some
|
||||||
|
# FS.
|
||||||
|
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||||
|
|
||||||
|
return parts
|
||||||
|
|
||||||
|
def _get_candidates(self, link, package_name):
|
||||||
|
can_not_cache = (
|
||||||
|
not self.cache_dir or
|
||||||
|
not package_name or
|
||||||
|
not link
|
||||||
|
)
|
||||||
|
if can_not_cache:
|
||||||
|
return []
|
||||||
|
|
||||||
|
canonical_name = canonicalize_name(package_name)
|
||||||
|
formats = index.fmt_ctl_formats(
|
||||||
|
self.format_control, canonical_name
|
||||||
|
)
|
||||||
|
if not self.allowed_formats.intersection(formats):
|
||||||
|
return []
|
||||||
|
|
||||||
|
root = self.get_path_for_link(link)
|
||||||
|
try:
|
||||||
|
return os.listdir(root)
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
|
||||||
|
return []
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
"""Return a directory to store cached items in for link.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get(self, link, package_name):
|
||||||
|
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||||
|
passed link.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _link_for_candidate(self, link, candidate):
|
||||||
|
root = self.get_path_for_link(link)
|
||||||
|
path = os.path.join(root, candidate)
|
||||||
|
|
||||||
|
return index.Link(path_to_url(path))
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleWheelCache(Cache):
|
||||||
|
"""A cache of wheels for future installs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control):
|
||||||
|
super(SimpleWheelCache, self).__init__(
|
||||||
|
cache_dir, format_control, {"binary"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
"""Return a directory to store cached wheels for link
|
||||||
|
|
||||||
|
Because there are M wheels for any one sdist, we provide a directory
|
||||||
|
to cache them in, and then consult that directory when looking up
|
||||||
|
cache hits.
|
||||||
|
|
||||||
|
We only insert things into the cache if they have plausible version
|
||||||
|
numbers, so that we don't contaminate the cache with things that were
|
||||||
|
not unique. E.g. ./package might have dozens of installs done for it
|
||||||
|
and build a version of 0.0...and if we built and cached a wheel, we'd
|
||||||
|
end up using the same wheel even if the source has been edited.
|
||||||
|
|
||||||
|
:param link: The link of the sdist for which this will cache wheels.
|
||||||
|
"""
|
||||||
|
parts = self._get_cache_path_parts(link)
|
||||||
|
|
||||||
|
# Store wheels within the root cache_dir
|
||||||
|
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||||
|
|
||||||
|
def get(self, link, package_name):
|
||||||
|
candidates = []
|
||||||
|
|
||||||
|
for wheel_name in self._get_candidates(link, package_name):
|
||||||
|
try:
|
||||||
|
wheel = Wheel(wheel_name)
|
||||||
|
except InvalidWheelFilename:
|
||||||
|
continue
|
||||||
|
if not wheel.supported():
|
||||||
|
# Built for a different python/arch/etc
|
||||||
|
continue
|
||||||
|
candidates.append((wheel.support_index_min(), wheel_name))
|
||||||
|
|
||||||
|
if not candidates:
|
||||||
|
return link
|
||||||
|
|
||||||
|
return self._link_for_candidate(link, min(candidates)[1])
|
||||||
|
|
||||||
|
|
||||||
|
class EphemWheelCache(SimpleWheelCache):
|
||||||
|
"""A SimpleWheelCache that creates it's own temporary cache directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, format_control):
|
||||||
|
self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
|
||||||
|
self._temp_dir.create()
|
||||||
|
|
||||||
|
super(EphemWheelCache, self).__init__(
|
||||||
|
self._temp_dir.path, format_control
|
||||||
|
)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
self._temp_dir.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
class WheelCache(Cache):
|
||||||
|
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
||||||
|
|
||||||
|
This Cache allows for gracefully degradation, using the ephem wheel cache
|
||||||
|
when a certain link is not found in the simple wheel cache first.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control):
|
||||||
|
super(WheelCache, self).__init__(
|
||||||
|
cache_dir, format_control, {'binary'}
|
||||||
|
)
|
||||||
|
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||||
|
self._ephem_cache = EphemWheelCache(format_control)
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
return self._wheel_cache.get_path_for_link(link)
|
||||||
|
|
||||||
|
def get_ephem_path_for_link(self, link):
|
||||||
|
return self._ephem_cache.get_path_for_link(link)
|
||||||
|
|
||||||
|
def get(self, link, package_name):
|
||||||
|
retval = self._wheel_cache.get(link, package_name)
|
||||||
|
if retval is link:
|
||||||
|
retval = self._ephem_cache.get(link, package_name)
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
self._wheel_cache.cleanup()
|
||||||
|
self._ephem_cache.cleanup()
|
@ -0,0 +1,609 @@
|
|||||||
|
"""
|
||||||
|
shared options and groups
|
||||||
|
|
||||||
|
The principle here is to define options once, but *not* instantiate them
|
||||||
|
globally. One reason being that options with action='append' can carry state
|
||||||
|
between parses. pip parses general options twice internally, and shouldn't
|
||||||
|
pass on state. To be consistent, all options will follow this design.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
from functools import partial
|
||||||
|
from optparse import SUPPRESS_HELP, Option, OptionGroup
|
||||||
|
|
||||||
|
from pip._internal.index import (
|
||||||
|
FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
|
||||||
|
)
|
||||||
|
from pip._internal.locations import USER_CACHE_DIR, src_prefix
|
||||||
|
from pip._internal.models import PyPI
|
||||||
|
from pip._internal.utils.hashes import STRONG_HASHES
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.ui import BAR_TYPES
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
def make_option_group(group, parser):
|
||||||
|
"""
|
||||||
|
Return an OptionGroup object
|
||||||
|
group -- assumed to be dict with 'name' and 'options' keys
|
||||||
|
parser -- an optparse Parser
|
||||||
|
"""
|
||||||
|
option_group = OptionGroup(parser, group['name'])
|
||||||
|
for option in group['options']:
|
||||||
|
option_group.add_option(option())
|
||||||
|
return option_group
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_build_global(options, check_options=None):
|
||||||
|
"""Disable wheels if per-setup.py call options are set.
|
||||||
|
|
||||||
|
:param options: The OptionParser options to update.
|
||||||
|
:param check_options: The options to check, if not supplied defaults to
|
||||||
|
options.
|
||||||
|
"""
|
||||||
|
if check_options is None:
|
||||||
|
check_options = options
|
||||||
|
|
||||||
|
def getname(n):
|
||||||
|
return getattr(check_options, n, None)
|
||||||
|
names = ["build_options", "global_options", "install_options"]
|
||||||
|
if any(map(getname, names)):
|
||||||
|
control = options.format_control
|
||||||
|
fmt_ctl_no_binary(control)
|
||||||
|
warnings.warn(
|
||||||
|
'Disabling all use of wheels due to the use of --build-options '
|
||||||
|
'/ --global-options / --install-options.', stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
###########
|
||||||
|
# options #
|
||||||
|
###########
|
||||||
|
|
||||||
|
help_ = partial(
|
||||||
|
Option,
|
||||||
|
'-h', '--help',
|
||||||
|
dest='help',
|
||||||
|
action='help',
|
||||||
|
help='Show help.',
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
isolated_mode = partial(
|
||||||
|
Option,
|
||||||
|
"--isolated",
|
||||||
|
dest="isolated_mode",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Run pip in an isolated mode, ignoring environment variables and user "
|
||||||
|
"configuration."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
require_virtualenv = partial(
|
||||||
|
Option,
|
||||||
|
# Run only if inside a virtualenv, bail if not.
|
||||||
|
'--require-virtualenv', '--require-venv',
|
||||||
|
dest='require_venv',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
verbose = partial(
|
||||||
|
Option,
|
||||||
|
'-v', '--verbose',
|
||||||
|
dest='verbose',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help='Give more output. Option is additive, and can be used up to 3 times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
no_color = partial(
|
||||||
|
Option,
|
||||||
|
'--no-color',
|
||||||
|
dest='no_color',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Suppress colored output",
|
||||||
|
)
|
||||||
|
|
||||||
|
version = partial(
|
||||||
|
Option,
|
||||||
|
'-V', '--version',
|
||||||
|
dest='version',
|
||||||
|
action='store_true',
|
||||||
|
help='Show version and exit.',
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
quiet = partial(
|
||||||
|
Option,
|
||||||
|
'-q', '--quiet',
|
||||||
|
dest='quiet',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help=(
|
||||||
|
'Give less output. Option is additive, and can be used up to 3'
|
||||||
|
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
|
||||||
|
' levels).'
|
||||||
|
),
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
progress_bar = partial(
|
||||||
|
Option,
|
||||||
|
'--progress-bar',
|
||||||
|
dest='progress_bar',
|
||||||
|
type='choice',
|
||||||
|
choices=list(BAR_TYPES.keys()),
|
||||||
|
default='on',
|
||||||
|
help=(
|
||||||
|
'Specify type of progress to be displayed [' +
|
||||||
|
'|'.join(BAR_TYPES.keys()) + '] (default: %default)'
|
||||||
|
),
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
log = partial(
|
||||||
|
Option,
|
||||||
|
"--log", "--log-file", "--local-log",
|
||||||
|
dest="log",
|
||||||
|
metavar="path",
|
||||||
|
help="Path to a verbose appending log."
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
no_input = partial(
|
||||||
|
Option,
|
||||||
|
# Don't ask for input
|
||||||
|
'--no-input',
|
||||||
|
dest='no_input',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
proxy = partial(
|
||||||
|
Option,
|
||||||
|
'--proxy',
|
||||||
|
dest='proxy',
|
||||||
|
type='str',
|
||||||
|
default='',
|
||||||
|
help="Specify a proxy in the form [user:passwd@]proxy.server:port."
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
retries = partial(
|
||||||
|
Option,
|
||||||
|
'--retries',
|
||||||
|
dest='retries',
|
||||||
|
type='int',
|
||||||
|
default=5,
|
||||||
|
help="Maximum number of retries each connection should attempt "
|
||||||
|
"(default %default times).",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
timeout = partial(
|
||||||
|
Option,
|
||||||
|
'--timeout', '--default-timeout',
|
||||||
|
metavar='sec',
|
||||||
|
dest='timeout',
|
||||||
|
type='float',
|
||||||
|
default=15,
|
||||||
|
help='Set the socket timeout (default %default seconds).',
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
skip_requirements_regex = partial(
|
||||||
|
Option,
|
||||||
|
# A regex to be used to skip requirements
|
||||||
|
'--skip-requirements-regex',
|
||||||
|
dest='skip_requirements_regex',
|
||||||
|
type='str',
|
||||||
|
default='',
|
||||||
|
help=SUPPRESS_HELP,
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def exists_action():
|
||||||
|
return Option(
|
||||||
|
# Option when path already exist
|
||||||
|
'--exists-action',
|
||||||
|
dest='exists_action',
|
||||||
|
type='choice',
|
||||||
|
choices=['s', 'i', 'w', 'b', 'a'],
|
||||||
|
default=[],
|
||||||
|
action='append',
|
||||||
|
metavar='action',
|
||||||
|
help="Default action when a path already exists: "
|
||||||
|
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
cert = partial(
|
||||||
|
Option,
|
||||||
|
'--cert',
|
||||||
|
dest='cert',
|
||||||
|
type='str',
|
||||||
|
metavar='path',
|
||||||
|
help="Path to alternate CA bundle.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
client_cert = partial(
|
||||||
|
Option,
|
||||||
|
'--client-cert',
|
||||||
|
dest='client_cert',
|
||||||
|
type='str',
|
||||||
|
default=None,
|
||||||
|
metavar='path',
|
||||||
|
help="Path to SSL client certificate, a single file containing the "
|
||||||
|
"private key and the certificate in PEM format.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
index_url = partial(
|
||||||
|
Option,
|
||||||
|
'-i', '--index-url', '--pypi-url',
|
||||||
|
dest='index_url',
|
||||||
|
metavar='URL',
|
||||||
|
default=PyPI.simple_url,
|
||||||
|
help="Base URL of Python Package Index (default %default). "
|
||||||
|
"This should point to a repository compliant with PEP 503 "
|
||||||
|
"(the simple repository API) or a local directory laid out "
|
||||||
|
"in the same format.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def extra_index_url():
|
||||||
|
return Option(
|
||||||
|
'--extra-index-url',
|
||||||
|
dest='extra_index_urls',
|
||||||
|
metavar='URL',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help="Extra URLs of package indexes to use in addition to "
|
||||||
|
"--index-url. Should follow the same rules as "
|
||||||
|
"--index-url.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
no_index = partial(
|
||||||
|
Option,
|
||||||
|
'--no-index',
|
||||||
|
dest='no_index',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Ignore package index (only looking at --find-links URLs instead).',
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def find_links():
|
||||||
|
return Option(
|
||||||
|
'-f', '--find-links',
|
||||||
|
dest='find_links',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='url',
|
||||||
|
help="If a url or path to an html file, then parse for links to "
|
||||||
|
"archives. If a local path or file:// url that's a directory, "
|
||||||
|
"then look for archives in the directory listing.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def trusted_host():
|
||||||
|
return Option(
|
||||||
|
"--trusted-host",
|
||||||
|
dest="trusted_hosts",
|
||||||
|
action="append",
|
||||||
|
metavar="HOSTNAME",
|
||||||
|
default=[],
|
||||||
|
help="Mark this host as trusted, even though it does not have valid "
|
||||||
|
"or any HTTPS.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Remove after 1.5
|
||||||
|
process_dependency_links = partial(
|
||||||
|
Option,
|
||||||
|
"--process-dependency-links",
|
||||||
|
dest="process_dependency_links",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Enable the processing of dependency links.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def constraints():
|
||||||
|
return Option(
|
||||||
|
'-c', '--constraint',
|
||||||
|
dest='constraints',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Constrain versions using the given constraints file. '
|
||||||
|
'This option can be used multiple times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def requirements():
|
||||||
|
return Option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Install from the given requirements file. '
|
||||||
|
'This option can be used multiple times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def editable():
|
||||||
|
return Option(
|
||||||
|
'-e', '--editable',
|
||||||
|
dest='editables',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='path/url',
|
||||||
|
help=('Install a project in editable mode (i.e. setuptools '
|
||||||
|
'"develop mode") from a local project path or a VCS url.'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
src = partial(
|
||||||
|
Option,
|
||||||
|
'--src', '--source', '--source-dir', '--source-directory',
|
||||||
|
dest='src_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=src_prefix,
|
||||||
|
help='Directory to check out editable projects into. '
|
||||||
|
'The default in a virtualenv is "<venv path>/src". '
|
||||||
|
'The default for global installs is "<current dir>/src".'
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def _get_format_control(values, option):
|
||||||
|
"""Get a format_control object."""
|
||||||
|
return getattr(values, option.dest)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_no_binary(option, opt_str, value, parser):
|
||||||
|
existing = getattr(parser.values, option.dest)
|
||||||
|
fmt_ctl_handle_mutual_exclude(
|
||||||
|
value, existing.no_binary, existing.only_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_only_binary(option, opt_str, value, parser):
|
||||||
|
existing = getattr(parser.values, option.dest)
|
||||||
|
fmt_ctl_handle_mutual_exclude(
|
||||||
|
value, existing.only_binary, existing.no_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def no_binary():
|
||||||
|
return Option(
|
||||||
|
"--no-binary", dest="format_control", action="callback",
|
||||||
|
callback=_handle_no_binary, type="str",
|
||||||
|
default=FormatControl(set(), set()),
|
||||||
|
help="Do not use binary packages. Can be supplied multiple times, and "
|
||||||
|
"each time adds to the existing value. Accepts either :all: to "
|
||||||
|
"disable all binary packages, :none: to empty the set, or one or "
|
||||||
|
"more package names with commas between them. Note that some "
|
||||||
|
"packages are tricky to compile and may fail to install when "
|
||||||
|
"this option is used on them.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def only_binary():
|
||||||
|
return Option(
|
||||||
|
"--only-binary", dest="format_control", action="callback",
|
||||||
|
callback=_handle_only_binary, type="str",
|
||||||
|
default=FormatControl(set(), set()),
|
||||||
|
help="Do not use source packages. Can be supplied multiple times, and "
|
||||||
|
"each time adds to the existing value. Accepts either :all: to "
|
||||||
|
"disable all source packages, :none: to empty the set, or one or "
|
||||||
|
"more package names with commas between them. Packages without "
|
||||||
|
"binary distributions will fail to install when this option is "
|
||||||
|
"used on them.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
cache_dir = partial(
|
||||||
|
Option,
|
||||||
|
"--cache-dir",
|
||||||
|
dest="cache_dir",
|
||||||
|
default=USER_CACHE_DIR,
|
||||||
|
metavar="dir",
|
||||||
|
help="Store the cache data in <dir>."
|
||||||
|
)
|
||||||
|
|
||||||
|
no_cache = partial(
|
||||||
|
Option,
|
||||||
|
"--no-cache-dir",
|
||||||
|
dest="cache_dir",
|
||||||
|
action="store_false",
|
||||||
|
help="Disable the cache.",
|
||||||
|
)
|
||||||
|
|
||||||
|
no_deps = partial(
|
||||||
|
Option,
|
||||||
|
'--no-deps', '--no-dependencies',
|
||||||
|
dest='ignore_dependencies',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Don't install package dependencies.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
build_dir = partial(
|
||||||
|
Option,
|
||||||
|
'-b', '--build', '--build-dir', '--build-directory',
|
||||||
|
dest='build_dir',
|
||||||
|
metavar='dir',
|
||||||
|
help='Directory to unpack packages into and build in. Note that '
|
||||||
|
'an initial build still takes place in a temporary directory. '
|
||||||
|
'The location of temporary directories can be controlled by setting '
|
||||||
|
'the TMPDIR environment variable (TEMP on Windows) appropriately. '
|
||||||
|
'When passed, build directories are not cleaned in case of failures.'
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
ignore_requires_python = partial(
|
||||||
|
Option,
|
||||||
|
'--ignore-requires-python',
|
||||||
|
dest='ignore_requires_python',
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore the Requires-Python information.'
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
no_build_isolation = partial(
|
||||||
|
Option,
|
||||||
|
'--no-build-isolation',
|
||||||
|
dest='build_isolation',
|
||||||
|
action='store_false',
|
||||||
|
default=True,
|
||||||
|
help='Disable isolation when building a modern source distribution. '
|
||||||
|
'Build dependencies specified by PEP 518 must be already installed '
|
||||||
|
'if this option is used.'
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
install_options = partial(
|
||||||
|
Option,
|
||||||
|
'--install-option',
|
||||||
|
dest='install_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra arguments to be supplied to the setup.py install "
|
||||||
|
"command (use like --install-option=\"--install-scripts=/usr/local/"
|
||||||
|
"bin\"). Use multiple --install-option options to pass multiple "
|
||||||
|
"options to setup.py install. If you are using an option with a "
|
||||||
|
"directory path, be sure to use absolute path.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
global_options = partial(
|
||||||
|
Option,
|
||||||
|
'--global-option',
|
||||||
|
dest='global_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra global options to be supplied to the setup.py "
|
||||||
|
"call before the install command.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
no_clean = partial(
|
||||||
|
Option,
|
||||||
|
'--no-clean',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Don't clean up build directories)."
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
pre = partial(
|
||||||
|
Option,
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
disable_pip_version_check = partial(
|
||||||
|
Option,
|
||||||
|
"--disable-pip-version-check",
|
||||||
|
dest="disable_pip_version_check",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Don't periodically check PyPI to determine whether a new version "
|
||||||
|
"of pip is available for download. Implied with --no-index.",
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
# Deprecated, Remove later
|
||||||
|
always_unzip = partial(
|
||||||
|
Option,
|
||||||
|
'-Z', '--always-unzip',
|
||||||
|
dest='always_unzip',
|
||||||
|
action='store_true',
|
||||||
|
help=SUPPRESS_HELP,
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def _merge_hash(option, opt_str, value, parser):
|
||||||
|
"""Given a value spelled "algo:digest", append the digest to a list
|
||||||
|
pointed to in a dict by the algo name."""
|
||||||
|
if not parser.values.hashes:
|
||||||
|
parser.values.hashes = {}
|
||||||
|
try:
|
||||||
|
algo, digest = value.split(':', 1)
|
||||||
|
except ValueError:
|
||||||
|
parser.error('Arguments to %s must be a hash name '
|
||||||
|
'followed by a value, like --hash=sha256:abcde...' %
|
||||||
|
opt_str)
|
||||||
|
if algo not in STRONG_HASHES:
|
||||||
|
parser.error('Allowed hash algorithms for %s are %s.' %
|
||||||
|
(opt_str, ', '.join(STRONG_HASHES)))
|
||||||
|
parser.values.hashes.setdefault(algo, []).append(digest)
|
||||||
|
|
||||||
|
|
||||||
|
hash = partial(
|
||||||
|
Option,
|
||||||
|
'--hash',
|
||||||
|
# Hash values eventually end up in InstallRequirement.hashes due to
|
||||||
|
# __dict__ copying in process_line().
|
||||||
|
dest='hashes',
|
||||||
|
action='callback',
|
||||||
|
callback=_merge_hash,
|
||||||
|
type='string',
|
||||||
|
help="Verify that the package's archive matches this "
|
||||||
|
'hash before installing. Example: --hash=sha256:abcdef...',
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
require_hashes = partial(
|
||||||
|
Option,
|
||||||
|
'--require-hashes',
|
||||||
|
dest='require_hashes',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Require a hash to check each requirement against, for '
|
||||||
|
'repeatable installs. This option is implied when any package in a '
|
||||||
|
'requirements file has a --hash option.',
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
##########
|
||||||
|
# groups #
|
||||||
|
##########
|
||||||
|
|
||||||
|
general_group = {
|
||||||
|
'name': 'General Options',
|
||||||
|
'options': [
|
||||||
|
help_,
|
||||||
|
isolated_mode,
|
||||||
|
require_virtualenv,
|
||||||
|
verbose,
|
||||||
|
version,
|
||||||
|
quiet,
|
||||||
|
log,
|
||||||
|
no_input,
|
||||||
|
proxy,
|
||||||
|
retries,
|
||||||
|
timeout,
|
||||||
|
skip_requirements_regex,
|
||||||
|
exists_action,
|
||||||
|
trusted_host,
|
||||||
|
cert,
|
||||||
|
client_cert,
|
||||||
|
cache_dir,
|
||||||
|
no_cache,
|
||||||
|
disable_pip_version_check,
|
||||||
|
no_color,
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
index_group = {
|
||||||
|
'name': 'Package Index Options',
|
||||||
|
'options': [
|
||||||
|
index_url,
|
||||||
|
extra_index_url,
|
||||||
|
no_index,
|
||||||
|
find_links,
|
||||||
|
process_dependency_links,
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,79 @@
|
|||||||
|
"""
|
||||||
|
Package containing all pip commands
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from pip._internal.commands.completion import CompletionCommand
|
||||||
|
from pip._internal.commands.configuration import ConfigurationCommand
|
||||||
|
from pip._internal.commands.download import DownloadCommand
|
||||||
|
from pip._internal.commands.freeze import FreezeCommand
|
||||||
|
from pip._internal.commands.hash import HashCommand
|
||||||
|
from pip._internal.commands.help import HelpCommand
|
||||||
|
from pip._internal.commands.list import ListCommand
|
||||||
|
from pip._internal.commands.check import CheckCommand
|
||||||
|
from pip._internal.commands.search import SearchCommand
|
||||||
|
from pip._internal.commands.show import ShowCommand
|
||||||
|
from pip._internal.commands.install import InstallCommand
|
||||||
|
from pip._internal.commands.uninstall import UninstallCommand
|
||||||
|
from pip._internal.commands.wheel import WheelCommand
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Type
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
|
||||||
|
commands_order = [
|
||||||
|
InstallCommand,
|
||||||
|
DownloadCommand,
|
||||||
|
UninstallCommand,
|
||||||
|
FreezeCommand,
|
||||||
|
ListCommand,
|
||||||
|
ShowCommand,
|
||||||
|
CheckCommand,
|
||||||
|
ConfigurationCommand,
|
||||||
|
SearchCommand,
|
||||||
|
WheelCommand,
|
||||||
|
HashCommand,
|
||||||
|
CompletionCommand,
|
||||||
|
HelpCommand,
|
||||||
|
] # type: List[Type[Command]]
|
||||||
|
|
||||||
|
commands_dict = {c.name: c for c in commands_order}
|
||||||
|
|
||||||
|
|
||||||
|
def get_summaries(ordered=True):
|
||||||
|
"""Yields sorted (command name, command summary) tuples."""
|
||||||
|
|
||||||
|
if ordered:
|
||||||
|
cmditems = _sort_commands(commands_dict, commands_order)
|
||||||
|
else:
|
||||||
|
cmditems = commands_dict.items()
|
||||||
|
|
||||||
|
for name, command_class in cmditems:
|
||||||
|
yield (name, command_class.summary)
|
||||||
|
|
||||||
|
|
||||||
|
def get_similar_commands(name):
|
||||||
|
"""Command name auto-correct."""
|
||||||
|
from difflib import get_close_matches
|
||||||
|
|
||||||
|
name = name.lower()
|
||||||
|
|
||||||
|
close_commands = get_close_matches(name, commands_dict.keys())
|
||||||
|
|
||||||
|
if close_commands:
|
||||||
|
return close_commands[0]
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _sort_commands(cmddict, order):
|
||||||
|
def keyfn(key):
|
||||||
|
try:
|
||||||
|
return order.index(key[1])
|
||||||
|
except ValueError:
|
||||||
|
# unordered items should come last
|
||||||
|
return 0xff
|
||||||
|
|
||||||
|
return sorted(cmddict.items(), key=keyfn)
|
@ -0,0 +1,42 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.operations.check import (
|
||||||
|
check_package_set, create_package_set_from_installed,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import get_installed_distributions
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckCommand(Command):
|
||||||
|
"""Verify installed packages have compatible dependencies."""
|
||||||
|
name = 'check'
|
||||||
|
usage = """
|
||||||
|
%prog [options]"""
|
||||||
|
summary = 'Verify installed packages have compatible dependencies.'
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
package_set = create_package_set_from_installed()
|
||||||
|
missing, conflicting = check_package_set(package_set)
|
||||||
|
|
||||||
|
for project_name in missing:
|
||||||
|
version = package_set[project_name].version
|
||||||
|
for dependency in missing[project_name]:
|
||||||
|
logger.info(
|
||||||
|
"%s %s requires %s, which is not installed.",
|
||||||
|
project_name, version, dependency[0],
|
||||||
|
)
|
||||||
|
|
||||||
|
for project_name in conflicting:
|
||||||
|
version = package_set[project_name].version
|
||||||
|
for dep_name, dep_version, req in conflicting[project_name]:
|
||||||
|
logger.info(
|
||||||
|
"%s %s has requirement %s, but you have %s %s.",
|
||||||
|
project_name, version, req, dep_name, dep_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
if missing or conflicting:
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
logger.info("No broken requirements found.")
|
@ -0,0 +1,94 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.utils.misc import get_prog
|
||||||
|
|
||||||
|
BASE_COMPLETION = """
|
||||||
|
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
||||||
|
"""
|
||||||
|
|
||||||
|
COMPLETION_SCRIPTS = {
|
||||||
|
'bash': """
|
||||||
|
_pip_completion()
|
||||||
|
{
|
||||||
|
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||||
|
COMP_CWORD=$COMP_CWORD \\
|
||||||
|
PIP_AUTO_COMPLETE=1 $1 ) )
|
||||||
|
}
|
||||||
|
complete -o default -F _pip_completion %(prog)s
|
||||||
|
""",
|
||||||
|
'zsh': """
|
||||||
|
function _pip_completion {
|
||||||
|
local words cword
|
||||||
|
read -Ac words
|
||||||
|
read -cn cword
|
||||||
|
reply=( $( COMP_WORDS="$words[*]" \\
|
||||||
|
COMP_CWORD=$(( cword-1 )) \\
|
||||||
|
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
||||||
|
}
|
||||||
|
compctl -K _pip_completion %(prog)s
|
||||||
|
""",
|
||||||
|
'fish': """
|
||||||
|
function __fish_complete_pip
|
||||||
|
set -lx COMP_WORDS (commandline -o) ""
|
||||||
|
set -lx COMP_CWORD ( \\
|
||||||
|
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
||||||
|
)
|
||||||
|
set -lx PIP_AUTO_COMPLETE 1
|
||||||
|
string split \\ -- (eval $COMP_WORDS[1])
|
||||||
|
end
|
||||||
|
complete -fa "(__fish_complete_pip)" -c %(prog)s
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CompletionCommand(Command):
|
||||||
|
"""A helper command to be used for command completion."""
|
||||||
|
name = 'completion'
|
||||||
|
summary = 'A helper command used for command completion.'
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(CompletionCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--bash', '-b',
|
||||||
|
action='store_const',
|
||||||
|
const='bash',
|
||||||
|
dest='shell',
|
||||||
|
help='Emit completion code for bash')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--zsh', '-z',
|
||||||
|
action='store_const',
|
||||||
|
const='zsh',
|
||||||
|
dest='shell',
|
||||||
|
help='Emit completion code for zsh')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--fish', '-f',
|
||||||
|
action='store_const',
|
||||||
|
const='fish',
|
||||||
|
dest='shell',
|
||||||
|
help='Emit completion code for fish')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
"""Prints the completion code of the given shell"""
|
||||||
|
shells = COMPLETION_SCRIPTS.keys()
|
||||||
|
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||||
|
if options.shell in shells:
|
||||||
|
script = textwrap.dedent(
|
||||||
|
COMPLETION_SCRIPTS.get(options.shell, '') % {
|
||||||
|
'prog': get_prog(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
|
||||||
|
else:
|
||||||
|
sys.stderr.write(
|
||||||
|
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
|
||||||
|
)
|
@ -0,0 +1,227 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.configuration import Configuration, kinds
|
||||||
|
from pip._internal.exceptions import PipError
|
||||||
|
from pip._internal.locations import venv_config_file
|
||||||
|
from pip._internal.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.utils.misc import get_prog
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationCommand(Command):
|
||||||
|
"""Manage local and global configuration.
|
||||||
|
|
||||||
|
Subcommands:
|
||||||
|
|
||||||
|
list: List the active configuration (or from the file specified)
|
||||||
|
edit: Edit the configuration file in an editor
|
||||||
|
get: Get the value associated with name
|
||||||
|
set: Set the name=value
|
||||||
|
unset: Unset the value associated with name
|
||||||
|
|
||||||
|
If none of --user, --global and --venv are passed, a virtual
|
||||||
|
environment configuration file is used if one is active and the file
|
||||||
|
exists. Otherwise, all modifications happen on the to the user file by
|
||||||
|
default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = 'config'
|
||||||
|
usage = """
|
||||||
|
%prog [<file-option>] list
|
||||||
|
%prog [<file-option>] [--editor <editor-path>] edit
|
||||||
|
|
||||||
|
%prog [<file-option>] get name
|
||||||
|
%prog [<file-option>] set name value
|
||||||
|
%prog [<file-option>] unset name
|
||||||
|
"""
|
||||||
|
|
||||||
|
summary = "Manage local and global configuration."
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(ConfigurationCommand, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
self.configuration = None
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--editor',
|
||||||
|
dest='editor',
|
||||||
|
action='store',
|
||||||
|
default=None,
|
||||||
|
help=(
|
||||||
|
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
||||||
|
'environment variables if not provided.'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--global',
|
||||||
|
dest='global_file',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Use the system-wide configuration file only'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='user_file',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Use the user configuration file only'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--venv',
|
||||||
|
dest='venv_file',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Use the virtualenv configuration file only'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
handlers = {
|
||||||
|
"list": self.list_values,
|
||||||
|
"edit": self.open_in_editor,
|
||||||
|
"get": self.get_name,
|
||||||
|
"set": self.set_name_value,
|
||||||
|
"unset": self.unset_name
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine action
|
||||||
|
if not args or args[0] not in handlers:
|
||||||
|
logger.error("Need an action ({}) to perform.".format(
|
||||||
|
", ".join(sorted(handlers)))
|
||||||
|
)
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
action = args[0]
|
||||||
|
|
||||||
|
# Determine which configuration files are to be loaded
|
||||||
|
# Depends on whether the command is modifying.
|
||||||
|
try:
|
||||||
|
load_only = self._determine_file(
|
||||||
|
options, need_value=(action in ["get", "set", "unset", "edit"])
|
||||||
|
)
|
||||||
|
except PipError as e:
|
||||||
|
logger.error(e.args[0])
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
# Load a new configuration
|
||||||
|
self.configuration = Configuration(
|
||||||
|
isolated=options.isolated_mode, load_only=load_only
|
||||||
|
)
|
||||||
|
self.configuration.load()
|
||||||
|
|
||||||
|
# Error handling happens here, not in the action-handlers.
|
||||||
|
try:
|
||||||
|
handlers[action](options, args[1:])
|
||||||
|
except PipError as e:
|
||||||
|
logger.error(e.args[0])
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
def _determine_file(self, options, need_value):
|
||||||
|
file_options = {
|
||||||
|
kinds.USER: options.user_file,
|
||||||
|
kinds.GLOBAL: options.global_file,
|
||||||
|
kinds.VENV: options.venv_file
|
||||||
|
}
|
||||||
|
|
||||||
|
if sum(file_options.values()) == 0:
|
||||||
|
if not need_value:
|
||||||
|
return None
|
||||||
|
# Default to user, unless there's a virtualenv file.
|
||||||
|
elif os.path.exists(venv_config_file):
|
||||||
|
return kinds.VENV
|
||||||
|
else:
|
||||||
|
return kinds.USER
|
||||||
|
elif sum(file_options.values()) == 1:
|
||||||
|
# There's probably a better expression for this.
|
||||||
|
return [key for key in file_options if file_options[key]][0]
|
||||||
|
|
||||||
|
raise PipError(
|
||||||
|
"Need exactly one file to operate upon "
|
||||||
|
"(--user, --venv, --global) to perform."
|
||||||
|
)
|
||||||
|
|
||||||
|
def list_values(self, options, args):
|
||||||
|
self._get_n_args(args, "list", n=0)
|
||||||
|
|
||||||
|
for key, value in sorted(self.configuration.items()):
|
||||||
|
logger.info("%s=%r", key, value)
|
||||||
|
|
||||||
|
def get_name(self, options, args):
|
||||||
|
key = self._get_n_args(args, "get [name]", n=1)
|
||||||
|
value = self.configuration.get_value(key)
|
||||||
|
|
||||||
|
logger.info("%s", value)
|
||||||
|
|
||||||
|
def set_name_value(self, options, args):
|
||||||
|
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
||||||
|
self.configuration.set_value(key, value)
|
||||||
|
|
||||||
|
self._save_configuration()
|
||||||
|
|
||||||
|
def unset_name(self, options, args):
|
||||||
|
key = self._get_n_args(args, "unset [name]", n=1)
|
||||||
|
self.configuration.unset_value(key)
|
||||||
|
|
||||||
|
self._save_configuration()
|
||||||
|
|
||||||
|
def open_in_editor(self, options, args):
|
||||||
|
editor = self._determine_editor(options)
|
||||||
|
|
||||||
|
fname = self.configuration.get_file_to_edit()
|
||||||
|
if fname is None:
|
||||||
|
raise PipError("Could not determine appropriate file.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.check_call([editor, fname])
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise PipError(
|
||||||
|
"Editor Subprocess exited with exit code {}"
|
||||||
|
.format(e.returncode)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_n_args(self, args, example, n):
|
||||||
|
"""Helper to make sure the command got the right number of arguments
|
||||||
|
"""
|
||||||
|
if len(args) != n:
|
||||||
|
msg = (
|
||||||
|
'Got unexpected number of arguments, expected {}. '
|
||||||
|
'(example: "{} config {}")'
|
||||||
|
).format(n, get_prog(), example)
|
||||||
|
raise PipError(msg)
|
||||||
|
|
||||||
|
if n == 1:
|
||||||
|
return args[0]
|
||||||
|
else:
|
||||||
|
return args
|
||||||
|
|
||||||
|
def _save_configuration(self):
|
||||||
|
# We successfully ran a modifying command. Need to save the
|
||||||
|
# configuration.
|
||||||
|
try:
|
||||||
|
self.configuration.save()
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
"Unable to save configuration. Please report this as a bug.",
|
||||||
|
exc_info=1
|
||||||
|
)
|
||||||
|
raise PipError("Internal Error.")
|
||||||
|
|
||||||
|
def _determine_editor(self, options):
|
||||||
|
if options.editor is not None:
|
||||||
|
return options.editor
|
||||||
|
elif "VISUAL" in os.environ:
|
||||||
|
return os.environ["VISUAL"]
|
||||||
|
elif "EDITOR" in os.environ:
|
||||||
|
return os.environ["EDITOR"]
|
||||||
|
else:
|
||||||
|
raise PipError("Could not determine editor to use.")
|
@ -0,0 +1,233 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal import cmdoptions
|
||||||
|
from pip._internal.basecommand import RequirementCommand
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.index import FormatControl
|
||||||
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
|
from pip._internal.req import RequirementSet
|
||||||
|
from pip._internal.resolve import Resolver
|
||||||
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
|
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadCommand(RequirementCommand):
|
||||||
|
"""
|
||||||
|
Download packages from:
|
||||||
|
|
||||||
|
- PyPI (and other indexes) using requirement specifiers.
|
||||||
|
- VCS project urls.
|
||||||
|
- Local project directories.
|
||||||
|
- Local or remote source archives.
|
||||||
|
|
||||||
|
pip also supports downloading from "requirements files", which provide
|
||||||
|
an easy way to specify a whole environment to be downloaded.
|
||||||
|
"""
|
||||||
|
name = 'download'
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <requirement specifier> [package-index-options] ...
|
||||||
|
%prog [options] -r <requirements file> [package-index-options] ...
|
||||||
|
%prog [options] <vcs project url> ...
|
||||||
|
%prog [options] <local project path> ...
|
||||||
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
|
summary = 'Download packages.'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(DownloadCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.constraints())
|
||||||
|
cmd_opts.add_option(cmdoptions.requirements())
|
||||||
|
cmd_opts.add_option(cmdoptions.build_dir())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.src())
|
||||||
|
cmd_opts.add_option(cmdoptions.pre())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_clean())
|
||||||
|
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||||
|
dest='download_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=os.curdir,
|
||||||
|
help=("Download packages into <dir>."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--platform',
|
||||||
|
dest='platform',
|
||||||
|
metavar='platform',
|
||||||
|
default=None,
|
||||||
|
help=("Only download wheels compatible with <platform>. "
|
||||||
|
"Defaults to the platform of the running system."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--python-version',
|
||||||
|
dest='python_version',
|
||||||
|
metavar='python_version',
|
||||||
|
default=None,
|
||||||
|
help=("Only download wheels compatible with Python "
|
||||||
|
"interpreter version <version>. If not specified, then the "
|
||||||
|
"current system interpreter minor version is used. A major "
|
||||||
|
"version (e.g. '2') can be specified to match all "
|
||||||
|
"minor revs of that major version. A minor version "
|
||||||
|
"(e.g. '34') can also be specified."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--implementation',
|
||||||
|
dest='implementation',
|
||||||
|
metavar='implementation',
|
||||||
|
default=None,
|
||||||
|
help=("Only download wheels compatible with Python "
|
||||||
|
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
||||||
|
" or 'ip'. If not specified, then the current "
|
||||||
|
"interpreter implementation is used. Use 'py' to force "
|
||||||
|
"implementation-agnostic wheels."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--abi',
|
||||||
|
dest='abi',
|
||||||
|
metavar='abi',
|
||||||
|
default=None,
|
||||||
|
help=("Only download wheels compatible with Python "
|
||||||
|
"abi <abi>, e.g. 'pypy_41'. If not specified, then the "
|
||||||
|
"current interpreter abi tag is used. Generally "
|
||||||
|
"you will need to specify --implementation, "
|
||||||
|
"--platform, and --python-version when using "
|
||||||
|
"this option."),
|
||||||
|
)
|
||||||
|
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
options.ignore_installed = True
|
||||||
|
# editable doesn't really make sense for `pip download`, but the bowels
|
||||||
|
# of the RequirementSet code require that property.
|
||||||
|
options.editables = []
|
||||||
|
|
||||||
|
if options.python_version:
|
||||||
|
python_versions = [options.python_version]
|
||||||
|
else:
|
||||||
|
python_versions = None
|
||||||
|
|
||||||
|
dist_restriction_set = any([
|
||||||
|
options.python_version,
|
||||||
|
options.platform,
|
||||||
|
options.abi,
|
||||||
|
options.implementation,
|
||||||
|
])
|
||||||
|
binary_only = FormatControl(set(), {':all:'})
|
||||||
|
no_sdist_dependencies = (
|
||||||
|
options.format_control != binary_only and
|
||||||
|
not options.ignore_dependencies
|
||||||
|
)
|
||||||
|
if dist_restriction_set and no_sdist_dependencies:
|
||||||
|
raise CommandError(
|
||||||
|
"When restricting platform and interpreter constraints using "
|
||||||
|
"--python-version, --platform, --abi, or --implementation, "
|
||||||
|
"either --no-deps must be set, or --only-binary=:all: must be "
|
||||||
|
"set and --no-binary must not be set (or must be set to "
|
||||||
|
":none:)."
|
||||||
|
)
|
||||||
|
|
||||||
|
options.src_dir = os.path.abspath(options.src_dir)
|
||||||
|
options.download_dir = normalize_path(options.download_dir)
|
||||||
|
|
||||||
|
ensure_dir(options.download_dir)
|
||||||
|
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
finder = self._build_package_finder(
|
||||||
|
options=options,
|
||||||
|
session=session,
|
||||||
|
platform=options.platform,
|
||||||
|
python_versions=python_versions,
|
||||||
|
abi=options.abi,
|
||||||
|
implementation=options.implementation,
|
||||||
|
)
|
||||||
|
build_delete = (not (options.no_clean or options.build_dir))
|
||||||
|
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||||
|
logger.warning(
|
||||||
|
"The directory '%s' or its parent directory is not owned "
|
||||||
|
"by the current user and caching wheels has been "
|
||||||
|
"disabled. check the permissions and owner of that "
|
||||||
|
"directory. If executing pip with sudo, you may want "
|
||||||
|
"sudo's -H flag.",
|
||||||
|
options.cache_dir,
|
||||||
|
)
|
||||||
|
options.cache_dir = None
|
||||||
|
|
||||||
|
with TempDirectory(
|
||||||
|
options.build_dir, delete=build_delete, kind="download"
|
||||||
|
) as directory:
|
||||||
|
|
||||||
|
requirement_set = RequirementSet(
|
||||||
|
require_hashes=options.require_hashes,
|
||||||
|
)
|
||||||
|
self.populate_requirement_set(
|
||||||
|
requirement_set,
|
||||||
|
args,
|
||||||
|
options,
|
||||||
|
finder,
|
||||||
|
session,
|
||||||
|
self.name,
|
||||||
|
None
|
||||||
|
)
|
||||||
|
|
||||||
|
preparer = RequirementPreparer(
|
||||||
|
build_dir=directory.path,
|
||||||
|
src_dir=options.src_dir,
|
||||||
|
download_dir=options.download_dir,
|
||||||
|
wheel_download_dir=None,
|
||||||
|
progress_bar=options.progress_bar,
|
||||||
|
build_isolation=options.build_isolation,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolver = Resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
session=session,
|
||||||
|
wheel_cache=None,
|
||||||
|
use_user_site=False,
|
||||||
|
upgrade_strategy="to-satisfy-only",
|
||||||
|
force_reinstall=False,
|
||||||
|
ignore_dependencies=options.ignore_dependencies,
|
||||||
|
ignore_requires_python=False,
|
||||||
|
ignore_installed=True,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
resolver.resolve(requirement_set)
|
||||||
|
|
||||||
|
downloaded = ' '.join([
|
||||||
|
req.name for req in requirement_set.successfully_downloaded
|
||||||
|
])
|
||||||
|
if downloaded:
|
||||||
|
logger.info('Successfully downloaded %s', downloaded)
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
if not options.no_clean:
|
||||||
|
requirement_set.cleanup_files()
|
||||||
|
|
||||||
|
return requirement_set
|
@ -0,0 +1,96 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal import index
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.compat import stdlib_pkgs
|
||||||
|
from pip._internal.operations.freeze import freeze
|
||||||
|
|
||||||
|
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
||||||
|
|
||||||
|
|
||||||
|
class FreezeCommand(Command):
|
||||||
|
"""
|
||||||
|
Output installed packages in requirements format.
|
||||||
|
|
||||||
|
packages are listed in a case-insensitive sorted order.
|
||||||
|
"""
|
||||||
|
name = 'freeze'
|
||||||
|
usage = """
|
||||||
|
%prog [options]"""
|
||||||
|
summary = 'Output installed packages in requirements format.'
|
||||||
|
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(FreezeCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help="Use the order in the given requirements file and its "
|
||||||
|
"comments when generating output. This option can be "
|
||||||
|
"used multiple times.")
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-f', '--find-links',
|
||||||
|
dest='find_links',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='URL',
|
||||||
|
help='URL for finding packages, which will be added to the '
|
||||||
|
'output.')
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-l', '--local',
|
||||||
|
dest='local',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='If in a virtualenv that has global access, do not output '
|
||||||
|
'globally-installed packages.')
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='user',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Only output packages installed in user-site.')
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--all',
|
||||||
|
dest='freeze_all',
|
||||||
|
action='store_true',
|
||||||
|
help='Do not skip these packages in the output:'
|
||||||
|
' %s' % ', '.join(DEV_PKGS))
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--exclude-editable',
|
||||||
|
dest='exclude_editable',
|
||||||
|
action='store_true',
|
||||||
|
help='Exclude editable package from output.')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
format_control = index.FormatControl(set(), set())
|
||||||
|
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||||
|
skip = set(stdlib_pkgs)
|
||||||
|
if not options.freeze_all:
|
||||||
|
skip.update(DEV_PKGS)
|
||||||
|
|
||||||
|
freeze_kwargs = dict(
|
||||||
|
requirement=options.requirements,
|
||||||
|
find_links=options.find_links,
|
||||||
|
local_only=options.local,
|
||||||
|
user_only=options.user,
|
||||||
|
skip_regex=options.skip_requirements_regex,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
skip=skip,
|
||||||
|
exclude_editable=options.exclude_editable,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for line in freeze(**freeze_kwargs):
|
||||||
|
sys.stdout.write(line + '\n')
|
||||||
|
finally:
|
||||||
|
wheel_cache.cleanup()
|
@ -0,0 +1,57 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.status_codes import ERROR
|
||||||
|
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||||
|
from pip._internal.utils.misc import read_chunks
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HashCommand(Command):
|
||||||
|
"""
|
||||||
|
Compute a hash of a local package archive.
|
||||||
|
|
||||||
|
These can be used with --hash in a requirements file to do repeatable
|
||||||
|
installs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
name = 'hash'
|
||||||
|
usage = '%prog [options] <file> ...'
|
||||||
|
summary = 'Compute hashes of package archives.'
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(HashCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-a', '--algorithm',
|
||||||
|
dest='algorithm',
|
||||||
|
choices=STRONG_HASHES,
|
||||||
|
action='store',
|
||||||
|
default=FAVORITE_HASH,
|
||||||
|
help='The hash algorithm to use: one of %s' %
|
||||||
|
', '.join(STRONG_HASHES))
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if not args:
|
||||||
|
self.parser.print_usage(sys.stderr)
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
algorithm = options.algorithm
|
||||||
|
for path in args:
|
||||||
|
logger.info('%s:\n--hash=%s:%s',
|
||||||
|
path, algorithm, _hash_of_file(path, algorithm))
|
||||||
|
|
||||||
|
|
||||||
|
def _hash_of_file(path, algorithm):
|
||||||
|
"""Return the hash digest of a file."""
|
||||||
|
with open(path, 'rb') as archive:
|
||||||
|
hash = hashlib.new(algorithm)
|
||||||
|
for chunk in read_chunks(archive):
|
||||||
|
hash.update(chunk)
|
||||||
|
return hash.hexdigest()
|
@ -0,0 +1,36 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from pip._internal.basecommand import SUCCESS, Command
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
|
||||||
|
|
||||||
|
class HelpCommand(Command):
|
||||||
|
"""Show help for commands"""
|
||||||
|
name = 'help'
|
||||||
|
usage = """
|
||||||
|
%prog <command>"""
|
||||||
|
summary = 'Show help for commands.'
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
from pip._internal.commands import commands_dict, get_similar_commands
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||||
|
cmd_name = args[0] # the command we need help for
|
||||||
|
except IndexError:
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
if cmd_name not in commands_dict:
|
||||||
|
guess = get_similar_commands(cmd_name)
|
||||||
|
|
||||||
|
msg = ['unknown command "%s"' % cmd_name]
|
||||||
|
if guess:
|
||||||
|
msg.append('maybe you meant "%s"' % guess)
|
||||||
|
|
||||||
|
raise CommandError(' - '.join(msg))
|
||||||
|
|
||||||
|
command = commands_dict[cmd_name]()
|
||||||
|
command.parser.print_help()
|
||||||
|
|
||||||
|
return SUCCESS
|
@ -0,0 +1,502 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import logging
|
||||||
|
import operator
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from optparse import SUPPRESS_HELP
|
||||||
|
|
||||||
|
from pip._internal import cmdoptions
|
||||||
|
from pip._internal.basecommand import RequirementCommand
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
CommandError, InstallationError, PreviousBuildDirError,
|
||||||
|
)
|
||||||
|
from pip._internal.locations import distutils_scheme, virtualenv_no_global
|
||||||
|
from pip._internal.operations.check import check_install_conflicts
|
||||||
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
|
from pip._internal.req import RequirementSet, install_given_reqs
|
||||||
|
from pip._internal.resolve import Resolver
|
||||||
|
from pip._internal.status_codes import ERROR
|
||||||
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
|
from pip._internal.utils.misc import ensure_dir, get_installed_version
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.wheel import WheelBuilder
|
||||||
|
|
||||||
|
try:
|
||||||
|
import wheel
|
||||||
|
except ImportError:
|
||||||
|
wheel = None
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallCommand(RequirementCommand):
|
||||||
|
"""
|
||||||
|
Install packages from:
|
||||||
|
|
||||||
|
- PyPI (and other indexes) using requirement specifiers.
|
||||||
|
- VCS project urls.
|
||||||
|
- Local project directories.
|
||||||
|
- Local or remote source archives.
|
||||||
|
|
||||||
|
pip also supports installing from "requirements files", which provide
|
||||||
|
an easy way to specify a whole environment to be installed.
|
||||||
|
"""
|
||||||
|
name = 'install'
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <requirement specifier> [package-index-options] ...
|
||||||
|
%prog [options] -r <requirements file> [package-index-options] ...
|
||||||
|
%prog [options] [-e] <vcs project url> ...
|
||||||
|
%prog [options] [-e] <local project path> ...
|
||||||
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
|
summary = 'Install packages.'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(InstallCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.requirements())
|
||||||
|
cmd_opts.add_option(cmdoptions.constraints())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
cmd_opts.add_option(cmdoptions.pre())
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.editable())
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-t', '--target',
|
||||||
|
dest='target_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=None,
|
||||||
|
help='Install packages into <dir>. '
|
||||||
|
'By default this will not replace existing files/folders in '
|
||||||
|
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||||
|
'with new versions.'
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='use_user_site',
|
||||||
|
action='store_true',
|
||||||
|
help="Install to the Python user install directory for your "
|
||||||
|
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||||
|
"Windows. (See the Python documentation for site.USER_BASE "
|
||||||
|
"for full details.)")
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--no-user',
|
||||||
|
dest='use_user_site',
|
||||||
|
action='store_false',
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--root',
|
||||||
|
dest='root_path',
|
||||||
|
metavar='dir',
|
||||||
|
default=None,
|
||||||
|
help="Install everything relative to this alternate root "
|
||||||
|
"directory.")
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--prefix',
|
||||||
|
dest='prefix_path',
|
||||||
|
metavar='dir',
|
||||||
|
default=None,
|
||||||
|
help="Installation prefix where lib, bin and other top-level "
|
||||||
|
"folders are placed")
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.build_dir())
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.src())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-U', '--upgrade',
|
||||||
|
dest='upgrade',
|
||||||
|
action='store_true',
|
||||||
|
help='Upgrade all specified packages to the newest available '
|
||||||
|
'version. The handling of dependencies depends on the '
|
||||||
|
'upgrade-strategy used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--upgrade-strategy',
|
||||||
|
dest='upgrade_strategy',
|
||||||
|
default='only-if-needed',
|
||||||
|
choices=['only-if-needed', 'eager'],
|
||||||
|
help='Determines how dependency upgrading should be handled '
|
||||||
|
'[default: %default]. '
|
||||||
|
'"eager" - dependencies are upgraded regardless of '
|
||||||
|
'whether the currently installed version satisfies the '
|
||||||
|
'requirements of the upgraded package(s). '
|
||||||
|
'"only-if-needed" - are upgraded only when they do not '
|
||||||
|
'satisfy the requirements of the upgraded package(s).'
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--force-reinstall',
|
||||||
|
dest='force_reinstall',
|
||||||
|
action='store_true',
|
||||||
|
help='Reinstall all packages even if they are already '
|
||||||
|
'up-to-date.')
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-I', '--ignore-installed',
|
||||||
|
dest='ignore_installed',
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore the installed packages (reinstalling instead).')
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.install_options())
|
||||||
|
cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--compile",
|
||||||
|
action="store_true",
|
||||||
|
dest="compile",
|
||||||
|
default=True,
|
||||||
|
help="Compile Python source files to bytecode",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--no-compile",
|
||||||
|
action="store_false",
|
||||||
|
dest="compile",
|
||||||
|
help="Do not compile Python source files to bytecode",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--no-warn-script-location",
|
||||||
|
action="store_false",
|
||||||
|
dest="warn_script_location",
|
||||||
|
default=True,
|
||||||
|
help="Do not warn when installing scripts outside PATH",
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--no-warn-conflicts",
|
||||||
|
action="store_false",
|
||||||
|
dest="warn_about_conflicts",
|
||||||
|
default=True,
|
||||||
|
help="Do not warn about broken dependencies",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_clean())
|
||||||
|
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
cmdoptions.check_install_build_global(options)
|
||||||
|
|
||||||
|
upgrade_strategy = "to-satisfy-only"
|
||||||
|
if options.upgrade:
|
||||||
|
upgrade_strategy = options.upgrade_strategy
|
||||||
|
|
||||||
|
if options.build_dir:
|
||||||
|
options.build_dir = os.path.abspath(options.build_dir)
|
||||||
|
|
||||||
|
options.src_dir = os.path.abspath(options.src_dir)
|
||||||
|
install_options = options.install_options or []
|
||||||
|
if options.use_user_site:
|
||||||
|
if options.prefix_path:
|
||||||
|
raise CommandError(
|
||||||
|
"Can not combine '--user' and '--prefix' as they imply "
|
||||||
|
"different installation locations"
|
||||||
|
)
|
||||||
|
if virtualenv_no_global():
|
||||||
|
raise InstallationError(
|
||||||
|
"Can not perform a '--user' install. User site-packages "
|
||||||
|
"are not visible in this virtualenv."
|
||||||
|
)
|
||||||
|
install_options.append('--user')
|
||||||
|
install_options.append('--prefix=')
|
||||||
|
|
||||||
|
target_temp_dir = TempDirectory(kind="target")
|
||||||
|
if options.target_dir:
|
||||||
|
options.ignore_installed = True
|
||||||
|
options.target_dir = os.path.abspath(options.target_dir)
|
||||||
|
if (os.path.exists(options.target_dir) and not
|
||||||
|
os.path.isdir(options.target_dir)):
|
||||||
|
raise CommandError(
|
||||||
|
"Target path exists but is not a directory, will not "
|
||||||
|
"continue."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a target directory for using with the target option
|
||||||
|
target_temp_dir.create()
|
||||||
|
install_options.append('--home=' + target_temp_dir.path)
|
||||||
|
|
||||||
|
global_options = options.global_options or []
|
||||||
|
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
finder = self._build_package_finder(options, session)
|
||||||
|
build_delete = (not (options.no_clean or options.build_dir))
|
||||||
|
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||||
|
|
||||||
|
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||||
|
logger.warning(
|
||||||
|
"The directory '%s' or its parent directory is not owned "
|
||||||
|
"by the current user and caching wheels has been "
|
||||||
|
"disabled. check the permissions and owner of that "
|
||||||
|
"directory. If executing pip with sudo, you may want "
|
||||||
|
"sudo's -H flag.",
|
||||||
|
options.cache_dir,
|
||||||
|
)
|
||||||
|
options.cache_dir = None
|
||||||
|
|
||||||
|
with TempDirectory(
|
||||||
|
options.build_dir, delete=build_delete, kind="install"
|
||||||
|
) as directory:
|
||||||
|
requirement_set = RequirementSet(
|
||||||
|
require_hashes=options.require_hashes,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.populate_requirement_set(
|
||||||
|
requirement_set, args, options, finder, session,
|
||||||
|
self.name, wheel_cache
|
||||||
|
)
|
||||||
|
preparer = RequirementPreparer(
|
||||||
|
build_dir=directory.path,
|
||||||
|
src_dir=options.src_dir,
|
||||||
|
download_dir=None,
|
||||||
|
wheel_download_dir=None,
|
||||||
|
progress_bar=options.progress_bar,
|
||||||
|
build_isolation=options.build_isolation,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolver = Resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
session=session,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
use_user_site=options.use_user_site,
|
||||||
|
upgrade_strategy=upgrade_strategy,
|
||||||
|
force_reinstall=options.force_reinstall,
|
||||||
|
ignore_dependencies=options.ignore_dependencies,
|
||||||
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
|
ignore_installed=options.ignore_installed,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
resolver.resolve(requirement_set)
|
||||||
|
|
||||||
|
# If caching is disabled or wheel is not installed don't
|
||||||
|
# try to build wheels.
|
||||||
|
if wheel and options.cache_dir:
|
||||||
|
# build wheels before install.
|
||||||
|
wb = WheelBuilder(
|
||||||
|
finder, preparer, wheel_cache,
|
||||||
|
build_options=[], global_options=[],
|
||||||
|
)
|
||||||
|
# Ignore the result: a failed wheel will be
|
||||||
|
# installed from the sdist/vcs whatever.
|
||||||
|
wb.build(
|
||||||
|
requirement_set.requirements.values(),
|
||||||
|
session=session, autobuilding=True
|
||||||
|
)
|
||||||
|
|
||||||
|
to_install = resolver.get_installation_order(
|
||||||
|
requirement_set
|
||||||
|
)
|
||||||
|
|
||||||
|
# Consistency Checking of the package set we're installing.
|
||||||
|
should_warn_about_conflicts = (
|
||||||
|
not options.ignore_dependencies and
|
||||||
|
options.warn_about_conflicts
|
||||||
|
)
|
||||||
|
if should_warn_about_conflicts:
|
||||||
|
self._warn_about_conflicts(to_install)
|
||||||
|
|
||||||
|
# Don't warn about script install locations if
|
||||||
|
# --target has been specified
|
||||||
|
warn_script_location = options.warn_script_location
|
||||||
|
if options.target_dir:
|
||||||
|
warn_script_location = False
|
||||||
|
|
||||||
|
installed = install_given_reqs(
|
||||||
|
to_install,
|
||||||
|
install_options,
|
||||||
|
global_options,
|
||||||
|
root=options.root_path,
|
||||||
|
home=target_temp_dir.path,
|
||||||
|
prefix=options.prefix_path,
|
||||||
|
pycompile=options.compile,
|
||||||
|
warn_script_location=warn_script_location,
|
||||||
|
use_user_site=options.use_user_site,
|
||||||
|
)
|
||||||
|
|
||||||
|
possible_lib_locations = get_lib_location_guesses(
|
||||||
|
user=options.use_user_site,
|
||||||
|
home=target_temp_dir.path,
|
||||||
|
root=options.root_path,
|
||||||
|
prefix=options.prefix_path,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
reqs = sorted(installed, key=operator.attrgetter('name'))
|
||||||
|
items = []
|
||||||
|
for req in reqs:
|
||||||
|
item = req.name
|
||||||
|
try:
|
||||||
|
installed_version = get_installed_version(
|
||||||
|
req.name, possible_lib_locations
|
||||||
|
)
|
||||||
|
if installed_version:
|
||||||
|
item += '-' + installed_version
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
items.append(item)
|
||||||
|
installed = ' '.join(items)
|
||||||
|
if installed:
|
||||||
|
logger.info('Successfully installed %s', installed)
|
||||||
|
except EnvironmentError as error:
|
||||||
|
show_traceback = (self.verbosity >= 1)
|
||||||
|
|
||||||
|
message = create_env_error_message(
|
||||||
|
error, show_traceback, options.use_user_site,
|
||||||
|
)
|
||||||
|
logger.error(message, exc_info=show_traceback)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except PreviousBuildDirError:
|
||||||
|
options.no_clean = True
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
# Clean up
|
||||||
|
if not options.no_clean:
|
||||||
|
requirement_set.cleanup_files()
|
||||||
|
wheel_cache.cleanup()
|
||||||
|
|
||||||
|
if options.target_dir:
|
||||||
|
self._handle_target_dir(
|
||||||
|
options.target_dir, target_temp_dir, options.upgrade
|
||||||
|
)
|
||||||
|
return requirement_set
|
||||||
|
|
||||||
|
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
|
||||||
|
ensure_dir(target_dir)
|
||||||
|
|
||||||
|
# Checking both purelib and platlib directories for installed
|
||||||
|
# packages to be moved to target directory
|
||||||
|
lib_dir_list = []
|
||||||
|
|
||||||
|
with target_temp_dir:
|
||||||
|
# Checking both purelib and platlib directories for installed
|
||||||
|
# packages to be moved to target directory
|
||||||
|
scheme = distutils_scheme('', home=target_temp_dir.path)
|
||||||
|
purelib_dir = scheme['purelib']
|
||||||
|
platlib_dir = scheme['platlib']
|
||||||
|
data_dir = scheme['data']
|
||||||
|
|
||||||
|
if os.path.exists(purelib_dir):
|
||||||
|
lib_dir_list.append(purelib_dir)
|
||||||
|
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
||||||
|
lib_dir_list.append(platlib_dir)
|
||||||
|
if os.path.exists(data_dir):
|
||||||
|
lib_dir_list.append(data_dir)
|
||||||
|
|
||||||
|
for lib_dir in lib_dir_list:
|
||||||
|
for item in os.listdir(lib_dir):
|
||||||
|
if lib_dir == data_dir:
|
||||||
|
ddir = os.path.join(data_dir, item)
|
||||||
|
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
||||||
|
continue
|
||||||
|
target_item_dir = os.path.join(target_dir, item)
|
||||||
|
if os.path.exists(target_item_dir):
|
||||||
|
if not upgrade:
|
||||||
|
logger.warning(
|
||||||
|
'Target directory %s already exists. Specify '
|
||||||
|
'--upgrade to force replacement.',
|
||||||
|
target_item_dir
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if os.path.islink(target_item_dir):
|
||||||
|
logger.warning(
|
||||||
|
'Target directory %s already exists and is '
|
||||||
|
'a link. Pip will not automatically replace '
|
||||||
|
'links, please remove if replacement is '
|
||||||
|
'desired.',
|
||||||
|
target_item_dir
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if os.path.isdir(target_item_dir):
|
||||||
|
shutil.rmtree(target_item_dir)
|
||||||
|
else:
|
||||||
|
os.remove(target_item_dir)
|
||||||
|
|
||||||
|
shutil.move(
|
||||||
|
os.path.join(lib_dir, item),
|
||||||
|
target_item_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
def _warn_about_conflicts(self, to_install):
|
||||||
|
package_set, _dep_info = check_install_conflicts(to_install)
|
||||||
|
missing, conflicting = _dep_info
|
||||||
|
|
||||||
|
# NOTE: There is some duplication here from pip check
|
||||||
|
for project_name in missing:
|
||||||
|
version = package_set[project_name][0]
|
||||||
|
for dependency in missing[project_name]:
|
||||||
|
logger.critical(
|
||||||
|
"%s %s requires %s, which is not installed.",
|
||||||
|
project_name, version, dependency[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
for project_name in conflicting:
|
||||||
|
version = package_set[project_name][0]
|
||||||
|
for dep_name, dep_version, req in conflicting[project_name]:
|
||||||
|
logger.critical(
|
||||||
|
"%s %s has requirement %s, but you'll have %s %s which is "
|
||||||
|
"incompatible.",
|
||||||
|
project_name, version, req, dep_name, dep_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_lib_location_guesses(*args, **kwargs):
|
||||||
|
scheme = distutils_scheme('', *args, **kwargs)
|
||||||
|
return [scheme['purelib'], scheme['platlib']]
|
||||||
|
|
||||||
|
|
||||||
|
def create_env_error_message(error, show_traceback, using_user_site):
|
||||||
|
"""Format an error message for an EnvironmentError
|
||||||
|
|
||||||
|
It may occur anytime during the execution of the install command.
|
||||||
|
"""
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
# Mention the error if we are not going to show a traceback
|
||||||
|
parts.append("Could not install packages due to an EnvironmentError")
|
||||||
|
if not show_traceback:
|
||||||
|
parts.append(": ")
|
||||||
|
parts.append(str(error))
|
||||||
|
else:
|
||||||
|
parts.append(".")
|
||||||
|
|
||||||
|
# Spilt the error indication from a helper message (if any)
|
||||||
|
parts[-1] += "\n"
|
||||||
|
|
||||||
|
# Suggest useful actions to the user:
|
||||||
|
# (1) using user site-packages or (2) verifying the permissions
|
||||||
|
if error.errno == errno.EACCES:
|
||||||
|
user_option_part = "Consider using the `--user` option"
|
||||||
|
permissions_part = "Check the permissions"
|
||||||
|
|
||||||
|
if not using_user_site:
|
||||||
|
parts.extend([
|
||||||
|
user_option_part, " or ",
|
||||||
|
permissions_part.lower(),
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
parts.append(permissions_part)
|
||||||
|
parts.append(".\n")
|
||||||
|
|
||||||
|
return "".join(parts).strip() + "\n"
|
@ -0,0 +1,343 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
from pip._vendor.six.moves import zip_longest
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.cmdoptions import index_group, make_option_group
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.index import PackageFinder
|
||||||
|
from pip._internal.utils.deprecation import RemovedInPip11Warning
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
dist_is_editable, get_installed_distributions,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.packaging import get_installer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ListCommand(Command):
|
||||||
|
"""
|
||||||
|
List installed packages, including editables.
|
||||||
|
|
||||||
|
Packages are listed in a case-insensitive sorted order.
|
||||||
|
"""
|
||||||
|
name = 'list'
|
||||||
|
usage = """
|
||||||
|
%prog [options]"""
|
||||||
|
summary = 'List installed packages.'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(ListCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-o', '--outdated',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='List outdated packages')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-u', '--uptodate',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='List uptodate packages')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-e', '--editable',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='List editable projects.')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-l', '--local',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=('If in a virtualenv that has global access, do not list '
|
||||||
|
'globally-installed packages.'),
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='user',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Only output packages installed in user-site.')
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=("Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--format',
|
||||||
|
action='store',
|
||||||
|
dest='list_format',
|
||||||
|
default="columns",
|
||||||
|
choices=('legacy', 'columns', 'freeze', 'json'),
|
||||||
|
help="Select the output format among: columns (default), freeze, "
|
||||||
|
"json, or legacy.",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--not-required',
|
||||||
|
action='store_true',
|
||||||
|
dest='not_required',
|
||||||
|
help="List packages that are not dependencies of "
|
||||||
|
"installed packages.",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--exclude-editable',
|
||||||
|
action='store_false',
|
||||||
|
dest='include_editable',
|
||||||
|
help='Exclude editable package from output.',
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--include-editable',
|
||||||
|
action='store_true',
|
||||||
|
dest='include_editable',
|
||||||
|
help='Include editable package from output.',
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
index_opts = make_option_group(index_group, self.parser)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def _build_package_finder(self, options, index_urls, session):
|
||||||
|
"""
|
||||||
|
Create a package finder appropriate to this list command.
|
||||||
|
"""
|
||||||
|
return PackageFinder(
|
||||||
|
find_links=options.find_links,
|
||||||
|
index_urls=index_urls,
|
||||||
|
allow_all_prereleases=options.pre,
|
||||||
|
trusted_hosts=options.trusted_hosts,
|
||||||
|
process_dependency_links=options.process_dependency_links,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if options.list_format == "legacy":
|
||||||
|
warnings.warn(
|
||||||
|
"The legacy format has been deprecated and will be removed "
|
||||||
|
"in the future.",
|
||||||
|
RemovedInPip11Warning,
|
||||||
|
)
|
||||||
|
|
||||||
|
if options.outdated and options.uptodate:
|
||||||
|
raise CommandError(
|
||||||
|
"Options --outdated and --uptodate cannot be combined.")
|
||||||
|
|
||||||
|
packages = get_installed_distributions(
|
||||||
|
local_only=options.local,
|
||||||
|
user_only=options.user,
|
||||||
|
editables_only=options.editable,
|
||||||
|
include_editables=options.include_editable,
|
||||||
|
)
|
||||||
|
|
||||||
|
if options.outdated:
|
||||||
|
packages = self.get_outdated(packages, options)
|
||||||
|
elif options.uptodate:
|
||||||
|
packages = self.get_uptodate(packages, options)
|
||||||
|
|
||||||
|
if options.not_required:
|
||||||
|
packages = self.get_not_required(packages, options)
|
||||||
|
|
||||||
|
self.output_package_listing(packages, options)
|
||||||
|
|
||||||
|
def get_outdated(self, packages, options):
|
||||||
|
return [
|
||||||
|
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
|
if dist.latest_version > dist.parsed_version
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_uptodate(self, packages, options):
|
||||||
|
return [
|
||||||
|
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
|
if dist.latest_version == dist.parsed_version
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_not_required(self, packages, options):
|
||||||
|
dep_keys = set()
|
||||||
|
for dist in packages:
|
||||||
|
dep_keys.update(requirement.key for requirement in dist.requires())
|
||||||
|
return {pkg for pkg in packages if pkg.key not in dep_keys}
|
||||||
|
|
||||||
|
def iter_packages_latest_infos(self, packages, options):
|
||||||
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
|
if options.no_index:
|
||||||
|
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||||
|
index_urls = []
|
||||||
|
|
||||||
|
dependency_links = []
|
||||||
|
for dist in packages:
|
||||||
|
if dist.has_metadata('dependency_links.txt'):
|
||||||
|
dependency_links.extend(
|
||||||
|
dist.get_metadata_lines('dependency_links.txt'),
|
||||||
|
)
|
||||||
|
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
finder = self._build_package_finder(options, index_urls, session)
|
||||||
|
finder.add_dependency_links(dependency_links)
|
||||||
|
|
||||||
|
for dist in packages:
|
||||||
|
typ = 'unknown'
|
||||||
|
all_candidates = finder.find_all_candidates(dist.key)
|
||||||
|
if not options.pre:
|
||||||
|
# Remove prereleases
|
||||||
|
all_candidates = [candidate for candidate in all_candidates
|
||||||
|
if not candidate.version.is_prerelease]
|
||||||
|
|
||||||
|
if not all_candidates:
|
||||||
|
continue
|
||||||
|
best_candidate = max(all_candidates,
|
||||||
|
key=finder._candidate_sort_key)
|
||||||
|
remote_version = best_candidate.version
|
||||||
|
if best_candidate.location.is_wheel:
|
||||||
|
typ = 'wheel'
|
||||||
|
else:
|
||||||
|
typ = 'sdist'
|
||||||
|
# This is dirty but makes the rest of the code much cleaner
|
||||||
|
dist.latest_version = remote_version
|
||||||
|
dist.latest_filetype = typ
|
||||||
|
yield dist
|
||||||
|
|
||||||
|
def output_legacy(self, dist, options):
|
||||||
|
if options.verbose >= 1:
|
||||||
|
return '%s (%s, %s, %s)' % (
|
||||||
|
dist.project_name,
|
||||||
|
dist.version,
|
||||||
|
dist.location,
|
||||||
|
get_installer(dist),
|
||||||
|
)
|
||||||
|
elif dist_is_editable(dist):
|
||||||
|
return '%s (%s, %s)' % (
|
||||||
|
dist.project_name,
|
||||||
|
dist.version,
|
||||||
|
dist.location,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return '%s (%s)' % (dist.project_name, dist.version)
|
||||||
|
|
||||||
|
def output_legacy_latest(self, dist, options):
|
||||||
|
return '%s - Latest: %s [%s]' % (
|
||||||
|
self.output_legacy(dist, options),
|
||||||
|
dist.latest_version,
|
||||||
|
dist.latest_filetype,
|
||||||
|
)
|
||||||
|
|
||||||
|
def output_package_listing(self, packages, options):
|
||||||
|
packages = sorted(
|
||||||
|
packages,
|
||||||
|
key=lambda dist: dist.project_name.lower(),
|
||||||
|
)
|
||||||
|
if options.list_format == 'columns' and packages:
|
||||||
|
data, header = format_for_columns(packages, options)
|
||||||
|
self.output_package_listing_columns(data, header)
|
||||||
|
elif options.list_format == 'freeze':
|
||||||
|
for dist in packages:
|
||||||
|
if options.verbose >= 1:
|
||||||
|
logger.info("%s==%s (%s)", dist.project_name,
|
||||||
|
dist.version, dist.location)
|
||||||
|
else:
|
||||||
|
logger.info("%s==%s", dist.project_name, dist.version)
|
||||||
|
elif options.list_format == 'json':
|
||||||
|
logger.info(format_for_json(packages, options))
|
||||||
|
elif options.list_format == "legacy":
|
||||||
|
for dist in packages:
|
||||||
|
if options.outdated:
|
||||||
|
logger.info(self.output_legacy_latest(dist, options))
|
||||||
|
else:
|
||||||
|
logger.info(self.output_legacy(dist, options))
|
||||||
|
|
||||||
|
def output_package_listing_columns(self, data, header):
|
||||||
|
# insert the header first: we need to know the size of column names
|
||||||
|
if len(data) > 0:
|
||||||
|
data.insert(0, header)
|
||||||
|
|
||||||
|
pkg_strings, sizes = tabulate(data)
|
||||||
|
|
||||||
|
# Create and add a separator.
|
||||||
|
if len(data) > 0:
|
||||||
|
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
||||||
|
|
||||||
|
for val in pkg_strings:
|
||||||
|
logger.info(val)
|
||||||
|
|
||||||
|
|
||||||
|
def tabulate(vals):
|
||||||
|
# From pfmoore on GitHub:
|
||||||
|
# https://github.com/pypa/pip/issues/3651#issuecomment-216932564
|
||||||
|
assert len(vals) > 0
|
||||||
|
|
||||||
|
sizes = [0] * max(len(x) for x in vals)
|
||||||
|
for row in vals:
|
||||||
|
sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for row in vals:
|
||||||
|
display = " ".join([str(c).ljust(s) if c is not None else ''
|
||||||
|
for s, c in zip_longest(sizes, row)])
|
||||||
|
result.append(display)
|
||||||
|
|
||||||
|
return result, sizes
|
||||||
|
|
||||||
|
|
||||||
|
def format_for_columns(pkgs, options):
|
||||||
|
"""
|
||||||
|
Convert the package data into something usable
|
||||||
|
by output_package_listing_columns.
|
||||||
|
"""
|
||||||
|
running_outdated = options.outdated
|
||||||
|
# Adjust the header for the `pip list --outdated` case.
|
||||||
|
if running_outdated:
|
||||||
|
header = ["Package", "Version", "Latest", "Type"]
|
||||||
|
else:
|
||||||
|
header = ["Package", "Version"]
|
||||||
|
|
||||||
|
data = []
|
||||||
|
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
|
||||||
|
header.append("Location")
|
||||||
|
if options.verbose >= 1:
|
||||||
|
header.append("Installer")
|
||||||
|
|
||||||
|
for proj in pkgs:
|
||||||
|
# if we're working on the 'outdated' list, separate out the
|
||||||
|
# latest_version and type
|
||||||
|
row = [proj.project_name, proj.version]
|
||||||
|
|
||||||
|
if running_outdated:
|
||||||
|
row.append(proj.latest_version)
|
||||||
|
row.append(proj.latest_filetype)
|
||||||
|
|
||||||
|
if options.verbose >= 1 or dist_is_editable(proj):
|
||||||
|
row.append(proj.location)
|
||||||
|
if options.verbose >= 1:
|
||||||
|
row.append(get_installer(proj))
|
||||||
|
|
||||||
|
data.append(row)
|
||||||
|
|
||||||
|
return data, header
|
||||||
|
|
||||||
|
|
||||||
|
def format_for_json(packages, options):
|
||||||
|
data = []
|
||||||
|
for dist in packages:
|
||||||
|
info = {
|
||||||
|
'name': dist.project_name,
|
||||||
|
'version': six.text_type(dist.version),
|
||||||
|
}
|
||||||
|
if options.verbose >= 1:
|
||||||
|
info['location'] = dist.location
|
||||||
|
info['installer'] = get_installer(dist)
|
||||||
|
if options.outdated:
|
||||||
|
info['latest_version'] = six.text_type(dist.latest_version)
|
||||||
|
info['latest_filetype'] = dist.latest_filetype
|
||||||
|
data.append(info)
|
||||||
|
return json.dumps(data)
|
@ -0,0 +1,135 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
||||||
|
# why we ignore the type on this import
|
||||||
|
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
||||||
|
|
||||||
|
from pip._internal.basecommand import SUCCESS, Command
|
||||||
|
from pip._internal.compat import get_terminal_size
|
||||||
|
from pip._internal.download import PipXmlrpcTransport
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.models import PyPI
|
||||||
|
from pip._internal.status_codes import NO_MATCHES_FOUND
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SearchCommand(Command):
|
||||||
|
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||||
|
name = 'search'
|
||||||
|
usage = """
|
||||||
|
%prog [options] <query>"""
|
||||||
|
summary = 'Search PyPI for packages.'
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(SearchCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-i', '--index',
|
||||||
|
dest='index',
|
||||||
|
metavar='URL',
|
||||||
|
default=PyPI.pypi_url,
|
||||||
|
help='Base URL of Python Package Index (default %default)')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if not args:
|
||||||
|
raise CommandError('Missing required argument (search query).')
|
||||||
|
query = args
|
||||||
|
pypi_hits = self.search(query, options)
|
||||||
|
hits = transform_hits(pypi_hits)
|
||||||
|
|
||||||
|
terminal_width = None
|
||||||
|
if sys.stdout.isatty():
|
||||||
|
terminal_width = get_terminal_size()[0]
|
||||||
|
|
||||||
|
print_results(hits, terminal_width=terminal_width)
|
||||||
|
if pypi_hits:
|
||||||
|
return SUCCESS
|
||||||
|
return NO_MATCHES_FOUND
|
||||||
|
|
||||||
|
def search(self, query, options):
|
||||||
|
index_url = options.index
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
transport = PipXmlrpcTransport(index_url, session)
|
||||||
|
pypi = xmlrpc_client.ServerProxy(index_url, transport)
|
||||||
|
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||||
|
return hits
|
||||||
|
|
||||||
|
|
||||||
|
def transform_hits(hits):
|
||||||
|
"""
|
||||||
|
The list from pypi is really a list of versions. We want a list of
|
||||||
|
packages with the list of versions stored inline. This converts the
|
||||||
|
list from pypi into one we can use.
|
||||||
|
"""
|
||||||
|
packages = OrderedDict()
|
||||||
|
for hit in hits:
|
||||||
|
name = hit['name']
|
||||||
|
summary = hit['summary']
|
||||||
|
version = hit['version']
|
||||||
|
|
||||||
|
if name not in packages.keys():
|
||||||
|
packages[name] = {
|
||||||
|
'name': name,
|
||||||
|
'summary': summary,
|
||||||
|
'versions': [version],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
packages[name]['versions'].append(version)
|
||||||
|
|
||||||
|
# if this is the highest version, replace summary and score
|
||||||
|
if version == highest_version(packages[name]['versions']):
|
||||||
|
packages[name]['summary'] = summary
|
||||||
|
|
||||||
|
return list(packages.values())
|
||||||
|
|
||||||
|
|
||||||
|
def print_results(hits, name_column_width=None, terminal_width=None):
|
||||||
|
if not hits:
|
||||||
|
return
|
||||||
|
if name_column_width is None:
|
||||||
|
name_column_width = max([
|
||||||
|
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
||||||
|
for hit in hits
|
||||||
|
]) + 4
|
||||||
|
|
||||||
|
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
||||||
|
for hit in hits:
|
||||||
|
name = hit['name']
|
||||||
|
summary = hit['summary'] or ''
|
||||||
|
latest = highest_version(hit.get('versions', ['-']))
|
||||||
|
if terminal_width is not None:
|
||||||
|
target_width = terminal_width - name_column_width - 5
|
||||||
|
if target_width > 10:
|
||||||
|
# wrap and indent summary to fit terminal
|
||||||
|
summary = textwrap.wrap(summary, target_width)
|
||||||
|
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||||
|
|
||||||
|
line = '%-*s - %s' % (name_column_width,
|
||||||
|
'%s (%s)' % (name, latest), summary)
|
||||||
|
try:
|
||||||
|
logger.info(line)
|
||||||
|
if name in installed_packages:
|
||||||
|
dist = pkg_resources.get_distribution(name)
|
||||||
|
with indent_log():
|
||||||
|
if dist.version == latest:
|
||||||
|
logger.info('INSTALLED: %s (latest)', dist.version)
|
||||||
|
else:
|
||||||
|
logger.info('INSTALLED: %s', dist.version)
|
||||||
|
logger.info('LATEST: %s', latest)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def highest_version(versions):
|
||||||
|
return max(versions, key=parse_version)
|
@ -0,0 +1,164 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from email.parser import FeedParser # type: ignore
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.status_codes import ERROR, SUCCESS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ShowCommand(Command):
|
||||||
|
"""Show information about one or more installed packages."""
|
||||||
|
name = 'show'
|
||||||
|
usage = """
|
||||||
|
%prog [options] <package> ..."""
|
||||||
|
summary = 'Show information about installed packages.'
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(ShowCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-f', '--files',
|
||||||
|
dest='files',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Show the full list of installed files for each package.')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if not args:
|
||||||
|
logger.warning('ERROR: Please provide a package name or names.')
|
||||||
|
return ERROR
|
||||||
|
query = args
|
||||||
|
|
||||||
|
results = search_packages_info(query)
|
||||||
|
if not print_results(
|
||||||
|
results, list_files=options.files, verbose=options.verbose):
|
||||||
|
return ERROR
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
def search_packages_info(query):
|
||||||
|
"""
|
||||||
|
Gather details from installed distributions. Print distribution name,
|
||||||
|
version, location, and installed files. Installed files requires a
|
||||||
|
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||||
|
directory.
|
||||||
|
"""
|
||||||
|
installed = {}
|
||||||
|
for p in pkg_resources.working_set:
|
||||||
|
installed[canonicalize_name(p.project_name)] = p
|
||||||
|
|
||||||
|
query_names = [canonicalize_name(name) for name in query]
|
||||||
|
|
||||||
|
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
||||||
|
package = {
|
||||||
|
'name': dist.project_name,
|
||||||
|
'version': dist.version,
|
||||||
|
'location': dist.location,
|
||||||
|
'requires': [dep.project_name for dep in dist.requires()],
|
||||||
|
}
|
||||||
|
file_list = None
|
||||||
|
metadata = None
|
||||||
|
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
||||||
|
# RECORDs should be part of .dist-info metadatas
|
||||||
|
if dist.has_metadata('RECORD'):
|
||||||
|
lines = dist.get_metadata_lines('RECORD')
|
||||||
|
paths = [l.split(',')[0] for l in lines]
|
||||||
|
paths = [os.path.join(dist.location, p) for p in paths]
|
||||||
|
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||||
|
|
||||||
|
if dist.has_metadata('METADATA'):
|
||||||
|
metadata = dist.get_metadata('METADATA')
|
||||||
|
else:
|
||||||
|
# Otherwise use pip's log for .egg-info's
|
||||||
|
if dist.has_metadata('installed-files.txt'):
|
||||||
|
paths = dist.get_metadata_lines('installed-files.txt')
|
||||||
|
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
||||||
|
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||||
|
|
||||||
|
if dist.has_metadata('PKG-INFO'):
|
||||||
|
metadata = dist.get_metadata('PKG-INFO')
|
||||||
|
|
||||||
|
if dist.has_metadata('entry_points.txt'):
|
||||||
|
entry_points = dist.get_metadata_lines('entry_points.txt')
|
||||||
|
package['entry_points'] = entry_points
|
||||||
|
|
||||||
|
if dist.has_metadata('INSTALLER'):
|
||||||
|
for line in dist.get_metadata_lines('INSTALLER'):
|
||||||
|
if line.strip():
|
||||||
|
package['installer'] = line.strip()
|
||||||
|
break
|
||||||
|
|
||||||
|
# @todo: Should pkg_resources.Distribution have a
|
||||||
|
# `get_pkg_info` method?
|
||||||
|
feed_parser = FeedParser()
|
||||||
|
feed_parser.feed(metadata)
|
||||||
|
pkg_info_dict = feed_parser.close()
|
||||||
|
for key in ('metadata-version', 'summary',
|
||||||
|
'home-page', 'author', 'author-email', 'license'):
|
||||||
|
package[key] = pkg_info_dict.get(key)
|
||||||
|
|
||||||
|
# It looks like FeedParser cannot deal with repeated headers
|
||||||
|
classifiers = []
|
||||||
|
for line in metadata.splitlines():
|
||||||
|
if line.startswith('Classifier: '):
|
||||||
|
classifiers.append(line[len('Classifier: '):])
|
||||||
|
package['classifiers'] = classifiers
|
||||||
|
|
||||||
|
if file_list:
|
||||||
|
package['files'] = sorted(file_list)
|
||||||
|
yield package
|
||||||
|
|
||||||
|
|
||||||
|
def print_results(distributions, list_files=False, verbose=False):
|
||||||
|
"""
|
||||||
|
Print the informations from installed distributions found.
|
||||||
|
"""
|
||||||
|
results_printed = False
|
||||||
|
for i, dist in enumerate(distributions):
|
||||||
|
results_printed = True
|
||||||
|
if i > 0:
|
||||||
|
logger.info("---")
|
||||||
|
|
||||||
|
name = dist.get('name', '')
|
||||||
|
required_by = [
|
||||||
|
pkg.project_name for pkg in pkg_resources.working_set
|
||||||
|
if name in [required.name for required in pkg.requires()]
|
||||||
|
]
|
||||||
|
|
||||||
|
logger.info("Name: %s", name)
|
||||||
|
logger.info("Version: %s", dist.get('version', ''))
|
||||||
|
logger.info("Summary: %s", dist.get('summary', ''))
|
||||||
|
logger.info("Home-page: %s", dist.get('home-page', ''))
|
||||||
|
logger.info("Author: %s", dist.get('author', ''))
|
||||||
|
logger.info("Author-email: %s", dist.get('author-email', ''))
|
||||||
|
logger.info("License: %s", dist.get('license', ''))
|
||||||
|
logger.info("Location: %s", dist.get('location', ''))
|
||||||
|
logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
|
||||||
|
logger.info("Required-by: %s", ', '.join(required_by))
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
logger.info("Metadata-Version: %s",
|
||||||
|
dist.get('metadata-version', ''))
|
||||||
|
logger.info("Installer: %s", dist.get('installer', ''))
|
||||||
|
logger.info("Classifiers:")
|
||||||
|
for classifier in dist.get('classifiers', []):
|
||||||
|
logger.info(" %s", classifier)
|
||||||
|
logger.info("Entry-points:")
|
||||||
|
for entry in dist.get('entry_points', []):
|
||||||
|
logger.info(" %s", entry.strip())
|
||||||
|
if list_files:
|
||||||
|
logger.info("Files:")
|
||||||
|
for line in dist.get('files', []):
|
||||||
|
logger.info(" %s", line.strip())
|
||||||
|
if "files" not in dist:
|
||||||
|
logger.info("Cannot locate installed-files.txt")
|
||||||
|
return results_printed
|
@ -0,0 +1,71 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.req import InstallRequirement, parse_requirements
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallCommand(Command):
|
||||||
|
"""
|
||||||
|
Uninstall packages.
|
||||||
|
|
||||||
|
pip is able to uninstall most installed packages. Known exceptions are:
|
||||||
|
|
||||||
|
- Pure distutils packages installed with ``python setup.py install``, which
|
||||||
|
leave behind no metadata to determine what files were installed.
|
||||||
|
- Script wrappers installed by ``python setup.py develop``.
|
||||||
|
"""
|
||||||
|
name = 'uninstall'
|
||||||
|
usage = """
|
||||||
|
%prog [options] <package> ...
|
||||||
|
%prog [options] -r <requirements file> ..."""
|
||||||
|
summary = 'Uninstall packages.'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(UninstallCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Uninstall all the packages listed in the given requirements '
|
||||||
|
'file. This option can be used multiple times.',
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-y', '--yes',
|
||||||
|
dest='yes',
|
||||||
|
action='store_true',
|
||||||
|
help="Don't ask for confirmation of uninstall deletions.")
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
reqs_to_uninstall = {}
|
||||||
|
for name in args:
|
||||||
|
req = InstallRequirement.from_line(
|
||||||
|
name, isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
if req.name:
|
||||||
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
|
for filename in options.requirements:
|
||||||
|
for req in parse_requirements(
|
||||||
|
filename,
|
||||||
|
options=options,
|
||||||
|
session=session):
|
||||||
|
if req.name:
|
||||||
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
|
if not reqs_to_uninstall:
|
||||||
|
raise InstallationError(
|
||||||
|
'You must give at least one requirement to %(name)s (see '
|
||||||
|
'"pip help %(name)s")' % dict(name=self.name)
|
||||||
|
)
|
||||||
|
for req in reqs_to_uninstall.values():
|
||||||
|
uninstall_pathset = req.uninstall(
|
||||||
|
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
||||||
|
)
|
||||||
|
if uninstall_pathset:
|
||||||
|
uninstall_pathset.commit()
|
@ -0,0 +1,179 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal import cmdoptions
|
||||||
|
from pip._internal.basecommand import RequirementCommand
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
||||||
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
|
from pip._internal.req import RequirementSet
|
||||||
|
from pip._internal.resolve import Resolver
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.wheel import WheelBuilder
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class WheelCommand(RequirementCommand):
|
||||||
|
"""
|
||||||
|
Build Wheel archives for your requirements and dependencies.
|
||||||
|
|
||||||
|
Wheel is a built-package format, and offers the advantage of not
|
||||||
|
recompiling your software during every install. For more details, see the
|
||||||
|
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||||
|
|
||||||
|
Requirements: setuptools>=0.8, and wheel.
|
||||||
|
|
||||||
|
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||||
|
package to build individual wheels.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = 'wheel'
|
||||||
|
usage = """
|
||||||
|
%prog [options] <requirement specifier> ...
|
||||||
|
%prog [options] -r <requirements file> ...
|
||||||
|
%prog [options] [-e] <vcs project url> ...
|
||||||
|
%prog [options] [-e] <local project path> ...
|
||||||
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
|
summary = 'Build wheels from your requirements.'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(WheelCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-w', '--wheel-dir',
|
||||||
|
dest='wheel_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=os.curdir,
|
||||||
|
help=("Build wheels into <dir>, where the default is the "
|
||||||
|
"current working directory."),
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--build-option',
|
||||||
|
dest='build_options',
|
||||||
|
metavar='options',
|
||||||
|
action='append',
|
||||||
|
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
|
cmd_opts.add_option(cmdoptions.constraints())
|
||||||
|
cmd_opts.add_option(cmdoptions.editable())
|
||||||
|
cmd_opts.add_option(cmdoptions.requirements())
|
||||||
|
cmd_opts.add_option(cmdoptions.src())
|
||||||
|
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
cmd_opts.add_option(cmdoptions.build_dir())
|
||||||
|
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--global-option',
|
||||||
|
dest='global_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra global options to be supplied to the setup.py "
|
||||||
|
"call before the 'bdist_wheel' command.")
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=("Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.no_clean())
|
||||||
|
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
cmdoptions.check_install_build_global(options)
|
||||||
|
|
||||||
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
|
if options.no_index:
|
||||||
|
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||||
|
index_urls = []
|
||||||
|
|
||||||
|
if options.build_dir:
|
||||||
|
options.build_dir = os.path.abspath(options.build_dir)
|
||||||
|
|
||||||
|
options.src_dir = os.path.abspath(options.src_dir)
|
||||||
|
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
finder = self._build_package_finder(options, session)
|
||||||
|
build_delete = (not (options.no_clean or options.build_dir))
|
||||||
|
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||||
|
|
||||||
|
with TempDirectory(
|
||||||
|
options.build_dir, delete=build_delete, kind="wheel"
|
||||||
|
) as directory:
|
||||||
|
requirement_set = RequirementSet(
|
||||||
|
require_hashes=options.require_hashes,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.populate_requirement_set(
|
||||||
|
requirement_set, args, options, finder, session,
|
||||||
|
self.name, wheel_cache
|
||||||
|
)
|
||||||
|
|
||||||
|
preparer = RequirementPreparer(
|
||||||
|
build_dir=directory.path,
|
||||||
|
src_dir=options.src_dir,
|
||||||
|
download_dir=None,
|
||||||
|
wheel_download_dir=options.wheel_dir,
|
||||||
|
progress_bar=options.progress_bar,
|
||||||
|
build_isolation=options.build_isolation,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolver = Resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
session=session,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
use_user_site=False,
|
||||||
|
upgrade_strategy="to-satisfy-only",
|
||||||
|
force_reinstall=False,
|
||||||
|
ignore_dependencies=options.ignore_dependencies,
|
||||||
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
|
ignore_installed=True,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
resolver.resolve(requirement_set)
|
||||||
|
|
||||||
|
# build wheels
|
||||||
|
wb = WheelBuilder(
|
||||||
|
finder, preparer, wheel_cache,
|
||||||
|
build_options=options.build_options or [],
|
||||||
|
global_options=options.global_options or [],
|
||||||
|
no_clean=options.no_clean,
|
||||||
|
)
|
||||||
|
wheels_built_successfully = wb.build(
|
||||||
|
requirement_set.requirements.values(), session=session,
|
||||||
|
)
|
||||||
|
if not wheels_built_successfully:
|
||||||
|
raise CommandError(
|
||||||
|
"Failed to build one or more wheels"
|
||||||
|
)
|
||||||
|
except PreviousBuildDirError:
|
||||||
|
options.no_clean = True
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if not options.no_clean:
|
||||||
|
requirement_set.cleanup_files()
|
||||||
|
wheel_cache.cleanup()
|
@ -0,0 +1,235 @@
|
|||||||
|
"""Stuff that differs in different Python versions and platform
|
||||||
|
distributions."""
|
||||||
|
from __future__ import absolute_import, division
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor.six import text_type
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ipaddress
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from pip._vendor import ipaddress # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
import ipaddr as ipaddress # type: ignore
|
||||||
|
ipaddress.ip_address = ipaddress.IPAddress
|
||||||
|
ipaddress.ip_network = ipaddress.IPNetwork
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ipaddress", "uses_pycache", "console_to_str", "native_str",
|
||||||
|
"get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 4):
|
||||||
|
uses_pycache = True
|
||||||
|
from importlib.util import cache_from_source
|
||||||
|
else:
|
||||||
|
import imp
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache_from_source = imp.cache_from_source # type: ignore
|
||||||
|
except AttributeError:
|
||||||
|
# does not use __pycache__
|
||||||
|
cache_from_source = None
|
||||||
|
|
||||||
|
uses_pycache = cache_from_source is not None
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 5):
|
||||||
|
backslashreplace_decode = "backslashreplace"
|
||||||
|
else:
|
||||||
|
# In version 3.4 and older, backslashreplace exists
|
||||||
|
# but does not support use for decoding.
|
||||||
|
# We implement our own replace handler for this
|
||||||
|
# situation, so that we can consistently use
|
||||||
|
# backslash replacement for all versions.
|
||||||
|
def backslashreplace_decode_fn(err):
|
||||||
|
raw_bytes = (err.object[i] for i in range(err.start, err.end))
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
# Python 2 gave us characters - convert to numeric bytes
|
||||||
|
raw_bytes = (ord(b) for b in raw_bytes)
|
||||||
|
return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
|
||||||
|
codecs.register_error(
|
||||||
|
"backslashreplace_decode",
|
||||||
|
backslashreplace_decode_fn,
|
||||||
|
)
|
||||||
|
backslashreplace_decode = "backslashreplace_decode"
|
||||||
|
|
||||||
|
|
||||||
|
def console_to_str(data):
|
||||||
|
"""Return a string, safe for output, of subprocess output.
|
||||||
|
|
||||||
|
We assume the data is in the locale preferred encoding.
|
||||||
|
If it won't decode properly, we warn the user but decode as
|
||||||
|
best we can.
|
||||||
|
|
||||||
|
We also ensure that the output can be safely written to
|
||||||
|
standard output without encoding errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# First, get the encoding we assume. This is the preferred
|
||||||
|
# encoding for the locale, unless that is not found, or
|
||||||
|
# it is ASCII, in which case assume UTF-8
|
||||||
|
encoding = locale.getpreferredencoding()
|
||||||
|
if (not encoding) or codecs.lookup(encoding).name == "ascii":
|
||||||
|
encoding = "utf-8"
|
||||||
|
|
||||||
|
# Now try to decode the data - if we fail, warn the user and
|
||||||
|
# decode with replacement.
|
||||||
|
try:
|
||||||
|
s = data.decode(encoding)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
logger.warning(
|
||||||
|
"Subprocess output does not appear to be encoded as %s",
|
||||||
|
encoding,
|
||||||
|
)
|
||||||
|
s = data.decode(encoding, errors=backslashreplace_decode)
|
||||||
|
|
||||||
|
# Make sure we can print the output, by encoding it to the output
|
||||||
|
# encoding with replacement of unencodable characters, and then
|
||||||
|
# decoding again.
|
||||||
|
# We use stderr's encoding because it's less likely to be
|
||||||
|
# redirected and if we don't find an encoding we skip this
|
||||||
|
# step (on the assumption that output is wrapped by something
|
||||||
|
# that won't fail).
|
||||||
|
# The double getattr is to deal with the possibility that we're
|
||||||
|
# being called in a situation where sys.__stderr__ doesn't exist,
|
||||||
|
# or doesn't have an encoding attribute. Neither of these cases
|
||||||
|
# should occur in normal pip use, but there's no harm in checking
|
||||||
|
# in case people use pip in (unsupported) unusual situations.
|
||||||
|
output_encoding = getattr(getattr(sys, "__stderr__", None),
|
||||||
|
"encoding", None)
|
||||||
|
|
||||||
|
if output_encoding:
|
||||||
|
s = s.encode(output_encoding, errors="backslashreplace")
|
||||||
|
s = s.decode(output_encoding)
|
||||||
|
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
def native_str(s, replace=False):
|
||||||
|
if isinstance(s, bytes):
|
||||||
|
return s.decode('utf-8', 'replace' if replace else 'strict')
|
||||||
|
return s
|
||||||
|
|
||||||
|
else:
|
||||||
|
def native_str(s, replace=False):
|
||||||
|
# Replace is ignored -- unicode to UTF-8 can't fail
|
||||||
|
if isinstance(s, text_type):
|
||||||
|
return s.encode('utf-8')
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def get_path_uid(path):
|
||||||
|
"""
|
||||||
|
Return path's uid.
|
||||||
|
|
||||||
|
Does not follow symlinks:
|
||||||
|
https://github.com/pypa/pip/pull/935#discussion_r5307003
|
||||||
|
|
||||||
|
Placed this function in compat due to differences on AIX and
|
||||||
|
Jython, that should eventually go away.
|
||||||
|
|
||||||
|
:raises OSError: When path is a symlink or can't be read.
|
||||||
|
"""
|
||||||
|
if hasattr(os, 'O_NOFOLLOW'):
|
||||||
|
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
||||||
|
file_uid = os.fstat(fd).st_uid
|
||||||
|
os.close(fd)
|
||||||
|
else: # AIX and Jython
|
||||||
|
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
|
||||||
|
if not os.path.islink(path):
|
||||||
|
# older versions of Jython don't have `os.fstat`
|
||||||
|
file_uid = os.stat(path).st_uid
|
||||||
|
else:
|
||||||
|
# raise OSError for parity with os.O_NOFOLLOW above
|
||||||
|
raise OSError(
|
||||||
|
"%s is a symlink; Will not return uid for symlinks" % path
|
||||||
|
)
|
||||||
|
return file_uid
|
||||||
|
|
||||||
|
|
||||||
|
def expanduser(path):
|
||||||
|
"""
|
||||||
|
Expand ~ and ~user constructions.
|
||||||
|
|
||||||
|
Includes a workaround for http://bugs.python.org/issue14768
|
||||||
|
"""
|
||||||
|
expanded = os.path.expanduser(path)
|
||||||
|
if path.startswith('~/') and expanded.startswith('//'):
|
||||||
|
expanded = expanded[1:]
|
||||||
|
return expanded
|
||||||
|
|
||||||
|
|
||||||
|
# packages in the stdlib that may have installation metadata, but should not be
|
||||||
|
# considered 'installed'. this theoretically could be determined based on
|
||||||
|
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
|
||||||
|
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
|
||||||
|
# make this ineffective, so hard-coding
|
||||||
|
stdlib_pkgs = {"python", "wsgiref", "argparse"}
|
||||||
|
|
||||||
|
|
||||||
|
# windows detection, covers cpython and ironpython
|
||||||
|
WINDOWS = (sys.platform.startswith("win") or
|
||||||
|
(sys.platform == 'cli' and os.name == 'nt'))
|
||||||
|
|
||||||
|
|
||||||
|
def samefile(file1, file2):
|
||||||
|
"""Provide an alternative for os.path.samefile on Windows/Python2"""
|
||||||
|
if hasattr(os.path, 'samefile'):
|
||||||
|
return os.path.samefile(file1, file2)
|
||||||
|
else:
|
||||||
|
path1 = os.path.normcase(os.path.abspath(file1))
|
||||||
|
path2 = os.path.normcase(os.path.abspath(file2))
|
||||||
|
return path1 == path2
|
||||||
|
|
||||||
|
|
||||||
|
if hasattr(shutil, 'get_terminal_size'):
|
||||||
|
def get_terminal_size():
|
||||||
|
"""
|
||||||
|
Returns a tuple (x, y) representing the width(x) and the height(y)
|
||||||
|
in characters of the terminal window.
|
||||||
|
"""
|
||||||
|
return tuple(shutil.get_terminal_size())
|
||||||
|
else:
|
||||||
|
def get_terminal_size():
|
||||||
|
"""
|
||||||
|
Returns a tuple (x, y) representing the width(x) and the height(y)
|
||||||
|
in characters of the terminal window.
|
||||||
|
"""
|
||||||
|
def ioctl_GWINSZ(fd):
|
||||||
|
try:
|
||||||
|
import fcntl
|
||||||
|
import termios
|
||||||
|
import struct
|
||||||
|
cr = struct.unpack_from(
|
||||||
|
'hh',
|
||||||
|
fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
if cr == (0, 0):
|
||||||
|
return None
|
||||||
|
return cr
|
||||||
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||||
|
cr = ioctl_GWINSZ(fd)
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if not cr:
|
||||||
|
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||||
|
return int(cr[1]), int(cr[0])
|
@ -0,0 +1,378 @@
|
|||||||
|
"""Configuration management setup
|
||||||
|
|
||||||
|
Some terminology:
|
||||||
|
- name
|
||||||
|
As written in config files.
|
||||||
|
- value
|
||||||
|
Value associated with a name
|
||||||
|
- key
|
||||||
|
Name combined with it's section (section.name)
|
||||||
|
- variant
|
||||||
|
A single word describing where the configuration key-value pair came from
|
||||||
|
"""
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
from pip._vendor.six.moves import configparser
|
||||||
|
|
||||||
|
from pip._internal.exceptions import ConfigurationError
|
||||||
|
from pip._internal.locations import (
|
||||||
|
legacy_config_file, new_config_file, running_under_virtualenv,
|
||||||
|
site_config_files, venv_config_file,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import ensure_dir, enum
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||||
|
|
||||||
|
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||||
|
Kind = NewType("Kind", str)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||||
|
def _normalize_name(name):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Make a name consistent regardless of source (environment or file)
|
||||||
|
"""
|
||||||
|
name = name.lower().replace('_', '-')
|
||||||
|
if name.startswith('--'):
|
||||||
|
name = name[2:] # only prefer long opts
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def _disassemble_key(name):
|
||||||
|
# type: (str) -> List[str]
|
||||||
|
return name.split(".", 1)
|
||||||
|
|
||||||
|
|
||||||
|
# The kinds of configurations there are.
|
||||||
|
kinds = enum(
|
||||||
|
USER="user", # User Specific
|
||||||
|
GLOBAL="global", # System Wide
|
||||||
|
VENV="venv", # Virtual Environment Specific
|
||||||
|
ENV="env", # from PIP_CONFIG_FILE
|
||||||
|
ENV_VAR="env-var", # from Environment Variables
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Configuration(object):
|
||||||
|
"""Handles management of configuration.
|
||||||
|
|
||||||
|
Provides an interface to accessing and managing configuration files.
|
||||||
|
|
||||||
|
This class converts provides an API that takes "section.key-name" style
|
||||||
|
keys and stores the value associated with it as "key-name" under the
|
||||||
|
section "section".
|
||||||
|
|
||||||
|
This allows for a clean interface wherein the both the section and the
|
||||||
|
key-name are preserved in an easy to manage form in the configuration files
|
||||||
|
and the data stored is also nice.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, isolated, load_only=None):
|
||||||
|
# type: (bool, Kind) -> None
|
||||||
|
super(Configuration, self).__init__()
|
||||||
|
|
||||||
|
_valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
|
||||||
|
if load_only not in _valid_load_only:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Got invalid value for load_only - should be one of {}".format(
|
||||||
|
", ".join(map(repr, _valid_load_only[:-1]))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.isolated = isolated # type: bool
|
||||||
|
self.load_only = load_only # type: Optional[Kind]
|
||||||
|
|
||||||
|
# The order here determines the override order.
|
||||||
|
self._override_order = [
|
||||||
|
kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
|
||||||
|
]
|
||||||
|
|
||||||
|
self._ignore_env_names = ["version", "help"]
|
||||||
|
|
||||||
|
# Because we keep track of where we got the data from
|
||||||
|
self._parsers = {
|
||||||
|
variant: [] for variant in self._override_order
|
||||||
|
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
||||||
|
self._config = {
|
||||||
|
variant: {} for variant in self._override_order
|
||||||
|
} # type: Dict[Kind, Dict[str, Any]]
|
||||||
|
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Loads configuration from configuration files and environment
|
||||||
|
"""
|
||||||
|
self._load_config_files()
|
||||||
|
if not self.isolated:
|
||||||
|
self._load_environment_vars()
|
||||||
|
|
||||||
|
def get_file_to_edit(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
"""Returns the file with highest priority in configuration
|
||||||
|
"""
|
||||||
|
assert self.load_only is not None, \
|
||||||
|
"Need to be specified a file to be editing"
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self._get_parser_to_modify()[0]
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
# type: () -> Iterable[Tuple[str, Any]]
|
||||||
|
"""Returns key-value pairs like dict.items() representing the loaded
|
||||||
|
configuration
|
||||||
|
"""
|
||||||
|
return self._dictionary.items()
|
||||||
|
|
||||||
|
def get_value(self, key):
|
||||||
|
# type: (str) -> Any
|
||||||
|
"""Get a value from the configuration.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self._dictionary[key]
|
||||||
|
except KeyError:
|
||||||
|
raise ConfigurationError("No such key - {}".format(key))
|
||||||
|
|
||||||
|
def set_value(self, key, value):
|
||||||
|
# type: (str, Any) -> None
|
||||||
|
"""Modify a value in the configuration.
|
||||||
|
"""
|
||||||
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
|
fname, parser = self._get_parser_to_modify()
|
||||||
|
|
||||||
|
if parser is not None:
|
||||||
|
section, name = _disassemble_key(key)
|
||||||
|
|
||||||
|
# Modify the parser and the configuration
|
||||||
|
if not parser.has_section(section):
|
||||||
|
parser.add_section(section)
|
||||||
|
parser.set(section, name, value)
|
||||||
|
|
||||||
|
self._config[self.load_only][key] = value
|
||||||
|
self._mark_as_modified(fname, parser)
|
||||||
|
|
||||||
|
def unset_value(self, key):
|
||||||
|
# type: (str) -> None
|
||||||
|
"""Unset a value in the configuration.
|
||||||
|
"""
|
||||||
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
|
if key not in self._config[self.load_only]:
|
||||||
|
raise ConfigurationError("No such key - {}".format(key))
|
||||||
|
|
||||||
|
fname, parser = self._get_parser_to_modify()
|
||||||
|
|
||||||
|
if parser is not None:
|
||||||
|
section, name = _disassemble_key(key)
|
||||||
|
|
||||||
|
# Remove the key in the parser
|
||||||
|
modified_something = False
|
||||||
|
if parser.has_section(section):
|
||||||
|
# Returns whether the option was removed or not
|
||||||
|
modified_something = parser.remove_option(section, name)
|
||||||
|
|
||||||
|
if modified_something:
|
||||||
|
# name removed from parser, section may now be empty
|
||||||
|
section_iter = iter(parser.items(section))
|
||||||
|
try:
|
||||||
|
val = six.next(section_iter)
|
||||||
|
except StopIteration:
|
||||||
|
val = None
|
||||||
|
|
||||||
|
if val is None:
|
||||||
|
parser.remove_section(section)
|
||||||
|
|
||||||
|
self._mark_as_modified(fname, parser)
|
||||||
|
else:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Fatal Internal error [id=1]. Please report as a bug."
|
||||||
|
)
|
||||||
|
|
||||||
|
del self._config[self.load_only][key]
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Save the currentin-memory state.
|
||||||
|
"""
|
||||||
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
|
for fname, parser in self._modified_parsers:
|
||||||
|
logger.info("Writing to %s", fname)
|
||||||
|
|
||||||
|
# Ensure directory exists.
|
||||||
|
ensure_dir(os.path.dirname(fname))
|
||||||
|
|
||||||
|
with open(fname, "w") as f:
|
||||||
|
parser.write(f) # type: ignore
|
||||||
|
|
||||||
|
#
|
||||||
|
# Private routines
|
||||||
|
#
|
||||||
|
|
||||||
|
def _ensure_have_load_only(self):
|
||||||
|
# type: () -> None
|
||||||
|
if self.load_only is None:
|
||||||
|
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||||
|
logger.debug("Will be working with %s variant only", self.load_only)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _dictionary(self):
|
||||||
|
# type: () -> Dict[str, Any]
|
||||||
|
"""A dictionary representing the loaded configuration.
|
||||||
|
"""
|
||||||
|
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||||
|
# are not needed here.
|
||||||
|
retval = {}
|
||||||
|
|
||||||
|
for variant in self._override_order:
|
||||||
|
retval.update(self._config[variant])
|
||||||
|
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def _load_config_files(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Loads configuration from configuration files
|
||||||
|
"""
|
||||||
|
config_files = dict(self._iter_config_files())
|
||||||
|
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||||
|
logger.debug(
|
||||||
|
"Skipping loading configuration files due to "
|
||||||
|
"environment's PIP_CONFIG_FILE being os.devnull"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
for variant, files in config_files.items():
|
||||||
|
for fname in files:
|
||||||
|
# If there's specific variant set in `load_only`, load only
|
||||||
|
# that variant, not the others.
|
||||||
|
if self.load_only is not None and variant != self.load_only:
|
||||||
|
logger.debug(
|
||||||
|
"Skipping file '%s' (variant: %s)", fname, variant
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
parser = self._load_file(variant, fname)
|
||||||
|
|
||||||
|
# Keeping track of the parsers used
|
||||||
|
self._parsers[variant].append((fname, parser))
|
||||||
|
|
||||||
|
def _load_file(self, variant, fname):
|
||||||
|
# type: (Kind, str) -> RawConfigParser
|
||||||
|
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
||||||
|
parser = self._construct_parser(fname)
|
||||||
|
|
||||||
|
for section in parser.sections():
|
||||||
|
items = parser.items(section)
|
||||||
|
self._config[variant].update(self._normalized_keys(section, items))
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
def _construct_parser(self, fname):
|
||||||
|
# type: (str) -> RawConfigParser
|
||||||
|
parser = configparser.RawConfigParser()
|
||||||
|
# If there is no such file, don't bother reading it but create the
|
||||||
|
# parser anyway, to hold the data.
|
||||||
|
# Doing this is useful when modifying and saving files, where we don't
|
||||||
|
# need to construct a parser.
|
||||||
|
if os.path.exists(fname):
|
||||||
|
try:
|
||||||
|
parser.read(fname)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise ConfigurationError((
|
||||||
|
"ERROR: "
|
||||||
|
"Configuration file contains invalid %s characters.\n"
|
||||||
|
"Please fix your configuration, located at %s\n"
|
||||||
|
) % (locale.getpreferredencoding(False), fname))
|
||||||
|
return parser
|
||||||
|
|
||||||
|
def _load_environment_vars(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Loads configuration from environment variables
|
||||||
|
"""
|
||||||
|
self._config[kinds.ENV_VAR].update(
|
||||||
|
self._normalized_keys(":env:", self._get_environ_vars())
|
||||||
|
)
|
||||||
|
|
||||||
|
def _normalized_keys(self, section, items):
|
||||||
|
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
||||||
|
"""Normalizes items to construct a dictionary with normalized keys.
|
||||||
|
|
||||||
|
This routine is where the names become keys and are made the same
|
||||||
|
regardless of source - configuration files or environment.
|
||||||
|
"""
|
||||||
|
normalized = {}
|
||||||
|
for name, val in items:
|
||||||
|
key = section + "." + _normalize_name(name)
|
||||||
|
normalized[key] = val
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
def _get_environ_vars(self):
|
||||||
|
# type: () -> Iterable[Tuple[str, str]]
|
||||||
|
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||||
|
for key, val in os.environ.items():
|
||||||
|
should_be_yielded = (
|
||||||
|
key.startswith("PIP_") and
|
||||||
|
key[4:].lower() not in self._ignore_env_names
|
||||||
|
)
|
||||||
|
if should_be_yielded:
|
||||||
|
yield key[4:].lower(), val
|
||||||
|
|
||||||
|
# XXX: This is patched in the tests.
|
||||||
|
def _iter_config_files(self):
|
||||||
|
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
||||||
|
"""Yields variant and configuration files associated with it.
|
||||||
|
|
||||||
|
This should be treated like items of a dictionary.
|
||||||
|
"""
|
||||||
|
# SMELL: Move the conditions out of this function
|
||||||
|
|
||||||
|
# environment variables have the lowest priority
|
||||||
|
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
||||||
|
if config_file is not None:
|
||||||
|
yield kinds.ENV, [config_file]
|
||||||
|
else:
|
||||||
|
yield kinds.ENV, []
|
||||||
|
|
||||||
|
# at the base we have any global configuration
|
||||||
|
yield kinds.GLOBAL, list(site_config_files)
|
||||||
|
|
||||||
|
# per-user configuration next
|
||||||
|
should_load_user_config = not self.isolated and not (
|
||||||
|
config_file and os.path.exists(config_file)
|
||||||
|
)
|
||||||
|
if should_load_user_config:
|
||||||
|
# The legacy config file is overridden by the new config file
|
||||||
|
yield kinds.USER, [legacy_config_file, new_config_file]
|
||||||
|
|
||||||
|
# finally virtualenv configuration first trumping others
|
||||||
|
if running_under_virtualenv():
|
||||||
|
yield kinds.VENV, [venv_config_file]
|
||||||
|
|
||||||
|
def _get_parser_to_modify(self):
|
||||||
|
# type: () -> Tuple[str, RawConfigParser]
|
||||||
|
# Determine which parser to modify
|
||||||
|
parsers = self._parsers[self.load_only]
|
||||||
|
if not parsers:
|
||||||
|
# This should not happen if everything works correctly.
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Fatal Internal error [id=2]. Please report as a bug."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the highest priority parser.
|
||||||
|
return parsers[-1]
|
||||||
|
|
||||||
|
# XXX: This is patched in the tests.
|
||||||
|
def _mark_as_modified(self, fname, parser):
|
||||||
|
# type: (str, RawConfigParser) -> None
|
||||||
|
file_parser_tuple = (fname, parser)
|
||||||
|
if file_parser_tuple not in self._modified_parsers:
|
||||||
|
self._modified_parsers.append(file_parser_tuple)
|
@ -0,0 +1,922 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import cgi
|
||||||
|
import email.utils
|
||||||
|
import getpass
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor import requests, six, urllib3
|
||||||
|
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||||
|
from pip._vendor.cachecontrol.caches import FileCache
|
||||||
|
from pip._vendor.lockfile import LockError
|
||||||
|
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||||
|
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||||
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||||
|
from pip._vendor.requests.utils import get_netrc_auth
|
||||||
|
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
||||||
|
# why we ignore the type on this import
|
||||||
|
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||||
|
from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
|
||||||
|
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
||||||
|
|
||||||
|
import pip
|
||||||
|
from pip._internal.compat import WINDOWS
|
||||||
|
from pip._internal.exceptions import HashMismatch, InstallationError
|
||||||
|
from pip._internal.locations import write_delete_marker_file
|
||||||
|
from pip._internal.models import PyPI
|
||||||
|
from pip._internal.utils.encoding import auto_decode
|
||||||
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
|
from pip._internal.utils.glibc import libc_ver
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
|
||||||
|
display_path, format_size, get_installed_version, rmtree, splitext,
|
||||||
|
unpack_file,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.ui import DownloadProgressProvider
|
||||||
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ssl # noqa
|
||||||
|
except ImportError:
|
||||||
|
ssl = None
|
||||||
|
|
||||||
|
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
|
||||||
|
|
||||||
|
__all__ = ['get_file_content',
|
||||||
|
'is_url', 'url_to_path', 'path_to_url',
|
||||||
|
'is_archive_file', 'unpack_vcs_link',
|
||||||
|
'unpack_file_url', 'is_vcs_url', 'is_file_url',
|
||||||
|
'unpack_http_url', 'unpack_url']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def user_agent():
|
||||||
|
"""
|
||||||
|
Return a string representing the user agent.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
"installer": {"name": "pip", "version": pip.__version__},
|
||||||
|
"python": platform.python_version(),
|
||||||
|
"implementation": {
|
||||||
|
"name": platform.python_implementation(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if data["implementation"]["name"] == 'CPython':
|
||||||
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
elif data["implementation"]["name"] == 'PyPy':
|
||||||
|
if sys.pypy_version_info.releaselevel == 'final':
|
||||||
|
pypy_version_info = sys.pypy_version_info[:3]
|
||||||
|
else:
|
||||||
|
pypy_version_info = sys.pypy_version_info
|
||||||
|
data["implementation"]["version"] = ".".join(
|
||||||
|
[str(x) for x in pypy_version_info]
|
||||||
|
)
|
||||||
|
elif data["implementation"]["name"] == 'Jython':
|
||||||
|
# Complete Guess
|
||||||
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
elif data["implementation"]["name"] == 'IronPython':
|
||||||
|
# Complete Guess
|
||||||
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
|
||||||
|
if sys.platform.startswith("linux"):
|
||||||
|
from pip._vendor import distro
|
||||||
|
distro_infos = dict(filter(
|
||||||
|
lambda x: x[1],
|
||||||
|
zip(["name", "version", "id"], distro.linux_distribution()),
|
||||||
|
))
|
||||||
|
libc = dict(filter(
|
||||||
|
lambda x: x[1],
|
||||||
|
zip(["lib", "version"], libc_ver()),
|
||||||
|
))
|
||||||
|
if libc:
|
||||||
|
distro_infos["libc"] = libc
|
||||||
|
if distro_infos:
|
||||||
|
data["distro"] = distro_infos
|
||||||
|
|
||||||
|
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
||||||
|
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
|
||||||
|
|
||||||
|
if platform.system():
|
||||||
|
data.setdefault("system", {})["name"] = platform.system()
|
||||||
|
|
||||||
|
if platform.release():
|
||||||
|
data.setdefault("system", {})["release"] = platform.release()
|
||||||
|
|
||||||
|
if platform.machine():
|
||||||
|
data["cpu"] = platform.machine()
|
||||||
|
|
||||||
|
if HAS_TLS:
|
||||||
|
data["openssl_version"] = ssl.OPENSSL_VERSION
|
||||||
|
|
||||||
|
setuptools_version = get_installed_version("setuptools")
|
||||||
|
if setuptools_version is not None:
|
||||||
|
data["setuptools_version"] = setuptools_version
|
||||||
|
|
||||||
|
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
||||||
|
data=data,
|
||||||
|
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MultiDomainBasicAuth(AuthBase):
|
||||||
|
|
||||||
|
def __init__(self, prompting=True):
|
||||||
|
self.prompting = prompting
|
||||||
|
self.passwords = {}
|
||||||
|
|
||||||
|
def __call__(self, req):
|
||||||
|
parsed = urllib_parse.urlparse(req.url)
|
||||||
|
|
||||||
|
# Get the netloc without any embedded credentials
|
||||||
|
netloc = parsed.netloc.rsplit("@", 1)[-1]
|
||||||
|
|
||||||
|
# Set the url of the request to the url without any credentials
|
||||||
|
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
|
||||||
|
|
||||||
|
# Use any stored credentials that we have for this netloc
|
||||||
|
username, password = self.passwords.get(netloc, (None, None))
|
||||||
|
|
||||||
|
# Extract credentials embedded in the url if we have none stored
|
||||||
|
if username is None:
|
||||||
|
username, password = self.parse_credentials(parsed.netloc)
|
||||||
|
|
||||||
|
# Get creds from netrc if we still don't have them
|
||||||
|
if username is None and password is None:
|
||||||
|
netrc_auth = get_netrc_auth(req.url)
|
||||||
|
username, password = netrc_auth if netrc_auth else (None, None)
|
||||||
|
|
||||||
|
if username or password:
|
||||||
|
# Store the username and password
|
||||||
|
self.passwords[netloc] = (username, password)
|
||||||
|
|
||||||
|
# Send the basic auth with this request
|
||||||
|
req = HTTPBasicAuth(username or "", password or "")(req)
|
||||||
|
|
||||||
|
# Attach a hook to handle 401 responses
|
||||||
|
req.register_hook("response", self.handle_401)
|
||||||
|
|
||||||
|
return req
|
||||||
|
|
||||||
|
def handle_401(self, resp, **kwargs):
|
||||||
|
# We only care about 401 responses, anything else we want to just
|
||||||
|
# pass through the actual response
|
||||||
|
if resp.status_code != 401:
|
||||||
|
return resp
|
||||||
|
|
||||||
|
# We are not able to prompt the user so simply return the response
|
||||||
|
if not self.prompting:
|
||||||
|
return resp
|
||||||
|
|
||||||
|
parsed = urllib_parse.urlparse(resp.url)
|
||||||
|
|
||||||
|
# Prompt the user for a new username and password
|
||||||
|
username = six.moves.input("User for %s: " % parsed.netloc)
|
||||||
|
password = getpass.getpass("Password: ")
|
||||||
|
|
||||||
|
# Store the new username and password to use for future requests
|
||||||
|
if username or password:
|
||||||
|
self.passwords[parsed.netloc] = (username, password)
|
||||||
|
|
||||||
|
# Consume content and release the original connection to allow our new
|
||||||
|
# request to reuse the same one.
|
||||||
|
resp.content
|
||||||
|
resp.raw.release_conn()
|
||||||
|
|
||||||
|
# Add our new username and password to the request
|
||||||
|
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
||||||
|
|
||||||
|
# Send our new request
|
||||||
|
new_resp = resp.connection.send(req, **kwargs)
|
||||||
|
new_resp.history.append(resp)
|
||||||
|
|
||||||
|
return new_resp
|
||||||
|
|
||||||
|
def parse_credentials(self, netloc):
|
||||||
|
if "@" in netloc:
|
||||||
|
userinfo = netloc.rsplit("@", 1)[0]
|
||||||
|
if ":" in userinfo:
|
||||||
|
user, pwd = userinfo.split(":", 1)
|
||||||
|
return (urllib_unquote(user), urllib_unquote(pwd))
|
||||||
|
return urllib_unquote(userinfo), None
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
class LocalFSAdapter(BaseAdapter):
|
||||||
|
|
||||||
|
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
|
||||||
|
proxies=None):
|
||||||
|
pathname = url_to_path(request.url)
|
||||||
|
|
||||||
|
resp = Response()
|
||||||
|
resp.status_code = 200
|
||||||
|
resp.url = request.url
|
||||||
|
|
||||||
|
try:
|
||||||
|
stats = os.stat(pathname)
|
||||||
|
except OSError as exc:
|
||||||
|
resp.status_code = 404
|
||||||
|
resp.raw = exc
|
||||||
|
else:
|
||||||
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||||
|
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||||
|
resp.headers = CaseInsensitiveDict({
|
||||||
|
"Content-Type": content_type,
|
||||||
|
"Content-Length": stats.st_size,
|
||||||
|
"Last-Modified": modified,
|
||||||
|
})
|
||||||
|
|
||||||
|
resp.raw = open(pathname, "rb")
|
||||||
|
resp.close = resp.raw.close
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SafeFileCache(FileCache):
|
||||||
|
"""
|
||||||
|
A file based cache which is safe to use even when the target directory may
|
||||||
|
not be accessible or writable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(SafeFileCache, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Check to ensure that the directory containing our cache directory
|
||||||
|
# is owned by the user current executing pip. If it does not exist
|
||||||
|
# we will check the parent directory until we find one that does exist.
|
||||||
|
# If it is not owned by the user executing pip then we will disable
|
||||||
|
# the cache and log a warning.
|
||||||
|
if not check_path_owner(self.directory):
|
||||||
|
logger.warning(
|
||||||
|
"The directory '%s' or its parent directory is not owned by "
|
||||||
|
"the current user and the cache has been disabled. Please "
|
||||||
|
"check the permissions and owner of that directory. If "
|
||||||
|
"executing pip with sudo, you may want sudo's -H flag.",
|
||||||
|
self.directory,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set our directory to None to disable the Cache
|
||||||
|
self.directory = None
|
||||||
|
|
||||||
|
def get(self, *args, **kwargs):
|
||||||
|
# If we don't have a directory, then the cache should be a no-op.
|
||||||
|
if self.directory is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
return super(SafeFileCache, self).get(*args, **kwargs)
|
||||||
|
except (LockError, OSError, IOError):
|
||||||
|
# We intentionally silence this error, if we can't access the cache
|
||||||
|
# then we can just skip caching and process the request as if
|
||||||
|
# caching wasn't enabled.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def set(self, *args, **kwargs):
|
||||||
|
# If we don't have a directory, then the cache should be a no-op.
|
||||||
|
if self.directory is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
return super(SafeFileCache, self).set(*args, **kwargs)
|
||||||
|
except (LockError, OSError, IOError):
|
||||||
|
# We intentionally silence this error, if we can't access the cache
|
||||||
|
# then we can just skip caching and process the request as if
|
||||||
|
# caching wasn't enabled.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, *args, **kwargs):
|
||||||
|
# If we don't have a directory, then the cache should be a no-op.
|
||||||
|
if self.directory is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
return super(SafeFileCache, self).delete(*args, **kwargs)
|
||||||
|
except (LockError, OSError, IOError):
|
||||||
|
# We intentionally silence this error, if we can't access the cache
|
||||||
|
# then we can just skip caching and process the request as if
|
||||||
|
# caching wasn't enabled.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InsecureHTTPAdapter(HTTPAdapter):
|
||||||
|
|
||||||
|
def cert_verify(self, conn, url, verify, cert):
|
||||||
|
conn.cert_reqs = 'CERT_NONE'
|
||||||
|
conn.ca_certs = None
|
||||||
|
|
||||||
|
|
||||||
|
class PipSession(requests.Session):
|
||||||
|
|
||||||
|
timeout = None
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
retries = kwargs.pop("retries", 0)
|
||||||
|
cache = kwargs.pop("cache", None)
|
||||||
|
insecure_hosts = kwargs.pop("insecure_hosts", [])
|
||||||
|
|
||||||
|
super(PipSession, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Attach our User Agent to the request
|
||||||
|
self.headers["User-Agent"] = user_agent()
|
||||||
|
|
||||||
|
# Attach our Authentication handler to the session
|
||||||
|
self.auth = MultiDomainBasicAuth()
|
||||||
|
|
||||||
|
# Create our urllib3.Retry instance which will allow us to customize
|
||||||
|
# how we handle retries.
|
||||||
|
retries = urllib3.Retry(
|
||||||
|
# Set the total number of retries that a particular request can
|
||||||
|
# have.
|
||||||
|
total=retries,
|
||||||
|
|
||||||
|
# A 503 error from PyPI typically means that the Fastly -> Origin
|
||||||
|
# connection got interrupted in some way. A 503 error in general
|
||||||
|
# is typically considered a transient error so we'll go ahead and
|
||||||
|
# retry it.
|
||||||
|
# A 500 may indicate transient error in Amazon S3
|
||||||
|
# A 520 or 527 - may indicate transient error in CloudFlare
|
||||||
|
status_forcelist=[500, 503, 520, 527],
|
||||||
|
|
||||||
|
# Add a small amount of back off between failed requests in
|
||||||
|
# order to prevent hammering the service.
|
||||||
|
backoff_factor=0.25,
|
||||||
|
)
|
||||||
|
|
||||||
|
# We want to _only_ cache responses on securely fetched origins. We do
|
||||||
|
# this because we can't validate the response of an insecurely fetched
|
||||||
|
# origin, and we don't want someone to be able to poison the cache and
|
||||||
|
# require manual eviction from the cache to fix it.
|
||||||
|
if cache:
|
||||||
|
secure_adapter = CacheControlAdapter(
|
||||||
|
cache=SafeFileCache(cache, use_dir_lock=True),
|
||||||
|
max_retries=retries,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||||
|
|
||||||
|
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||||
|
# support caching (see above) so we'll use it for all http:// URLs as
|
||||||
|
# well as any https:// host that we've marked as ignoring TLS errors
|
||||||
|
# for.
|
||||||
|
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||||
|
|
||||||
|
self.mount("https://", secure_adapter)
|
||||||
|
self.mount("http://", insecure_adapter)
|
||||||
|
|
||||||
|
# Enable file:// urls
|
||||||
|
self.mount("file://", LocalFSAdapter())
|
||||||
|
|
||||||
|
# We want to use a non-validating adapter for any requests which are
|
||||||
|
# deemed insecure.
|
||||||
|
for host in insecure_hosts:
|
||||||
|
self.mount("https://{}/".format(host), insecure_adapter)
|
||||||
|
|
||||||
|
def request(self, method, url, *args, **kwargs):
|
||||||
|
# Allow setting a default timeout on a session
|
||||||
|
kwargs.setdefault("timeout", self.timeout)
|
||||||
|
|
||||||
|
# Dispatch the actual request
|
||||||
|
return super(PipSession, self).request(method, url, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_content(url, comes_from=None, session=None):
|
||||||
|
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||||
|
http: URL. Returns (location, content). Content is unicode.
|
||||||
|
|
||||||
|
:param url: File path or url.
|
||||||
|
:param comes_from: Origin description of requirements.
|
||||||
|
:param session: Instance of pip.download.PipSession.
|
||||||
|
"""
|
||||||
|
if session is None:
|
||||||
|
raise TypeError(
|
||||||
|
"get_file_content() missing 1 required keyword argument: 'session'"
|
||||||
|
)
|
||||||
|
|
||||||
|
match = _scheme_re.search(url)
|
||||||
|
if match:
|
||||||
|
scheme = match.group(1).lower()
|
||||||
|
if (scheme == 'file' and comes_from and
|
||||||
|
comes_from.startswith('http')):
|
||||||
|
raise InstallationError(
|
||||||
|
'Requirements file %s references URL %s, which is local'
|
||||||
|
% (comes_from, url))
|
||||||
|
if scheme == 'file':
|
||||||
|
path = url.split(':', 1)[1]
|
||||||
|
path = path.replace('\\', '/')
|
||||||
|
match = _url_slash_drive_re.match(path)
|
||||||
|
if match:
|
||||||
|
path = match.group(1) + ':' + path.split('|', 1)[1]
|
||||||
|
path = urllib_parse.unquote(path)
|
||||||
|
if path.startswith('/'):
|
||||||
|
path = '/' + path.lstrip('/')
|
||||||
|
url = path
|
||||||
|
else:
|
||||||
|
# FIXME: catch some errors
|
||||||
|
resp = session.get(url)
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp.url, resp.text
|
||||||
|
try:
|
||||||
|
with open(url, 'rb') as f:
|
||||||
|
content = auto_decode(f.read())
|
||||||
|
except IOError as exc:
|
||||||
|
raise InstallationError(
|
||||||
|
'Could not open requirements file: %s' % str(exc)
|
||||||
|
)
|
||||||
|
return url, content
|
||||||
|
|
||||||
|
|
||||||
|
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
||||||
|
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
||||||
|
|
||||||
|
|
||||||
|
def is_url(name):
|
||||||
|
"""Returns true if the name looks like a URL"""
|
||||||
|
if ':' not in name:
|
||||||
|
return False
|
||||||
|
scheme = name.split(':', 1)[0].lower()
|
||||||
|
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
|
||||||
|
|
||||||
|
|
||||||
|
def url_to_path(url):
|
||||||
|
"""
|
||||||
|
Convert a file: URL to a path.
|
||||||
|
"""
|
||||||
|
assert url.startswith('file:'), (
|
||||||
|
"You can only turn file: urls into filenames (not %r)" % url)
|
||||||
|
|
||||||
|
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
|
||||||
|
|
||||||
|
# if we have a UNC path, prepend UNC share notation
|
||||||
|
if netloc:
|
||||||
|
netloc = '\\\\' + netloc
|
||||||
|
|
||||||
|
path = urllib_request.url2pathname(netloc + path)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def path_to_url(path):
|
||||||
|
"""
|
||||||
|
Convert a path to a file: URL. The path will be made absolute and have
|
||||||
|
quoted path parts.
|
||||||
|
"""
|
||||||
|
path = os.path.normpath(os.path.abspath(path))
|
||||||
|
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def is_archive_file(name):
|
||||||
|
"""Return True if `name` is a considered as an archive file."""
|
||||||
|
ext = splitext(name)[1].lower()
|
||||||
|
if ext in ARCHIVE_EXTENSIONS:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_vcs_link(link, location):
|
||||||
|
vcs_backend = _get_used_vcs_backend(link)
|
||||||
|
vcs_backend.unpack(location)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_used_vcs_backend(link):
|
||||||
|
for backend in vcs.backends:
|
||||||
|
if link.scheme in backend.schemes:
|
||||||
|
vcs_backend = backend(link.url)
|
||||||
|
return vcs_backend
|
||||||
|
|
||||||
|
|
||||||
|
def is_vcs_url(link):
|
||||||
|
return bool(_get_used_vcs_backend(link))
|
||||||
|
|
||||||
|
|
||||||
|
def is_file_url(link):
|
||||||
|
return link.url.lower().startswith('file:')
|
||||||
|
|
||||||
|
|
||||||
|
def is_dir_url(link):
|
||||||
|
"""Return whether a file:// Link points to a directory.
|
||||||
|
|
||||||
|
``link`` must not have any other scheme but file://. Call is_file_url()
|
||||||
|
first.
|
||||||
|
|
||||||
|
"""
|
||||||
|
link_path = url_to_path(link.url_without_fragment)
|
||||||
|
return os.path.isdir(link_path)
|
||||||
|
|
||||||
|
|
||||||
|
def _progress_indicator(iterable, *args, **kwargs):
|
||||||
|
return iterable
|
||||||
|
|
||||||
|
|
||||||
|
def _download_url(resp, link, content_file, hashes, progress_bar):
|
||||||
|
try:
|
||||||
|
total_length = int(resp.headers['content-length'])
|
||||||
|
except (ValueError, KeyError, TypeError):
|
||||||
|
total_length = 0
|
||||||
|
|
||||||
|
cached_resp = getattr(resp, "from_cache", False)
|
||||||
|
if logger.getEffectiveLevel() > logging.INFO:
|
||||||
|
show_progress = False
|
||||||
|
elif cached_resp:
|
||||||
|
show_progress = False
|
||||||
|
elif total_length > (40 * 1000):
|
||||||
|
show_progress = True
|
||||||
|
elif not total_length:
|
||||||
|
show_progress = True
|
||||||
|
else:
|
||||||
|
show_progress = False
|
||||||
|
|
||||||
|
show_url = link.show_url
|
||||||
|
|
||||||
|
def resp_read(chunk_size):
|
||||||
|
try:
|
||||||
|
# Special case for urllib3.
|
||||||
|
for chunk in resp.raw.stream(
|
||||||
|
chunk_size,
|
||||||
|
# We use decode_content=False here because we don't
|
||||||
|
# want urllib3 to mess with the raw bytes we get
|
||||||
|
# from the server. If we decompress inside of
|
||||||
|
# urllib3 then we cannot verify the checksum
|
||||||
|
# because the checksum will be of the compressed
|
||||||
|
# file. This breakage will only occur if the
|
||||||
|
# server adds a Content-Encoding header, which
|
||||||
|
# depends on how the server was configured:
|
||||||
|
# - Some servers will notice that the file isn't a
|
||||||
|
# compressible file and will leave the file alone
|
||||||
|
# and with an empty Content-Encoding
|
||||||
|
# - Some servers will notice that the file is
|
||||||
|
# already compressed and will leave the file
|
||||||
|
# alone and will add a Content-Encoding: gzip
|
||||||
|
# header
|
||||||
|
# - Some servers won't notice anything at all and
|
||||||
|
# will take a file that's already been compressed
|
||||||
|
# and compress it again and set the
|
||||||
|
# Content-Encoding: gzip header
|
||||||
|
#
|
||||||
|
# By setting this not to decode automatically we
|
||||||
|
# hope to eliminate problems with the second case.
|
||||||
|
decode_content=False):
|
||||||
|
yield chunk
|
||||||
|
except AttributeError:
|
||||||
|
# Standard file-like object.
|
||||||
|
while True:
|
||||||
|
chunk = resp.raw.read(chunk_size)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
def written_chunks(chunks):
|
||||||
|
for chunk in chunks:
|
||||||
|
content_file.write(chunk)
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
progress_indicator = _progress_indicator
|
||||||
|
|
||||||
|
if link.netloc == PyPI.netloc:
|
||||||
|
url = show_url
|
||||||
|
else:
|
||||||
|
url = link.url_without_fragment
|
||||||
|
|
||||||
|
if show_progress: # We don't show progress on cached responses
|
||||||
|
progress_indicator = DownloadProgressProvider(progress_bar,
|
||||||
|
max=total_length)
|
||||||
|
if total_length:
|
||||||
|
logger.info("Downloading %s (%s)", url, format_size(total_length))
|
||||||
|
else:
|
||||||
|
logger.info("Downloading %s", url)
|
||||||
|
elif cached_resp:
|
||||||
|
logger.info("Using cached %s", url)
|
||||||
|
else:
|
||||||
|
logger.info("Downloading %s", url)
|
||||||
|
|
||||||
|
logger.debug('Downloading from URL %s', link)
|
||||||
|
|
||||||
|
downloaded_chunks = written_chunks(
|
||||||
|
progress_indicator(
|
||||||
|
resp_read(CONTENT_CHUNK_SIZE),
|
||||||
|
CONTENT_CHUNK_SIZE
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if hashes:
|
||||||
|
hashes.check_against_chunks(downloaded_chunks)
|
||||||
|
else:
|
||||||
|
consume(downloaded_chunks)
|
||||||
|
|
||||||
|
|
||||||
|
def _copy_file(filename, location, link):
|
||||||
|
copy = True
|
||||||
|
download_location = os.path.join(location, link.filename)
|
||||||
|
if os.path.exists(download_location):
|
||||||
|
response = ask_path_exists(
|
||||||
|
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
|
||||||
|
display_path(download_location), ('i', 'w', 'b', 'a'))
|
||||||
|
if response == 'i':
|
||||||
|
copy = False
|
||||||
|
elif response == 'w':
|
||||||
|
logger.warning('Deleting %s', display_path(download_location))
|
||||||
|
os.remove(download_location)
|
||||||
|
elif response == 'b':
|
||||||
|
dest_file = backup_dir(download_location)
|
||||||
|
logger.warning(
|
||||||
|
'Backing up %s to %s',
|
||||||
|
display_path(download_location),
|
||||||
|
display_path(dest_file),
|
||||||
|
)
|
||||||
|
shutil.move(download_location, dest_file)
|
||||||
|
elif response == 'a':
|
||||||
|
sys.exit(-1)
|
||||||
|
if copy:
|
||||||
|
shutil.copy(filename, download_location)
|
||||||
|
logger.info('Saved %s', display_path(download_location))
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_http_url(link, location, download_dir=None,
|
||||||
|
session=None, hashes=None, progress_bar="on"):
|
||||||
|
if session is None:
|
||||||
|
raise TypeError(
|
||||||
|
"unpack_http_url() missing 1 required keyword argument: 'session'"
|
||||||
|
)
|
||||||
|
|
||||||
|
with TempDirectory(kind="unpack") as temp_dir:
|
||||||
|
# If a download dir is specified, is the file already downloaded there?
|
||||||
|
already_downloaded_path = None
|
||||||
|
if download_dir:
|
||||||
|
already_downloaded_path = _check_download_dir(link,
|
||||||
|
download_dir,
|
||||||
|
hashes)
|
||||||
|
|
||||||
|
if already_downloaded_path:
|
||||||
|
from_path = already_downloaded_path
|
||||||
|
content_type = mimetypes.guess_type(from_path)[0]
|
||||||
|
else:
|
||||||
|
# let's download to a tmp dir
|
||||||
|
from_path, content_type = _download_http_url(link,
|
||||||
|
session,
|
||||||
|
temp_dir.path,
|
||||||
|
hashes,
|
||||||
|
progress_bar)
|
||||||
|
|
||||||
|
# unpack the archive to the build dir location. even when only
|
||||||
|
# downloading archives, they have to be unpacked to parse dependencies
|
||||||
|
unpack_file(from_path, location, content_type, link)
|
||||||
|
|
||||||
|
# a download dir is specified; let's copy the archive there
|
||||||
|
if download_dir and not already_downloaded_path:
|
||||||
|
_copy_file(from_path, download_dir, link)
|
||||||
|
|
||||||
|
if not already_downloaded_path:
|
||||||
|
os.unlink(from_path)
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_file_url(link, location, download_dir=None, hashes=None):
|
||||||
|
"""Unpack link into location.
|
||||||
|
|
||||||
|
If download_dir is provided and link points to a file, make a copy
|
||||||
|
of the link file inside download_dir.
|
||||||
|
"""
|
||||||
|
link_path = url_to_path(link.url_without_fragment)
|
||||||
|
|
||||||
|
# If it's a url to a local directory
|
||||||
|
if is_dir_url(link):
|
||||||
|
if os.path.isdir(location):
|
||||||
|
rmtree(location)
|
||||||
|
shutil.copytree(link_path, location, symlinks=True)
|
||||||
|
if download_dir:
|
||||||
|
logger.info('Link is a directory, ignoring download_dir')
|
||||||
|
return
|
||||||
|
|
||||||
|
# If --require-hashes is off, `hashes` is either empty, the
|
||||||
|
# link's embedded hash, or MissingHashes; it is required to
|
||||||
|
# match. If --require-hashes is on, we are satisfied by any
|
||||||
|
# hash in `hashes` matching: a URL-based or an option-based
|
||||||
|
# one; no internet-sourced hash will be in `hashes`.
|
||||||
|
if hashes:
|
||||||
|
hashes.check_against_path(link_path)
|
||||||
|
|
||||||
|
# If a download dir is specified, is the file already there and valid?
|
||||||
|
already_downloaded_path = None
|
||||||
|
if download_dir:
|
||||||
|
already_downloaded_path = _check_download_dir(link,
|
||||||
|
download_dir,
|
||||||
|
hashes)
|
||||||
|
|
||||||
|
if already_downloaded_path:
|
||||||
|
from_path = already_downloaded_path
|
||||||
|
else:
|
||||||
|
from_path = link_path
|
||||||
|
|
||||||
|
content_type = mimetypes.guess_type(from_path)[0]
|
||||||
|
|
||||||
|
# unpack the archive to the build dir location. even when only downloading
|
||||||
|
# archives, they have to be unpacked to parse dependencies
|
||||||
|
unpack_file(from_path, location, content_type, link)
|
||||||
|
|
||||||
|
# a download dir is specified and not already downloaded
|
||||||
|
if download_dir and not already_downloaded_path:
|
||||||
|
_copy_file(from_path, download_dir, link)
|
||||||
|
|
||||||
|
|
||||||
|
def _copy_dist_from_dir(link_path, location):
|
||||||
|
"""Copy distribution files in `link_path` to `location`.
|
||||||
|
|
||||||
|
Invoked when user requests to install a local directory. E.g.:
|
||||||
|
|
||||||
|
pip install .
|
||||||
|
pip install ~/dev/git-repos/python-prompt-toolkit
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Note: This is currently VERY SLOW if you have a lot of data in the
|
||||||
|
# directory, because it copies everything with `shutil.copytree`.
|
||||||
|
# What it should really do is build an sdist and install that.
|
||||||
|
# See https://github.com/pypa/pip/issues/2195
|
||||||
|
|
||||||
|
if os.path.isdir(location):
|
||||||
|
rmtree(location)
|
||||||
|
|
||||||
|
# build an sdist
|
||||||
|
setup_py = 'setup.py'
|
||||||
|
sdist_args = [sys.executable]
|
||||||
|
sdist_args.append('-c')
|
||||||
|
sdist_args.append(SETUPTOOLS_SHIM % setup_py)
|
||||||
|
sdist_args.append('sdist')
|
||||||
|
sdist_args += ['--dist-dir', location]
|
||||||
|
logger.info('Running setup.py sdist for %s', link_path)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
|
||||||
|
|
||||||
|
# unpack sdist into `location`
|
||||||
|
sdist = os.path.join(location, os.listdir(location)[0])
|
||||||
|
logger.info('Unpacking sdist %s into %s', sdist, location)
|
||||||
|
unpack_file(sdist, location, content_type=None, link=None)
|
||||||
|
|
||||||
|
|
||||||
|
class PipXmlrpcTransport(xmlrpc_client.Transport):
|
||||||
|
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, index_url, session, use_datetime=False):
|
||||||
|
xmlrpc_client.Transport.__init__(self, use_datetime)
|
||||||
|
index_parts = urllib_parse.urlparse(index_url)
|
||||||
|
self._scheme = index_parts.scheme
|
||||||
|
self._session = session
|
||||||
|
|
||||||
|
def request(self, host, handler, request_body, verbose=False):
|
||||||
|
parts = (self._scheme, host, handler, None, None, None)
|
||||||
|
url = urllib_parse.urlunparse(parts)
|
||||||
|
try:
|
||||||
|
headers = {'Content-Type': 'text/xml'}
|
||||||
|
response = self._session.post(url, data=request_body,
|
||||||
|
headers=headers, stream=True)
|
||||||
|
response.raise_for_status()
|
||||||
|
self.verbose = verbose
|
||||||
|
return self.parse_response(response.raw)
|
||||||
|
except requests.HTTPError as exc:
|
||||||
|
logger.critical(
|
||||||
|
"HTTP error %s while getting %s",
|
||||||
|
exc.response.status_code, url,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_url(link, location, download_dir=None,
|
||||||
|
only_download=False, session=None, hashes=None,
|
||||||
|
progress_bar="on"):
|
||||||
|
"""Unpack link.
|
||||||
|
If link is a VCS link:
|
||||||
|
if only_download, export into download_dir and ignore location
|
||||||
|
else unpack into location
|
||||||
|
for other types of link:
|
||||||
|
- unpack into location
|
||||||
|
- if download_dir, copy the file into download_dir
|
||||||
|
- if only_download, mark location for deletion
|
||||||
|
|
||||||
|
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||||
|
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
||||||
|
required, and unhashable types of requirements (like VCS ones, which
|
||||||
|
would ordinarily raise HashUnsupported) are allowed.
|
||||||
|
"""
|
||||||
|
# non-editable vcs urls
|
||||||
|
if is_vcs_url(link):
|
||||||
|
unpack_vcs_link(link, location)
|
||||||
|
|
||||||
|
# file urls
|
||||||
|
elif is_file_url(link):
|
||||||
|
unpack_file_url(link, location, download_dir, hashes=hashes)
|
||||||
|
|
||||||
|
# http urls
|
||||||
|
else:
|
||||||
|
if session is None:
|
||||||
|
session = PipSession()
|
||||||
|
|
||||||
|
unpack_http_url(
|
||||||
|
link,
|
||||||
|
location,
|
||||||
|
download_dir,
|
||||||
|
session,
|
||||||
|
hashes=hashes,
|
||||||
|
progress_bar=progress_bar
|
||||||
|
)
|
||||||
|
if only_download:
|
||||||
|
write_delete_marker_file(location)
|
||||||
|
|
||||||
|
|
||||||
|
def _download_http_url(link, session, temp_dir, hashes, progress_bar):
|
||||||
|
"""Download link url into temp_dir using provided session"""
|
||||||
|
target_url = link.url.split('#', 1)[0]
|
||||||
|
try:
|
||||||
|
resp = session.get(
|
||||||
|
target_url,
|
||||||
|
# We use Accept-Encoding: identity here because requests
|
||||||
|
# defaults to accepting compressed responses. This breaks in
|
||||||
|
# a variety of ways depending on how the server is configured.
|
||||||
|
# - Some servers will notice that the file isn't a compressible
|
||||||
|
# file and will leave the file alone and with an empty
|
||||||
|
# Content-Encoding
|
||||||
|
# - Some servers will notice that the file is already
|
||||||
|
# compressed and will leave the file alone and will add a
|
||||||
|
# Content-Encoding: gzip header
|
||||||
|
# - Some servers won't notice anything at all and will take
|
||||||
|
# a file that's already been compressed and compress it again
|
||||||
|
# and set the Content-Encoding: gzip header
|
||||||
|
# By setting this to request only the identity encoding We're
|
||||||
|
# hoping to eliminate the third case. Hopefully there does not
|
||||||
|
# exist a server which when given a file will notice it is
|
||||||
|
# already compressed and that you're not asking for a
|
||||||
|
# compressed file and will then decompress it before sending
|
||||||
|
# because if that's the case I don't think it'll ever be
|
||||||
|
# possible to make this work.
|
||||||
|
headers={"Accept-Encoding": "identity"},
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.HTTPError as exc:
|
||||||
|
logger.critical(
|
||||||
|
"HTTP error %s while getting %s", exc.response.status_code, link,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
content_type = resp.headers.get('content-type', '')
|
||||||
|
filename = link.filename # fallback
|
||||||
|
# Have a look at the Content-Disposition header for a better guess
|
||||||
|
content_disposition = resp.headers.get('content-disposition')
|
||||||
|
if content_disposition:
|
||||||
|
type, params = cgi.parse_header(content_disposition)
|
||||||
|
# We use ``or`` here because we don't want to use an "empty" value
|
||||||
|
# from the filename param.
|
||||||
|
filename = params.get('filename') or filename
|
||||||
|
ext = splitext(filename)[1]
|
||||||
|
if not ext:
|
||||||
|
ext = mimetypes.guess_extension(content_type)
|
||||||
|
if ext:
|
||||||
|
filename += ext
|
||||||
|
if not ext and link.url != resp.url:
|
||||||
|
ext = os.path.splitext(resp.url)[1]
|
||||||
|
if ext:
|
||||||
|
filename += ext
|
||||||
|
file_path = os.path.join(temp_dir, filename)
|
||||||
|
with open(file_path, 'wb') as content_file:
|
||||||
|
_download_url(resp, link, content_file, hashes, progress_bar)
|
||||||
|
return file_path, content_type
|
||||||
|
|
||||||
|
|
||||||
|
def _check_download_dir(link, download_dir, hashes):
|
||||||
|
""" Check download_dir for previously downloaded file with correct hash
|
||||||
|
If a correct file is found return its path else None
|
||||||
|
"""
|
||||||
|
download_path = os.path.join(download_dir, link.filename)
|
||||||
|
if os.path.exists(download_path):
|
||||||
|
# If already downloaded, does its hash match?
|
||||||
|
logger.info('File was already downloaded %s', download_path)
|
||||||
|
if hashes:
|
||||||
|
try:
|
||||||
|
hashes.check_against_path(download_path)
|
||||||
|
except HashMismatch:
|
||||||
|
logger.warning(
|
||||||
|
'Previously-downloaded file %s has bad hash. '
|
||||||
|
'Re-downloading.',
|
||||||
|
download_path
|
||||||
|
)
|
||||||
|
os.unlink(download_path)
|
||||||
|
return None
|
||||||
|
return download_path
|
||||||
|
return None
|
@ -0,0 +1,249 @@
|
|||||||
|
"""Exceptions used throughout package"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from itertools import chain, groupby, repeat
|
||||||
|
|
||||||
|
from pip._vendor.six import iteritems
|
||||||
|
|
||||||
|
|
||||||
|
class PipError(Exception):
|
||||||
|
"""Base pip exception"""
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(PipError):
|
||||||
|
"""General exception in configuration"""
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationError(PipError):
|
||||||
|
"""General exception during installation"""
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallationError(PipError):
|
||||||
|
"""General exception during uninstallation"""
|
||||||
|
|
||||||
|
|
||||||
|
class DistributionNotFound(InstallationError):
|
||||||
|
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementsFileParseError(InstallationError):
|
||||||
|
"""Raised when a general error occurs parsing a requirements file line."""
|
||||||
|
|
||||||
|
|
||||||
|
class BestVersionAlreadyInstalled(PipError):
|
||||||
|
"""Raised when the most up-to-date version of a package is already
|
||||||
|
installed."""
|
||||||
|
|
||||||
|
|
||||||
|
class BadCommand(PipError):
|
||||||
|
"""Raised when virtualenv or a command is not found"""
|
||||||
|
|
||||||
|
|
||||||
|
class CommandError(PipError):
|
||||||
|
"""Raised when there is an error in command-line arguments"""
|
||||||
|
|
||||||
|
|
||||||
|
class PreviousBuildDirError(PipError):
|
||||||
|
"""Raised when there's a previous conflicting build directory"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidWheelFilename(InstallationError):
|
||||||
|
"""Invalid wheel filename."""
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedWheel(InstallationError):
|
||||||
|
"""Unsupported wheel."""
|
||||||
|
|
||||||
|
|
||||||
|
class HashErrors(InstallationError):
|
||||||
|
"""Multiple HashError instances rolled into one for reporting"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.errors = []
|
||||||
|
|
||||||
|
def append(self, error):
|
||||||
|
self.errors.append(error)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
lines = []
|
||||||
|
self.errors.sort(key=lambda e: e.order)
|
||||||
|
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||||
|
lines.append(cls.head)
|
||||||
|
lines.extend(e.body() for e in errors_of_cls)
|
||||||
|
if lines:
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
return bool(self.errors)
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return self.__nonzero__()
|
||||||
|
|
||||||
|
|
||||||
|
class HashError(InstallationError):
|
||||||
|
"""
|
||||||
|
A failure to verify a package against known-good hashes
|
||||||
|
|
||||||
|
:cvar order: An int sorting hash exception classes by difficulty of
|
||||||
|
recovery (lower being harder), so the user doesn't bother fretting
|
||||||
|
about unpinned packages when he has deeper issues, like VCS
|
||||||
|
dependencies, to deal with. Also keeps error reports in a
|
||||||
|
deterministic order.
|
||||||
|
:cvar head: A section heading for display above potentially many
|
||||||
|
exceptions of this kind
|
||||||
|
:ivar req: The InstallRequirement that triggered this error. This is
|
||||||
|
pasted on after the exception is instantiated, because it's not
|
||||||
|
typically available earlier.
|
||||||
|
|
||||||
|
"""
|
||||||
|
req = None
|
||||||
|
head = ''
|
||||||
|
|
||||||
|
def body(self):
|
||||||
|
"""Return a summary of me for display under the heading.
|
||||||
|
|
||||||
|
This default implementation simply prints a description of the
|
||||||
|
triggering requirement.
|
||||||
|
|
||||||
|
:param req: The InstallRequirement that provoked this error, with
|
||||||
|
populate_link() having already been called
|
||||||
|
|
||||||
|
"""
|
||||||
|
return ' %s' % self._requirement_name()
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '%s\n%s' % (self.head, self.body())
|
||||||
|
|
||||||
|
def _requirement_name(self):
|
||||||
|
"""Return a description of the requirement that triggered me.
|
||||||
|
|
||||||
|
This default implementation returns long description of the req, with
|
||||||
|
line numbers
|
||||||
|
|
||||||
|
"""
|
||||||
|
return str(self.req) if self.req else 'unknown package'
|
||||||
|
|
||||||
|
|
||||||
|
class VcsHashUnsupported(HashError):
|
||||||
|
"""A hash was provided for a version-control-system-based requirement, but
|
||||||
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
|
order = 0
|
||||||
|
head = ("Can't verify hashes for these requirements because we don't "
|
||||||
|
"have a way to hash version control repositories:")
|
||||||
|
|
||||||
|
|
||||||
|
class DirectoryUrlHashUnsupported(HashError):
|
||||||
|
"""A hash was provided for a version-control-system-based requirement, but
|
||||||
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
|
order = 1
|
||||||
|
head = ("Can't verify hashes for these file:// requirements because they "
|
||||||
|
"point to directories:")
|
||||||
|
|
||||||
|
|
||||||
|
class HashMissing(HashError):
|
||||||
|
"""A hash was needed for a requirement but is absent."""
|
||||||
|
|
||||||
|
order = 2
|
||||||
|
head = ('Hashes are required in --require-hashes mode, but they are '
|
||||||
|
'missing from some requirements. Here is a list of those '
|
||||||
|
'requirements along with the hashes their downloaded archives '
|
||||||
|
'actually had. Add lines like these to your requirements files to '
|
||||||
|
'prevent tampering. (If you did not enable --require-hashes '
|
||||||
|
'manually, note that it turns on automatically when any package '
|
||||||
|
'has a hash.)')
|
||||||
|
|
||||||
|
def __init__(self, gotten_hash):
|
||||||
|
"""
|
||||||
|
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||||
|
just downloaded
|
||||||
|
"""
|
||||||
|
self.gotten_hash = gotten_hash
|
||||||
|
|
||||||
|
def body(self):
|
||||||
|
# Dodge circular import.
|
||||||
|
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||||
|
|
||||||
|
package = None
|
||||||
|
if self.req:
|
||||||
|
# In the case of URL-based requirements, display the original URL
|
||||||
|
# seen in the requirements file rather than the package name,
|
||||||
|
# so the output can be directly copied into the requirements file.
|
||||||
|
package = (self.req.original_link if self.req.original_link
|
||||||
|
# In case someone feeds something downright stupid
|
||||||
|
# to InstallRequirement's constructor.
|
||||||
|
else getattr(self.req, 'req', None))
|
||||||
|
return ' %s --hash=%s:%s' % (package or 'unknown package',
|
||||||
|
FAVORITE_HASH,
|
||||||
|
self.gotten_hash)
|
||||||
|
|
||||||
|
|
||||||
|
class HashUnpinned(HashError):
|
||||||
|
"""A requirement had a hash specified but was not pinned to a specific
|
||||||
|
version."""
|
||||||
|
|
||||||
|
order = 3
|
||||||
|
head = ('In --require-hashes mode, all requirements must have their '
|
||||||
|
'versions pinned with ==. These do not:')
|
||||||
|
|
||||||
|
|
||||||
|
class HashMismatch(HashError):
|
||||||
|
"""
|
||||||
|
Distribution file hash values don't match.
|
||||||
|
|
||||||
|
:ivar package_name: The name of the package that triggered the hash
|
||||||
|
mismatch. Feel free to write to this after the exception is raise to
|
||||||
|
improve its error message.
|
||||||
|
|
||||||
|
"""
|
||||||
|
order = 4
|
||||||
|
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
||||||
|
'FILE. If you have updated the package versions, please update '
|
||||||
|
'the hashes. Otherwise, examine the package contents carefully; '
|
||||||
|
'someone may have tampered with them.')
|
||||||
|
|
||||||
|
def __init__(self, allowed, gots):
|
||||||
|
"""
|
||||||
|
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||||
|
hex digests
|
||||||
|
:param gots: A dict of algorithm names pointing to hashes we
|
||||||
|
actually got from the files under suspicion
|
||||||
|
"""
|
||||||
|
self.allowed = allowed
|
||||||
|
self.gots = gots
|
||||||
|
|
||||||
|
def body(self):
|
||||||
|
return ' %s:\n%s' % (self._requirement_name(),
|
||||||
|
self._hash_comparison())
|
||||||
|
|
||||||
|
def _hash_comparison(self):
|
||||||
|
"""
|
||||||
|
Return a comparison of actual and expected hash values.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
||||||
|
or 123451234512345123451234512345123451234512345
|
||||||
|
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||||
|
|
||||||
|
"""
|
||||||
|
def hash_then_or(hash_name):
|
||||||
|
# For now, all the decent hashes have 6-char names, so we can get
|
||||||
|
# away with hard-coding space literals.
|
||||||
|
return chain([hash_name], repeat(' or'))
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
for hash_name, expecteds in iteritems(self.allowed):
|
||||||
|
prefix = hash_then_or(hash_name)
|
||||||
|
lines.extend((' Expected %s %s' % (next(prefix), e))
|
||||||
|
for e in expecteds)
|
||||||
|
lines.append(' Got %s\n' %
|
||||||
|
self.gots[hash_name].hexdigest())
|
||||||
|
prefix = ' or'
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedPythonVersion(InstallationError):
|
||||||
|
"""Unsupported python version according to Requires-Python package
|
||||||
|
metadata."""
|
1117
venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py
Normal file
1117
venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,194 @@
|
|||||||
|
"""Locations where we look for configs, install stuff, etc"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import platform
|
||||||
|
import site
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
from distutils import sysconfig as distutils_sysconfig
|
||||||
|
from distutils.command.install import SCHEME_KEYS, install # type: ignore
|
||||||
|
|
||||||
|
from pip._internal.compat import WINDOWS, expanduser
|
||||||
|
from pip._internal.utils import appdirs
|
||||||
|
|
||||||
|
# Application Directories
|
||||||
|
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||||
|
|
||||||
|
|
||||||
|
DELETE_MARKER_MESSAGE = '''\
|
||||||
|
This file is placed here by pip to indicate the source was put
|
||||||
|
here by pip.
|
||||||
|
|
||||||
|
Once this package is successfully installed this source code will be
|
||||||
|
deleted (unless you remove this file).
|
||||||
|
'''
|
||||||
|
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
|
||||||
|
|
||||||
|
|
||||||
|
def write_delete_marker_file(directory):
|
||||||
|
"""
|
||||||
|
Write the pip delete marker file into this directory.
|
||||||
|
"""
|
||||||
|
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
|
||||||
|
with open(filepath, 'w') as marker_fp:
|
||||||
|
marker_fp.write(DELETE_MARKER_MESSAGE)
|
||||||
|
|
||||||
|
|
||||||
|
def running_under_virtualenv():
|
||||||
|
"""
|
||||||
|
Return True if we're running inside a virtualenv, False otherwise.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if hasattr(sys, 'real_prefix'):
|
||||||
|
return True
|
||||||
|
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def virtualenv_no_global():
|
||||||
|
"""
|
||||||
|
Return True if in a venv and no system site packages.
|
||||||
|
"""
|
||||||
|
# this mirrors the logic in virtualenv.py for locating the
|
||||||
|
# no-global-site-packages.txt file
|
||||||
|
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||||
|
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
|
||||||
|
if running_under_virtualenv() and os.path.isfile(no_global_file):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
if running_under_virtualenv():
|
||||||
|
src_prefix = os.path.join(sys.prefix, 'src')
|
||||||
|
else:
|
||||||
|
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
||||||
|
try:
|
||||||
|
src_prefix = os.path.join(os.getcwd(), 'src')
|
||||||
|
except OSError:
|
||||||
|
# In case the current working directory has been renamed or deleted
|
||||||
|
sys.exit(
|
||||||
|
"The folder you are executing pip from can no longer be found."
|
||||||
|
)
|
||||||
|
|
||||||
|
# under macOS + virtualenv sys.prefix is not properly resolved
|
||||||
|
# it is something like /path/to/python/bin/..
|
||||||
|
# Note: using realpath due to tmp dirs on OSX being symlinks
|
||||||
|
src_prefix = os.path.abspath(src_prefix)
|
||||||
|
|
||||||
|
# FIXME doesn't account for venv linked to global site-packages
|
||||||
|
|
||||||
|
site_packages = sysconfig.get_path("purelib")
|
||||||
|
# This is because of a bug in PyPy's sysconfig module, see
|
||||||
|
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
|
||||||
|
# for more information.
|
||||||
|
if platform.python_implementation().lower() == "pypy":
|
||||||
|
site_packages = distutils_sysconfig.get_python_lib()
|
||||||
|
try:
|
||||||
|
# Use getusersitepackages if this is present, as it ensures that the
|
||||||
|
# value is initialised properly.
|
||||||
|
user_site = site.getusersitepackages()
|
||||||
|
except AttributeError:
|
||||||
|
user_site = site.USER_SITE
|
||||||
|
user_dir = expanduser('~')
|
||||||
|
if WINDOWS:
|
||||||
|
bin_py = os.path.join(sys.prefix, 'Scripts')
|
||||||
|
bin_user = os.path.join(user_site, 'Scripts')
|
||||||
|
# buildout uses 'bin' on Windows too?
|
||||||
|
if not os.path.exists(bin_py):
|
||||||
|
bin_py = os.path.join(sys.prefix, 'bin')
|
||||||
|
bin_user = os.path.join(user_site, 'bin')
|
||||||
|
|
||||||
|
config_basename = 'pip.ini'
|
||||||
|
|
||||||
|
legacy_storage_dir = os.path.join(user_dir, 'pip')
|
||||||
|
legacy_config_file = os.path.join(
|
||||||
|
legacy_storage_dir,
|
||||||
|
config_basename,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
bin_py = os.path.join(sys.prefix, 'bin')
|
||||||
|
bin_user = os.path.join(user_site, 'bin')
|
||||||
|
|
||||||
|
config_basename = 'pip.conf'
|
||||||
|
|
||||||
|
legacy_storage_dir = os.path.join(user_dir, '.pip')
|
||||||
|
legacy_config_file = os.path.join(
|
||||||
|
legacy_storage_dir,
|
||||||
|
config_basename,
|
||||||
|
)
|
||||||
|
# Forcing to use /usr/local/bin for standard macOS framework installs
|
||||||
|
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||||
|
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
||||||
|
bin_py = '/usr/local/bin'
|
||||||
|
|
||||||
|
site_config_files = [
|
||||||
|
os.path.join(path, config_basename)
|
||||||
|
for path in appdirs.site_config_dirs('pip')
|
||||||
|
]
|
||||||
|
|
||||||
|
venv_config_file = os.path.join(sys.prefix, config_basename)
|
||||||
|
new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
|
||||||
|
|
||||||
|
|
||||||
|
def distutils_scheme(dist_name, user=False, home=None, root=None,
|
||||||
|
isolated=False, prefix=None):
|
||||||
|
"""
|
||||||
|
Return a distutils install scheme
|
||||||
|
"""
|
||||||
|
from distutils.dist import Distribution
|
||||||
|
|
||||||
|
scheme = {}
|
||||||
|
|
||||||
|
if isolated:
|
||||||
|
extra_dist_args = {"script_args": ["--no-user-cfg"]}
|
||||||
|
else:
|
||||||
|
extra_dist_args = {}
|
||||||
|
dist_args = {'name': dist_name}
|
||||||
|
dist_args.update(extra_dist_args)
|
||||||
|
|
||||||
|
d = Distribution(dist_args)
|
||||||
|
d.parse_config_files()
|
||||||
|
i = d.get_command_obj('install', create=True)
|
||||||
|
# NOTE: setting user or home has the side-effect of creating the home dir
|
||||||
|
# or user base for installations during finalize_options()
|
||||||
|
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||||
|
assert not (user and prefix), "user={} prefix={}".format(user, prefix)
|
||||||
|
i.user = user or i.user
|
||||||
|
if user:
|
||||||
|
i.prefix = ""
|
||||||
|
i.prefix = prefix or i.prefix
|
||||||
|
i.home = home or i.home
|
||||||
|
i.root = root or i.root
|
||||||
|
i.finalize_options()
|
||||||
|
for key in SCHEME_KEYS:
|
||||||
|
scheme[key] = getattr(i, 'install_' + key)
|
||||||
|
|
||||||
|
# install_lib specified in setup.cfg should install *everything*
|
||||||
|
# into there (i.e. it takes precedence over both purelib and
|
||||||
|
# platlib). Note, i.install_lib is *always* set after
|
||||||
|
# finalize_options(); we only want to override here if the user
|
||||||
|
# has explicitly requested it hence going back to the config
|
||||||
|
if 'install_lib' in d.get_option_dict('install'):
|
||||||
|
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||||
|
|
||||||
|
if running_under_virtualenv():
|
||||||
|
scheme['headers'] = os.path.join(
|
||||||
|
sys.prefix,
|
||||||
|
'include',
|
||||||
|
'site',
|
||||||
|
'python' + sys.version[:3],
|
||||||
|
dist_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
if root is not None:
|
||||||
|
path_no_drive = os.path.splitdrive(
|
||||||
|
os.path.abspath(scheme["headers"]))[1]
|
||||||
|
scheme["headers"] = os.path.join(
|
||||||
|
root,
|
||||||
|
path_no_drive[1:],
|
||||||
|
)
|
||||||
|
|
||||||
|
return scheme
|
@ -0,0 +1,4 @@
|
|||||||
|
from pip._internal.models.index import Index, PyPI
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["Index", "PyPI"]
|
@ -0,0 +1,15 @@
|
|||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
|
||||||
|
class Index(object):
|
||||||
|
def __init__(self, url):
|
||||||
|
self.url = url
|
||||||
|
self.netloc = urllib_parse.urlsplit(url).netloc
|
||||||
|
self.simple_url = self.url_to_path('simple')
|
||||||
|
self.pypi_url = self.url_to_path('pypi')
|
||||||
|
|
||||||
|
def url_to_path(self, path):
|
||||||
|
return urllib_parse.urljoin(self.url, path)
|
||||||
|
|
||||||
|
|
||||||
|
PyPI = Index('https://pypi.org/')
|
@ -0,0 +1,106 @@
|
|||||||
|
"""Validation of dependencies of packages
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.operations.prepare import make_abstract_dist
|
||||||
|
|
||||||
|
from pip._internal.utils.misc import get_installed_distributions
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from typing import Any, Dict, Iterator, Set, Tuple, List
|
||||||
|
|
||||||
|
# Shorthands
|
||||||
|
PackageSet = Dict[str, 'PackageDetails']
|
||||||
|
Missing = Tuple[str, Any]
|
||||||
|
Conflicting = Tuple[str, str, Any]
|
||||||
|
|
||||||
|
MissingDict = Dict[str, List[Missing]]
|
||||||
|
ConflictingDict = Dict[str, List[Conflicting]]
|
||||||
|
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||||
|
|
||||||
|
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
||||||
|
|
||||||
|
|
||||||
|
def create_package_set_from_installed(**kwargs):
|
||||||
|
# type: (**Any) -> PackageSet
|
||||||
|
"""Converts a list of distributions into a PackageSet.
|
||||||
|
"""
|
||||||
|
# Default to using all packages installed on the system
|
||||||
|
if kwargs == {}:
|
||||||
|
kwargs = {"local_only": False, "skip": ()}
|
||||||
|
retval = {}
|
||||||
|
for dist in get_installed_distributions(**kwargs):
|
||||||
|
name = canonicalize_name(dist.project_name)
|
||||||
|
retval[name] = PackageDetails(dist.version, dist.requires())
|
||||||
|
return retval
|
||||||
|
|
||||||
|
|
||||||
|
def check_package_set(package_set):
|
||||||
|
# type: (PackageSet) -> CheckResult
|
||||||
|
"""Check if a package set is consistent
|
||||||
|
"""
|
||||||
|
missing = dict()
|
||||||
|
conflicting = dict()
|
||||||
|
|
||||||
|
for package_name in package_set:
|
||||||
|
# Info about dependencies of package_name
|
||||||
|
missing_deps = set() # type: Set[Missing]
|
||||||
|
conflicting_deps = set() # type: Set[Conflicting]
|
||||||
|
|
||||||
|
for req in package_set[package_name].requires:
|
||||||
|
name = canonicalize_name(req.project_name) # type: str
|
||||||
|
|
||||||
|
# Check if it's missing
|
||||||
|
if name not in package_set:
|
||||||
|
missed = True
|
||||||
|
if req.marker is not None:
|
||||||
|
missed = req.marker.evaluate()
|
||||||
|
if missed:
|
||||||
|
missing_deps.add((name, req))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if there's a conflict
|
||||||
|
version = package_set[name].version # type: str
|
||||||
|
if not req.specifier.contains(version, prereleases=True):
|
||||||
|
conflicting_deps.add((name, version, req))
|
||||||
|
|
||||||
|
def str_key(x):
|
||||||
|
return str(x)
|
||||||
|
|
||||||
|
if missing_deps:
|
||||||
|
missing[package_name] = sorted(missing_deps, key=str_key)
|
||||||
|
if conflicting_deps:
|
||||||
|
conflicting[package_name] = sorted(conflicting_deps, key=str_key)
|
||||||
|
|
||||||
|
return missing, conflicting
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_conflicts(to_install):
|
||||||
|
# type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
|
||||||
|
"""For checking if the dependency graph would be consistent after \
|
||||||
|
installing given requirements
|
||||||
|
"""
|
||||||
|
# Start from the current state
|
||||||
|
state = create_package_set_from_installed()
|
||||||
|
_simulate_installation_of(to_install, state)
|
||||||
|
return state, check_package_set(state)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE from @pradyunsg
|
||||||
|
# This required a minor update in dependency link handling logic over at
|
||||||
|
# operations.prepare.IsSDist.dist() to get it working
|
||||||
|
def _simulate_installation_of(to_install, state):
|
||||||
|
# type: (List[InstallRequirement], PackageSet) -> None
|
||||||
|
"""Computes the version of packages after installing to_install.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Modify it as installing requirement_set would (assuming no errors)
|
||||||
|
for inst_req in to_install:
|
||||||
|
dist = make_abstract_dist(inst_req).dist(finder=None)
|
||||||
|
name = canonicalize_name(dist.key)
|
||||||
|
state[name] = PackageDetails(dist.version, dist.requires())
|
@ -0,0 +1,252 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources, six
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
from pip._vendor.pkg_resources import RequirementParseError
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.req import InstallRequirement
|
||||||
|
from pip._internal.req.req_file import COMMENT_RE
|
||||||
|
from pip._internal.utils.deprecation import RemovedInPip11Warning
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
dist_is_editable, get_installed_distributions,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def freeze(
|
||||||
|
requirement=None,
|
||||||
|
find_links=None, local_only=None, user_only=None, skip_regex=None,
|
||||||
|
isolated=False,
|
||||||
|
wheel_cache=None,
|
||||||
|
exclude_editable=False,
|
||||||
|
skip=()):
|
||||||
|
find_links = find_links or []
|
||||||
|
skip_match = None
|
||||||
|
|
||||||
|
if skip_regex:
|
||||||
|
skip_match = re.compile(skip_regex).search
|
||||||
|
|
||||||
|
dependency_links = []
|
||||||
|
|
||||||
|
for dist in pkg_resources.working_set:
|
||||||
|
if dist.has_metadata('dependency_links.txt'):
|
||||||
|
dependency_links.extend(
|
||||||
|
dist.get_metadata_lines('dependency_links.txt')
|
||||||
|
)
|
||||||
|
for link in find_links:
|
||||||
|
if '#egg=' in link:
|
||||||
|
dependency_links.append(link)
|
||||||
|
for link in find_links:
|
||||||
|
yield '-f %s' % link
|
||||||
|
installations = {}
|
||||||
|
for dist in get_installed_distributions(local_only=local_only,
|
||||||
|
skip=(),
|
||||||
|
user_only=user_only):
|
||||||
|
try:
|
||||||
|
req = FrozenRequirement.from_dist(
|
||||||
|
dist,
|
||||||
|
dependency_links
|
||||||
|
)
|
||||||
|
except RequirementParseError:
|
||||||
|
logger.warning(
|
||||||
|
"Could not parse requirement: %s",
|
||||||
|
dist.project_name
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if exclude_editable and req.editable:
|
||||||
|
continue
|
||||||
|
installations[req.name] = req
|
||||||
|
|
||||||
|
if requirement:
|
||||||
|
# the options that don't get turned into an InstallRequirement
|
||||||
|
# should only be emitted once, even if the same option is in multiple
|
||||||
|
# requirements files, so we need to keep track of what has been emitted
|
||||||
|
# so that we don't emit it again if it's seen again
|
||||||
|
emitted_options = set()
|
||||||
|
# keep track of which files a requirement is in so that we can
|
||||||
|
# give an accurate warning if a requirement appears multiple times.
|
||||||
|
req_files = collections.defaultdict(list)
|
||||||
|
for req_file_path in requirement:
|
||||||
|
with open(req_file_path) as req_file:
|
||||||
|
for line in req_file:
|
||||||
|
if (not line.strip() or
|
||||||
|
line.strip().startswith('#') or
|
||||||
|
(skip_match and skip_match(line)) or
|
||||||
|
line.startswith((
|
||||||
|
'-r', '--requirement',
|
||||||
|
'-Z', '--always-unzip',
|
||||||
|
'-f', '--find-links',
|
||||||
|
'-i', '--index-url',
|
||||||
|
'--pre',
|
||||||
|
'--trusted-host',
|
||||||
|
'--process-dependency-links',
|
||||||
|
'--extra-index-url'))):
|
||||||
|
line = line.rstrip()
|
||||||
|
if line not in emitted_options:
|
||||||
|
emitted_options.add(line)
|
||||||
|
yield line
|
||||||
|
continue
|
||||||
|
|
||||||
|
if line.startswith('-e') or line.startswith('--editable'):
|
||||||
|
if line.startswith('-e'):
|
||||||
|
line = line[2:].strip()
|
||||||
|
else:
|
||||||
|
line = line[len('--editable'):].strip().lstrip('=')
|
||||||
|
line_req = InstallRequirement.from_editable(
|
||||||
|
line,
|
||||||
|
isolated=isolated,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
line_req = InstallRequirement.from_line(
|
||||||
|
COMMENT_RE.sub('', line).strip(),
|
||||||
|
isolated=isolated,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not line_req.name:
|
||||||
|
logger.info(
|
||||||
|
"Skipping line in requirement file [%s] because "
|
||||||
|
"it's not clear what it would install: %s",
|
||||||
|
req_file_path, line.strip(),
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
" (add #egg=PackageName to the URL to avoid"
|
||||||
|
" this warning)"
|
||||||
|
)
|
||||||
|
elif line_req.name not in installations:
|
||||||
|
# either it's not installed, or it is installed
|
||||||
|
# but has been processed already
|
||||||
|
if not req_files[line_req.name]:
|
||||||
|
logger.warning(
|
||||||
|
"Requirement file [%s] contains %s, but that "
|
||||||
|
"package is not installed",
|
||||||
|
req_file_path,
|
||||||
|
COMMENT_RE.sub('', line).strip(),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
req_files[line_req.name].append(req_file_path)
|
||||||
|
else:
|
||||||
|
yield str(installations[line_req.name]).rstrip()
|
||||||
|
del installations[line_req.name]
|
||||||
|
req_files[line_req.name].append(req_file_path)
|
||||||
|
|
||||||
|
# Warn about requirements that were included multiple times (in a
|
||||||
|
# single requirements file or in different requirements files).
|
||||||
|
for name, files in six.iteritems(req_files):
|
||||||
|
if len(files) > 1:
|
||||||
|
logger.warning("Requirement %s included multiple times [%s]",
|
||||||
|
name, ', '.join(sorted(set(files))))
|
||||||
|
|
||||||
|
yield(
|
||||||
|
'## The following requirements were added by '
|
||||||
|
'pip freeze:'
|
||||||
|
)
|
||||||
|
for installation in sorted(
|
||||||
|
installations.values(), key=lambda x: x.name.lower()):
|
||||||
|
if canonicalize_name(installation.name) not in skip:
|
||||||
|
yield str(installation).rstrip()
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenRequirement(object):
|
||||||
|
def __init__(self, name, req, editable, comments=()):
|
||||||
|
self.name = name
|
||||||
|
self.req = req
|
||||||
|
self.editable = editable
|
||||||
|
self.comments = comments
|
||||||
|
|
||||||
|
_rev_re = re.compile(r'-r(\d+)$')
|
||||||
|
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dist(cls, dist, dependency_links):
|
||||||
|
location = os.path.normcase(os.path.abspath(dist.location))
|
||||||
|
comments = []
|
||||||
|
from pip._internal.vcs import vcs, get_src_requirement
|
||||||
|
if dist_is_editable(dist) and vcs.get_backend_name(location):
|
||||||
|
editable = True
|
||||||
|
try:
|
||||||
|
req = get_src_requirement(dist, location)
|
||||||
|
except InstallationError as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Error when trying to get requirement for VCS system %s, "
|
||||||
|
"falling back to uneditable format", exc
|
||||||
|
)
|
||||||
|
req = None
|
||||||
|
if req is None:
|
||||||
|
logger.warning(
|
||||||
|
'Could not determine repository location of %s', location
|
||||||
|
)
|
||||||
|
comments.append(
|
||||||
|
'## !! Could not determine repository location'
|
||||||
|
)
|
||||||
|
req = dist.as_requirement()
|
||||||
|
editable = False
|
||||||
|
else:
|
||||||
|
editable = False
|
||||||
|
req = dist.as_requirement()
|
||||||
|
specs = req.specs
|
||||||
|
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
|
||||||
|
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
|
||||||
|
(specs, dist)
|
||||||
|
version = specs[0][1]
|
||||||
|
ver_match = cls._rev_re.search(version)
|
||||||
|
date_match = cls._date_re.search(version)
|
||||||
|
if ver_match or date_match:
|
||||||
|
svn_backend = vcs.get_backend('svn')
|
||||||
|
if svn_backend:
|
||||||
|
svn_location = svn_backend().get_location(
|
||||||
|
dist,
|
||||||
|
dependency_links,
|
||||||
|
)
|
||||||
|
if not svn_location:
|
||||||
|
logger.warning(
|
||||||
|
'Warning: cannot find svn location for %s', req,
|
||||||
|
)
|
||||||
|
comments.append(
|
||||||
|
'## FIXME: could not find svn URL in dependency_links '
|
||||||
|
'for this package:'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
warnings.warn(
|
||||||
|
"SVN editable detection based on dependency links "
|
||||||
|
"will be dropped in the future.",
|
||||||
|
RemovedInPip11Warning,
|
||||||
|
)
|
||||||
|
comments.append(
|
||||||
|
'# Installing as editable to satisfy requirement %s:' %
|
||||||
|
req
|
||||||
|
)
|
||||||
|
if ver_match:
|
||||||
|
rev = ver_match.group(1)
|
||||||
|
else:
|
||||||
|
rev = '{%s}' % date_match.group(1)
|
||||||
|
editable = True
|
||||||
|
req = '%s@%s#egg=%s' % (
|
||||||
|
svn_location,
|
||||||
|
rev,
|
||||||
|
cls.egg_name(dist)
|
||||||
|
)
|
||||||
|
return cls(dist.project_name, req, editable, comments)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def egg_name(dist):
|
||||||
|
name = dist.egg_name()
|
||||||
|
match = re.search(r'-py\d\.\d$', name)
|
||||||
|
if match:
|
||||||
|
name = name[:match.start()]
|
||||||
|
return name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
req = self.req
|
||||||
|
if self.editable:
|
||||||
|
req = '-e %s' % req
|
||||||
|
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
@ -0,0 +1,380 @@
|
|||||||
|
"""Prepares a distribution for installation
|
||||||
|
"""
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from copy import copy
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources, requests
|
||||||
|
|
||||||
|
from pip._internal.build_env import NoOpBuildEnvironment
|
||||||
|
from pip._internal.compat import expanduser
|
||||||
|
from pip._internal.download import (
|
||||||
|
is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
|
||||||
|
)
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
|
||||||
|
PreviousBuildDirError, VcsHashUnsupported,
|
||||||
|
)
|
||||||
|
from pip._internal.index import FormatControl
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.utils.hashes import MissingHashes
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
call_subprocess, display_path, normalize_path,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.ui import open_spinner
|
||||||
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def make_abstract_dist(req):
|
||||||
|
"""Factory to make an abstract dist object.
|
||||||
|
|
||||||
|
Preconditions: Either an editable req with a source_dir, or satisfied_by or
|
||||||
|
a wheel link, or a non-editable req with a source_dir.
|
||||||
|
|
||||||
|
:return: A concrete DistAbstraction.
|
||||||
|
"""
|
||||||
|
if req.editable:
|
||||||
|
return IsSDist(req)
|
||||||
|
elif req.link and req.link.is_wheel:
|
||||||
|
return IsWheel(req)
|
||||||
|
else:
|
||||||
|
return IsSDist(req)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_build_reqs(finder, prefix, build_requirements):
|
||||||
|
# NOTE: What follows is not a very good thing.
|
||||||
|
# Eventually, this should move into the BuildEnvironment class and
|
||||||
|
# that should handle all the isolation and sub-process invocation.
|
||||||
|
finder = copy(finder)
|
||||||
|
finder.format_control = FormatControl(set(), set([":all:"]))
|
||||||
|
urls = [
|
||||||
|
finder.find_requirement(
|
||||||
|
InstallRequirement.from_line(r), upgrade=False).url
|
||||||
|
for r in build_requirements
|
||||||
|
]
|
||||||
|
args = [
|
||||||
|
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
|
||||||
|
'--no-user', '--prefix', prefix,
|
||||||
|
] + list(urls)
|
||||||
|
|
||||||
|
with open_spinner("Installing build dependencies") as spinner:
|
||||||
|
call_subprocess(args, show_stdout=False, spinner=spinner)
|
||||||
|
|
||||||
|
|
||||||
|
class DistAbstraction(object):
|
||||||
|
"""Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
|
||||||
|
|
||||||
|
The requirements for anything installable are as follows:
|
||||||
|
- we must be able to determine the requirement name
|
||||||
|
(or we can't correctly handle the non-upgrade case).
|
||||||
|
- we must be able to generate a list of run-time dependencies
|
||||||
|
without installing any additional packages (or we would
|
||||||
|
have to either burn time by doing temporary isolated installs
|
||||||
|
or alternatively violate pips 'don't start installing unless
|
||||||
|
all requirements are available' rule - neither of which are
|
||||||
|
desirable).
|
||||||
|
- for packages with setup requirements, we must also be able
|
||||||
|
to determine their requirements without installing additional
|
||||||
|
packages (for the same reason as run-time dependencies)
|
||||||
|
- we must be able to create a Distribution object exposing the
|
||||||
|
above metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, req):
|
||||||
|
self.req = req
|
||||||
|
|
||||||
|
def dist(self, finder):
|
||||||
|
"""Return a setuptools Dist object."""
|
||||||
|
raise NotImplementedError(self.dist)
|
||||||
|
|
||||||
|
def prep_for_dist(self, finder):
|
||||||
|
"""Ensure that we can get a Dist for this requirement."""
|
||||||
|
raise NotImplementedError(self.dist)
|
||||||
|
|
||||||
|
|
||||||
|
class IsWheel(DistAbstraction):
|
||||||
|
|
||||||
|
def dist(self, finder):
|
||||||
|
return list(pkg_resources.find_distributions(
|
||||||
|
self.req.source_dir))[0]
|
||||||
|
|
||||||
|
def prep_for_dist(self, finder, build_isolation):
|
||||||
|
# FIXME:https://github.com/pypa/pip/issues/1112
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class IsSDist(DistAbstraction):
|
||||||
|
|
||||||
|
def dist(self, finder):
|
||||||
|
dist = self.req.get_dist()
|
||||||
|
# FIXME: shouldn't be globally added.
|
||||||
|
if finder and dist.has_metadata('dependency_links.txt'):
|
||||||
|
finder.add_dependency_links(
|
||||||
|
dist.get_metadata_lines('dependency_links.txt')
|
||||||
|
)
|
||||||
|
return dist
|
||||||
|
|
||||||
|
def prep_for_dist(self, finder, build_isolation):
|
||||||
|
# Before calling "setup.py egg_info", we need to set-up the build
|
||||||
|
# environment.
|
||||||
|
build_requirements, isolate = self.req.get_pep_518_info()
|
||||||
|
should_isolate = build_isolation and isolate
|
||||||
|
|
||||||
|
minimum_requirements = ('setuptools', 'wheel')
|
||||||
|
missing_requirements = set(minimum_requirements) - set(
|
||||||
|
pkg_resources.Requirement(r).key
|
||||||
|
for r in build_requirements
|
||||||
|
)
|
||||||
|
if missing_requirements:
|
||||||
|
def format_reqs(rs):
|
||||||
|
return ' and '.join(map(repr, sorted(rs)))
|
||||||
|
logger.warning(
|
||||||
|
"Missing build time requirements in pyproject.toml for %s: "
|
||||||
|
"%s.", self.req, format_reqs(missing_requirements)
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"This version of pip does not implement PEP 517 so it cannot "
|
||||||
|
"build a wheel without %s.", format_reqs(minimum_requirements)
|
||||||
|
)
|
||||||
|
|
||||||
|
if should_isolate:
|
||||||
|
with self.req.build_env:
|
||||||
|
pass
|
||||||
|
_install_build_reqs(finder, self.req.build_env.path,
|
||||||
|
build_requirements)
|
||||||
|
else:
|
||||||
|
self.req.build_env = NoOpBuildEnvironment(no_clean=False)
|
||||||
|
|
||||||
|
self.req.run_egg_info()
|
||||||
|
self.req.assert_source_matches_version()
|
||||||
|
|
||||||
|
|
||||||
|
class Installed(DistAbstraction):
|
||||||
|
|
||||||
|
def dist(self, finder):
|
||||||
|
return self.req.satisfied_by
|
||||||
|
|
||||||
|
def prep_for_dist(self, finder):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementPreparer(object):
|
||||||
|
"""Prepares a Requirement
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
|
||||||
|
progress_bar, build_isolation):
|
||||||
|
super(RequirementPreparer, self).__init__()
|
||||||
|
|
||||||
|
self.src_dir = src_dir
|
||||||
|
self.build_dir = build_dir
|
||||||
|
|
||||||
|
# Where still packed archives should be written to. If None, they are
|
||||||
|
# not saved, and are deleted immediately after unpacking.
|
||||||
|
self.download_dir = download_dir
|
||||||
|
|
||||||
|
# Where still-packed .whl files should be written to. If None, they are
|
||||||
|
# written to the download_dir parameter. Separate to download_dir to
|
||||||
|
# permit only keeping wheel archives for pip wheel.
|
||||||
|
if wheel_download_dir:
|
||||||
|
wheel_download_dir = normalize_path(wheel_download_dir)
|
||||||
|
self.wheel_download_dir = wheel_download_dir
|
||||||
|
|
||||||
|
# NOTE
|
||||||
|
# download_dir and wheel_download_dir overlap semantically and may
|
||||||
|
# be combined if we're willing to have non-wheel archives present in
|
||||||
|
# the wheelhouse output by 'pip wheel'.
|
||||||
|
|
||||||
|
self.progress_bar = progress_bar
|
||||||
|
|
||||||
|
# Is build isolation allowed?
|
||||||
|
self.build_isolation = build_isolation
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _download_should_save(self):
|
||||||
|
# TODO: Modify to reduce indentation needed
|
||||||
|
if self.download_dir:
|
||||||
|
self.download_dir = expanduser(self.download_dir)
|
||||||
|
if os.path.exists(self.download_dir):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.critical('Could not find download directory')
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not find or access download directory '%s'"
|
||||||
|
% display_path(self.download_dir))
|
||||||
|
return False
|
||||||
|
|
||||||
|
def prepare_linked_requirement(self, req, session, finder,
|
||||||
|
upgrade_allowed, require_hashes):
|
||||||
|
"""Prepare a requirement that would be obtained from req.link
|
||||||
|
"""
|
||||||
|
# TODO: Breakup into smaller functions
|
||||||
|
if req.link and req.link.scheme == 'file':
|
||||||
|
path = url_to_path(req.link.url)
|
||||||
|
logger.info('Processing %s', display_path(path))
|
||||||
|
else:
|
||||||
|
logger.info('Collecting %s', req)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
# @@ if filesystem packages are not marked
|
||||||
|
# editable in a req, a non deterministic error
|
||||||
|
# occurs when the script attempts to unpack the
|
||||||
|
# build directory
|
||||||
|
req.ensure_has_source_dir(self.build_dir)
|
||||||
|
# If a checkout exists, it's unwise to keep going. version
|
||||||
|
# inconsistencies are logged later, but do not fail the
|
||||||
|
# installation.
|
||||||
|
# FIXME: this won't upgrade when there's an existing
|
||||||
|
# package unpacked in `req.source_dir`
|
||||||
|
# package unpacked in `req.source_dir`
|
||||||
|
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||||
|
raise PreviousBuildDirError(
|
||||||
|
"pip can't proceed with requirements '%s' due to a"
|
||||||
|
" pre-existing build directory (%s). This is "
|
||||||
|
"likely due to a previous installation that failed"
|
||||||
|
". pip is being responsible and not assuming it "
|
||||||
|
"can delete this. Please delete it and try again."
|
||||||
|
% (req, req.source_dir)
|
||||||
|
)
|
||||||
|
req.populate_link(finder, upgrade_allowed, require_hashes)
|
||||||
|
|
||||||
|
# We can't hit this spot and have populate_link return None.
|
||||||
|
# req.satisfied_by is None here (because we're
|
||||||
|
# guarded) and upgrade has no impact except when satisfied_by
|
||||||
|
# is not None.
|
||||||
|
# Then inside find_requirement existing_applicable -> False
|
||||||
|
# If no new versions are found, DistributionNotFound is raised,
|
||||||
|
# otherwise a result is guaranteed.
|
||||||
|
assert req.link
|
||||||
|
link = req.link
|
||||||
|
|
||||||
|
# Now that we have the real link, we can tell what kind of
|
||||||
|
# requirements we have and raise some more informative errors
|
||||||
|
# than otherwise. (For example, we can raise VcsHashUnsupported
|
||||||
|
# for a VCS URL rather than HashMissing.)
|
||||||
|
if require_hashes:
|
||||||
|
# We could check these first 2 conditions inside
|
||||||
|
# unpack_url and save repetition of conditions, but then
|
||||||
|
# we would report less-useful error messages for
|
||||||
|
# unhashable requirements, complaining that there's no
|
||||||
|
# hash provided.
|
||||||
|
if is_vcs_url(link):
|
||||||
|
raise VcsHashUnsupported()
|
||||||
|
elif is_file_url(link) and is_dir_url(link):
|
||||||
|
raise DirectoryUrlHashUnsupported()
|
||||||
|
if not req.original_link and not req.is_pinned:
|
||||||
|
# Unpinned packages are asking for trouble when a new
|
||||||
|
# version is uploaded. This isn't a security check, but
|
||||||
|
# it saves users a surprising hash mismatch in the
|
||||||
|
# future.
|
||||||
|
#
|
||||||
|
# file:/// URLs aren't pinnable, so don't complain
|
||||||
|
# about them not being pinned.
|
||||||
|
raise HashUnpinned()
|
||||||
|
|
||||||
|
hashes = req.hashes(trust_internet=not require_hashes)
|
||||||
|
if require_hashes and not hashes:
|
||||||
|
# Known-good hashes are missing for this requirement, so
|
||||||
|
# shim it with a facade object that will provoke hash
|
||||||
|
# computation and then raise a HashMissing exception
|
||||||
|
# showing the user what the hash should be.
|
||||||
|
hashes = MissingHashes()
|
||||||
|
|
||||||
|
try:
|
||||||
|
download_dir = self.download_dir
|
||||||
|
# We always delete unpacked sdists after pip ran.
|
||||||
|
autodelete_unpacked = True
|
||||||
|
if req.link.is_wheel and self.wheel_download_dir:
|
||||||
|
# when doing 'pip wheel` we download wheels to a
|
||||||
|
# dedicated dir.
|
||||||
|
download_dir = self.wheel_download_dir
|
||||||
|
if req.link.is_wheel:
|
||||||
|
if download_dir:
|
||||||
|
# When downloading, we only unpack wheels to get
|
||||||
|
# metadata.
|
||||||
|
autodelete_unpacked = True
|
||||||
|
else:
|
||||||
|
# When installing a wheel, we use the unpacked
|
||||||
|
# wheel.
|
||||||
|
autodelete_unpacked = False
|
||||||
|
unpack_url(
|
||||||
|
req.link, req.source_dir,
|
||||||
|
download_dir, autodelete_unpacked,
|
||||||
|
session=session, hashes=hashes,
|
||||||
|
progress_bar=self.progress_bar
|
||||||
|
)
|
||||||
|
except requests.HTTPError as exc:
|
||||||
|
logger.critical(
|
||||||
|
'Could not install requirement %s because of error %s',
|
||||||
|
req,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
raise InstallationError(
|
||||||
|
'Could not install requirement %s because of HTTP '
|
||||||
|
'error %s for URL %s' %
|
||||||
|
(req, exc, req.link)
|
||||||
|
)
|
||||||
|
abstract_dist = make_abstract_dist(req)
|
||||||
|
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||||
|
if self._download_should_save:
|
||||||
|
# Make a .zip of the source_dir we already created.
|
||||||
|
if req.link.scheme in vcs.all_schemes:
|
||||||
|
req.archive(self.download_dir)
|
||||||
|
return abstract_dist
|
||||||
|
|
||||||
|
def prepare_editable_requirement(self, req, require_hashes, use_user_site,
|
||||||
|
finder):
|
||||||
|
"""Prepare an editable requirement
|
||||||
|
"""
|
||||||
|
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||||
|
|
||||||
|
logger.info('Obtaining %s', req)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
if require_hashes:
|
||||||
|
raise InstallationError(
|
||||||
|
'The editable requirement %s cannot be installed when '
|
||||||
|
'requiring hashes, because there is no single file to '
|
||||||
|
'hash.' % req
|
||||||
|
)
|
||||||
|
req.ensure_has_source_dir(self.src_dir)
|
||||||
|
req.update_editable(not self._download_should_save)
|
||||||
|
|
||||||
|
abstract_dist = make_abstract_dist(req)
|
||||||
|
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||||
|
|
||||||
|
if self._download_should_save:
|
||||||
|
req.archive(self.download_dir)
|
||||||
|
req.check_if_exists(use_user_site)
|
||||||
|
|
||||||
|
return abstract_dist
|
||||||
|
|
||||||
|
def prepare_installed_requirement(self, req, require_hashes, skip_reason):
|
||||||
|
"""Prepare an already-installed requirement
|
||||||
|
"""
|
||||||
|
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||||
|
assert skip_reason is not None, (
|
||||||
|
"did not get skip reason skipped but req.satisfied_by "
|
||||||
|
"is set to %r" % (req.satisfied_by,)
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
'Requirement %s: %s (%s)',
|
||||||
|
skip_reason, req, req.satisfied_by.version
|
||||||
|
)
|
||||||
|
with indent_log():
|
||||||
|
if require_hashes:
|
||||||
|
logger.debug(
|
||||||
|
'Since it is already installed, we are trusting this '
|
||||||
|
'package without checking its hash. To ensure a '
|
||||||
|
'completely repeatable environment, install into an '
|
||||||
|
'empty virtualenv.'
|
||||||
|
)
|
||||||
|
abstract_dist = Installed(req)
|
||||||
|
|
||||||
|
return abstract_dist
|
@ -0,0 +1,317 @@
|
|||||||
|
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import distutils.util
|
||||||
|
import logging
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
import warnings
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
import pip._internal.utils.glibc
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_var(var):
|
||||||
|
try:
|
||||||
|
return sysconfig.get_config_var(var)
|
||||||
|
except IOError as e: # Issue #1074
|
||||||
|
warnings.warn("{}".format(e), RuntimeWarning)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_abbr_impl():
|
||||||
|
"""Return abbreviated implementation name."""
|
||||||
|
if hasattr(sys, 'pypy_version_info'):
|
||||||
|
pyimpl = 'pp'
|
||||||
|
elif sys.platform.startswith('java'):
|
||||||
|
pyimpl = 'jy'
|
||||||
|
elif sys.platform == 'cli':
|
||||||
|
pyimpl = 'ip'
|
||||||
|
else:
|
||||||
|
pyimpl = 'cp'
|
||||||
|
return pyimpl
|
||||||
|
|
||||||
|
|
||||||
|
def get_impl_ver():
|
||||||
|
"""Return implementation version."""
|
||||||
|
impl_ver = get_config_var("py_version_nodot")
|
||||||
|
if not impl_ver or get_abbr_impl() == 'pp':
|
||||||
|
impl_ver = ''.join(map(str, get_impl_version_info()))
|
||||||
|
return impl_ver
|
||||||
|
|
||||||
|
|
||||||
|
def get_impl_version_info():
|
||||||
|
"""Return sys.version_info-like tuple for use in decrementing the minor
|
||||||
|
version."""
|
||||||
|
if get_abbr_impl() == 'pp':
|
||||||
|
# as per https://github.com/pypa/pip/issues/2882
|
||||||
|
return (sys.version_info[0], sys.pypy_version_info.major,
|
||||||
|
sys.pypy_version_info.minor)
|
||||||
|
else:
|
||||||
|
return sys.version_info[0], sys.version_info[1]
|
||||||
|
|
||||||
|
|
||||||
|
def get_impl_tag():
|
||||||
|
"""
|
||||||
|
Returns the Tag for this specific implementation.
|
||||||
|
"""
|
||||||
|
return "{}{}".format(get_abbr_impl(), get_impl_ver())
|
||||||
|
|
||||||
|
|
||||||
|
def get_flag(var, fallback, expected=True, warn=True):
|
||||||
|
"""Use a fallback method for determining SOABI flags if the needed config
|
||||||
|
var is unset or unavailable."""
|
||||||
|
val = get_config_var(var)
|
||||||
|
if val is None:
|
||||||
|
if warn:
|
||||||
|
logger.debug("Config variable '%s' is unset, Python ABI tag may "
|
||||||
|
"be incorrect", var)
|
||||||
|
return fallback()
|
||||||
|
return val == expected
|
||||||
|
|
||||||
|
|
||||||
|
def get_abi_tag():
|
||||||
|
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
|
||||||
|
(CPython 2, PyPy)."""
|
||||||
|
soabi = get_config_var('SOABI')
|
||||||
|
impl = get_abbr_impl()
|
||||||
|
if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
|
||||||
|
d = ''
|
||||||
|
m = ''
|
||||||
|
u = ''
|
||||||
|
if get_flag('Py_DEBUG',
|
||||||
|
lambda: hasattr(sys, 'gettotalrefcount'),
|
||||||
|
warn=(impl == 'cp')):
|
||||||
|
d = 'd'
|
||||||
|
if get_flag('WITH_PYMALLOC',
|
||||||
|
lambda: impl == 'cp',
|
||||||
|
warn=(impl == 'cp')):
|
||||||
|
m = 'm'
|
||||||
|
if get_flag('Py_UNICODE_SIZE',
|
||||||
|
lambda: sys.maxunicode == 0x10ffff,
|
||||||
|
expected=4,
|
||||||
|
warn=(impl == 'cp' and
|
||||||
|
sys.version_info < (3, 3))) \
|
||||||
|
and sys.version_info < (3, 3):
|
||||||
|
u = 'u'
|
||||||
|
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
|
||||||
|
elif soabi and soabi.startswith('cpython-'):
|
||||||
|
abi = 'cp' + soabi.split('-')[1]
|
||||||
|
elif soabi:
|
||||||
|
abi = soabi.replace('.', '_').replace('-', '_')
|
||||||
|
else:
|
||||||
|
abi = None
|
||||||
|
return abi
|
||||||
|
|
||||||
|
|
||||||
|
def _is_running_32bit():
|
||||||
|
return sys.maxsize == 2147483647
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform():
|
||||||
|
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
# distutils.util.get_platform() returns the release based on the value
|
||||||
|
# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
|
||||||
|
# be significantly older than the user's current machine.
|
||||||
|
release, _, machine = platform.mac_ver()
|
||||||
|
split_ver = release.split('.')
|
||||||
|
|
||||||
|
if machine == "x86_64" and _is_running_32bit():
|
||||||
|
machine = "i386"
|
||||||
|
elif machine == "ppc64" and _is_running_32bit():
|
||||||
|
machine = "ppc"
|
||||||
|
|
||||||
|
return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
|
||||||
|
|
||||||
|
# XXX remove distutils dependency
|
||||||
|
result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
||||||
|
if result == "linux_x86_64" and _is_running_32bit():
|
||||||
|
# 32 bit Python program (running on a 64 bit Linux): pip should only
|
||||||
|
# install and run 32 bit compiled extensions in that case.
|
||||||
|
result = "linux_i686"
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def is_manylinux1_compatible():
|
||||||
|
# Only Linux, and only x86-64 / i686
|
||||||
|
if get_platform() not in {"linux_x86_64", "linux_i686"}:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check for presence of _manylinux module
|
||||||
|
try:
|
||||||
|
import _manylinux
|
||||||
|
return bool(_manylinux.manylinux1_compatible)
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
# Fall through to heuristic check below
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check glibc version. CentOS 5 uses glibc 2.5.
|
||||||
|
return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
|
||||||
|
|
||||||
|
|
||||||
|
def get_darwin_arches(major, minor, machine):
|
||||||
|
"""Return a list of supported arches (including group arches) for
|
||||||
|
the given major, minor and machine architecture of an macOS machine.
|
||||||
|
"""
|
||||||
|
arches = []
|
||||||
|
|
||||||
|
def _supports_arch(major, minor, arch):
|
||||||
|
# Looking at the application support for macOS versions in the chart
|
||||||
|
# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
|
||||||
|
# our timeline looks roughly like:
|
||||||
|
#
|
||||||
|
# 10.0 - Introduces ppc support.
|
||||||
|
# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
|
||||||
|
# and x86_64 support is CLI only, and cannot be used for GUI
|
||||||
|
# applications.
|
||||||
|
# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
|
||||||
|
# 10.6 - Drops support for ppc64
|
||||||
|
# 10.7 - Drops support for ppc
|
||||||
|
#
|
||||||
|
# Given that we do not know if we're installing a CLI or a GUI
|
||||||
|
# application, we must be conservative and assume it might be a GUI
|
||||||
|
# application and behave as if ppc64 and x86_64 support did not occur
|
||||||
|
# until 10.5.
|
||||||
|
#
|
||||||
|
# Note: The above information is taken from the "Application support"
|
||||||
|
# column in the chart not the "Processor support" since I believe
|
||||||
|
# that we care about what instruction sets an application can use
|
||||||
|
# not which processors the OS supports.
|
||||||
|
if arch == 'ppc':
|
||||||
|
return (major, minor) <= (10, 5)
|
||||||
|
if arch == 'ppc64':
|
||||||
|
return (major, minor) == (10, 5)
|
||||||
|
if arch == 'i386':
|
||||||
|
return (major, minor) >= (10, 4)
|
||||||
|
if arch == 'x86_64':
|
||||||
|
return (major, minor) >= (10, 5)
|
||||||
|
if arch in groups:
|
||||||
|
for garch in groups[arch]:
|
||||||
|
if _supports_arch(major, minor, garch):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
groups = OrderedDict([
|
||||||
|
("fat", ("i386", "ppc")),
|
||||||
|
("intel", ("x86_64", "i386")),
|
||||||
|
("fat64", ("x86_64", "ppc64")),
|
||||||
|
("fat32", ("x86_64", "i386", "ppc")),
|
||||||
|
])
|
||||||
|
|
||||||
|
if _supports_arch(major, minor, machine):
|
||||||
|
arches.append(machine)
|
||||||
|
|
||||||
|
for garch in groups:
|
||||||
|
if machine in groups[garch] and _supports_arch(major, minor, garch):
|
||||||
|
arches.append(garch)
|
||||||
|
|
||||||
|
arches.append('universal')
|
||||||
|
|
||||||
|
return arches
|
||||||
|
|
||||||
|
|
||||||
|
def get_supported(versions=None, noarch=False, platform=None,
|
||||||
|
impl=None, abi=None):
|
||||||
|
"""Return a list of supported tags for each version specified in
|
||||||
|
`versions`.
|
||||||
|
|
||||||
|
:param versions: a list of string versions, of the form ["33", "32"],
|
||||||
|
or None. The first version will be assumed to support our ABI.
|
||||||
|
:param platform: specify the exact platform you want valid
|
||||||
|
tags for, or None. If None, use the local system platform.
|
||||||
|
:param impl: specify the exact implementation you want valid
|
||||||
|
tags for, or None. If None, use the local interpreter impl.
|
||||||
|
:param abi: specify the exact abi you want valid
|
||||||
|
tags for, or None. If None, use the local interpreter abi.
|
||||||
|
"""
|
||||||
|
supported = []
|
||||||
|
|
||||||
|
# Versions must be given with respect to the preference
|
||||||
|
if versions is None:
|
||||||
|
versions = []
|
||||||
|
version_info = get_impl_version_info()
|
||||||
|
major = version_info[:-1]
|
||||||
|
# Support all previous minor Python versions.
|
||||||
|
for minor in range(version_info[-1], -1, -1):
|
||||||
|
versions.append(''.join(map(str, major + (minor,))))
|
||||||
|
|
||||||
|
impl = impl or get_abbr_impl()
|
||||||
|
|
||||||
|
abis = []
|
||||||
|
|
||||||
|
abi = abi or get_abi_tag()
|
||||||
|
if abi:
|
||||||
|
abis[0:0] = [abi]
|
||||||
|
|
||||||
|
abi3s = set()
|
||||||
|
import imp
|
||||||
|
for suffix in imp.get_suffixes():
|
||||||
|
if suffix[0].startswith('.abi'):
|
||||||
|
abi3s.add(suffix[0].split('.', 2)[1])
|
||||||
|
|
||||||
|
abis.extend(sorted(list(abi3s)))
|
||||||
|
|
||||||
|
abis.append('none')
|
||||||
|
|
||||||
|
if not noarch:
|
||||||
|
arch = platform or get_platform()
|
||||||
|
if arch.startswith('macosx'):
|
||||||
|
# support macosx-10.6-intel on macosx-10.9-x86_64
|
||||||
|
match = _osx_arch_pat.match(arch)
|
||||||
|
if match:
|
||||||
|
name, major, minor, actual_arch = match.groups()
|
||||||
|
tpl = '{}_{}_%i_%s'.format(name, major)
|
||||||
|
arches = []
|
||||||
|
for m in reversed(range(int(minor) + 1)):
|
||||||
|
for a in get_darwin_arches(int(major), m, actual_arch):
|
||||||
|
arches.append(tpl % (m, a))
|
||||||
|
else:
|
||||||
|
# arch pattern didn't match (?!)
|
||||||
|
arches = [arch]
|
||||||
|
elif platform is None and is_manylinux1_compatible():
|
||||||
|
arches = [arch.replace('linux', 'manylinux1'), arch]
|
||||||
|
else:
|
||||||
|
arches = [arch]
|
||||||
|
|
||||||
|
# Current version, current API (built specifically for our Python):
|
||||||
|
for abi in abis:
|
||||||
|
for arch in arches:
|
||||||
|
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||||
|
|
||||||
|
# abi3 modules compatible with older version of Python
|
||||||
|
for version in versions[1:]:
|
||||||
|
# abi3 was introduced in Python 3.2
|
||||||
|
if version in {'31', '30'}:
|
||||||
|
break
|
||||||
|
for abi in abi3s: # empty set if not Python 3
|
||||||
|
for arch in arches:
|
||||||
|
supported.append(("%s%s" % (impl, version), abi, arch))
|
||||||
|
|
||||||
|
# Has binaries, does not use the Python API:
|
||||||
|
for arch in arches:
|
||||||
|
supported.append(('py%s' % (versions[0][0]), 'none', arch))
|
||||||
|
|
||||||
|
# No abi / arch, but requires our implementation:
|
||||||
|
supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
|
||||||
|
# Tagged specifically as being cross-version compatible
|
||||||
|
# (with just the major version specified)
|
||||||
|
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||||
|
|
||||||
|
# No abi / arch, generic Python
|
||||||
|
for i, version in enumerate(versions):
|
||||||
|
supported.append(('py%s' % (version,), 'none', 'any'))
|
||||||
|
if i == 0:
|
||||||
|
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
||||||
|
|
||||||
|
return supported
|
||||||
|
|
||||||
|
|
||||||
|
implementation_tag = get_impl_tag()
|
@ -0,0 +1,69 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .req_install import InstallRequirement
|
||||||
|
from .req_set import RequirementSet
|
||||||
|
from .req_file import parse_requirements
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"RequirementSet", "InstallRequirement",
|
||||||
|
"parse_requirements", "install_given_reqs",
|
||||||
|
]
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def install_given_reqs(to_install, install_options, global_options=(),
|
||||||
|
*args, **kwargs):
|
||||||
|
"""
|
||||||
|
Install everything in the given list.
|
||||||
|
|
||||||
|
(to be called after having downloaded and unpacked the packages)
|
||||||
|
"""
|
||||||
|
|
||||||
|
if to_install:
|
||||||
|
logger.info(
|
||||||
|
'Installing collected packages: %s',
|
||||||
|
', '.join([req.name for req in to_install]),
|
||||||
|
)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
for requirement in to_install:
|
||||||
|
if requirement.conflicts_with:
|
||||||
|
logger.info(
|
||||||
|
'Found existing installation: %s',
|
||||||
|
requirement.conflicts_with,
|
||||||
|
)
|
||||||
|
with indent_log():
|
||||||
|
uninstalled_pathset = requirement.uninstall(
|
||||||
|
auto_confirm=True
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
requirement.install(
|
||||||
|
install_options,
|
||||||
|
global_options,
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
should_rollback = (
|
||||||
|
requirement.conflicts_with and
|
||||||
|
not requirement.install_succeeded
|
||||||
|
)
|
||||||
|
# if install did not succeed, rollback previous uninstall
|
||||||
|
if should_rollback:
|
||||||
|
uninstalled_pathset.rollback()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
should_commit = (
|
||||||
|
requirement.conflicts_with and
|
||||||
|
requirement.install_succeeded
|
||||||
|
)
|
||||||
|
if should_commit:
|
||||||
|
uninstalled_pathset.commit()
|
||||||
|
requirement.remove_temporary_source()
|
||||||
|
|
||||||
|
return to_install
|
@ -0,0 +1,338 @@
|
|||||||
|
"""
|
||||||
|
Requirements file parsing
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor.six.moves import filterfalse
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal import cmdoptions
|
||||||
|
from pip._internal.download import get_file_content
|
||||||
|
from pip._internal.exceptions import RequirementsFileParseError
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
__all__ = ['parse_requirements']
|
||||||
|
|
||||||
|
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||||
|
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
|
||||||
|
|
||||||
|
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||||
|
# variable name consisting of only uppercase letters, digits or the '_'
|
||||||
|
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||||
|
# 2013 Edition.
|
||||||
|
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
||||||
|
|
||||||
|
SUPPORTED_OPTIONS = [
|
||||||
|
cmdoptions.constraints,
|
||||||
|
cmdoptions.editable,
|
||||||
|
cmdoptions.requirements,
|
||||||
|
cmdoptions.no_index,
|
||||||
|
cmdoptions.index_url,
|
||||||
|
cmdoptions.find_links,
|
||||||
|
cmdoptions.extra_index_url,
|
||||||
|
cmdoptions.always_unzip,
|
||||||
|
cmdoptions.no_binary,
|
||||||
|
cmdoptions.only_binary,
|
||||||
|
cmdoptions.pre,
|
||||||
|
cmdoptions.process_dependency_links,
|
||||||
|
cmdoptions.trusted_host,
|
||||||
|
cmdoptions.require_hashes,
|
||||||
|
]
|
||||||
|
|
||||||
|
# options to be passed to requirements
|
||||||
|
SUPPORTED_OPTIONS_REQ = [
|
||||||
|
cmdoptions.install_options,
|
||||||
|
cmdoptions.global_options,
|
||||||
|
cmdoptions.hash,
|
||||||
|
]
|
||||||
|
|
||||||
|
# the 'dest' string values
|
||||||
|
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
|
||||||
|
|
||||||
|
|
||||||
|
def parse_requirements(filename, finder=None, comes_from=None, options=None,
|
||||||
|
session=None, constraint=False, wheel_cache=None):
|
||||||
|
"""Parse a requirements file and yield InstallRequirement instances.
|
||||||
|
|
||||||
|
:param filename: Path or url of requirements file.
|
||||||
|
:param finder: Instance of pip.index.PackageFinder.
|
||||||
|
:param comes_from: Origin description of requirements.
|
||||||
|
:param options: cli options.
|
||||||
|
:param session: Instance of pip.download.PipSession.
|
||||||
|
:param constraint: If true, parsing a constraint file rather than
|
||||||
|
requirements file.
|
||||||
|
:param wheel_cache: Instance of pip.wheel.WheelCache
|
||||||
|
"""
|
||||||
|
if session is None:
|
||||||
|
raise TypeError(
|
||||||
|
"parse_requirements() missing 1 required keyword argument: "
|
||||||
|
"'session'"
|
||||||
|
)
|
||||||
|
|
||||||
|
_, content = get_file_content(
|
||||||
|
filename, comes_from=comes_from, session=session
|
||||||
|
)
|
||||||
|
|
||||||
|
lines_enum = preprocess(content, options)
|
||||||
|
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
req_iter = process_line(line, filename, line_number, finder,
|
||||||
|
comes_from, options, session, wheel_cache,
|
||||||
|
constraint=constraint)
|
||||||
|
for req in req_iter:
|
||||||
|
yield req
|
||||||
|
|
||||||
|
|
||||||
|
def preprocess(content, options):
|
||||||
|
"""Split, filter, and join lines, and return a line iterator
|
||||||
|
|
||||||
|
:param content: the content of the requirements file
|
||||||
|
:param options: cli options
|
||||||
|
"""
|
||||||
|
lines_enum = enumerate(content.splitlines(), start=1)
|
||||||
|
lines_enum = join_lines(lines_enum)
|
||||||
|
lines_enum = ignore_comments(lines_enum)
|
||||||
|
lines_enum = skip_regex(lines_enum, options)
|
||||||
|
lines_enum = expand_env_variables(lines_enum)
|
||||||
|
return lines_enum
|
||||||
|
|
||||||
|
|
||||||
|
def process_line(line, filename, line_number, finder=None, comes_from=None,
|
||||||
|
options=None, session=None, wheel_cache=None,
|
||||||
|
constraint=False):
|
||||||
|
"""Process a single requirements line; This can result in creating/yielding
|
||||||
|
requirements, or updating the finder.
|
||||||
|
|
||||||
|
For lines that contain requirements, the only options that have an effect
|
||||||
|
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||||
|
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
For lines that do not contain requirements, the only options that have an
|
||||||
|
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||||
|
be present, but are ignored. These lines may contain multiple options
|
||||||
|
(although our docs imply only one is supported), and all our parsed and
|
||||||
|
affect the finder.
|
||||||
|
|
||||||
|
:param constraint: If True, parsing a constraints file.
|
||||||
|
:param options: OptionParser options that we may update
|
||||||
|
"""
|
||||||
|
parser = build_parser(line)
|
||||||
|
defaults = parser.get_default_values()
|
||||||
|
defaults.index_url = None
|
||||||
|
if finder:
|
||||||
|
# `finder.format_control` will be updated during parsing
|
||||||
|
defaults.format_control = finder.format_control
|
||||||
|
args_str, options_str = break_args_options(line)
|
||||||
|
if sys.version_info < (2, 7, 3):
|
||||||
|
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
||||||
|
options_str = options_str.encode('utf8')
|
||||||
|
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
|
||||||
|
|
||||||
|
# preserve for the nested code path
|
||||||
|
line_comes_from = '%s %s (line %s)' % (
|
||||||
|
'-c' if constraint else '-r', filename, line_number,
|
||||||
|
)
|
||||||
|
|
||||||
|
# yield a line requirement
|
||||||
|
if args_str:
|
||||||
|
isolated = options.isolated_mode if options else False
|
||||||
|
if options:
|
||||||
|
cmdoptions.check_install_build_global(options, opts)
|
||||||
|
# get the options that apply to requirements
|
||||||
|
req_options = {}
|
||||||
|
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||||
|
if dest in opts.__dict__ and opts.__dict__[dest]:
|
||||||
|
req_options[dest] = opts.__dict__[dest]
|
||||||
|
yield InstallRequirement.from_line(
|
||||||
|
args_str, line_comes_from, constraint=constraint,
|
||||||
|
isolated=isolated, options=req_options, wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
|
||||||
|
# yield an editable requirement
|
||||||
|
elif opts.editables:
|
||||||
|
isolated = options.isolated_mode if options else False
|
||||||
|
yield InstallRequirement.from_editable(
|
||||||
|
opts.editables[0], comes_from=line_comes_from,
|
||||||
|
constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
|
||||||
|
# parse a nested requirements file
|
||||||
|
elif opts.requirements or opts.constraints:
|
||||||
|
if opts.requirements:
|
||||||
|
req_path = opts.requirements[0]
|
||||||
|
nested_constraint = False
|
||||||
|
else:
|
||||||
|
req_path = opts.constraints[0]
|
||||||
|
nested_constraint = True
|
||||||
|
# original file is over http
|
||||||
|
if SCHEME_RE.search(filename):
|
||||||
|
# do a url join so relative paths work
|
||||||
|
req_path = urllib_parse.urljoin(filename, req_path)
|
||||||
|
# original file and nested file are paths
|
||||||
|
elif not SCHEME_RE.search(req_path):
|
||||||
|
# do a join so relative paths work
|
||||||
|
req_path = os.path.join(os.path.dirname(filename), req_path)
|
||||||
|
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
|
||||||
|
parser = parse_requirements(
|
||||||
|
req_path, finder, comes_from, options, session,
|
||||||
|
constraint=nested_constraint, wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
for req in parser:
|
||||||
|
yield req
|
||||||
|
|
||||||
|
# percolate hash-checking option upward
|
||||||
|
elif opts.require_hashes:
|
||||||
|
options.require_hashes = opts.require_hashes
|
||||||
|
|
||||||
|
# set finder options
|
||||||
|
elif finder:
|
||||||
|
if opts.index_url:
|
||||||
|
finder.index_urls = [opts.index_url]
|
||||||
|
if opts.no_index is True:
|
||||||
|
finder.index_urls = []
|
||||||
|
if opts.extra_index_urls:
|
||||||
|
finder.index_urls.extend(opts.extra_index_urls)
|
||||||
|
if opts.find_links:
|
||||||
|
# FIXME: it would be nice to keep track of the source
|
||||||
|
# of the find_links: support a find-links local path
|
||||||
|
# relative to a requirements file.
|
||||||
|
value = opts.find_links[0]
|
||||||
|
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||||
|
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||||
|
if os.path.exists(relative_to_reqs_file):
|
||||||
|
value = relative_to_reqs_file
|
||||||
|
finder.find_links.append(value)
|
||||||
|
if opts.pre:
|
||||||
|
finder.allow_all_prereleases = True
|
||||||
|
if opts.process_dependency_links:
|
||||||
|
finder.process_dependency_links = True
|
||||||
|
if opts.trusted_hosts:
|
||||||
|
finder.secure_origins.extend(
|
||||||
|
("*", host, "*") for host in opts.trusted_hosts)
|
||||||
|
|
||||||
|
|
||||||
|
def break_args_options(line):
|
||||||
|
"""Break up the line into an args and options string. We only want to shlex
|
||||||
|
(and then optparse) the options, not the args. args can contain markers
|
||||||
|
which are corrupted by shlex.
|
||||||
|
"""
|
||||||
|
tokens = line.split(' ')
|
||||||
|
args = []
|
||||||
|
options = tokens[:]
|
||||||
|
for token in tokens:
|
||||||
|
if token.startswith('-') or token.startswith('--'):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
args.append(token)
|
||||||
|
options.pop(0)
|
||||||
|
return ' '.join(args), ' '.join(options)
|
||||||
|
|
||||||
|
|
||||||
|
def build_parser(line):
|
||||||
|
"""
|
||||||
|
Return a parser for parsing requirement lines
|
||||||
|
"""
|
||||||
|
parser = optparse.OptionParser(add_help_option=False)
|
||||||
|
|
||||||
|
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||||
|
for option_factory in option_factories:
|
||||||
|
option = option_factory()
|
||||||
|
parser.add_option(option)
|
||||||
|
|
||||||
|
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||||
|
# that in our own exception.
|
||||||
|
def parser_exit(self, msg):
|
||||||
|
# add offending line
|
||||||
|
msg = 'Invalid requirement: %s\n%s' % (line, msg)
|
||||||
|
raise RequirementsFileParseError(msg)
|
||||||
|
parser.exit = parser_exit
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def join_lines(lines_enum):
|
||||||
|
"""Joins a line ending in '\' with the previous line (except when following
|
||||||
|
comments). The joined line takes on the index of the first line.
|
||||||
|
"""
|
||||||
|
primary_line_number = None
|
||||||
|
new_line = []
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||||
|
if COMMENT_RE.match(line):
|
||||||
|
# this ensures comments are always matched later
|
||||||
|
line = ' ' + line
|
||||||
|
if new_line:
|
||||||
|
new_line.append(line)
|
||||||
|
yield primary_line_number, ''.join(new_line)
|
||||||
|
new_line = []
|
||||||
|
else:
|
||||||
|
yield line_number, line
|
||||||
|
else:
|
||||||
|
if not new_line:
|
||||||
|
primary_line_number = line_number
|
||||||
|
new_line.append(line.strip('\\'))
|
||||||
|
|
||||||
|
# last line contains \
|
||||||
|
if new_line:
|
||||||
|
yield primary_line_number, ''.join(new_line)
|
||||||
|
|
||||||
|
# TODO: handle space after '\'.
|
||||||
|
|
||||||
|
|
||||||
|
def ignore_comments(lines_enum):
|
||||||
|
"""
|
||||||
|
Strips comments and filter empty lines.
|
||||||
|
"""
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
line = COMMENT_RE.sub('', line)
|
||||||
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
yield line_number, line
|
||||||
|
|
||||||
|
|
||||||
|
def skip_regex(lines_enum, options):
|
||||||
|
"""
|
||||||
|
Skip lines that match '--skip-requirements-regex' pattern
|
||||||
|
|
||||||
|
Note: the regex pattern is only built once
|
||||||
|
"""
|
||||||
|
skip_regex = options.skip_requirements_regex if options else None
|
||||||
|
if skip_regex:
|
||||||
|
pattern = re.compile(skip_regex)
|
||||||
|
lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
|
||||||
|
return lines_enum
|
||||||
|
|
||||||
|
|
||||||
|
def expand_env_variables(lines_enum):
|
||||||
|
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
||||||
|
|
||||||
|
The only allowed format for environment variables defined in the
|
||||||
|
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
||||||
|
|
||||||
|
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
||||||
|
2. Ensure consistency across platforms for requirement files.
|
||||||
|
|
||||||
|
These points are the result of a discusssion on the `github pull
|
||||||
|
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
||||||
|
|
||||||
|
Valid characters in variable names follow the `POSIX standard
|
||||||
|
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
||||||
|
to uppercase letter, digits and the `_` (underscore).
|
||||||
|
"""
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
for env_var, var_name in ENV_VAR_RE.findall(line):
|
||||||
|
value = os.getenv(var_name)
|
||||||
|
if not value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
line = line.replace(env_var, value)
|
||||||
|
|
||||||
|
yield line_number, line
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,164 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.wheel import Wheel
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementSet(object):
|
||||||
|
|
||||||
|
def __init__(self, require_hashes=False):
|
||||||
|
"""Create a RequirementSet.
|
||||||
|
|
||||||
|
:param wheel_cache: The pip wheel cache, for passing to
|
||||||
|
InstallRequirement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.requirements = OrderedDict()
|
||||||
|
self.require_hashes = require_hashes
|
||||||
|
|
||||||
|
# Mapping of alias: real_name
|
||||||
|
self.requirement_aliases = {}
|
||||||
|
self.unnamed_requirements = []
|
||||||
|
self.successfully_downloaded = []
|
||||||
|
self.reqs_to_cleanup = []
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
reqs = [req for req in self.requirements.values()
|
||||||
|
if not req.comes_from]
|
||||||
|
reqs.sort(key=lambda req: req.name.lower())
|
||||||
|
return ' '.join([str(req.req) for req in reqs])
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
reqs = [req for req in self.requirements.values()]
|
||||||
|
reqs.sort(key=lambda req: req.name.lower())
|
||||||
|
reqs_str = ', '.join([str(req.req) for req in reqs])
|
||||||
|
return ('<%s object; %d requirement(s): %s>'
|
||||||
|
% (self.__class__.__name__, len(reqs), reqs_str))
|
||||||
|
|
||||||
|
def add_requirement(self, install_req, parent_req_name=None,
|
||||||
|
extras_requested=None):
|
||||||
|
"""Add install_req as a requirement to install.
|
||||||
|
|
||||||
|
:param parent_req_name: The name of the requirement that needed this
|
||||||
|
added. The name is used because when multiple unnamed requirements
|
||||||
|
resolve to the same name, we could otherwise end up with dependency
|
||||||
|
links that point outside the Requirements set. parent_req must
|
||||||
|
already be added. Note that None implies that this is a user
|
||||||
|
supplied requirement, vs an inferred one.
|
||||||
|
:param extras_requested: an iterable of extras used to evaluate the
|
||||||
|
environment markers.
|
||||||
|
:return: Additional requirements to scan. That is either [] if
|
||||||
|
the requirement is not applicable, or [install_req] if the
|
||||||
|
requirement is applicable and has just been added.
|
||||||
|
"""
|
||||||
|
name = install_req.name
|
||||||
|
if not install_req.match_markers(extras_requested):
|
||||||
|
logger.info("Ignoring %s: markers '%s' don't match your "
|
||||||
|
"environment", install_req.name,
|
||||||
|
install_req.markers)
|
||||||
|
return [], None
|
||||||
|
|
||||||
|
# This check has to come after we filter requirements with the
|
||||||
|
# environment markers.
|
||||||
|
if install_req.link and install_req.link.is_wheel:
|
||||||
|
wheel = Wheel(install_req.link.filename)
|
||||||
|
if not wheel.supported():
|
||||||
|
raise InstallationError(
|
||||||
|
"%s is not a supported wheel on this platform." %
|
||||||
|
wheel.filename
|
||||||
|
)
|
||||||
|
|
||||||
|
# This next bit is really a sanity check.
|
||||||
|
assert install_req.is_direct == (parent_req_name is None), (
|
||||||
|
"a direct req shouldn't have a parent and also, "
|
||||||
|
"a non direct req should have a parent"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
# url or path requirement w/o an egg fragment
|
||||||
|
self.unnamed_requirements.append(install_req)
|
||||||
|
return [install_req], None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
existing_req = self.get_requirement(name)
|
||||||
|
except KeyError:
|
||||||
|
existing_req = None
|
||||||
|
if (parent_req_name is None and existing_req and not
|
||||||
|
existing_req.constraint and
|
||||||
|
existing_req.extras == install_req.extras and not
|
||||||
|
existing_req.req.specifier == install_req.req.specifier):
|
||||||
|
raise InstallationError(
|
||||||
|
'Double requirement given: %s (already in %s, name=%r)'
|
||||||
|
% (install_req, existing_req, name))
|
||||||
|
if not existing_req:
|
||||||
|
# Add requirement
|
||||||
|
self.requirements[name] = install_req
|
||||||
|
# FIXME: what about other normalizations? E.g., _ vs. -?
|
||||||
|
if name.lower() != name:
|
||||||
|
self.requirement_aliases[name.lower()] = name
|
||||||
|
result = [install_req]
|
||||||
|
else:
|
||||||
|
# Assume there's no need to scan, and that we've already
|
||||||
|
# encountered this for scanning.
|
||||||
|
result = []
|
||||||
|
if not install_req.constraint and existing_req.constraint:
|
||||||
|
if (install_req.link and not (existing_req.link and
|
||||||
|
install_req.link.path == existing_req.link.path)):
|
||||||
|
self.reqs_to_cleanup.append(install_req)
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not satisfy constraints for '%s': "
|
||||||
|
"installation from path or url cannot be "
|
||||||
|
"constrained to a version" % name,
|
||||||
|
)
|
||||||
|
# If we're now installing a constraint, mark the existing
|
||||||
|
# object for real installation.
|
||||||
|
existing_req.constraint = False
|
||||||
|
existing_req.extras = tuple(
|
||||||
|
sorted(set(existing_req.extras).union(
|
||||||
|
set(install_req.extras))))
|
||||||
|
logger.debug("Setting %s extras to: %s",
|
||||||
|
existing_req, existing_req.extras)
|
||||||
|
# And now we need to scan this.
|
||||||
|
result = [existing_req]
|
||||||
|
# Canonicalise to the already-added object for the backref
|
||||||
|
# check below.
|
||||||
|
install_req = existing_req
|
||||||
|
|
||||||
|
# We return install_req here to allow for the caller to add it to
|
||||||
|
# the dependency information for the parent package.
|
||||||
|
return result, install_req
|
||||||
|
|
||||||
|
def has_requirement(self, project_name):
|
||||||
|
name = project_name.lower()
|
||||||
|
if (name in self.requirements and
|
||||||
|
not self.requirements[name].constraint or
|
||||||
|
name in self.requirement_aliases and
|
||||||
|
not self.requirements[self.requirement_aliases[name]].constraint):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_requirements(self):
|
||||||
|
return list(req for req in self.requirements.values() if not
|
||||||
|
req.constraint) or self.unnamed_requirements
|
||||||
|
|
||||||
|
def get_requirement(self, project_name):
|
||||||
|
for name in project_name, project_name.lower():
|
||||||
|
if name in self.requirements:
|
||||||
|
return self.requirements[name]
|
||||||
|
if name in self.requirement_aliases:
|
||||||
|
return self.requirements[self.requirement_aliases[name]]
|
||||||
|
raise KeyError("No project with the name %r" % project_name)
|
||||||
|
|
||||||
|
def cleanup_files(self):
|
||||||
|
"""Clean up files, remove builds."""
|
||||||
|
logger.debug('Cleaning up...')
|
||||||
|
with indent_log():
|
||||||
|
for req in self.reqs_to_cleanup:
|
||||||
|
req.remove_temporary_source()
|
@ -0,0 +1,455 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
|
||||||
|
from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache
|
||||||
|
from pip._internal.exceptions import UninstallationError
|
||||||
|
from pip._internal.locations import bin_py, bin_user
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
|
||||||
|
normalize_path, renames,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _script_names(dist, script_name, is_gui):
|
||||||
|
"""Create the fully qualified name of the files created by
|
||||||
|
{console,gui}_scripts for the given ``dist``.
|
||||||
|
Returns the list of file names
|
||||||
|
"""
|
||||||
|
if dist_in_usersite(dist):
|
||||||
|
bin_dir = bin_user
|
||||||
|
else:
|
||||||
|
bin_dir = bin_py
|
||||||
|
exe_name = os.path.join(bin_dir, script_name)
|
||||||
|
paths_to_remove = [exe_name]
|
||||||
|
if WINDOWS:
|
||||||
|
paths_to_remove.append(exe_name + '.exe')
|
||||||
|
paths_to_remove.append(exe_name + '.exe.manifest')
|
||||||
|
if is_gui:
|
||||||
|
paths_to_remove.append(exe_name + '-script.pyw')
|
||||||
|
else:
|
||||||
|
paths_to_remove.append(exe_name + '-script.py')
|
||||||
|
return paths_to_remove
|
||||||
|
|
||||||
|
|
||||||
|
def _unique(fn):
|
||||||
|
@functools.wraps(fn)
|
||||||
|
def unique(*args, **kw):
|
||||||
|
seen = set()
|
||||||
|
for item in fn(*args, **kw):
|
||||||
|
if item not in seen:
|
||||||
|
seen.add(item)
|
||||||
|
yield item
|
||||||
|
return unique
|
||||||
|
|
||||||
|
|
||||||
|
@_unique
|
||||||
|
def uninstallation_paths(dist):
|
||||||
|
"""
|
||||||
|
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
|
||||||
|
|
||||||
|
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||||
|
the .pyc in the same directory.
|
||||||
|
|
||||||
|
UninstallPathSet.add() takes care of the __pycache__ .pyc.
|
||||||
|
"""
|
||||||
|
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||||
|
for row in r:
|
||||||
|
path = os.path.join(dist.location, row[0])
|
||||||
|
yield path
|
||||||
|
if path.endswith('.py'):
|
||||||
|
dn, fn = os.path.split(path)
|
||||||
|
base = fn[:-3]
|
||||||
|
path = os.path.join(dn, base + '.pyc')
|
||||||
|
yield path
|
||||||
|
|
||||||
|
|
||||||
|
def compact(paths):
|
||||||
|
"""Compact a path set to contain the minimal number of paths
|
||||||
|
necessary to contain all paths in the set. If /a/path/ and
|
||||||
|
/a/path/to/a/file.txt are both in the set, leave only the
|
||||||
|
shorter path."""
|
||||||
|
|
||||||
|
sep = os.path.sep
|
||||||
|
short_paths = set()
|
||||||
|
for path in sorted(paths, key=len):
|
||||||
|
should_add = any(
|
||||||
|
path.startswith(shortpath.rstrip("*")) and
|
||||||
|
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||||
|
for shortpath in short_paths
|
||||||
|
)
|
||||||
|
if not should_add:
|
||||||
|
short_paths.add(path)
|
||||||
|
return short_paths
|
||||||
|
|
||||||
|
|
||||||
|
def compress_for_output_listing(paths):
|
||||||
|
"""Returns a tuple of 2 sets of which paths to display to user
|
||||||
|
|
||||||
|
The first set contains paths that would be deleted. Files of a package
|
||||||
|
are not added and the top-level directory of the package has a '*' added
|
||||||
|
at the end - to signify that all it's contents are removed.
|
||||||
|
|
||||||
|
The second set contains files that would have been skipped in the above
|
||||||
|
folders.
|
||||||
|
"""
|
||||||
|
|
||||||
|
will_remove = list(paths)
|
||||||
|
will_skip = set()
|
||||||
|
|
||||||
|
# Determine folders and files
|
||||||
|
folders = set()
|
||||||
|
files = set()
|
||||||
|
for path in will_remove:
|
||||||
|
if path.endswith(".pyc"):
|
||||||
|
continue
|
||||||
|
if path.endswith("__init__.py") or ".dist-info" in path:
|
||||||
|
folders.add(os.path.dirname(path))
|
||||||
|
files.add(path)
|
||||||
|
|
||||||
|
folders = compact(folders)
|
||||||
|
|
||||||
|
# This walks the tree using os.walk to not miss extra folders
|
||||||
|
# that might get added.
|
||||||
|
for folder in folders:
|
||||||
|
for dirpath, _, dirfiles in os.walk(folder):
|
||||||
|
for fname in dirfiles:
|
||||||
|
if fname.endswith(".pyc"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_ = os.path.normcase(os.path.join(dirpath, fname))
|
||||||
|
if os.path.isfile(file_) and file_ not in files:
|
||||||
|
# We are skipping this file. Add it to the set.
|
||||||
|
will_skip.add(file_)
|
||||||
|
|
||||||
|
will_remove = files | {
|
||||||
|
os.path.join(folder, "*") for folder in folders
|
||||||
|
}
|
||||||
|
|
||||||
|
return will_remove, will_skip
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallPathSet(object):
|
||||||
|
"""A set of file paths to be removed in the uninstallation of a
|
||||||
|
requirement."""
|
||||||
|
def __init__(self, dist):
|
||||||
|
self.paths = set()
|
||||||
|
self._refuse = set()
|
||||||
|
self.pth = {}
|
||||||
|
self.dist = dist
|
||||||
|
self.save_dir = TempDirectory(kind="uninstall")
|
||||||
|
self._moved_paths = []
|
||||||
|
|
||||||
|
def _permitted(self, path):
|
||||||
|
"""
|
||||||
|
Return True if the given path is one we are permitted to
|
||||||
|
remove/modify, False otherwise.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return is_local(path)
|
||||||
|
|
||||||
|
def add(self, path):
|
||||||
|
head, tail = os.path.split(path)
|
||||||
|
|
||||||
|
# we normalize the head to resolve parent directory symlinks, but not
|
||||||
|
# the tail, since we only want to uninstall symlinks, not their targets
|
||||||
|
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||||
|
|
||||||
|
if not os.path.exists(path):
|
||||||
|
return
|
||||||
|
if self._permitted(path):
|
||||||
|
self.paths.add(path)
|
||||||
|
else:
|
||||||
|
self._refuse.add(path)
|
||||||
|
|
||||||
|
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||||
|
# due to imports
|
||||||
|
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||||
|
self.add(cache_from_source(path))
|
||||||
|
|
||||||
|
def add_pth(self, pth_file, entry):
|
||||||
|
pth_file = normalize_path(pth_file)
|
||||||
|
if self._permitted(pth_file):
|
||||||
|
if pth_file not in self.pth:
|
||||||
|
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||||
|
self.pth[pth_file].add(entry)
|
||||||
|
else:
|
||||||
|
self._refuse.add(pth_file)
|
||||||
|
|
||||||
|
def _stash(self, path):
|
||||||
|
return os.path.join(
|
||||||
|
self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep)
|
||||||
|
)
|
||||||
|
|
||||||
|
def remove(self, auto_confirm=False, verbose=False):
|
||||||
|
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||||
|
``auto_confirm`` is True)."""
|
||||||
|
|
||||||
|
if not self.paths:
|
||||||
|
logger.info(
|
||||||
|
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||||
|
self.dist.project_name,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
dist_name_version = (
|
||||||
|
self.dist.project_name + "-" + self.dist.version
|
||||||
|
)
|
||||||
|
logger.info('Uninstalling %s:', dist_name_version)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||||
|
self.save_dir.create()
|
||||||
|
|
||||||
|
for path in sorted(compact(self.paths)):
|
||||||
|
new_path = self._stash(path)
|
||||||
|
logger.debug('Removing file or directory %s', path)
|
||||||
|
self._moved_paths.append(path)
|
||||||
|
renames(path, new_path)
|
||||||
|
for pth in self.pth.values():
|
||||||
|
pth.remove()
|
||||||
|
|
||||||
|
logger.info('Successfully uninstalled %s', dist_name_version)
|
||||||
|
|
||||||
|
def _allowed_to_proceed(self, verbose):
|
||||||
|
"""Display which files would be deleted and prompt for confirmation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _display(msg, paths):
|
||||||
|
if not paths:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(msg)
|
||||||
|
with indent_log():
|
||||||
|
for path in sorted(compact(paths)):
|
||||||
|
logger.info(path)
|
||||||
|
|
||||||
|
if not verbose:
|
||||||
|
will_remove, will_skip = compress_for_output_listing(self.paths)
|
||||||
|
else:
|
||||||
|
# In verbose mode, display all the files that are going to be
|
||||||
|
# deleted.
|
||||||
|
will_remove = list(self.paths)
|
||||||
|
will_skip = set()
|
||||||
|
|
||||||
|
_display('Would remove:', will_remove)
|
||||||
|
_display('Would not remove (might be manually added):', will_skip)
|
||||||
|
_display('Would not remove (outside of prefix):', self._refuse)
|
||||||
|
|
||||||
|
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
"""Rollback the changes previously made by remove()."""
|
||||||
|
if self.save_dir.path is None:
|
||||||
|
logger.error(
|
||||||
|
"Can't roll back %s; was not uninstalled",
|
||||||
|
self.dist.project_name,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||||
|
for path in self._moved_paths:
|
||||||
|
tmp_path = self._stash(path)
|
||||||
|
logger.debug('Replacing %s', path)
|
||||||
|
renames(tmp_path, path)
|
||||||
|
for pth in self.pth.values():
|
||||||
|
pth.rollback()
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||||
|
self.save_dir.cleanup()
|
||||||
|
self._moved_paths = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dist(cls, dist):
|
||||||
|
dist_path = normalize_path(dist.location)
|
||||||
|
if not dist_is_local(dist):
|
||||||
|
logger.info(
|
||||||
|
"Not uninstalling %s at %s, outside environment %s",
|
||||||
|
dist.key,
|
||||||
|
dist_path,
|
||||||
|
sys.prefix,
|
||||||
|
)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
||||||
|
sysconfig.get_path("platstdlib")}
|
||||||
|
if p}:
|
||||||
|
logger.info(
|
||||||
|
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||||
|
dist.key,
|
||||||
|
dist_path,
|
||||||
|
)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
paths_to_remove = cls(dist)
|
||||||
|
develop_egg_link = egg_link_path(dist)
|
||||||
|
develop_egg_link_egg_info = '{}.egg-info'.format(
|
||||||
|
pkg_resources.to_filename(dist.project_name))
|
||||||
|
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
||||||
|
# Special case for distutils installed package
|
||||||
|
distutils_egg_info = getattr(dist._provider, 'path', None)
|
||||||
|
|
||||||
|
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||||
|
# same package, pip needs to uninstall the currently detected version
|
||||||
|
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
||||||
|
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
||||||
|
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
||||||
|
# are in fact in the develop_egg_link case
|
||||||
|
paths_to_remove.add(dist.egg_info)
|
||||||
|
if dist.has_metadata('installed-files.txt'):
|
||||||
|
for installed_file in dist.get_metadata(
|
||||||
|
'installed-files.txt').splitlines():
|
||||||
|
path = os.path.normpath(
|
||||||
|
os.path.join(dist.egg_info, installed_file)
|
||||||
|
)
|
||||||
|
paths_to_remove.add(path)
|
||||||
|
# FIXME: need a test for this elif block
|
||||||
|
# occurs with --single-version-externally-managed/--record outside
|
||||||
|
# of pip
|
||||||
|
elif dist.has_metadata('top_level.txt'):
|
||||||
|
if dist.has_metadata('namespace_packages.txt'):
|
||||||
|
namespaces = dist.get_metadata('namespace_packages.txt')
|
||||||
|
else:
|
||||||
|
namespaces = []
|
||||||
|
for top_level_pkg in [
|
||||||
|
p for p
|
||||||
|
in dist.get_metadata('top_level.txt').splitlines()
|
||||||
|
if p and p not in namespaces]:
|
||||||
|
path = os.path.join(dist.location, top_level_pkg)
|
||||||
|
paths_to_remove.add(path)
|
||||||
|
paths_to_remove.add(path + '.py')
|
||||||
|
paths_to_remove.add(path + '.pyc')
|
||||||
|
paths_to_remove.add(path + '.pyo')
|
||||||
|
|
||||||
|
elif distutils_egg_info:
|
||||||
|
raise UninstallationError(
|
||||||
|
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||||
|
"and thus we cannot accurately determine which files belong "
|
||||||
|
"to it which would lead to only a partial uninstall.".format(
|
||||||
|
dist.project_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
elif dist.location.endswith('.egg'):
|
||||||
|
# package installed by easy_install
|
||||||
|
# We cannot match on dist.egg_name because it can slightly vary
|
||||||
|
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||||
|
paths_to_remove.add(dist.location)
|
||||||
|
easy_install_egg = os.path.split(dist.location)[1]
|
||||||
|
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
||||||
|
'easy-install.pth')
|
||||||
|
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
||||||
|
|
||||||
|
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
||||||
|
for path in uninstallation_paths(dist):
|
||||||
|
paths_to_remove.add(path)
|
||||||
|
|
||||||
|
elif develop_egg_link:
|
||||||
|
# develop egg
|
||||||
|
with open(develop_egg_link, 'r') as fh:
|
||||||
|
link_pointer = os.path.normcase(fh.readline().strip())
|
||||||
|
assert (link_pointer == dist.location), (
|
||||||
|
'Egg-link %s does not match installed location of %s '
|
||||||
|
'(at %s)' % (link_pointer, dist.project_name, dist.location)
|
||||||
|
)
|
||||||
|
paths_to_remove.add(develop_egg_link)
|
||||||
|
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
||||||
|
'easy-install.pth')
|
||||||
|
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.debug(
|
||||||
|
'Not sure how to uninstall: %s - Check: %s',
|
||||||
|
dist, dist.location,
|
||||||
|
)
|
||||||
|
|
||||||
|
# find distutils scripts= scripts
|
||||||
|
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
||||||
|
for script in dist.metadata_listdir('scripts'):
|
||||||
|
if dist_in_usersite(dist):
|
||||||
|
bin_dir = bin_user
|
||||||
|
else:
|
||||||
|
bin_dir = bin_py
|
||||||
|
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||||
|
if WINDOWS:
|
||||||
|
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
||||||
|
|
||||||
|
# find console_scripts
|
||||||
|
_scripts_to_remove = []
|
||||||
|
console_scripts = dist.get_entry_map(group='console_scripts')
|
||||||
|
for name in console_scripts.keys():
|
||||||
|
_scripts_to_remove.extend(_script_names(dist, name, False))
|
||||||
|
# find gui_scripts
|
||||||
|
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
||||||
|
for name in gui_scripts.keys():
|
||||||
|
_scripts_to_remove.extend(_script_names(dist, name, True))
|
||||||
|
|
||||||
|
for s in _scripts_to_remove:
|
||||||
|
paths_to_remove.add(s)
|
||||||
|
|
||||||
|
return paths_to_remove
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallPthEntries(object):
|
||||||
|
def __init__(self, pth_file):
|
||||||
|
if not os.path.isfile(pth_file):
|
||||||
|
raise UninstallationError(
|
||||||
|
"Cannot remove entries from nonexistent file %s" % pth_file
|
||||||
|
)
|
||||||
|
self.file = pth_file
|
||||||
|
self.entries = set()
|
||||||
|
self._saved_lines = None
|
||||||
|
|
||||||
|
def add(self, entry):
|
||||||
|
entry = os.path.normcase(entry)
|
||||||
|
# On Windows, os.path.normcase converts the entry to use
|
||||||
|
# backslashes. This is correct for entries that describe absolute
|
||||||
|
# paths outside of site-packages, but all the others use forward
|
||||||
|
# slashes.
|
||||||
|
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||||
|
entry = entry.replace('\\', '/')
|
||||||
|
self.entries.add(entry)
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
logger.debug('Removing pth entries from %s:', self.file)
|
||||||
|
with open(self.file, 'rb') as fh:
|
||||||
|
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||||
|
lines = fh.readlines()
|
||||||
|
self._saved_lines = lines
|
||||||
|
if any(b'\r\n' in line for line in lines):
|
||||||
|
endline = '\r\n'
|
||||||
|
else:
|
||||||
|
endline = '\n'
|
||||||
|
# handle missing trailing newline
|
||||||
|
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||||
|
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||||
|
for entry in self.entries:
|
||||||
|
try:
|
||||||
|
logger.debug('Removing entry: %s', entry)
|
||||||
|
lines.remove((entry + endline).encode("utf-8"))
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
with open(self.file, 'wb') as fh:
|
||||||
|
fh.writelines(lines)
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
if self._saved_lines is None:
|
||||||
|
logger.error(
|
||||||
|
'Cannot roll back changes to %s, none were made', self.file
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
logger.debug('Rolling %s back to previous state', self.file)
|
||||||
|
with open(self.file, 'wb') as fh:
|
||||||
|
fh.writelines(self._saved_lines)
|
||||||
|
return True
|
@ -0,0 +1,354 @@
|
|||||||
|
"""Dependency Resolution
|
||||||
|
|
||||||
|
The dependency resolution in pip is performed as follows:
|
||||||
|
|
||||||
|
for top-level requirements:
|
||||||
|
a. only one spec allowed per project, regardless of conflicts or not.
|
||||||
|
otherwise a "double requirement" exception is raised
|
||||||
|
b. they override sub-dependency requirements.
|
||||||
|
for sub-dependencies
|
||||||
|
a. "first found, wins" (where the order is breadth first)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from collections import defaultdict
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
|
||||||
|
UnsupportedPythonVersion,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import dist_in_usersite, ensure_dir
|
||||||
|
from pip._internal.utils.packaging import check_dist_requires_python
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Resolver(object):
|
||||||
|
"""Resolves which packages need to be installed/uninstalled to perform \
|
||||||
|
the requested operation without breaking the requirements of any package.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
|
||||||
|
|
||||||
|
def __init__(self, preparer, session, finder, wheel_cache, use_user_site,
|
||||||
|
ignore_dependencies, ignore_installed, ignore_requires_python,
|
||||||
|
force_reinstall, isolated, upgrade_strategy):
|
||||||
|
super(Resolver, self).__init__()
|
||||||
|
assert upgrade_strategy in self._allowed_strategies
|
||||||
|
|
||||||
|
self.preparer = preparer
|
||||||
|
self.finder = finder
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
# NOTE: This would eventually be replaced with a cache that can give
|
||||||
|
# information about both sdist and wheels transparently.
|
||||||
|
self.wheel_cache = wheel_cache
|
||||||
|
|
||||||
|
self.require_hashes = None # This is set in resolve
|
||||||
|
|
||||||
|
self.upgrade_strategy = upgrade_strategy
|
||||||
|
self.force_reinstall = force_reinstall
|
||||||
|
self.isolated = isolated
|
||||||
|
self.ignore_dependencies = ignore_dependencies
|
||||||
|
self.ignore_installed = ignore_installed
|
||||||
|
self.ignore_requires_python = ignore_requires_python
|
||||||
|
self.use_user_site = use_user_site
|
||||||
|
|
||||||
|
self._discovered_dependencies = defaultdict(list)
|
||||||
|
|
||||||
|
def resolve(self, requirement_set):
|
||||||
|
"""Resolve what operations need to be done
|
||||||
|
|
||||||
|
As a side-effect of this method, the packages (and their dependencies)
|
||||||
|
are downloaded, unpacked and prepared for installation. This
|
||||||
|
preparation is done by ``pip.operations.prepare``.
|
||||||
|
|
||||||
|
Once PyPI has static dependency metadata available, it would be
|
||||||
|
possible to move the preparation to become a step separated from
|
||||||
|
dependency resolution.
|
||||||
|
"""
|
||||||
|
# make the wheelhouse
|
||||||
|
if self.preparer.wheel_download_dir:
|
||||||
|
ensure_dir(self.preparer.wheel_download_dir)
|
||||||
|
|
||||||
|
# If any top-level requirement has a hash specified, enter
|
||||||
|
# hash-checking mode, which requires hashes from all.
|
||||||
|
root_reqs = (
|
||||||
|
requirement_set.unnamed_requirements +
|
||||||
|
list(requirement_set.requirements.values())
|
||||||
|
)
|
||||||
|
self.require_hashes = (
|
||||||
|
requirement_set.require_hashes or
|
||||||
|
any(req.has_hash_options for req in root_reqs)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Display where finder is looking for packages
|
||||||
|
locations = self.finder.get_formatted_locations()
|
||||||
|
if locations:
|
||||||
|
logger.info(locations)
|
||||||
|
|
||||||
|
# Actually prepare the files, and collect any exceptions. Most hash
|
||||||
|
# exceptions cannot be checked ahead of time, because
|
||||||
|
# req.populate_link() needs to be called before we can make decisions
|
||||||
|
# based on link type.
|
||||||
|
discovered_reqs = []
|
||||||
|
hash_errors = HashErrors()
|
||||||
|
for req in chain(root_reqs, discovered_reqs):
|
||||||
|
try:
|
||||||
|
discovered_reqs.extend(
|
||||||
|
self._resolve_one(requirement_set, req)
|
||||||
|
)
|
||||||
|
except HashError as exc:
|
||||||
|
exc.req = req
|
||||||
|
hash_errors.append(exc)
|
||||||
|
|
||||||
|
if hash_errors:
|
||||||
|
raise hash_errors
|
||||||
|
|
||||||
|
def _is_upgrade_allowed(self, req):
|
||||||
|
if self.upgrade_strategy == "to-satisfy-only":
|
||||||
|
return False
|
||||||
|
elif self.upgrade_strategy == "eager":
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
assert self.upgrade_strategy == "only-if-needed"
|
||||||
|
return req.is_direct
|
||||||
|
|
||||||
|
def _set_req_to_reinstall(self, req):
|
||||||
|
"""
|
||||||
|
Set a requirement to be installed.
|
||||||
|
"""
|
||||||
|
# Don't uninstall the conflict if doing a user install and the
|
||||||
|
# conflict is not a user install.
|
||||||
|
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
|
||||||
|
req.conflicts_with = req.satisfied_by
|
||||||
|
req.satisfied_by = None
|
||||||
|
|
||||||
|
# XXX: Stop passing requirement_set for options
|
||||||
|
def _check_skip_installed(self, req_to_install):
|
||||||
|
"""Check if req_to_install should be skipped.
|
||||||
|
|
||||||
|
This will check if the req is installed, and whether we should upgrade
|
||||||
|
or reinstall it, taking into account all the relevant user options.
|
||||||
|
|
||||||
|
After calling this req_to_install will only have satisfied_by set to
|
||||||
|
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
||||||
|
other value will be a dist recording the current thing installed that
|
||||||
|
satisfies the requirement.
|
||||||
|
|
||||||
|
Note that for vcs urls and the like we can't assess skipping in this
|
||||||
|
routine - we simply identify that we need to pull the thing down,
|
||||||
|
then later on it is pulled down and introspected to assess upgrade/
|
||||||
|
reinstalls etc.
|
||||||
|
|
||||||
|
:return: A text reason for why it was skipped, or None.
|
||||||
|
"""
|
||||||
|
if self.ignore_installed:
|
||||||
|
return None
|
||||||
|
|
||||||
|
req_to_install.check_if_exists(self.use_user_site)
|
||||||
|
if not req_to_install.satisfied_by:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.force_reinstall:
|
||||||
|
self._set_req_to_reinstall(req_to_install)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not self._is_upgrade_allowed(req_to_install):
|
||||||
|
if self.upgrade_strategy == "only-if-needed":
|
||||||
|
return 'not upgraded as not directly required'
|
||||||
|
return 'already satisfied'
|
||||||
|
|
||||||
|
# Check for the possibility of an upgrade. For link-based
|
||||||
|
# requirements we have to pull the tree down and inspect to assess
|
||||||
|
# the version #, so it's handled way down.
|
||||||
|
if not req_to_install.link:
|
||||||
|
try:
|
||||||
|
self.finder.find_requirement(req_to_install, upgrade=True)
|
||||||
|
except BestVersionAlreadyInstalled:
|
||||||
|
# Then the best version is installed.
|
||||||
|
return 'already up-to-date'
|
||||||
|
except DistributionNotFound:
|
||||||
|
# No distribution found, so we squash the error. It will
|
||||||
|
# be raised later when we re-try later to do the install.
|
||||||
|
# Why don't we just raise here?
|
||||||
|
pass
|
||||||
|
|
||||||
|
self._set_req_to_reinstall(req_to_install)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_abstract_dist_for(self, req):
|
||||||
|
"""Takes a InstallRequirement and returns a single AbstractDist \
|
||||||
|
representing a prepared variant of the same.
|
||||||
|
"""
|
||||||
|
assert self.require_hashes is not None, (
|
||||||
|
"require_hashes should have been set in Resolver.resolve()"
|
||||||
|
)
|
||||||
|
|
||||||
|
if req.editable:
|
||||||
|
return self.preparer.prepare_editable_requirement(
|
||||||
|
req, self.require_hashes, self.use_user_site, self.finder,
|
||||||
|
)
|
||||||
|
|
||||||
|
# satisfied_by is only evaluated by calling _check_skip_installed,
|
||||||
|
# so it must be None here.
|
||||||
|
assert req.satisfied_by is None
|
||||||
|
skip_reason = self._check_skip_installed(req)
|
||||||
|
|
||||||
|
if req.satisfied_by:
|
||||||
|
return self.preparer.prepare_installed_requirement(
|
||||||
|
req, self.require_hashes, skip_reason
|
||||||
|
)
|
||||||
|
|
||||||
|
upgrade_allowed = self._is_upgrade_allowed(req)
|
||||||
|
abstract_dist = self.preparer.prepare_linked_requirement(
|
||||||
|
req, self.session, self.finder, upgrade_allowed,
|
||||||
|
self.require_hashes
|
||||||
|
)
|
||||||
|
|
||||||
|
# NOTE
|
||||||
|
# The following portion is for determining if a certain package is
|
||||||
|
# going to be re-installed/upgraded or not and reporting to the user.
|
||||||
|
# This should probably get cleaned up in a future refactor.
|
||||||
|
|
||||||
|
# req.req is only avail after unpack for URL
|
||||||
|
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
||||||
|
# (#14)
|
||||||
|
if not self.ignore_installed:
|
||||||
|
req.check_if_exists(self.use_user_site)
|
||||||
|
|
||||||
|
if req.satisfied_by:
|
||||||
|
should_modify = (
|
||||||
|
self.upgrade_strategy != "to-satisfy-only" or
|
||||||
|
self.force_reinstall or
|
||||||
|
self.ignore_installed or
|
||||||
|
req.link.scheme == 'file'
|
||||||
|
)
|
||||||
|
if should_modify:
|
||||||
|
self._set_req_to_reinstall(req)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'Requirement already satisfied (use --upgrade to upgrade):'
|
||||||
|
' %s', req,
|
||||||
|
)
|
||||||
|
|
||||||
|
return abstract_dist
|
||||||
|
|
||||||
|
def _resolve_one(self, requirement_set, req_to_install):
|
||||||
|
"""Prepare a single requirements file.
|
||||||
|
|
||||||
|
:return: A list of additional InstallRequirements to also install.
|
||||||
|
"""
|
||||||
|
# Tell user what we are doing for this requirement:
|
||||||
|
# obtain (editable), skipping, processing (local url), collecting
|
||||||
|
# (remote url or package name)
|
||||||
|
if req_to_install.constraint or req_to_install.prepared:
|
||||||
|
return []
|
||||||
|
|
||||||
|
req_to_install.prepared = True
|
||||||
|
|
||||||
|
# register tmp src for cleanup in case something goes wrong
|
||||||
|
requirement_set.reqs_to_cleanup.append(req_to_install)
|
||||||
|
|
||||||
|
abstract_dist = self._get_abstract_dist_for(req_to_install)
|
||||||
|
|
||||||
|
# Parse and return dependencies
|
||||||
|
dist = abstract_dist.dist(self.finder)
|
||||||
|
try:
|
||||||
|
check_dist_requires_python(dist)
|
||||||
|
except UnsupportedPythonVersion as err:
|
||||||
|
if self.ignore_requires_python:
|
||||||
|
logger.warning(err.args[0])
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
more_reqs = []
|
||||||
|
|
||||||
|
def add_req(subreq, extras_requested):
|
||||||
|
sub_install_req = InstallRequirement.from_req(
|
||||||
|
str(subreq),
|
||||||
|
req_to_install,
|
||||||
|
isolated=self.isolated,
|
||||||
|
wheel_cache=self.wheel_cache,
|
||||||
|
)
|
||||||
|
parent_req_name = req_to_install.name
|
||||||
|
to_scan_again, add_to_parent = requirement_set.add_requirement(
|
||||||
|
sub_install_req,
|
||||||
|
parent_req_name=parent_req_name,
|
||||||
|
extras_requested=extras_requested,
|
||||||
|
)
|
||||||
|
if parent_req_name and add_to_parent:
|
||||||
|
self._discovered_dependencies[parent_req_name].append(
|
||||||
|
add_to_parent
|
||||||
|
)
|
||||||
|
more_reqs.extend(to_scan_again)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
# We add req_to_install before its dependencies, so that we
|
||||||
|
# can refer to it when adding dependencies.
|
||||||
|
if not requirement_set.has_requirement(req_to_install.name):
|
||||||
|
# 'unnamed' requirements will get added here
|
||||||
|
req_to_install.is_direct = True
|
||||||
|
requirement_set.add_requirement(
|
||||||
|
req_to_install, parent_req_name=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.ignore_dependencies:
|
||||||
|
if req_to_install.extras:
|
||||||
|
logger.debug(
|
||||||
|
"Installing extra requirements: %r",
|
||||||
|
','.join(req_to_install.extras),
|
||||||
|
)
|
||||||
|
missing_requested = sorted(
|
||||||
|
set(req_to_install.extras) - set(dist.extras)
|
||||||
|
)
|
||||||
|
for missing in missing_requested:
|
||||||
|
logger.warning(
|
||||||
|
'%s does not provide the extra \'%s\'',
|
||||||
|
dist, missing
|
||||||
|
)
|
||||||
|
|
||||||
|
available_requested = sorted(
|
||||||
|
set(dist.extras) & set(req_to_install.extras)
|
||||||
|
)
|
||||||
|
for subreq in dist.requires(available_requested):
|
||||||
|
add_req(subreq, extras_requested=available_requested)
|
||||||
|
|
||||||
|
if not req_to_install.editable and not req_to_install.satisfied_by:
|
||||||
|
# XXX: --no-install leads this to report 'Successfully
|
||||||
|
# downloaded' for only non-editable reqs, even though we took
|
||||||
|
# action on them.
|
||||||
|
requirement_set.successfully_downloaded.append(req_to_install)
|
||||||
|
|
||||||
|
return more_reqs
|
||||||
|
|
||||||
|
def get_installation_order(self, req_set):
|
||||||
|
"""Create the installation order.
|
||||||
|
|
||||||
|
The installation order is topological - requirements are installed
|
||||||
|
before the requiring thing. We break cycles at an arbitrary point,
|
||||||
|
and make no other guarantees.
|
||||||
|
"""
|
||||||
|
# The current implementation, which we may change at any point
|
||||||
|
# installs the user specified things in the order given, except when
|
||||||
|
# dependencies must come earlier to achieve topological order.
|
||||||
|
order = []
|
||||||
|
ordered_reqs = set()
|
||||||
|
|
||||||
|
def schedule(req):
|
||||||
|
if req.satisfied_by or req in ordered_reqs:
|
||||||
|
return
|
||||||
|
if req.constraint:
|
||||||
|
return
|
||||||
|
ordered_reqs.add(req)
|
||||||
|
for dep in self._discovered_dependencies[req.name]:
|
||||||
|
schedule(dep)
|
||||||
|
order.append(req)
|
||||||
|
|
||||||
|
for install_req in req_set.requirements.values():
|
||||||
|
schedule(install_req)
|
||||||
|
return order
|
@ -0,0 +1,8 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
SUCCESS = 0
|
||||||
|
ERROR = 1
|
||||||
|
UNKNOWN_ERROR = 2
|
||||||
|
VIRTUALENV_NOT_FOUND = 3
|
||||||
|
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||||
|
NO_MATCHES_FOUND = 23
|
@ -0,0 +1,258 @@
|
|||||||
|
"""
|
||||||
|
This code was taken from https://github.com/ActiveState/appdirs and modified
|
||||||
|
to suit our purposes.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor.six import PY2, text_type
|
||||||
|
|
||||||
|
from pip._internal.compat import WINDOWS, expanduser
|
||||||
|
|
||||||
|
|
||||||
|
def user_cache_dir(appname):
|
||||||
|
r"""
|
||||||
|
Return full path to the user-specific cache dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
macOS: ~/Library/Caches/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName> (XDG default)
|
||||||
|
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings go
|
||||||
|
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
|
||||||
|
non-roaming app data dir (the default returned by `user_data_dir`). Apps
|
||||||
|
typically put cache data somewhere *under* the given dir here. Some
|
||||||
|
examples:
|
||||||
|
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||||
|
...\Acme\SuperApp\Cache\1.0
|
||||||
|
|
||||||
|
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||||
|
"""
|
||||||
|
if WINDOWS:
|
||||||
|
# Get the base path
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||||
|
|
||||||
|
# When using Python 2, return paths as bytes on Windows like we do on
|
||||||
|
# other operating systems. See helper function docs for more details.
|
||||||
|
if PY2 and isinstance(path, text_type):
|
||||||
|
path = _win_path_to_bytes(path)
|
||||||
|
|
||||||
|
# Add our app name and Cache directory to it
|
||||||
|
path = os.path.join(path, appname, "Cache")
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
# Get the base path
|
||||||
|
path = expanduser("~/Library/Caches")
|
||||||
|
|
||||||
|
# Add our app name to it
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
# Get the base path
|
||||||
|
path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
|
||||||
|
|
||||||
|
# Add our app name to it
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_data_dir(appname, roaming=False):
|
||||||
|
r"""
|
||||||
|
Return full path to the user-specific data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
macOS: ~/Library/Application Support/<AppName>
|
||||||
|
if it exists, else ~/.config/<AppName>
|
||||||
|
Unix: ~/.local/share/<AppName> # or in
|
||||||
|
$XDG_DATA_HOME, if defined
|
||||||
|
Win XP (not roaming): C:\Documents and Settings\<username>\ ...
|
||||||
|
...Application Data\<AppName>
|
||||||
|
Win XP (roaming): C:\Documents and Settings\<username>\Local ...
|
||||||
|
...Settings\Application Data\<AppName>
|
||||||
|
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
|
||||||
|
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||||
|
That means, by default "~/.local/share/<AppName>".
|
||||||
|
"""
|
||||||
|
if WINDOWS:
|
||||||
|
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||||
|
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
path = os.path.join(
|
||||||
|
expanduser('~/Library/Application Support/'),
|
||||||
|
appname,
|
||||||
|
) if os.path.isdir(os.path.join(
|
||||||
|
expanduser('~/Library/Application Support/'),
|
||||||
|
appname,
|
||||||
|
)
|
||||||
|
) else os.path.join(
|
||||||
|
expanduser('~/.config/'),
|
||||||
|
appname,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
path = os.path.join(
|
||||||
|
os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
|
||||||
|
appname,
|
||||||
|
)
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_config_dir(appname, roaming=True):
|
||||||
|
"""Return full path to the user-specific config dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"roaming" (boolean, default True) can be set False to not use the
|
||||||
|
Windows roaming appdata directory. That means that for users on a
|
||||||
|
Windows network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
macOS: same as user_data_dir
|
||||||
|
Unix: ~/.config/<AppName>
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||||
|
That means, by default "~/.config/<AppName>".
|
||||||
|
"""
|
||||||
|
if WINDOWS:
|
||||||
|
path = user_data_dir(appname, roaming=roaming)
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
path = user_data_dir(appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
# for the discussion regarding site_config_dirs locations
|
||||||
|
# see <https://github.com/pypa/pip/issues/1733>
|
||||||
|
def site_config_dirs(appname):
|
||||||
|
r"""Return a list of potential user-shared config dirs for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
|
||||||
|
Typical user config directories are:
|
||||||
|
macOS: /Library/Application Support/<AppName>/
|
||||||
|
Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
|
||||||
|
$XDG_CONFIG_DIRS
|
||||||
|
Win XP: C:\Documents and Settings\All Users\Application ...
|
||||||
|
...Data\<AppName>\
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
|
||||||
|
on Vista.)
|
||||||
|
Win 7: Hidden, but writeable on Win 7:
|
||||||
|
C:\ProgramData\<AppName>\
|
||||||
|
"""
|
||||||
|
if WINDOWS:
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||||
|
pathlist = [os.path.join(path, appname)]
|
||||||
|
elif sys.platform == 'darwin':
|
||||||
|
pathlist = [os.path.join('/Library/Application Support', appname)]
|
||||||
|
else:
|
||||||
|
# try looking in $XDG_CONFIG_DIRS
|
||||||
|
xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||||
|
if xdg_config_dirs:
|
||||||
|
pathlist = [
|
||||||
|
os.path.join(expanduser(x), appname)
|
||||||
|
for x in xdg_config_dirs.split(os.pathsep)
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
pathlist = []
|
||||||
|
|
||||||
|
# always look in /etc directly as well
|
||||||
|
pathlist.append('/etc')
|
||||||
|
|
||||||
|
return pathlist
|
||||||
|
|
||||||
|
|
||||||
|
# -- Windows support functions --
|
||||||
|
|
||||||
|
def _get_win_folder_from_registry(csidl_name):
|
||||||
|
"""
|
||||||
|
This is a fallback technique at best. I'm not sure if using the
|
||||||
|
registry for this guarantees us the correct answer for all CSIDL_*
|
||||||
|
names.
|
||||||
|
"""
|
||||||
|
import _winreg
|
||||||
|
|
||||||
|
shell_folder_name = {
|
||||||
|
"CSIDL_APPDATA": "AppData",
|
||||||
|
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||||
|
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
key = _winreg.OpenKey(
|
||||||
|
_winreg.HKEY_CURRENT_USER,
|
||||||
|
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||||
|
)
|
||||||
|
directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||||
|
return directory
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_ctypes(csidl_name):
|
||||||
|
csidl_const = {
|
||||||
|
"CSIDL_APPDATA": 26,
|
||||||
|
"CSIDL_COMMON_APPDATA": 35,
|
||||||
|
"CSIDL_LOCAL_APPDATA": 28,
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
buf = ctypes.create_unicode_buffer(1024)
|
||||||
|
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in buf:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf2 = ctypes.create_unicode_buffer(1024)
|
||||||
|
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||||
|
buf = buf2
|
||||||
|
|
||||||
|
return buf.value
|
||||||
|
|
||||||
|
|
||||||
|
if WINDOWS:
|
||||||
|
try:
|
||||||
|
import ctypes
|
||||||
|
_get_win_folder = _get_win_folder_with_ctypes
|
||||||
|
except ImportError:
|
||||||
|
_get_win_folder = _get_win_folder_from_registry
|
||||||
|
|
||||||
|
|
||||||
|
def _win_path_to_bytes(path):
|
||||||
|
"""Encode Windows paths to bytes. Only used on Python 2.
|
||||||
|
|
||||||
|
Motivation is to be consistent with other operating systems where paths
|
||||||
|
are also returned as bytes. This avoids problems mixing bytes and Unicode
|
||||||
|
elsewhere in the codebase. For more details and discussion see
|
||||||
|
<https://github.com/pypa/pip/issues/3463>.
|
||||||
|
|
||||||
|
If encoding using ASCII and MBCS fails, return the original Unicode path.
|
||||||
|
"""
|
||||||
|
for encoding in ('ASCII', 'MBCS'):
|
||||||
|
try:
|
||||||
|
return path.encode(encoding)
|
||||||
|
except (UnicodeEncodeError, LookupError):
|
||||||
|
pass
|
||||||
|
return path
|
@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
A module that implements tooling to enable easy warnings about deprecations.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class PipDeprecationWarning(Warning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Pending(object):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RemovedInPip11Warning(PipDeprecationWarning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RemovedInPip12Warning(PipDeprecationWarning, Pending):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Warnings <-> Logging Integration
|
||||||
|
|
||||||
|
|
||||||
|
_warnings_showwarning = None # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||||
|
if file is not None:
|
||||||
|
if _warnings_showwarning is not None:
|
||||||
|
_warnings_showwarning(
|
||||||
|
message, category, filename, lineno, file, line,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if issubclass(category, PipDeprecationWarning):
|
||||||
|
# We use a specially named logger which will handle all of the
|
||||||
|
# deprecation messages for pip.
|
||||||
|
logger = logging.getLogger("pip._internal.deprecations")
|
||||||
|
|
||||||
|
# This is purposely using the % formatter here instead of letting
|
||||||
|
# the logging module handle the interpolation. This is because we
|
||||||
|
# want it to appear as if someone typed this entire message out.
|
||||||
|
log_message = "DEPRECATION: %s" % message
|
||||||
|
|
||||||
|
# PipDeprecationWarnings that are Pending still have at least 2
|
||||||
|
# versions to go until they are removed so they can just be
|
||||||
|
# warnings. Otherwise, they will be removed in the very next
|
||||||
|
# version of pip. We want these to be more obvious so we use the
|
||||||
|
# ERROR logging level.
|
||||||
|
if issubclass(category, Pending):
|
||||||
|
logger.warning(log_message)
|
||||||
|
else:
|
||||||
|
logger.error(log_message)
|
||||||
|
else:
|
||||||
|
_warnings_showwarning(
|
||||||
|
message, category, filename, lineno, file, line,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def install_warning_logger():
|
||||||
|
# Enable our Deprecation Warnings
|
||||||
|
warnings.simplefilter("default", PipDeprecationWarning, append=True)
|
||||||
|
|
||||||
|
global _warnings_showwarning
|
||||||
|
|
||||||
|
if _warnings_showwarning is None:
|
||||||
|
_warnings_showwarning = warnings.showwarning
|
||||||
|
warnings.showwarning = _showwarning
|
@ -0,0 +1,33 @@
|
|||||||
|
import codecs
|
||||||
|
import locale
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
BOMS = [
|
||||||
|
(codecs.BOM_UTF8, 'utf8'),
|
||||||
|
(codecs.BOM_UTF16, 'utf16'),
|
||||||
|
(codecs.BOM_UTF16_BE, 'utf16-be'),
|
||||||
|
(codecs.BOM_UTF16_LE, 'utf16-le'),
|
||||||
|
(codecs.BOM_UTF32, 'utf32'),
|
||||||
|
(codecs.BOM_UTF32_BE, 'utf32-be'),
|
||||||
|
(codecs.BOM_UTF32_LE, 'utf32-le'),
|
||||||
|
]
|
||||||
|
|
||||||
|
ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
|
||||||
|
|
||||||
|
|
||||||
|
def auto_decode(data):
|
||||||
|
"""Check a bytes string for a BOM to correctly detect the encoding
|
||||||
|
|
||||||
|
Fallback to locale.getpreferredencoding(False) like open() on Python3"""
|
||||||
|
for bom, encoding in BOMS:
|
||||||
|
if data.startswith(bom):
|
||||||
|
return data[len(bom):].decode(encoding)
|
||||||
|
# Lets check the first two lines as in PEP263
|
||||||
|
for line in data.split(b'\n')[:2]:
|
||||||
|
if line[0:1] == b'#' and ENCODING_RE.search(line):
|
||||||
|
encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
|
||||||
|
return data.decode(encoding)
|
||||||
|
return data.decode(
|
||||||
|
locale.getpreferredencoding(False) or sys.getdefaultencoding(),
|
||||||
|
)
|
@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
from pip._internal.compat import get_path_uid
|
||||||
|
|
||||||
|
|
||||||
|
def check_path_owner(path):
|
||||||
|
# If we don't have a way to check the effective uid of this process, then
|
||||||
|
# we'll just assume that we own the directory.
|
||||||
|
if not hasattr(os, "geteuid"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
previous = None
|
||||||
|
while path != previous:
|
||||||
|
if os.path.lexists(path):
|
||||||
|
# Check if path is writable by current user.
|
||||||
|
if os.geteuid() == 0:
|
||||||
|
# Special handling for root user in order to handle properly
|
||||||
|
# cases where users use sudo without -H flag.
|
||||||
|
try:
|
||||||
|
path_uid = get_path_uid(path)
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
return path_uid == 0
|
||||||
|
else:
|
||||||
|
return os.access(path, os.W_OK)
|
||||||
|
else:
|
||||||
|
previous, path = path, os.path.dirname(path)
|
@ -0,0 +1,84 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import ctypes
|
||||||
|
import re
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
def glibc_version_string():
|
||||||
|
"Returns glibc version string, or None if not using glibc."
|
||||||
|
|
||||||
|
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||||
|
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||||
|
# main program". This way we can let the linker do the work to figure out
|
||||||
|
# which libc our process is actually using.
|
||||||
|
process_namespace = ctypes.CDLL(None)
|
||||||
|
try:
|
||||||
|
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||||
|
except AttributeError:
|
||||||
|
# Symbol doesn't exist -> therefore, we are not linked to
|
||||||
|
# glibc.
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||||
|
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||||
|
version_str = gnu_get_libc_version()
|
||||||
|
# py2 / py3 compatibility:
|
||||||
|
if not isinstance(version_str, str):
|
||||||
|
version_str = version_str.decode("ascii")
|
||||||
|
|
||||||
|
return version_str
|
||||||
|
|
||||||
|
|
||||||
|
# Separated out from have_compatible_glibc for easier unit testing
|
||||||
|
def check_glibc_version(version_str, required_major, minimum_minor):
|
||||||
|
# Parse string and check against requested version.
|
||||||
|
#
|
||||||
|
# We use a regexp instead of str.split because we want to discard any
|
||||||
|
# random junk that might come after the minor version -- this might happen
|
||||||
|
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||||
|
# uses version strings like "2.20-2014.11"). See gh-3588.
|
||||||
|
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||||
|
if not m:
|
||||||
|
warnings.warn("Expected glibc version with 2 components major.minor,"
|
||||||
|
" got: %s" % version_str, RuntimeWarning)
|
||||||
|
return False
|
||||||
|
return (int(m.group("major")) == required_major and
|
||||||
|
int(m.group("minor")) >= minimum_minor)
|
||||||
|
|
||||||
|
|
||||||
|
def have_compatible_glibc(required_major, minimum_minor):
|
||||||
|
version_str = glibc_version_string()
|
||||||
|
if version_str is None:
|
||||||
|
return False
|
||||||
|
return check_glibc_version(version_str, required_major, minimum_minor)
|
||||||
|
|
||||||
|
|
||||||
|
# platform.libc_ver regularly returns completely nonsensical glibc
|
||||||
|
# versions. E.g. on my computer, platform says:
|
||||||
|
#
|
||||||
|
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
||||||
|
# ('glibc', '2.7')
|
||||||
|
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
||||||
|
# ('glibc', '2.9')
|
||||||
|
#
|
||||||
|
# But the truth is:
|
||||||
|
#
|
||||||
|
# ~$ ldd --version
|
||||||
|
# ldd (Debian GLIBC 2.22-11) 2.22
|
||||||
|
#
|
||||||
|
# This is unfortunate, because it means that the linehaul data on libc
|
||||||
|
# versions that was generated by pip 8.1.2 and earlier is useless and
|
||||||
|
# misleading. Solution: instead of using platform, use our code that actually
|
||||||
|
# works.
|
||||||
|
def libc_ver():
|
||||||
|
"""Try to determine the glibc version
|
||||||
|
|
||||||
|
Returns a tuple of strings (lib, version) which default to empty strings
|
||||||
|
in case the lookup fails.
|
||||||
|
"""
|
||||||
|
glibc_version = glibc_version_string()
|
||||||
|
if glibc_version is None:
|
||||||
|
return ("", "")
|
||||||
|
else:
|
||||||
|
return ("glibc", glibc_version)
|
@ -0,0 +1,94 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from pip._vendor.six import iteritems, iterkeys, itervalues
|
||||||
|
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
HashMismatch, HashMissing, InstallationError,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import read_chunks
|
||||||
|
|
||||||
|
# The recommended hash algo of the moment. Change this whenever the state of
|
||||||
|
# the art changes; it won't hurt backward compatibility.
|
||||||
|
FAVORITE_HASH = 'sha256'
|
||||||
|
|
||||||
|
|
||||||
|
# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
|
||||||
|
# Currently, those are the ones at least as collision-resistant as sha256.
|
||||||
|
STRONG_HASHES = ['sha256', 'sha384', 'sha512']
|
||||||
|
|
||||||
|
|
||||||
|
class Hashes(object):
|
||||||
|
"""A wrapper that builds multiple hashes at once and checks them against
|
||||||
|
known-good values
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, hashes=None):
|
||||||
|
"""
|
||||||
|
:param hashes: A dict of algorithm names pointing to lists of allowed
|
||||||
|
hex digests
|
||||||
|
"""
|
||||||
|
self._allowed = {} if hashes is None else hashes
|
||||||
|
|
||||||
|
def check_against_chunks(self, chunks):
|
||||||
|
"""Check good hashes against ones built from iterable of chunks of
|
||||||
|
data.
|
||||||
|
|
||||||
|
Raise HashMismatch if none match.
|
||||||
|
|
||||||
|
"""
|
||||||
|
gots = {}
|
||||||
|
for hash_name in iterkeys(self._allowed):
|
||||||
|
try:
|
||||||
|
gots[hash_name] = hashlib.new(hash_name)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
raise InstallationError('Unknown hash name: %s' % hash_name)
|
||||||
|
|
||||||
|
for chunk in chunks:
|
||||||
|
for hash in itervalues(gots):
|
||||||
|
hash.update(chunk)
|
||||||
|
|
||||||
|
for hash_name, got in iteritems(gots):
|
||||||
|
if got.hexdigest() in self._allowed[hash_name]:
|
||||||
|
return
|
||||||
|
self._raise(gots)
|
||||||
|
|
||||||
|
def _raise(self, gots):
|
||||||
|
raise HashMismatch(self._allowed, gots)
|
||||||
|
|
||||||
|
def check_against_file(self, file):
|
||||||
|
"""Check good hashes against a file-like object
|
||||||
|
|
||||||
|
Raise HashMismatch if none match.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.check_against_chunks(read_chunks(file))
|
||||||
|
|
||||||
|
def check_against_path(self, path):
|
||||||
|
with open(path, 'rb') as file:
|
||||||
|
return self.check_against_file(file)
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
"""Return whether I know any known-good hashes."""
|
||||||
|
return bool(self._allowed)
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return self.__nonzero__()
|
||||||
|
|
||||||
|
|
||||||
|
class MissingHashes(Hashes):
|
||||||
|
"""A workalike for Hashes used when we're missing a hash for a requirement
|
||||||
|
|
||||||
|
It computes the actual hash of the requirement and raises a HashMissing
|
||||||
|
exception showing it to the user.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
"""Don't offer the ``hashes`` kwarg."""
|
||||||
|
# Pass our favorite hash in to generate a "gotten hash". With the
|
||||||
|
# empty list, it will never match, so an error will always raise.
|
||||||
|
super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
|
||||||
|
|
||||||
|
def _raise(self, gots):
|
||||||
|
raise HashMissing(gots[FAVORITE_HASH].hexdigest())
|
@ -0,0 +1,132 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal.compat import WINDOWS
|
||||||
|
from pip._internal.utils.misc import ensure_dir
|
||||||
|
|
||||||
|
try:
|
||||||
|
import threading
|
||||||
|
except ImportError:
|
||||||
|
import dummy_threading as threading # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pip._vendor import colorama
|
||||||
|
# Lots of different errors can come from this, including SystemError and
|
||||||
|
# ImportError.
|
||||||
|
except Exception:
|
||||||
|
colorama = None
|
||||||
|
|
||||||
|
|
||||||
|
_log_state = threading.local()
|
||||||
|
_log_state.indentation = 0
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def indent_log(num=2):
|
||||||
|
"""
|
||||||
|
A context manager which will cause the log output to be indented for any
|
||||||
|
log messages emitted inside it.
|
||||||
|
"""
|
||||||
|
_log_state.indentation += num
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
_log_state.indentation -= num
|
||||||
|
|
||||||
|
|
||||||
|
def get_indentation():
|
||||||
|
return getattr(_log_state, 'indentation', 0)
|
||||||
|
|
||||||
|
|
||||||
|
class IndentingFormatter(logging.Formatter):
|
||||||
|
|
||||||
|
def format(self, record):
|
||||||
|
"""
|
||||||
|
Calls the standard formatter, but will indent all of the log messages
|
||||||
|
by our current indentation level.
|
||||||
|
"""
|
||||||
|
formatted = logging.Formatter.format(self, record)
|
||||||
|
formatted = "".join([
|
||||||
|
(" " * get_indentation()) + line
|
||||||
|
for line in formatted.splitlines(True)
|
||||||
|
])
|
||||||
|
return formatted
|
||||||
|
|
||||||
|
|
||||||
|
def _color_wrap(*colors):
|
||||||
|
def wrapped(inp):
|
||||||
|
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
class ColorizedStreamHandler(logging.StreamHandler):
|
||||||
|
|
||||||
|
# Don't build up a list of colors if we don't have colorama
|
||||||
|
if colorama:
|
||||||
|
COLORS = [
|
||||||
|
# This needs to be in order from highest logging level to lowest.
|
||||||
|
(logging.ERROR, _color_wrap(colorama.Fore.RED)),
|
||||||
|
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
COLORS = []
|
||||||
|
|
||||||
|
def __init__(self, stream=None, no_color=None):
|
||||||
|
logging.StreamHandler.__init__(self, stream)
|
||||||
|
self._no_color = no_color
|
||||||
|
|
||||||
|
if WINDOWS and colorama:
|
||||||
|
self.stream = colorama.AnsiToWin32(self.stream)
|
||||||
|
|
||||||
|
def should_color(self):
|
||||||
|
# Don't colorize things if we do not have colorama or if told not to
|
||||||
|
if not colorama or self._no_color:
|
||||||
|
return False
|
||||||
|
|
||||||
|
real_stream = (
|
||||||
|
self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
|
||||||
|
else self.stream.wrapped
|
||||||
|
)
|
||||||
|
|
||||||
|
# If the stream is a tty we should color it
|
||||||
|
if hasattr(real_stream, "isatty") and real_stream.isatty():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If we have an ASNI term we should color it
|
||||||
|
if os.environ.get("TERM") == "ANSI":
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If anything else we should not color it
|
||||||
|
return False
|
||||||
|
|
||||||
|
def format(self, record):
|
||||||
|
msg = logging.StreamHandler.format(self, record)
|
||||||
|
|
||||||
|
if self.should_color():
|
||||||
|
for level, color in self.COLORS:
|
||||||
|
if record.levelno >= level:
|
||||||
|
msg = color(msg)
|
||||||
|
break
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
||||||
|
|
||||||
|
def _open(self):
|
||||||
|
ensure_dir(os.path.dirname(self.baseFilename))
|
||||||
|
return logging.handlers.RotatingFileHandler._open(self)
|
||||||
|
|
||||||
|
|
||||||
|
class MaxLevelFilter(logging.Filter):
|
||||||
|
|
||||||
|
def __init__(self, level):
|
||||||
|
self.level = level
|
||||||
|
|
||||||
|
def filter(self, record):
|
||||||
|
return record.levelno < self.level
|
@ -0,0 +1,851 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import errno
|
||||||
|
import io
|
||||||
|
import locale
|
||||||
|
# we have a submodule named 'logging' which would shadow this if we used the
|
||||||
|
# regular name:
|
||||||
|
import logging as std_logging
|
||||||
|
import os
|
||||||
|
import posixpath
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tarfile
|
||||||
|
import zipfile
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
|
||||||
|
# why we ignore the type on this import.
|
||||||
|
from pip._vendor.retrying import retry # type: ignore
|
||||||
|
from pip._vendor.six import PY2
|
||||||
|
from pip._vendor.six.moves import input
|
||||||
|
|
||||||
|
from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.locations import (
|
||||||
|
running_under_virtualenv, site_packages, user_site, virtualenv_no_global,
|
||||||
|
write_delete_marker_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
from io import BytesIO as StringIO
|
||||||
|
else:
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
__all__ = ['rmtree', 'display_path', 'backup_dir',
|
||||||
|
'ask', 'splitext',
|
||||||
|
'format_size', 'is_installable_dir',
|
||||||
|
'is_svn_page', 'file_contents',
|
||||||
|
'split_leading_dir', 'has_leading_dir',
|
||||||
|
'normalize_path',
|
||||||
|
'renames', 'get_prog',
|
||||||
|
'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
|
||||||
|
'captured_stdout', 'ensure_dir',
|
||||||
|
'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
|
||||||
|
'get_installed_version']
|
||||||
|
|
||||||
|
|
||||||
|
logger = std_logging.getLogger(__name__)
|
||||||
|
|
||||||
|
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
|
||||||
|
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
|
||||||
|
ZIP_EXTENSIONS = ('.zip', '.whl')
|
||||||
|
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
|
||||||
|
ARCHIVE_EXTENSIONS = (
|
||||||
|
ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
|
||||||
|
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
|
||||||
|
try:
|
||||||
|
import bz2 # noqa
|
||||||
|
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
|
||||||
|
except ImportError:
|
||||||
|
logger.debug('bz2 module is not available')
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Only for Python 3.3+
|
||||||
|
import lzma # noqa
|
||||||
|
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
|
||||||
|
except ImportError:
|
||||||
|
logger.debug('lzma module is not available')
|
||||||
|
|
||||||
|
|
||||||
|
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
return __import__(pkg_or_module_string)
|
||||||
|
except ImportError:
|
||||||
|
raise ExceptionType(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dir(path):
|
||||||
|
"""os.path.makedirs without EEXIST."""
|
||||||
|
try:
|
||||||
|
os.makedirs(path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def get_prog():
|
||||||
|
try:
|
||||||
|
prog = os.path.basename(sys.argv[0])
|
||||||
|
if prog in ('__main__.py', '-c'):
|
||||||
|
return "%s -m pip" % sys.executable
|
||||||
|
else:
|
||||||
|
return prog
|
||||||
|
except (AttributeError, TypeError, IndexError):
|
||||||
|
pass
|
||||||
|
return 'pip'
|
||||||
|
|
||||||
|
|
||||||
|
# Retry every half second for up to 3 seconds
|
||||||
|
@retry(stop_max_delay=3000, wait_fixed=500)
|
||||||
|
def rmtree(dir, ignore_errors=False):
|
||||||
|
shutil.rmtree(dir, ignore_errors=ignore_errors,
|
||||||
|
onerror=rmtree_errorhandler)
|
||||||
|
|
||||||
|
|
||||||
|
def rmtree_errorhandler(func, path, exc_info):
|
||||||
|
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
|
||||||
|
remove them, an exception is thrown. We catch that here, remove the
|
||||||
|
read-only attribute, and hopefully continue without problems."""
|
||||||
|
# if file type currently read only
|
||||||
|
if os.stat(path).st_mode & stat.S_IREAD:
|
||||||
|
# convert to read/write
|
||||||
|
os.chmod(path, stat.S_IWRITE)
|
||||||
|
# use the original function to repeat the operation
|
||||||
|
func(path)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def display_path(path):
|
||||||
|
"""Gives the display value for a given path, making it relative to cwd
|
||||||
|
if possible."""
|
||||||
|
path = os.path.normcase(os.path.abspath(path))
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
path = path.decode(sys.getfilesystemencoding(), 'replace')
|
||||||
|
path = path.encode(sys.getdefaultencoding(), 'replace')
|
||||||
|
if path.startswith(os.getcwd() + os.path.sep):
|
||||||
|
path = '.' + path[len(os.getcwd()):]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def backup_dir(dir, ext='.bak'):
|
||||||
|
"""Figure out the name of a directory to back up the given dir to
|
||||||
|
(adding .bak, .bak2, etc)"""
|
||||||
|
n = 1
|
||||||
|
extension = ext
|
||||||
|
while os.path.exists(dir + extension):
|
||||||
|
n += 1
|
||||||
|
extension = ext + str(n)
|
||||||
|
return dir + extension
|
||||||
|
|
||||||
|
|
||||||
|
def ask_path_exists(message, options):
|
||||||
|
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
|
||||||
|
if action in options:
|
||||||
|
return action
|
||||||
|
return ask(message, options)
|
||||||
|
|
||||||
|
|
||||||
|
def ask(message, options):
|
||||||
|
"""Ask the message interactively, with the given possible responses"""
|
||||||
|
while 1:
|
||||||
|
if os.environ.get('PIP_NO_INPUT'):
|
||||||
|
raise Exception(
|
||||||
|
'No input was expected ($PIP_NO_INPUT set); question: %s' %
|
||||||
|
message
|
||||||
|
)
|
||||||
|
response = input(message)
|
||||||
|
response = response.strip().lower()
|
||||||
|
if response not in options:
|
||||||
|
print(
|
||||||
|
'Your response (%r) was not one of the expected responses: '
|
||||||
|
'%s' % (response, ', '.join(options))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def format_size(bytes):
|
||||||
|
if bytes > 1000 * 1000:
|
||||||
|
return '%.1fMB' % (bytes / 1000.0 / 1000)
|
||||||
|
elif bytes > 10 * 1000:
|
||||||
|
return '%ikB' % (bytes / 1000)
|
||||||
|
elif bytes > 1000:
|
||||||
|
return '%.1fkB' % (bytes / 1000.0)
|
||||||
|
else:
|
||||||
|
return '%ibytes' % bytes
|
||||||
|
|
||||||
|
|
||||||
|
def is_installable_dir(path):
|
||||||
|
"""Return True if `path` is a directory containing a setup.py file."""
|
||||||
|
if not os.path.isdir(path):
|
||||||
|
return False
|
||||||
|
setup_py = os.path.join(path, 'setup.py')
|
||||||
|
if os.path.isfile(setup_py):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_svn_page(html):
|
||||||
|
"""
|
||||||
|
Returns true if the page appears to be the index page of an svn repository
|
||||||
|
"""
|
||||||
|
return (re.search(r'<title>[^<]*Revision \d+:', html) and
|
||||||
|
re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
|
||||||
|
|
||||||
|
|
||||||
|
def file_contents(filename):
|
||||||
|
with open(filename, 'rb') as fp:
|
||||||
|
return fp.read().decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
|
||||||
|
"""Yield pieces of data from a file-like object until EOF."""
|
||||||
|
while True:
|
||||||
|
chunk = file.read(size)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
|
||||||
|
def split_leading_dir(path):
|
||||||
|
path = path.lstrip('/').lstrip('\\')
|
||||||
|
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
|
||||||
|
'\\' not in path):
|
||||||
|
return path.split('/', 1)
|
||||||
|
elif '\\' in path:
|
||||||
|
return path.split('\\', 1)
|
||||||
|
else:
|
||||||
|
return path, ''
|
||||||
|
|
||||||
|
|
||||||
|
def has_leading_dir(paths):
|
||||||
|
"""Returns true if all the paths have the same leading path name
|
||||||
|
(i.e., everything is in one subdirectory in an archive)"""
|
||||||
|
common_prefix = None
|
||||||
|
for path in paths:
|
||||||
|
prefix, rest = split_leading_dir(path)
|
||||||
|
if not prefix:
|
||||||
|
return False
|
||||||
|
elif common_prefix is None:
|
||||||
|
common_prefix = prefix
|
||||||
|
elif prefix != common_prefix:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_path(path, resolve_symlinks=True):
|
||||||
|
"""
|
||||||
|
Convert a path to its canonical, case-normalized, absolute version.
|
||||||
|
|
||||||
|
"""
|
||||||
|
path = expanduser(path)
|
||||||
|
if resolve_symlinks:
|
||||||
|
path = os.path.realpath(path)
|
||||||
|
else:
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
return os.path.normcase(path)
|
||||||
|
|
||||||
|
|
||||||
|
def splitext(path):
|
||||||
|
"""Like os.path.splitext, but take off .tar too"""
|
||||||
|
base, ext = posixpath.splitext(path)
|
||||||
|
if base.lower().endswith('.tar'):
|
||||||
|
ext = base[-4:] + ext
|
||||||
|
base = base[:-4]
|
||||||
|
return base, ext
|
||||||
|
|
||||||
|
|
||||||
|
def renames(old, new):
|
||||||
|
"""Like os.renames(), but handles renaming across devices."""
|
||||||
|
# Implementation borrowed from os.renames().
|
||||||
|
head, tail = os.path.split(new)
|
||||||
|
if head and tail and not os.path.exists(head):
|
||||||
|
os.makedirs(head)
|
||||||
|
|
||||||
|
shutil.move(old, new)
|
||||||
|
|
||||||
|
head, tail = os.path.split(old)
|
||||||
|
if head and tail:
|
||||||
|
try:
|
||||||
|
os.removedirs(head)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def is_local(path):
|
||||||
|
"""
|
||||||
|
Return True if path is within sys.prefix, if we're running in a virtualenv.
|
||||||
|
|
||||||
|
If we're not in a virtualenv, all paths are considered "local."
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not running_under_virtualenv():
|
||||||
|
return True
|
||||||
|
return normalize_path(path).startswith(normalize_path(sys.prefix))
|
||||||
|
|
||||||
|
|
||||||
|
def dist_is_local(dist):
|
||||||
|
"""
|
||||||
|
Return True if given Distribution object is installed locally
|
||||||
|
(i.e. within current virtualenv).
|
||||||
|
|
||||||
|
Always True if we're not in a virtualenv.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return is_local(dist_location(dist))
|
||||||
|
|
||||||
|
|
||||||
|
def dist_in_usersite(dist):
|
||||||
|
"""
|
||||||
|
Return True if given Distribution is installed in user site.
|
||||||
|
"""
|
||||||
|
norm_path = normalize_path(dist_location(dist))
|
||||||
|
return norm_path.startswith(normalize_path(user_site))
|
||||||
|
|
||||||
|
|
||||||
|
def dist_in_site_packages(dist):
|
||||||
|
"""
|
||||||
|
Return True if given Distribution is installed in
|
||||||
|
sysconfig.get_python_lib().
|
||||||
|
"""
|
||||||
|
return normalize_path(
|
||||||
|
dist_location(dist)
|
||||||
|
).startswith(normalize_path(site_packages))
|
||||||
|
|
||||||
|
|
||||||
|
def dist_is_editable(dist):
|
||||||
|
"""Is distribution an editable install?"""
|
||||||
|
for path_item in sys.path:
|
||||||
|
egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
|
||||||
|
if os.path.isfile(egg_link):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_installed_distributions(local_only=True,
|
||||||
|
skip=stdlib_pkgs,
|
||||||
|
include_editables=True,
|
||||||
|
editables_only=False,
|
||||||
|
user_only=False):
|
||||||
|
"""
|
||||||
|
Return a list of installed Distribution objects.
|
||||||
|
|
||||||
|
If ``local_only`` is True (default), only return installations
|
||||||
|
local to the current virtualenv, if in a virtualenv.
|
||||||
|
|
||||||
|
``skip`` argument is an iterable of lower-case project names to
|
||||||
|
ignore; defaults to stdlib_pkgs
|
||||||
|
|
||||||
|
If ``include_editables`` is False, don't report editables.
|
||||||
|
|
||||||
|
If ``editables_only`` is True , only report editables.
|
||||||
|
|
||||||
|
If ``user_only`` is True , only report installations in the user
|
||||||
|
site directory.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if local_only:
|
||||||
|
local_test = dist_is_local
|
||||||
|
else:
|
||||||
|
def local_test(d):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if include_editables:
|
||||||
|
def editable_test(d):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
def editable_test(d):
|
||||||
|
return not dist_is_editable(d)
|
||||||
|
|
||||||
|
if editables_only:
|
||||||
|
def editables_only_test(d):
|
||||||
|
return dist_is_editable(d)
|
||||||
|
else:
|
||||||
|
def editables_only_test(d):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if user_only:
|
||||||
|
user_test = dist_in_usersite
|
||||||
|
else:
|
||||||
|
def user_test(d):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return [d for d in pkg_resources.working_set
|
||||||
|
if local_test(d) and
|
||||||
|
d.key not in skip and
|
||||||
|
editable_test(d) and
|
||||||
|
editables_only_test(d) and
|
||||||
|
user_test(d)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def egg_link_path(dist):
|
||||||
|
"""
|
||||||
|
Return the path for the .egg-link file if it exists, otherwise, None.
|
||||||
|
|
||||||
|
There's 3 scenarios:
|
||||||
|
1) not in a virtualenv
|
||||||
|
try to find in site.USER_SITE, then site_packages
|
||||||
|
2) in a no-global virtualenv
|
||||||
|
try to find in site_packages
|
||||||
|
3) in a yes-global virtualenv
|
||||||
|
try to find in site_packages, then site.USER_SITE
|
||||||
|
(don't look in global location)
|
||||||
|
|
||||||
|
For #1 and #3, there could be odd cases, where there's an egg-link in 2
|
||||||
|
locations.
|
||||||
|
|
||||||
|
This method will just return the first one found.
|
||||||
|
"""
|
||||||
|
sites = []
|
||||||
|
if running_under_virtualenv():
|
||||||
|
if virtualenv_no_global():
|
||||||
|
sites.append(site_packages)
|
||||||
|
else:
|
||||||
|
sites.append(site_packages)
|
||||||
|
if user_site:
|
||||||
|
sites.append(user_site)
|
||||||
|
else:
|
||||||
|
if user_site:
|
||||||
|
sites.append(user_site)
|
||||||
|
sites.append(site_packages)
|
||||||
|
|
||||||
|
for site in sites:
|
||||||
|
egglink = os.path.join(site, dist.project_name) + '.egg-link'
|
||||||
|
if os.path.isfile(egglink):
|
||||||
|
return egglink
|
||||||
|
|
||||||
|
|
||||||
|
def dist_location(dist):
|
||||||
|
"""
|
||||||
|
Get the site-packages location of this distribution. Generally
|
||||||
|
this is dist.location, except in the case of develop-installed
|
||||||
|
packages, where dist.location is the source code location, and we
|
||||||
|
want to know where the egg-link file is.
|
||||||
|
|
||||||
|
"""
|
||||||
|
egg_link = egg_link_path(dist)
|
||||||
|
if egg_link:
|
||||||
|
return egg_link
|
||||||
|
return dist.location
|
||||||
|
|
||||||
|
|
||||||
|
def current_umask():
|
||||||
|
"""Get the current umask which involves having to set it temporarily."""
|
||||||
|
mask = os.umask(0)
|
||||||
|
os.umask(mask)
|
||||||
|
return mask
|
||||||
|
|
||||||
|
|
||||||
|
def unzip_file(filename, location, flatten=True):
|
||||||
|
"""
|
||||||
|
Unzip the file (with path `filename`) to the destination `location`. All
|
||||||
|
files are written based on system defaults and umask (i.e. permissions are
|
||||||
|
not preserved), except that regular file members with any execute
|
||||||
|
permissions (user, group, or world) have "chmod +x" applied after being
|
||||||
|
written. Note that for windows, any execute changes using os.chmod are
|
||||||
|
no-ops per the python docs.
|
||||||
|
"""
|
||||||
|
ensure_dir(location)
|
||||||
|
zipfp = open(filename, 'rb')
|
||||||
|
try:
|
||||||
|
zip = zipfile.ZipFile(zipfp, allowZip64=True)
|
||||||
|
leading = has_leading_dir(zip.namelist()) and flatten
|
||||||
|
for info in zip.infolist():
|
||||||
|
name = info.filename
|
||||||
|
data = zip.read(name)
|
||||||
|
fn = name
|
||||||
|
if leading:
|
||||||
|
fn = split_leading_dir(name)[1]
|
||||||
|
fn = os.path.join(location, fn)
|
||||||
|
dir = os.path.dirname(fn)
|
||||||
|
if fn.endswith('/') or fn.endswith('\\'):
|
||||||
|
# A directory
|
||||||
|
ensure_dir(fn)
|
||||||
|
else:
|
||||||
|
ensure_dir(dir)
|
||||||
|
fp = open(fn, 'wb')
|
||||||
|
try:
|
||||||
|
fp.write(data)
|
||||||
|
finally:
|
||||||
|
fp.close()
|
||||||
|
mode = info.external_attr >> 16
|
||||||
|
# if mode and regular file and any execute permissions for
|
||||||
|
# user/group/world?
|
||||||
|
if mode and stat.S_ISREG(mode) and mode & 0o111:
|
||||||
|
# make dest file have execute for user/group/world
|
||||||
|
# (chmod +x) no-op on windows per python docs
|
||||||
|
os.chmod(fn, (0o777 - current_umask() | 0o111))
|
||||||
|
finally:
|
||||||
|
zipfp.close()
|
||||||
|
|
||||||
|
|
||||||
|
def untar_file(filename, location):
|
||||||
|
"""
|
||||||
|
Untar the file (with path `filename`) to the destination `location`.
|
||||||
|
All files are written based on system defaults and umask (i.e. permissions
|
||||||
|
are not preserved), except that regular file members with any execute
|
||||||
|
permissions (user, group, or world) have "chmod +x" applied after being
|
||||||
|
written. Note that for windows, any execute changes using os.chmod are
|
||||||
|
no-ops per the python docs.
|
||||||
|
"""
|
||||||
|
ensure_dir(location)
|
||||||
|
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
|
||||||
|
mode = 'r:gz'
|
||||||
|
elif filename.lower().endswith(BZ2_EXTENSIONS):
|
||||||
|
mode = 'r:bz2'
|
||||||
|
elif filename.lower().endswith(XZ_EXTENSIONS):
|
||||||
|
mode = 'r:xz'
|
||||||
|
elif filename.lower().endswith('.tar'):
|
||||||
|
mode = 'r'
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
'Cannot determine compression type for file %s', filename,
|
||||||
|
)
|
||||||
|
mode = 'r:*'
|
||||||
|
tar = tarfile.open(filename, mode)
|
||||||
|
try:
|
||||||
|
# note: python<=2.5 doesn't seem to know about pax headers, filter them
|
||||||
|
leading = has_leading_dir([
|
||||||
|
member.name for member in tar.getmembers()
|
||||||
|
if member.name != 'pax_global_header'
|
||||||
|
])
|
||||||
|
for member in tar.getmembers():
|
||||||
|
fn = member.name
|
||||||
|
if fn == 'pax_global_header':
|
||||||
|
continue
|
||||||
|
if leading:
|
||||||
|
fn = split_leading_dir(fn)[1]
|
||||||
|
path = os.path.join(location, fn)
|
||||||
|
if member.isdir():
|
||||||
|
ensure_dir(path)
|
||||||
|
elif member.issym():
|
||||||
|
try:
|
||||||
|
tar._extract_member(member, path)
|
||||||
|
except Exception as exc:
|
||||||
|
# Some corrupt tar files seem to produce this
|
||||||
|
# (specifically bad symlinks)
|
||||||
|
logger.warning(
|
||||||
|
'In the tar file %s the member %s is invalid: %s',
|
||||||
|
filename, member.name, exc,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
fp = tar.extractfile(member)
|
||||||
|
except (KeyError, AttributeError) as exc:
|
||||||
|
# Some corrupt tar files seem to produce this
|
||||||
|
# (specifically bad symlinks)
|
||||||
|
logger.warning(
|
||||||
|
'In the tar file %s the member %s is invalid: %s',
|
||||||
|
filename, member.name, exc,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
ensure_dir(os.path.dirname(path))
|
||||||
|
with open(path, 'wb') as destfp:
|
||||||
|
shutil.copyfileobj(fp, destfp)
|
||||||
|
fp.close()
|
||||||
|
# Update the timestamp (useful for cython compiled files)
|
||||||
|
tar.utime(member, path)
|
||||||
|
# member have any execute permissions for user/group/world?
|
||||||
|
if member.mode & 0o111:
|
||||||
|
# make dest file have execute for user/group/world
|
||||||
|
# no-op on windows per python docs
|
||||||
|
os.chmod(path, (0o777 - current_umask() | 0o111))
|
||||||
|
finally:
|
||||||
|
tar.close()
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_file(filename, location, content_type, link):
|
||||||
|
filename = os.path.realpath(filename)
|
||||||
|
if (content_type == 'application/zip' or
|
||||||
|
filename.lower().endswith(ZIP_EXTENSIONS) or
|
||||||
|
zipfile.is_zipfile(filename)):
|
||||||
|
unzip_file(
|
||||||
|
filename,
|
||||||
|
location,
|
||||||
|
flatten=not filename.endswith('.whl')
|
||||||
|
)
|
||||||
|
elif (content_type == 'application/x-gzip' or
|
||||||
|
tarfile.is_tarfile(filename) or
|
||||||
|
filename.lower().endswith(
|
||||||
|
TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
|
||||||
|
untar_file(filename, location)
|
||||||
|
elif (content_type and content_type.startswith('text/html') and
|
||||||
|
is_svn_page(file_contents(filename))):
|
||||||
|
# We don't really care about this
|
||||||
|
from pip._internal.vcs.subversion import Subversion
|
||||||
|
Subversion('svn+' + link.url).unpack(location)
|
||||||
|
else:
|
||||||
|
# FIXME: handle?
|
||||||
|
# FIXME: magic signatures?
|
||||||
|
logger.critical(
|
||||||
|
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
|
||||||
|
'cannot detect archive format',
|
||||||
|
filename, location, content_type,
|
||||||
|
)
|
||||||
|
raise InstallationError(
|
||||||
|
'Cannot determine archive format of %s' % location
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def call_subprocess(cmd, show_stdout=True, cwd=None,
|
||||||
|
on_returncode='raise',
|
||||||
|
command_desc=None,
|
||||||
|
extra_environ=None, unset_environ=None, spinner=None):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
unset_environ: an iterable of environment variable names to unset
|
||||||
|
prior to calling subprocess.Popen().
|
||||||
|
"""
|
||||||
|
if unset_environ is None:
|
||||||
|
unset_environ = []
|
||||||
|
# This function's handling of subprocess output is confusing and I
|
||||||
|
# previously broke it terribly, so as penance I will write a long comment
|
||||||
|
# explaining things.
|
||||||
|
#
|
||||||
|
# The obvious thing that affects output is the show_stdout=
|
||||||
|
# kwarg. show_stdout=True means, let the subprocess write directly to our
|
||||||
|
# stdout. Even though it is nominally the default, it is almost never used
|
||||||
|
# inside pip (and should not be used in new code without a very good
|
||||||
|
# reason); as of 2016-02-22 it is only used in a few places inside the VCS
|
||||||
|
# wrapper code. Ideally we should get rid of it entirely, because it
|
||||||
|
# creates a lot of complexity here for a rarely used feature.
|
||||||
|
#
|
||||||
|
# Most places in pip set show_stdout=False. What this means is:
|
||||||
|
# - We connect the child stdout to a pipe, which we read.
|
||||||
|
# - By default, we hide the output but show a spinner -- unless the
|
||||||
|
# subprocess exits with an error, in which case we show the output.
|
||||||
|
# - If the --verbose option was passed (= loglevel is DEBUG), then we show
|
||||||
|
# the output unconditionally. (But in this case we don't want to show
|
||||||
|
# the output a second time if it turns out that there was an error.)
|
||||||
|
#
|
||||||
|
# stderr is always merged with stdout (even if show_stdout=True).
|
||||||
|
if show_stdout:
|
||||||
|
stdout = None
|
||||||
|
else:
|
||||||
|
stdout = subprocess.PIPE
|
||||||
|
if command_desc is None:
|
||||||
|
cmd_parts = []
|
||||||
|
for part in cmd:
|
||||||
|
if ' ' in part or '\n' in part or '"' in part or "'" in part:
|
||||||
|
part = '"%s"' % part.replace('"', '\\"')
|
||||||
|
cmd_parts.append(part)
|
||||||
|
command_desc = ' '.join(cmd_parts)
|
||||||
|
logger.debug("Running command %s", command_desc)
|
||||||
|
env = os.environ.copy()
|
||||||
|
if extra_environ:
|
||||||
|
env.update(extra_environ)
|
||||||
|
for name in unset_environ:
|
||||||
|
env.pop(name, None)
|
||||||
|
try:
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
|
||||||
|
stdout=stdout, cwd=cwd, env=env,
|
||||||
|
)
|
||||||
|
proc.stdin.close()
|
||||||
|
except Exception as exc:
|
||||||
|
logger.critical(
|
||||||
|
"Error %s while executing command %s", exc, command_desc,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
all_output = []
|
||||||
|
if stdout is not None:
|
||||||
|
while True:
|
||||||
|
line = console_to_str(proc.stdout.readline())
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
line = line.rstrip()
|
||||||
|
all_output.append(line + '\n')
|
||||||
|
if logger.getEffectiveLevel() <= std_logging.DEBUG:
|
||||||
|
# Show the line immediately
|
||||||
|
logger.debug(line)
|
||||||
|
else:
|
||||||
|
# Update the spinner
|
||||||
|
if spinner is not None:
|
||||||
|
spinner.spin()
|
||||||
|
try:
|
||||||
|
proc.wait()
|
||||||
|
finally:
|
||||||
|
if proc.stdout:
|
||||||
|
proc.stdout.close()
|
||||||
|
if spinner is not None:
|
||||||
|
if proc.returncode:
|
||||||
|
spinner.finish("error")
|
||||||
|
else:
|
||||||
|
spinner.finish("done")
|
||||||
|
if proc.returncode:
|
||||||
|
if on_returncode == 'raise':
|
||||||
|
if (logger.getEffectiveLevel() > std_logging.DEBUG and
|
||||||
|
not show_stdout):
|
||||||
|
logger.info(
|
||||||
|
'Complete output from command %s:', command_desc,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
''.join(all_output) +
|
||||||
|
'\n----------------------------------------'
|
||||||
|
)
|
||||||
|
raise InstallationError(
|
||||||
|
'Command "%s" failed with error code %s in %s'
|
||||||
|
% (command_desc, proc.returncode, cwd))
|
||||||
|
elif on_returncode == 'warn':
|
||||||
|
logger.warning(
|
||||||
|
'Command "%s" had error code %s in %s',
|
||||||
|
command_desc, proc.returncode, cwd,
|
||||||
|
)
|
||||||
|
elif on_returncode == 'ignore':
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid value: on_returncode=%s' %
|
||||||
|
repr(on_returncode))
|
||||||
|
if not show_stdout:
|
||||||
|
return ''.join(all_output)
|
||||||
|
|
||||||
|
|
||||||
|
def read_text_file(filename):
|
||||||
|
"""Return the contents of *filename*.
|
||||||
|
|
||||||
|
Try to decode the file contents with utf-8, the preferred system encoding
|
||||||
|
(e.g., cp1252 on some Windows machines), and latin1, in that order.
|
||||||
|
Decoding a byte string with latin1 will never raise an error. In the worst
|
||||||
|
case, the returned string will contain some garbage characters.
|
||||||
|
|
||||||
|
"""
|
||||||
|
with open(filename, 'rb') as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
|
||||||
|
for enc in encodings:
|
||||||
|
try:
|
||||||
|
data = data.decode(enc)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
|
||||||
|
assert type(data) != bytes # Latin1 should have worked.
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _make_build_dir(build_dir):
|
||||||
|
os.makedirs(build_dir)
|
||||||
|
write_delete_marker_file(build_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFile(object):
|
||||||
|
"""Wrap a list of lines in an object with readline() to make
|
||||||
|
ConfigParser happy."""
|
||||||
|
def __init__(self, lines):
|
||||||
|
self._gen = (l for l in lines)
|
||||||
|
|
||||||
|
def readline(self):
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
return next(self._gen)
|
||||||
|
except NameError:
|
||||||
|
return self._gen.next()
|
||||||
|
except StopIteration:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._gen
|
||||||
|
|
||||||
|
|
||||||
|
class StreamWrapper(StringIO):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_stream(cls, orig_stream):
|
||||||
|
cls.orig_stream = orig_stream
|
||||||
|
return cls()
|
||||||
|
|
||||||
|
# compileall.compile_dir() needs stdout.encoding to print to stdout
|
||||||
|
@property
|
||||||
|
def encoding(self):
|
||||||
|
return self.orig_stream.encoding
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def captured_output(stream_name):
|
||||||
|
"""Return a context manager used by captured_stdout/stdin/stderr
|
||||||
|
that temporarily replaces the sys stream *stream_name* with a StringIO.
|
||||||
|
|
||||||
|
Taken from Lib/support/__init__.py in the CPython repo.
|
||||||
|
"""
|
||||||
|
orig_stdout = getattr(sys, stream_name)
|
||||||
|
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
|
||||||
|
try:
|
||||||
|
yield getattr(sys, stream_name)
|
||||||
|
finally:
|
||||||
|
setattr(sys, stream_name, orig_stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def captured_stdout():
|
||||||
|
"""Capture the output of sys.stdout:
|
||||||
|
|
||||||
|
with captured_stdout() as stdout:
|
||||||
|
print('hello')
|
||||||
|
self.assertEqual(stdout.getvalue(), 'hello\n')
|
||||||
|
|
||||||
|
Taken from Lib/support/__init__.py in the CPython repo.
|
||||||
|
"""
|
||||||
|
return captured_output('stdout')
|
||||||
|
|
||||||
|
|
||||||
|
class cached_property(object):
|
||||||
|
"""A property that is only computed once per instance and then replaces
|
||||||
|
itself with an ordinary attribute. Deleting the attribute resets the
|
||||||
|
property.
|
||||||
|
|
||||||
|
Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, func):
|
||||||
|
self.__doc__ = getattr(func, '__doc__')
|
||||||
|
self.func = func
|
||||||
|
|
||||||
|
def __get__(self, obj, cls):
|
||||||
|
if obj is None:
|
||||||
|
# We're being accessed from the class itself, not from an object
|
||||||
|
return self
|
||||||
|
value = obj.__dict__[self.func.__name__] = self.func(obj)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def get_installed_version(dist_name, lookup_dirs=None):
|
||||||
|
"""Get the installed version of dist_name avoiding pkg_resources cache"""
|
||||||
|
# Create a requirement that we'll look for inside of setuptools.
|
||||||
|
req = pkg_resources.Requirement.parse(dist_name)
|
||||||
|
|
||||||
|
# We want to avoid having this cached, so we need to construct a new
|
||||||
|
# working set each time.
|
||||||
|
if lookup_dirs is None:
|
||||||
|
working_set = pkg_resources.WorkingSet()
|
||||||
|
else:
|
||||||
|
working_set = pkg_resources.WorkingSet(lookup_dirs)
|
||||||
|
|
||||||
|
# Get the installed distribution from our working set
|
||||||
|
dist = working_set.find(req)
|
||||||
|
|
||||||
|
# Check to see if we got an installed distribution or not, if we did
|
||||||
|
# we want to return it's version.
|
||||||
|
return dist.version if dist else None
|
||||||
|
|
||||||
|
|
||||||
|
def consume(iterator):
|
||||||
|
"""Consume an iterable at C speed."""
|
||||||
|
deque(iterator, maxlen=0)
|
||||||
|
|
||||||
|
|
||||||
|
# Simulates an enum
|
||||||
|
def enum(*sequential, **named):
|
||||||
|
enums = dict(zip(sequential, range(len(sequential))), **named)
|
||||||
|
reverse = {value: key for key, value in enums.items()}
|
||||||
|
enums['reverse_mapping'] = reverse
|
||||||
|
return type('Enum', (), enums)
|
@ -0,0 +1,163 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor import lockfile
|
||||||
|
from pip._vendor.packaging import version as packaging_version
|
||||||
|
|
||||||
|
from pip._internal.compat import WINDOWS
|
||||||
|
from pip._internal.index import PackageFinder
|
||||||
|
from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv
|
||||||
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
|
from pip._internal.utils.misc import ensure_dir, get_installed_version
|
||||||
|
|
||||||
|
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class VirtualenvSelfCheckState(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")
|
||||||
|
|
||||||
|
# Load the existing state
|
||||||
|
try:
|
||||||
|
with open(self.statefile_path) as statefile:
|
||||||
|
self.state = json.load(statefile)
|
||||||
|
except (IOError, ValueError):
|
||||||
|
self.state = {}
|
||||||
|
|
||||||
|
def save(self, pypi_version, current_time):
|
||||||
|
# Attempt to write out our version check file
|
||||||
|
with open(self.statefile_path, "w") as statefile:
|
||||||
|
json.dump(
|
||||||
|
{
|
||||||
|
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
||||||
|
"pypi_version": pypi_version,
|
||||||
|
},
|
||||||
|
statefile,
|
||||||
|
sort_keys=True,
|
||||||
|
separators=(",", ":")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalSelfCheckState(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")
|
||||||
|
|
||||||
|
# Load the existing state
|
||||||
|
try:
|
||||||
|
with open(self.statefile_path) as statefile:
|
||||||
|
self.state = json.load(statefile)[sys.prefix]
|
||||||
|
except (IOError, ValueError, KeyError):
|
||||||
|
self.state = {}
|
||||||
|
|
||||||
|
def save(self, pypi_version, current_time):
|
||||||
|
# Check to make sure that we own the directory
|
||||||
|
if not check_path_owner(os.path.dirname(self.statefile_path)):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Now that we've ensured the directory is owned by this user, we'll go
|
||||||
|
# ahead and make sure that all our directories are created.
|
||||||
|
ensure_dir(os.path.dirname(self.statefile_path))
|
||||||
|
|
||||||
|
# Attempt to write out our version check file
|
||||||
|
with lockfile.LockFile(self.statefile_path):
|
||||||
|
if os.path.exists(self.statefile_path):
|
||||||
|
with open(self.statefile_path) as statefile:
|
||||||
|
state = json.load(statefile)
|
||||||
|
else:
|
||||||
|
state = {}
|
||||||
|
|
||||||
|
state[sys.prefix] = {
|
||||||
|
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
||||||
|
"pypi_version": pypi_version,
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(self.statefile_path, "w") as statefile:
|
||||||
|
json.dump(state, statefile, sort_keys=True,
|
||||||
|
separators=(",", ":"))
|
||||||
|
|
||||||
|
|
||||||
|
def load_selfcheck_statefile():
|
||||||
|
if running_under_virtualenv():
|
||||||
|
return VirtualenvSelfCheckState()
|
||||||
|
else:
|
||||||
|
return GlobalSelfCheckState()
|
||||||
|
|
||||||
|
|
||||||
|
def pip_version_check(session, options):
|
||||||
|
"""Check for an update for pip.
|
||||||
|
|
||||||
|
Limit the frequency of checks to once per week. State is stored either in
|
||||||
|
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
|
||||||
|
of the pip script path.
|
||||||
|
"""
|
||||||
|
installed_version = get_installed_version("pip")
|
||||||
|
if not installed_version:
|
||||||
|
return
|
||||||
|
|
||||||
|
pip_version = packaging_version.parse(installed_version)
|
||||||
|
pypi_version = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
state = load_selfcheck_statefile()
|
||||||
|
|
||||||
|
current_time = datetime.datetime.utcnow()
|
||||||
|
# Determine if we need to refresh the state
|
||||||
|
if "last_check" in state.state and "pypi_version" in state.state:
|
||||||
|
last_check = datetime.datetime.strptime(
|
||||||
|
state.state["last_check"],
|
||||||
|
SELFCHECK_DATE_FMT
|
||||||
|
)
|
||||||
|
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
|
||||||
|
pypi_version = state.state["pypi_version"]
|
||||||
|
|
||||||
|
# Refresh the version if we need to or just see if we need to warn
|
||||||
|
if pypi_version is None:
|
||||||
|
# Lets use PackageFinder to see what the latest pip version is
|
||||||
|
finder = PackageFinder(
|
||||||
|
find_links=options.find_links,
|
||||||
|
index_urls=[options.index_url] + options.extra_index_urls,
|
||||||
|
allow_all_prereleases=False, # Explicitly set to False
|
||||||
|
trusted_hosts=options.trusted_hosts,
|
||||||
|
process_dependency_links=options.process_dependency_links,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
all_candidates = finder.find_all_candidates("pip")
|
||||||
|
if not all_candidates:
|
||||||
|
return
|
||||||
|
pypi_version = str(
|
||||||
|
max(all_candidates, key=lambda c: c.version).version
|
||||||
|
)
|
||||||
|
|
||||||
|
# save that we've performed a check
|
||||||
|
state.save(pypi_version, current_time)
|
||||||
|
|
||||||
|
remote_version = packaging_version.parse(pypi_version)
|
||||||
|
|
||||||
|
# Determine if our pypi_version is older
|
||||||
|
if (pip_version < remote_version and
|
||||||
|
pip_version.base_version != remote_version.base_version):
|
||||||
|
# Advise "python -m pip" on Windows to avoid issues
|
||||||
|
# with overwriting pip.exe.
|
||||||
|
if WINDOWS:
|
||||||
|
pip_cmd = "python -m pip"
|
||||||
|
else:
|
||||||
|
pip_cmd = "pip"
|
||||||
|
logger.warning(
|
||||||
|
"You are using pip version %s, however version %s is "
|
||||||
|
"available.\nYou should consider upgrading via the "
|
||||||
|
"'%s install --upgrade pip' command.",
|
||||||
|
pip_version, pypi_version, pip_cmd
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.debug(
|
||||||
|
"There was an error checking the latest version of pip",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
@ -0,0 +1,70 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from email.parser import FeedParser # type: ignore
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.packaging import specifiers, version
|
||||||
|
|
||||||
|
from pip._internal import exceptions
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def check_requires_python(requires_python):
|
||||||
|
"""
|
||||||
|
Check if the python version in use match the `requires_python` specifier.
|
||||||
|
|
||||||
|
Returns `True` if the version of python in use matches the requirement.
|
||||||
|
Returns `False` if the version of python in use does not matches the
|
||||||
|
requirement.
|
||||||
|
|
||||||
|
Raises an InvalidSpecifier if `requires_python` have an invalid format.
|
||||||
|
"""
|
||||||
|
if requires_python is None:
|
||||||
|
# The package provides no information
|
||||||
|
return True
|
||||||
|
requires_python_specifier = specifiers.SpecifierSet(requires_python)
|
||||||
|
|
||||||
|
# We only use major.minor.micro
|
||||||
|
python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
|
||||||
|
return python_version in requires_python_specifier
|
||||||
|
|
||||||
|
|
||||||
|
def get_metadata(dist):
|
||||||
|
if (isinstance(dist, pkg_resources.DistInfoDistribution) and
|
||||||
|
dist.has_metadata('METADATA')):
|
||||||
|
return dist.get_metadata('METADATA')
|
||||||
|
elif dist.has_metadata('PKG-INFO'):
|
||||||
|
return dist.get_metadata('PKG-INFO')
|
||||||
|
|
||||||
|
|
||||||
|
def check_dist_requires_python(dist):
|
||||||
|
metadata = get_metadata(dist)
|
||||||
|
feed_parser = FeedParser()
|
||||||
|
feed_parser.feed(metadata)
|
||||||
|
pkg_info_dict = feed_parser.close()
|
||||||
|
requires_python = pkg_info_dict.get('Requires-Python')
|
||||||
|
try:
|
||||||
|
if not check_requires_python(requires_python):
|
||||||
|
raise exceptions.UnsupportedPythonVersion(
|
||||||
|
"%s requires Python '%s' but the running Python is %s" % (
|
||||||
|
dist.project_name,
|
||||||
|
requires_python,
|
||||||
|
'.'.join(map(str, sys.version_info[:3])),)
|
||||||
|
)
|
||||||
|
except specifiers.InvalidSpecifier as e:
|
||||||
|
logger.warning(
|
||||||
|
"Package %s has an invalid Requires-Python entry %s - %s",
|
||||||
|
dist.project_name, requires_python, e,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def get_installer(dist):
|
||||||
|
if dist.has_metadata('INSTALLER'):
|
||||||
|
for line in dist.get_metadata_lines('INSTALLER'):
|
||||||
|
if line.strip():
|
||||||
|
return line.strip()
|
||||||
|
return ''
|
@ -0,0 +1,8 @@
|
|||||||
|
# Shim to wrap setup.py invocation with setuptools
|
||||||
|
SETUPTOOLS_SHIM = (
|
||||||
|
"import setuptools, tokenize;__file__=%r;"
|
||||||
|
"f=getattr(tokenize, 'open', open)(__file__);"
|
||||||
|
"code=f.read().replace('\\r\\n', '\\n');"
|
||||||
|
"f.close();"
|
||||||
|
"exec(compile(code, __file__, 'exec'))"
|
||||||
|
)
|
@ -0,0 +1,82 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from pip._internal.utils.misc import rmtree
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TempDirectory(object):
|
||||||
|
"""Helper class that owns and cleans up a temporary directory.
|
||||||
|
|
||||||
|
This class can be used as a context manager or as an OO representation of a
|
||||||
|
temporary directory.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
path
|
||||||
|
Location to the created temporary directory or None
|
||||||
|
delete
|
||||||
|
Whether the directory should be deleted when exiting
|
||||||
|
(when used as a contextmanager)
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
create()
|
||||||
|
Creates a temporary directory and stores its path in the path
|
||||||
|
attribute.
|
||||||
|
cleanup()
|
||||||
|
Deletes the temporary directory and sets path attribute to None
|
||||||
|
|
||||||
|
When used as a context manager, a temporary directory is created on
|
||||||
|
entering the context and, if the delete attribute is True, on exiting the
|
||||||
|
context the created directory is deleted.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, path=None, delete=None, kind="temp"):
|
||||||
|
super(TempDirectory, self).__init__()
|
||||||
|
|
||||||
|
if path is None and delete is None:
|
||||||
|
# If we were not given an explicit directory, and we were not given
|
||||||
|
# an explicit delete option, then we'll default to deleting.
|
||||||
|
delete = True
|
||||||
|
|
||||||
|
self.path = path
|
||||||
|
self.delete = delete
|
||||||
|
self.kind = kind
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<{} {!r}>".format(self.__class__.__name__, self.path)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.create()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc, value, tb):
|
||||||
|
if self.delete:
|
||||||
|
self.cleanup()
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
"""Create a temporary directory and store it's path in self.path
|
||||||
|
"""
|
||||||
|
if self.path is not None:
|
||||||
|
logger.debug(
|
||||||
|
"Skipped creation of temporary directory: {}".format(self.path)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
# We realpath here because some systems have their default tmpdir
|
||||||
|
# symlinked to another directory. This tends to confuse build
|
||||||
|
# scripts, so we canonicalize the path by traversing potential
|
||||||
|
# symlinks here.
|
||||||
|
self.path = os.path.realpath(
|
||||||
|
tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
|
||||||
|
)
|
||||||
|
logger.debug("Created temporary directory: {}".format(self.path))
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
"""Remove the temporary directory created and reset state
|
||||||
|
"""
|
||||||
|
if self.path is not None and os.path.exists(self.path):
|
||||||
|
rmtree(self.path)
|
||||||
|
self.path = None
|
@ -0,0 +1,29 @@
|
|||||||
|
"""For neatly implementing static typing in pip.
|
||||||
|
|
||||||
|
`mypy` - the static type analysis tool we use - uses the `typing` module, which
|
||||||
|
provides core functionality fundamental to mypy's functioning.
|
||||||
|
|
||||||
|
Generally, `typing` would be imported at runtime and used in that fashion -
|
||||||
|
it acts as a no-op at runtime and does not have any run-time overhead by
|
||||||
|
design.
|
||||||
|
|
||||||
|
As it turns out, `typing` is not vendorable - it uses separate sources for
|
||||||
|
Python 2/Python 3. Thus, this codebase can not expect it to be present.
|
||||||
|
To work around this, mypy allows the typing import to be behind a False-y
|
||||||
|
optional to prevent it from running at runtime and type-comments can be used
|
||||||
|
to remove the need for the types to be accessible directly during runtime.
|
||||||
|
|
||||||
|
This module provides the False-y guard in a nicely named fashion so that a
|
||||||
|
curious maintainer can reach here to read this.
|
||||||
|
|
||||||
|
In pip, all static-typing related imports should be guarded as follows:
|
||||||
|
|
||||||
|
from pip.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import ...
|
||||||
|
|
||||||
|
Ref: https://github.com/python/mypy/issues/3216
|
||||||
|
"""
|
||||||
|
|
||||||
|
MYPY_CHECK_RUNNING = False
|
@ -0,0 +1,421 @@
|
|||||||
|
from __future__ import absolute_import, division
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from signal import SIGINT, default_int_handler, signal
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
from pip._vendor.progress.bar import (
|
||||||
|
Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
|
||||||
|
ShadyBar,
|
||||||
|
)
|
||||||
|
from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
|
||||||
|
from pip._vendor.progress.spinner import Spinner
|
||||||
|
|
||||||
|
from pip._internal.compat import WINDOWS
|
||||||
|
from pip._internal.utils.logging import get_indentation
|
||||||
|
from pip._internal.utils.misc import format_size
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pip._vendor import colorama
|
||||||
|
# Lots of different errors can come from this, including SystemError and
|
||||||
|
# ImportError.
|
||||||
|
except Exception:
|
||||||
|
colorama = None
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _select_progress_class(preferred, fallback):
|
||||||
|
encoding = getattr(preferred.file, "encoding", None)
|
||||||
|
|
||||||
|
# If we don't know what encoding this file is in, then we'll just assume
|
||||||
|
# that it doesn't support unicode and use the ASCII bar.
|
||||||
|
if not encoding:
|
||||||
|
return fallback
|
||||||
|
|
||||||
|
# Collect all of the possible characters we want to use with the preferred
|
||||||
|
# bar.
|
||||||
|
characters = [
|
||||||
|
getattr(preferred, "empty_fill", six.text_type()),
|
||||||
|
getattr(preferred, "fill", six.text_type()),
|
||||||
|
]
|
||||||
|
characters += list(getattr(preferred, "phases", []))
|
||||||
|
|
||||||
|
# Try to decode the characters we're using for the bar using the encoding
|
||||||
|
# of the given file, if this works then we'll assume that we can use the
|
||||||
|
# fancier bar and if not we'll fall back to the plaintext bar.
|
||||||
|
try:
|
||||||
|
six.text_type().join(characters).encode(encoding)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
return fallback
|
||||||
|
else:
|
||||||
|
return preferred
|
||||||
|
|
||||||
|
|
||||||
|
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
class InterruptibleMixin(object):
|
||||||
|
"""
|
||||||
|
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
||||||
|
|
||||||
|
This allows downloads to be interrupted without leaving temporary state
|
||||||
|
(like hidden cursors) behind.
|
||||||
|
|
||||||
|
This class is similar to the progress library's existing SigIntMixin
|
||||||
|
helper, but as of version 1.2, that helper has the following problems:
|
||||||
|
|
||||||
|
1. It calls sys.exit().
|
||||||
|
2. It discards the existing SIGINT handler completely.
|
||||||
|
3. It leaves its own handler in place even after an uninterrupted finish,
|
||||||
|
which will have unexpected delayed effects if the user triggers an
|
||||||
|
unrelated keyboard interrupt some time after a progress-displaying
|
||||||
|
download has already completed, for example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Save the original SIGINT handler for later.
|
||||||
|
"""
|
||||||
|
super(InterruptibleMixin, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
self.original_handler = signal(SIGINT, self.handle_sigint)
|
||||||
|
|
||||||
|
# If signal() returns None, the previous handler was not installed from
|
||||||
|
# Python, and we cannot restore it. This probably should not happen,
|
||||||
|
# but if it does, we must restore something sensible instead, at least.
|
||||||
|
# The least bad option should be Python's default SIGINT handler, which
|
||||||
|
# just raises KeyboardInterrupt.
|
||||||
|
if self.original_handler is None:
|
||||||
|
self.original_handler = default_int_handler
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
"""
|
||||||
|
Restore the original SIGINT handler after finishing.
|
||||||
|
|
||||||
|
This should happen regardless of whether the progress display finishes
|
||||||
|
normally, or gets interrupted.
|
||||||
|
"""
|
||||||
|
super(InterruptibleMixin, self).finish()
|
||||||
|
signal(SIGINT, self.original_handler)
|
||||||
|
|
||||||
|
def handle_sigint(self, signum, frame):
|
||||||
|
"""
|
||||||
|
Call self.finish() before delegating to the original SIGINT handler.
|
||||||
|
|
||||||
|
This handler should only be in place while the progress display is
|
||||||
|
active.
|
||||||
|
"""
|
||||||
|
self.finish()
|
||||||
|
self.original_handler(signum, frame)
|
||||||
|
|
||||||
|
|
||||||
|
class SilentBar(Bar):
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BlueEmojiBar(IncrementalBar):
|
||||||
|
|
||||||
|
suffix = "%(percent)d%%"
|
||||||
|
bar_prefix = " "
|
||||||
|
bar_suffix = " "
|
||||||
|
phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressMixin(object):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
|
||||||
|
self.message = (" " * (get_indentation() + 2)) + self.message
|
||||||
|
|
||||||
|
@property
|
||||||
|
def downloaded(self):
|
||||||
|
return format_size(self.index)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def download_speed(self):
|
||||||
|
# Avoid zero division errors...
|
||||||
|
if self.avg == 0.0:
|
||||||
|
return "..."
|
||||||
|
return format_size(1 / self.avg) + "/s"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pretty_eta(self):
|
||||||
|
if self.eta:
|
||||||
|
return "eta %s" % self.eta_td
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def iter(self, it, n=1):
|
||||||
|
for x in it:
|
||||||
|
yield x
|
||||||
|
self.next(n)
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
|
class WindowsMixin(object):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# The Windows terminal does not support the hide/show cursor ANSI codes
|
||||||
|
# even with colorama. So we'll ensure that hide_cursor is False on
|
||||||
|
# Windows.
|
||||||
|
# This call neds to go before the super() call, so that hide_cursor
|
||||||
|
# is set in time. The base progress bar class writes the "hide cursor"
|
||||||
|
# code to the terminal in its init, so if we don't set this soon
|
||||||
|
# enough, we get a "hide" with no corresponding "show"...
|
||||||
|
if WINDOWS and self.hide_cursor:
|
||||||
|
self.hide_cursor = False
|
||||||
|
|
||||||
|
super(WindowsMixin, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Check if we are running on Windows and we have the colorama module,
|
||||||
|
# if we do then wrap our file with it.
|
||||||
|
if WINDOWS and colorama:
|
||||||
|
self.file = colorama.AnsiToWin32(self.file)
|
||||||
|
# The progress code expects to be able to call self.file.isatty()
|
||||||
|
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||||
|
# add it.
|
||||||
|
self.file.isatty = lambda: self.file.wrapped.isatty()
|
||||||
|
# The progress code expects to be able to call self.file.flush()
|
||||||
|
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||||
|
# add it.
|
||||||
|
self.file.flush = lambda: self.file.wrapped.flush()
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
|
||||||
|
DownloadProgressMixin):
|
||||||
|
|
||||||
|
file = sys.stdout
|
||||||
|
message = "%(percent)d%%"
|
||||||
|
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||||
|
|
||||||
|
# NOTE: The "type: ignore" comments on the following classes are there to
|
||||||
|
# work around https://github.com/python/typing/issues/241
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultDownloadProgressBar(BaseDownloadProgressBar,
|
||||||
|
_BaseBar): # type: ignore
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
IncrementalBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
ChargingBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
FillingSquaresBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
FillingCirclesBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
BlueEmojiBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
|
||||||
|
DownloadProgressMixin, WritelnMixin, Spinner):
|
||||||
|
|
||||||
|
file = sys.stdout
|
||||||
|
suffix = "%(downloaded)s %(download_speed)s"
|
||||||
|
|
||||||
|
def next_phase(self):
|
||||||
|
if not hasattr(self, "_phaser"):
|
||||||
|
self._phaser = itertools.cycle(self.phases)
|
||||||
|
return next(self._phaser)
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
message = self.message % self
|
||||||
|
phase = self.next_phase()
|
||||||
|
suffix = self.suffix % self
|
||||||
|
line = ''.join([
|
||||||
|
message,
|
||||||
|
" " if message else "",
|
||||||
|
phase,
|
||||||
|
" " if suffix else "",
|
||||||
|
suffix,
|
||||||
|
])
|
||||||
|
|
||||||
|
self.writeln(line)
|
||||||
|
|
||||||
|
|
||||||
|
BAR_TYPES = {
|
||||||
|
"off": (DownloadSilentBar, DownloadSilentBar),
|
||||||
|
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
||||||
|
"ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
|
||||||
|
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
||||||
|
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def DownloadProgressProvider(progress_bar, max=None):
|
||||||
|
if max is None or max == 0:
|
||||||
|
return BAR_TYPES[progress_bar][1]().iter
|
||||||
|
else:
|
||||||
|
return BAR_TYPES[progress_bar][0](max=max).iter
|
||||||
|
|
||||||
|
|
||||||
|
################################################################
|
||||||
|
# Generic "something is happening" spinners
|
||||||
|
#
|
||||||
|
# We don't even try using progress.spinner.Spinner here because it's actually
|
||||||
|
# simpler to reimplement from scratch than to coerce their code into doing
|
||||||
|
# what we need.
|
||||||
|
################################################################
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def hidden_cursor(file):
|
||||||
|
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||||
|
# even via colorama. So don't even try.
|
||||||
|
if WINDOWS:
|
||||||
|
yield
|
||||||
|
# We don't want to clutter the output with control characters if we're
|
||||||
|
# writing to a file, or if the user is running with --quiet.
|
||||||
|
# See https://github.com/pypa/pip/issues/3418
|
||||||
|
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
||||||
|
yield
|
||||||
|
else:
|
||||||
|
file.write(HIDE_CURSOR)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
file.write(SHOW_CURSOR)
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimiter(object):
|
||||||
|
def __init__(self, min_update_interval_seconds):
|
||||||
|
self._min_update_interval_seconds = min_update_interval_seconds
|
||||||
|
self._last_update = 0
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
now = time.time()
|
||||||
|
delta = now - self._last_update
|
||||||
|
return delta >= self._min_update_interval_seconds
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self._last_update = time.time()
|
||||||
|
|
||||||
|
|
||||||
|
class InteractiveSpinner(object):
|
||||||
|
def __init__(self, message, file=None, spin_chars="-\\|/",
|
||||||
|
# Empirically, 8 updates/second looks nice
|
||||||
|
min_update_interval_seconds=0.125):
|
||||||
|
self._message = message
|
||||||
|
if file is None:
|
||||||
|
file = sys.stdout
|
||||||
|
self._file = file
|
||||||
|
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||||
|
self._finished = False
|
||||||
|
|
||||||
|
self._spin_cycle = itertools.cycle(spin_chars)
|
||||||
|
|
||||||
|
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||||
|
self._width = 0
|
||||||
|
|
||||||
|
def _write(self, status):
|
||||||
|
assert not self._finished
|
||||||
|
# Erase what we wrote before by backspacing to the beginning, writing
|
||||||
|
# spaces to overwrite the old text, and then backspacing again
|
||||||
|
backup = "\b" * self._width
|
||||||
|
self._file.write(backup + " " * self._width + backup)
|
||||||
|
# Now we have a blank slate to add our status
|
||||||
|
self._file.write(status)
|
||||||
|
self._width = len(status)
|
||||||
|
self._file.flush()
|
||||||
|
self._rate_limiter.reset()
|
||||||
|
|
||||||
|
def spin(self):
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
if not self._rate_limiter.ready():
|
||||||
|
return
|
||||||
|
self._write(next(self._spin_cycle))
|
||||||
|
|
||||||
|
def finish(self, final_status):
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
self._write(final_status)
|
||||||
|
self._file.write("\n")
|
||||||
|
self._file.flush()
|
||||||
|
self._finished = True
|
||||||
|
|
||||||
|
|
||||||
|
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
||||||
|
# We still print updates occasionally (once every 60 seconds by default) to
|
||||||
|
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||||
|
# an indication that a task has frozen.
|
||||||
|
class NonInteractiveSpinner(object):
|
||||||
|
def __init__(self, message, min_update_interval_seconds=60):
|
||||||
|
self._message = message
|
||||||
|
self._finished = False
|
||||||
|
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||||
|
self._update("started")
|
||||||
|
|
||||||
|
def _update(self, status):
|
||||||
|
assert not self._finished
|
||||||
|
self._rate_limiter.reset()
|
||||||
|
logger.info("%s: %s", self._message, status)
|
||||||
|
|
||||||
|
def spin(self):
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
if not self._rate_limiter.ready():
|
||||||
|
return
|
||||||
|
self._update("still running...")
|
||||||
|
|
||||||
|
def finish(self, final_status):
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
self._update("finished with status '%s'" % (final_status,))
|
||||||
|
self._finished = True
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def open_spinner(message):
|
||||||
|
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||||
|
# through the logging system, but it acts like it has level INFO,
|
||||||
|
# i.e. it's only displayed if we're at level INFO or better.
|
||||||
|
# Non-interactive spinner goes through the logging system, so it is always
|
||||||
|
# in sync with logging configuration.
|
||||||
|
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||||
|
spinner = InteractiveSpinner(message)
|
||||||
|
else:
|
||||||
|
spinner = NonInteractiveSpinner(message)
|
||||||
|
try:
|
||||||
|
with hidden_cursor(sys.stdout):
|
||||||
|
yield spinner
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
spinner.finish("canceled")
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
spinner.finish("error")
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
spinner.finish("done")
|
@ -0,0 +1,471 @@
|
|||||||
|
"""Handles all VCS (version control) support"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import errno
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.exceptions import BadCommand
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
display_path, backup_dir, call_subprocess, rmtree, ask_path_exists,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
from pip._internal.basecommand import Command
|
||||||
|
|
||||||
|
__all__ = ['vcs', 'get_src_requirement']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RevOptions(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Encapsulates a VCS-specific revision to install, along with any VCS
|
||||||
|
install options.
|
||||||
|
|
||||||
|
Instances of this class should be treated as if immutable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, vcs, rev=None, extra_args=None):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
vcs: a VersionControl object.
|
||||||
|
rev: the name of the revision to install.
|
||||||
|
extra_args: a list of extra options.
|
||||||
|
"""
|
||||||
|
if extra_args is None:
|
||||||
|
extra_args = []
|
||||||
|
|
||||||
|
self.extra_args = extra_args
|
||||||
|
self.rev = rev
|
||||||
|
self.vcs = vcs
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arg_rev(self):
|
||||||
|
if self.rev is None:
|
||||||
|
return self.vcs.default_arg_rev
|
||||||
|
|
||||||
|
return self.rev
|
||||||
|
|
||||||
|
def to_args(self):
|
||||||
|
"""
|
||||||
|
Return the VCS-specific command arguments.
|
||||||
|
"""
|
||||||
|
args = []
|
||||||
|
rev = self.arg_rev
|
||||||
|
if rev is not None:
|
||||||
|
args += self.vcs.get_base_rev_args(rev)
|
||||||
|
args += self.extra_args
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
def to_display(self):
|
||||||
|
if not self.rev:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
return ' (to revision {})'.format(self.rev)
|
||||||
|
|
||||||
|
def make_new(self, rev):
|
||||||
|
"""
|
||||||
|
Make a copy of the current instance, but with a new rev.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev: the name of the revision for the new object.
|
||||||
|
"""
|
||||||
|
return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
|
||||||
|
|
||||||
|
|
||||||
|
class VcsSupport(object):
|
||||||
|
_registry = {} # type: Dict[str, Command]
|
||||||
|
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Register more schemes with urlparse for various version control
|
||||||
|
# systems
|
||||||
|
urllib_parse.uses_netloc.extend(self.schemes)
|
||||||
|
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
|
||||||
|
if getattr(urllib_parse, 'uses_fragment', None):
|
||||||
|
urllib_parse.uses_fragment.extend(self.schemes)
|
||||||
|
super(VcsSupport, self).__init__()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._registry.__iter__()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def backends(self):
|
||||||
|
return list(self._registry.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dirnames(self):
|
||||||
|
return [backend.dirname for backend in self.backends]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all_schemes(self):
|
||||||
|
schemes = []
|
||||||
|
for backend in self.backends:
|
||||||
|
schemes.extend(backend.schemes)
|
||||||
|
return schemes
|
||||||
|
|
||||||
|
def register(self, cls):
|
||||||
|
if not hasattr(cls, 'name'):
|
||||||
|
logger.warning('Cannot register VCS %s', cls.__name__)
|
||||||
|
return
|
||||||
|
if cls.name not in self._registry:
|
||||||
|
self._registry[cls.name] = cls
|
||||||
|
logger.debug('Registered VCS backend: %s', cls.name)
|
||||||
|
|
||||||
|
def unregister(self, cls=None, name=None):
|
||||||
|
if name in self._registry:
|
||||||
|
del self._registry[name]
|
||||||
|
elif cls in self._registry.values():
|
||||||
|
del self._registry[cls.name]
|
||||||
|
else:
|
||||||
|
logger.warning('Cannot unregister because no class or name given')
|
||||||
|
|
||||||
|
def get_backend_name(self, location):
|
||||||
|
"""
|
||||||
|
Return the name of the version control backend if found at given
|
||||||
|
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
|
||||||
|
"""
|
||||||
|
for vc_type in self._registry.values():
|
||||||
|
if vc_type.controls_location(location):
|
||||||
|
logger.debug('Determine that %s uses VCS: %s',
|
||||||
|
location, vc_type.name)
|
||||||
|
return vc_type.name
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_backend(self, name):
|
||||||
|
name = name.lower()
|
||||||
|
if name in self._registry:
|
||||||
|
return self._registry[name]
|
||||||
|
|
||||||
|
def get_backend_from_location(self, location):
|
||||||
|
vc_type = self.get_backend_name(location)
|
||||||
|
if vc_type:
|
||||||
|
return self.get_backend(vc_type)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
vcs = VcsSupport()
|
||||||
|
|
||||||
|
|
||||||
|
class VersionControl(object):
|
||||||
|
name = ''
|
||||||
|
dirname = ''
|
||||||
|
# List of supported schemes for this Version Control
|
||||||
|
schemes = () # type: Tuple[str, ...]
|
||||||
|
# Iterable of environment variable names to pass to call_subprocess().
|
||||||
|
unset_environ = () # type: Tuple[str, ...]
|
||||||
|
default_arg_rev = None # type: Optional[str]
|
||||||
|
|
||||||
|
def __init__(self, url=None, *args, **kwargs):
|
||||||
|
self.url = url
|
||||||
|
super(VersionControl, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_base_rev_args(self, rev):
|
||||||
|
"""
|
||||||
|
Return the base revision arguments for a vcs command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev: the name of a revision to install. Cannot be None.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def make_rev_options(self, rev=None, extra_args=None):
|
||||||
|
"""
|
||||||
|
Return a RevOptions object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev: the name of a revision to install.
|
||||||
|
extra_args: a list of extra options.
|
||||||
|
"""
|
||||||
|
return RevOptions(self, rev, extra_args=extra_args)
|
||||||
|
|
||||||
|
def _is_local_repository(self, repo):
|
||||||
|
"""
|
||||||
|
posix absolute paths start with os.path.sep,
|
||||||
|
win32 ones start with drive (like c:\\folder)
|
||||||
|
"""
|
||||||
|
drive, tail = os.path.splitdrive(repo)
|
||||||
|
return repo.startswith(os.path.sep) or drive
|
||||||
|
|
||||||
|
# See issue #1083 for why this method was introduced:
|
||||||
|
# https://github.com/pypa/pip/issues/1083
|
||||||
|
def translate_egg_surname(self, surname):
|
||||||
|
# For example, Django has branches of the form "stable/1.7.x".
|
||||||
|
return surname.replace('/', '_')
|
||||||
|
|
||||||
|
def export(self, location):
|
||||||
|
"""
|
||||||
|
Export the repository at the url to the destination location
|
||||||
|
i.e. only download the files, without vcs informations
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_url_rev(self):
|
||||||
|
"""
|
||||||
|
Returns the correct repository URL and revision by parsing the given
|
||||||
|
repository URL
|
||||||
|
"""
|
||||||
|
error_message = (
|
||||||
|
"Sorry, '%s' is a malformed VCS url. "
|
||||||
|
"The format is <vcs>+<protocol>://<url>, "
|
||||||
|
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
|
||||||
|
)
|
||||||
|
assert '+' in self.url, error_message % self.url
|
||||||
|
url = self.url.split('+', 1)[1]
|
||||||
|
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
|
||||||
|
rev = None
|
||||||
|
if '@' in path:
|
||||||
|
path, rev = path.rsplit('@', 1)
|
||||||
|
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
|
||||||
|
return url, rev
|
||||||
|
|
||||||
|
def get_info(self, location):
|
||||||
|
"""
|
||||||
|
Returns (url, revision), where both are strings
|
||||||
|
"""
|
||||||
|
assert not location.rstrip('/').endswith(self.dirname), \
|
||||||
|
'Bad directory: %s' % location
|
||||||
|
return self.get_url(location), self.get_revision(location)
|
||||||
|
|
||||||
|
def normalize_url(self, url):
|
||||||
|
"""
|
||||||
|
Normalize a URL for comparison by unquoting it and removing any
|
||||||
|
trailing slash.
|
||||||
|
"""
|
||||||
|
return urllib_parse.unquote(url).rstrip('/')
|
||||||
|
|
||||||
|
def compare_urls(self, url1, url2):
|
||||||
|
"""
|
||||||
|
Compare two repo URLs for identity, ignoring incidental differences.
|
||||||
|
"""
|
||||||
|
return (self.normalize_url(url1) == self.normalize_url(url2))
|
||||||
|
|
||||||
|
def obtain(self, dest):
|
||||||
|
"""
|
||||||
|
Called when installing or updating an editable package, takes the
|
||||||
|
source path of the checkout.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def switch(self, dest, url, rev_options):
|
||||||
|
"""
|
||||||
|
Switch the repo at ``dest`` to point to ``URL``.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev_options: a RevOptions object.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def update(self, dest, rev_options):
|
||||||
|
"""
|
||||||
|
Update an already-existing repo to the given ``rev_options``.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev_options: a RevOptions object.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_commit_id_equal(self, dest, name):
|
||||||
|
"""
|
||||||
|
Return whether the id of the current commit equals the given name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dest: the repository directory.
|
||||||
|
name: a string name.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def check_destination(self, dest, url, rev_options):
|
||||||
|
"""
|
||||||
|
Prepare a location to receive a checkout/clone.
|
||||||
|
|
||||||
|
Return True if the location is ready for (and requires) a
|
||||||
|
checkout/clone, False otherwise.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev_options: a RevOptions object.
|
||||||
|
"""
|
||||||
|
checkout = True
|
||||||
|
prompt = False
|
||||||
|
rev_display = rev_options.to_display()
|
||||||
|
if os.path.exists(dest):
|
||||||
|
checkout = False
|
||||||
|
if os.path.exists(os.path.join(dest, self.dirname)):
|
||||||
|
existing_url = self.get_url(dest)
|
||||||
|
if self.compare_urls(existing_url, url):
|
||||||
|
logger.debug(
|
||||||
|
'%s in %s exists, and has correct URL (%s)',
|
||||||
|
self.repo_name.title(),
|
||||||
|
display_path(dest),
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
if not self.is_commit_id_equal(dest, rev_options.rev):
|
||||||
|
logger.info(
|
||||||
|
'Updating %s %s%s',
|
||||||
|
display_path(dest),
|
||||||
|
self.repo_name,
|
||||||
|
rev_display,
|
||||||
|
)
|
||||||
|
self.update(dest, rev_options)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'Skipping because already up-to-date.')
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
'%s %s in %s exists with URL %s',
|
||||||
|
self.name,
|
||||||
|
self.repo_name,
|
||||||
|
display_path(dest),
|
||||||
|
existing_url,
|
||||||
|
)
|
||||||
|
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
||||||
|
('s', 'i', 'w', 'b'))
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
'Directory %s already exists, and is not a %s %s.',
|
||||||
|
dest,
|
||||||
|
self.name,
|
||||||
|
self.repo_name,
|
||||||
|
)
|
||||||
|
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
||||||
|
if prompt:
|
||||||
|
logger.warning(
|
||||||
|
'The plan is to install the %s repository %s',
|
||||||
|
self.name,
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
response = ask_path_exists('What to do? %s' % prompt[0],
|
||||||
|
prompt[1])
|
||||||
|
|
||||||
|
if response == 's':
|
||||||
|
logger.info(
|
||||||
|
'Switching %s %s to %s%s',
|
||||||
|
self.repo_name,
|
||||||
|
display_path(dest),
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
)
|
||||||
|
self.switch(dest, url, rev_options)
|
||||||
|
elif response == 'i':
|
||||||
|
# do nothing
|
||||||
|
pass
|
||||||
|
elif response == 'w':
|
||||||
|
logger.warning('Deleting %s', display_path(dest))
|
||||||
|
rmtree(dest)
|
||||||
|
checkout = True
|
||||||
|
elif response == 'b':
|
||||||
|
dest_dir = backup_dir(dest)
|
||||||
|
logger.warning(
|
||||||
|
'Backing up %s to %s', display_path(dest), dest_dir,
|
||||||
|
)
|
||||||
|
shutil.move(dest, dest_dir)
|
||||||
|
checkout = True
|
||||||
|
elif response == 'a':
|
||||||
|
sys.exit(-1)
|
||||||
|
return checkout
|
||||||
|
|
||||||
|
def unpack(self, location):
|
||||||
|
"""
|
||||||
|
Clean up current location and download the url repository
|
||||||
|
(and vcs infos) into location
|
||||||
|
"""
|
||||||
|
if os.path.exists(location):
|
||||||
|
rmtree(location)
|
||||||
|
self.obtain(location)
|
||||||
|
|
||||||
|
def get_src_requirement(self, dist, location):
|
||||||
|
"""
|
||||||
|
Return a string representing the requirement needed to
|
||||||
|
redownload the files currently present in location, something
|
||||||
|
like:
|
||||||
|
{repository_url}@{revision}#egg={project_name}-{version_identifier}
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_url(self, location):
|
||||||
|
"""
|
||||||
|
Return the url used at location
|
||||||
|
Used in get_info or check_destination
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_revision(self, location):
|
||||||
|
"""
|
||||||
|
Return the current commit id of the files at the given location.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def run_command(self, cmd, show_stdout=True, cwd=None,
|
||||||
|
on_returncode='raise',
|
||||||
|
command_desc=None,
|
||||||
|
extra_environ=None, spinner=None):
|
||||||
|
"""
|
||||||
|
Run a VCS subcommand
|
||||||
|
This is simply a wrapper around call_subprocess that adds the VCS
|
||||||
|
command name, and checks that the VCS is available
|
||||||
|
"""
|
||||||
|
cmd = [self.name] + cmd
|
||||||
|
try:
|
||||||
|
return call_subprocess(cmd, show_stdout, cwd,
|
||||||
|
on_returncode,
|
||||||
|
command_desc, extra_environ,
|
||||||
|
unset_environ=self.unset_environ,
|
||||||
|
spinner=spinner)
|
||||||
|
except OSError as e:
|
||||||
|
# errno.ENOENT = no such file or directory
|
||||||
|
# In other words, the VCS executable isn't available
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
raise BadCommand(
|
||||||
|
'Cannot find command %r - do you have '
|
||||||
|
'%r installed and in your '
|
||||||
|
'PATH?' % (self.name, self.name))
|
||||||
|
else:
|
||||||
|
raise # re-raise exception if a different error occurred
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def controls_location(cls, location):
|
||||||
|
"""
|
||||||
|
Check if a location is controlled by the vcs.
|
||||||
|
It is meant to be overridden to implement smarter detection
|
||||||
|
mechanisms for specific vcs.
|
||||||
|
"""
|
||||||
|
logger.debug('Checking in %s for %s (%s)...',
|
||||||
|
location, cls.dirname, cls.name)
|
||||||
|
path = os.path.join(location, cls.dirname)
|
||||||
|
return os.path.exists(path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_src_requirement(dist, location):
|
||||||
|
version_control = vcs.get_backend_from_location(location)
|
||||||
|
if version_control:
|
||||||
|
try:
|
||||||
|
return version_control().get_src_requirement(dist,
|
||||||
|
location)
|
||||||
|
except BadCommand:
|
||||||
|
logger.warning(
|
||||||
|
'cannot determine version of editable source in %s '
|
||||||
|
'(%s command not found in path)',
|
||||||
|
location,
|
||||||
|
version_control.name,
|
||||||
|
)
|
||||||
|
return dist.as_requirement()
|
||||||
|
logger.warning(
|
||||||
|
'cannot determine version of editable source in %s (is not SVN '
|
||||||
|
'checkout, Git clone, Mercurial clone or Bazaar branch)',
|
||||||
|
location,
|
||||||
|
)
|
||||||
|
return dist.as_requirement()
|
@ -0,0 +1,113 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.download import path_to_url
|
||||||
|
from pip._internal.utils.misc import display_path, rmtree
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.vcs import VersionControl, vcs
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Bazaar(VersionControl):
|
||||||
|
name = 'bzr'
|
||||||
|
dirname = '.bzr'
|
||||||
|
repo_name = 'branch'
|
||||||
|
schemes = (
|
||||||
|
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
|
||||||
|
'bzr+lp',
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, url=None, *args, **kwargs):
|
||||||
|
super(Bazaar, self).__init__(url, *args, **kwargs)
|
||||||
|
# This is only needed for python <2.7.5
|
||||||
|
# Register lp but do not expose as a scheme to support bzr+lp.
|
||||||
|
if getattr(urllib_parse, 'uses_fragment', None):
|
||||||
|
urllib_parse.uses_fragment.extend(['lp'])
|
||||||
|
|
||||||
|
def get_base_rev_args(self, rev):
|
||||||
|
return ['-r', rev]
|
||||||
|
|
||||||
|
def export(self, location):
|
||||||
|
"""
|
||||||
|
Export the Bazaar repository at the url to the destination location
|
||||||
|
"""
|
||||||
|
# Remove the location to make sure Bazaar can export it correctly
|
||||||
|
if os.path.exists(location):
|
||||||
|
rmtree(location)
|
||||||
|
|
||||||
|
with TempDirectory(kind="export") as temp_dir:
|
||||||
|
self.unpack(temp_dir.path)
|
||||||
|
|
||||||
|
self.run_command(
|
||||||
|
['export', location],
|
||||||
|
cwd=temp_dir.path, show_stdout=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
def switch(self, dest, url, rev_options):
|
||||||
|
self.run_command(['switch', url], cwd=dest)
|
||||||
|
|
||||||
|
def update(self, dest, rev_options):
|
||||||
|
cmd_args = ['pull', '-q'] + rev_options.to_args()
|
||||||
|
self.run_command(cmd_args, cwd=dest)
|
||||||
|
|
||||||
|
def obtain(self, dest):
|
||||||
|
url, rev = self.get_url_rev()
|
||||||
|
rev_options = self.make_rev_options(rev)
|
||||||
|
if self.check_destination(dest, url, rev_options):
|
||||||
|
rev_display = rev_options.to_display()
|
||||||
|
logger.info(
|
||||||
|
'Checking out %s%s to %s',
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
display_path(dest),
|
||||||
|
)
|
||||||
|
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
|
||||||
|
self.run_command(cmd_args)
|
||||||
|
|
||||||
|
def get_url_rev(self):
|
||||||
|
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
||||||
|
url, rev = super(Bazaar, self).get_url_rev()
|
||||||
|
if url.startswith('ssh://'):
|
||||||
|
url = 'bzr+' + url
|
||||||
|
return url, rev
|
||||||
|
|
||||||
|
def get_url(self, location):
|
||||||
|
urls = self.run_command(['info'], show_stdout=False, cwd=location)
|
||||||
|
for line in urls.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
for x in ('checkout of branch: ',
|
||||||
|
'parent branch: '):
|
||||||
|
if line.startswith(x):
|
||||||
|
repo = line.split(x)[1]
|
||||||
|
if self._is_local_repository(repo):
|
||||||
|
return path_to_url(repo)
|
||||||
|
return repo
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_revision(self, location):
|
||||||
|
revision = self.run_command(
|
||||||
|
['revno'], show_stdout=False, cwd=location,
|
||||||
|
)
|
||||||
|
return revision.splitlines()[-1]
|
||||||
|
|
||||||
|
def get_src_requirement(self, dist, location):
|
||||||
|
repo = self.get_url(location)
|
||||||
|
if not repo:
|
||||||
|
return None
|
||||||
|
if not repo.lower().startswith('bzr:'):
|
||||||
|
repo = 'bzr+' + repo
|
||||||
|
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||||
|
current_rev = self.get_revision(location)
|
||||||
|
return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
|
||||||
|
|
||||||
|
def is_commit_id_equal(self, dest, name):
|
||||||
|
"""Always assume the versions don't match"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
vcs.register(Bazaar)
|
@ -0,0 +1,311 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||||
|
|
||||||
|
from pip._internal.compat import samefile
|
||||||
|
from pip._internal.exceptions import BadCommand
|
||||||
|
from pip._internal.utils.misc import display_path
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.vcs import VersionControl, vcs
|
||||||
|
|
||||||
|
urlsplit = urllib_parse.urlsplit
|
||||||
|
urlunsplit = urllib_parse.urlunsplit
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
HASH_REGEX = re.compile('[a-fA-F0-9]{40}')
|
||||||
|
|
||||||
|
|
||||||
|
def looks_like_hash(sha):
|
||||||
|
return bool(HASH_REGEX.match(sha))
|
||||||
|
|
||||||
|
|
||||||
|
class Git(VersionControl):
|
||||||
|
name = 'git'
|
||||||
|
dirname = '.git'
|
||||||
|
repo_name = 'clone'
|
||||||
|
schemes = (
|
||||||
|
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
|
||||||
|
)
|
||||||
|
# Prevent the user's environment variables from interfering with pip:
|
||||||
|
# https://github.com/pypa/pip/issues/1130
|
||||||
|
unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
|
||||||
|
default_arg_rev = 'HEAD'
|
||||||
|
|
||||||
|
def __init__(self, url=None, *args, **kwargs):
|
||||||
|
|
||||||
|
# Works around an apparent Git bug
|
||||||
|
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
|
||||||
|
if url:
|
||||||
|
scheme, netloc, path, query, fragment = urlsplit(url)
|
||||||
|
if scheme.endswith('file'):
|
||||||
|
initial_slashes = path[:-len(path.lstrip('/'))]
|
||||||
|
newpath = (
|
||||||
|
initial_slashes +
|
||||||
|
urllib_request.url2pathname(path)
|
||||||
|
.replace('\\', '/').lstrip('/')
|
||||||
|
)
|
||||||
|
url = urlunsplit((scheme, netloc, newpath, query, fragment))
|
||||||
|
after_plus = scheme.find('+') + 1
|
||||||
|
url = scheme[:after_plus] + urlunsplit(
|
||||||
|
(scheme[after_plus:], netloc, newpath, query, fragment),
|
||||||
|
)
|
||||||
|
|
||||||
|
super(Git, self).__init__(url, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_base_rev_args(self, rev):
|
||||||
|
return [rev]
|
||||||
|
|
||||||
|
def get_git_version(self):
|
||||||
|
VERSION_PFX = 'git version '
|
||||||
|
version = self.run_command(['version'], show_stdout=False)
|
||||||
|
if version.startswith(VERSION_PFX):
|
||||||
|
version = version[len(VERSION_PFX):].split()[0]
|
||||||
|
else:
|
||||||
|
version = ''
|
||||||
|
# get first 3 positions of the git version becasue
|
||||||
|
# on windows it is x.y.z.windows.t, and this parses as
|
||||||
|
# LegacyVersion which always smaller than a Version.
|
||||||
|
version = '.'.join(version.split('.')[:3])
|
||||||
|
return parse_version(version)
|
||||||
|
|
||||||
|
def export(self, location):
|
||||||
|
"""Export the Git repository at the url to the destination location"""
|
||||||
|
if not location.endswith('/'):
|
||||||
|
location = location + '/'
|
||||||
|
|
||||||
|
with TempDirectory(kind="export") as temp_dir:
|
||||||
|
self.unpack(temp_dir.path)
|
||||||
|
self.run_command(
|
||||||
|
['checkout-index', '-a', '-f', '--prefix', location],
|
||||||
|
show_stdout=False, cwd=temp_dir.path
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_revision_sha(self, dest, rev):
|
||||||
|
"""
|
||||||
|
Return a commit hash for the given revision if it names a remote
|
||||||
|
branch or tag. Otherwise, return None.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dest: the repository directory.
|
||||||
|
rev: the revision name.
|
||||||
|
"""
|
||||||
|
# Pass rev to pre-filter the list.
|
||||||
|
output = self.run_command(['show-ref', rev], cwd=dest,
|
||||||
|
show_stdout=False, on_returncode='ignore')
|
||||||
|
refs = {}
|
||||||
|
for line in output.strip().splitlines():
|
||||||
|
try:
|
||||||
|
sha, ref = line.split()
|
||||||
|
except ValueError:
|
||||||
|
# Include the offending line to simplify troubleshooting if
|
||||||
|
# this error ever occurs.
|
||||||
|
raise ValueError('unexpected show-ref line: {!r}'.format(line))
|
||||||
|
|
||||||
|
refs[ref] = sha
|
||||||
|
|
||||||
|
branch_ref = 'refs/remotes/origin/{}'.format(rev)
|
||||||
|
tag_ref = 'refs/tags/{}'.format(rev)
|
||||||
|
|
||||||
|
return refs.get(branch_ref) or refs.get(tag_ref)
|
||||||
|
|
||||||
|
def check_rev_options(self, dest, rev_options):
|
||||||
|
"""Check the revision options before checkout.
|
||||||
|
|
||||||
|
Returns a new RevOptions object for the SHA1 of the branch or tag
|
||||||
|
if found.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rev_options: a RevOptions object.
|
||||||
|
"""
|
||||||
|
rev = rev_options.arg_rev
|
||||||
|
sha = self.get_revision_sha(dest, rev)
|
||||||
|
|
||||||
|
if sha is not None:
|
||||||
|
return rev_options.make_new(sha)
|
||||||
|
|
||||||
|
# Do not show a warning for the common case of something that has
|
||||||
|
# the form of a Git commit hash.
|
||||||
|
if not looks_like_hash(rev):
|
||||||
|
logger.warning(
|
||||||
|
"Did not find branch or tag '%s', assuming revision or ref.",
|
||||||
|
rev,
|
||||||
|
)
|
||||||
|
return rev_options
|
||||||
|
|
||||||
|
def is_commit_id_equal(self, dest, name):
|
||||||
|
"""
|
||||||
|
Return whether the current commit hash equals the given name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dest: the repository directory.
|
||||||
|
name: a string name.
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
# Then avoid an unnecessary subprocess call.
|
||||||
|
return False
|
||||||
|
|
||||||
|
return self.get_revision(dest) == name
|
||||||
|
|
||||||
|
def switch(self, dest, url, rev_options):
|
||||||
|
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
|
||||||
|
cmd_args = ['checkout', '-q'] + rev_options.to_args()
|
||||||
|
self.run_command(cmd_args, cwd=dest)
|
||||||
|
|
||||||
|
self.update_submodules(dest)
|
||||||
|
|
||||||
|
def update(self, dest, rev_options):
|
||||||
|
# First fetch changes from the default remote
|
||||||
|
if self.get_git_version() >= parse_version('1.9.0'):
|
||||||
|
# fetch tags in addition to everything else
|
||||||
|
self.run_command(['fetch', '-q', '--tags'], cwd=dest)
|
||||||
|
else:
|
||||||
|
self.run_command(['fetch', '-q'], cwd=dest)
|
||||||
|
# Then reset to wanted revision (maybe even origin/master)
|
||||||
|
rev_options = self.check_rev_options(dest, rev_options)
|
||||||
|
cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
|
||||||
|
self.run_command(cmd_args, cwd=dest)
|
||||||
|
#: update submodules
|
||||||
|
self.update_submodules(dest)
|
||||||
|
|
||||||
|
def obtain(self, dest):
|
||||||
|
url, rev = self.get_url_rev()
|
||||||
|
rev_options = self.make_rev_options(rev)
|
||||||
|
if self.check_destination(dest, url, rev_options):
|
||||||
|
rev_display = rev_options.to_display()
|
||||||
|
logger.info(
|
||||||
|
'Cloning %s%s to %s', url, rev_display, display_path(dest),
|
||||||
|
)
|
||||||
|
self.run_command(['clone', '-q', url, dest])
|
||||||
|
|
||||||
|
if rev:
|
||||||
|
rev_options = self.check_rev_options(dest, rev_options)
|
||||||
|
# Only do a checkout if the current commit id doesn't match
|
||||||
|
# the requested revision.
|
||||||
|
if not self.is_commit_id_equal(dest, rev_options.rev):
|
||||||
|
rev = rev_options.rev
|
||||||
|
# Only fetch the revision if it's a ref
|
||||||
|
if rev.startswith('refs/'):
|
||||||
|
self.run_command(
|
||||||
|
['fetch', '-q', url] + rev_options.to_args(),
|
||||||
|
cwd=dest,
|
||||||
|
)
|
||||||
|
# Change the revision to the SHA of the ref we fetched
|
||||||
|
rev = 'FETCH_HEAD'
|
||||||
|
self.run_command(['checkout', '-q', rev], cwd=dest)
|
||||||
|
|
||||||
|
#: repo may contain submodules
|
||||||
|
self.update_submodules(dest)
|
||||||
|
|
||||||
|
def get_url(self, location):
|
||||||
|
"""Return URL of the first remote encountered."""
|
||||||
|
remotes = self.run_command(
|
||||||
|
['config', '--get-regexp', r'remote\..*\.url'],
|
||||||
|
show_stdout=False, cwd=location,
|
||||||
|
)
|
||||||
|
remotes = remotes.splitlines()
|
||||||
|
found_remote = remotes[0]
|
||||||
|
for remote in remotes:
|
||||||
|
if remote.startswith('remote.origin.url '):
|
||||||
|
found_remote = remote
|
||||||
|
break
|
||||||
|
url = found_remote.split(' ')[1]
|
||||||
|
return url.strip()
|
||||||
|
|
||||||
|
def get_revision(self, location):
|
||||||
|
current_rev = self.run_command(
|
||||||
|
['rev-parse', 'HEAD'], show_stdout=False, cwd=location,
|
||||||
|
)
|
||||||
|
return current_rev.strip()
|
||||||
|
|
||||||
|
def _get_subdirectory(self, location):
|
||||||
|
"""Return the relative path of setup.py to the git repo root."""
|
||||||
|
# find the repo root
|
||||||
|
git_dir = self.run_command(['rev-parse', '--git-dir'],
|
||||||
|
show_stdout=False, cwd=location).strip()
|
||||||
|
if not os.path.isabs(git_dir):
|
||||||
|
git_dir = os.path.join(location, git_dir)
|
||||||
|
root_dir = os.path.join(git_dir, '..')
|
||||||
|
# find setup.py
|
||||||
|
orig_location = location
|
||||||
|
while not os.path.exists(os.path.join(location, 'setup.py')):
|
||||||
|
last_location = location
|
||||||
|
location = os.path.dirname(location)
|
||||||
|
if location == last_location:
|
||||||
|
# We've traversed up to the root of the filesystem without
|
||||||
|
# finding setup.py
|
||||||
|
logger.warning(
|
||||||
|
"Could not find setup.py for directory %s (tried all "
|
||||||
|
"parent directories)",
|
||||||
|
orig_location,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
# relative path of setup.py to repo root
|
||||||
|
if samefile(root_dir, location):
|
||||||
|
return None
|
||||||
|
return os.path.relpath(location, root_dir)
|
||||||
|
|
||||||
|
def get_src_requirement(self, dist, location):
|
||||||
|
repo = self.get_url(location)
|
||||||
|
if not repo.lower().startswith('git:'):
|
||||||
|
repo = 'git+' + repo
|
||||||
|
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||||
|
if not repo:
|
||||||
|
return None
|
||||||
|
current_rev = self.get_revision(location)
|
||||||
|
req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
|
||||||
|
subdirectory = self._get_subdirectory(location)
|
||||||
|
if subdirectory:
|
||||||
|
req += '&subdirectory=' + subdirectory
|
||||||
|
return req
|
||||||
|
|
||||||
|
def get_url_rev(self):
|
||||||
|
"""
|
||||||
|
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
||||||
|
That's required because although they use SSH they sometimes doesn't
|
||||||
|
work with a ssh:// scheme (e.g. Github). But we need a scheme for
|
||||||
|
parsing. Hence we remove it again afterwards and return it as a stub.
|
||||||
|
"""
|
||||||
|
if '://' not in self.url:
|
||||||
|
assert 'file:' not in self.url
|
||||||
|
self.url = self.url.replace('git+', 'git+ssh://')
|
||||||
|
url, rev = super(Git, self).get_url_rev()
|
||||||
|
url = url.replace('ssh://', '')
|
||||||
|
else:
|
||||||
|
url, rev = super(Git, self).get_url_rev()
|
||||||
|
|
||||||
|
return url, rev
|
||||||
|
|
||||||
|
def update_submodules(self, location):
|
||||||
|
if not os.path.exists(os.path.join(location, '.gitmodules')):
|
||||||
|
return
|
||||||
|
self.run_command(
|
||||||
|
['submodule', 'update', '--init', '--recursive', '-q'],
|
||||||
|
cwd=location,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def controls_location(cls, location):
|
||||||
|
if super(Git, cls).controls_location(location):
|
||||||
|
return True
|
||||||
|
try:
|
||||||
|
r = cls().run_command(['rev-parse'],
|
||||||
|
cwd=location,
|
||||||
|
show_stdout=False,
|
||||||
|
on_returncode='ignore')
|
||||||
|
return not r
|
||||||
|
except BadCommand:
|
||||||
|
logger.debug("could not determine if %s is under git control "
|
||||||
|
"because git is not available", location)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
vcs.register(Git)
|
@ -0,0 +1,105 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor.six.moves import configparser
|
||||||
|
|
||||||
|
from pip._internal.download import path_to_url
|
||||||
|
from pip._internal.utils.misc import display_path
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.vcs import VersionControl, vcs
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Mercurial(VersionControl):
|
||||||
|
name = 'hg'
|
||||||
|
dirname = '.hg'
|
||||||
|
repo_name = 'clone'
|
||||||
|
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
|
||||||
|
|
||||||
|
def get_base_rev_args(self, rev):
|
||||||
|
return [rev]
|
||||||
|
|
||||||
|
def export(self, location):
|
||||||
|
"""Export the Hg repository at the url to the destination location"""
|
||||||
|
with TempDirectory(kind="export") as temp_dir:
|
||||||
|
self.unpack(temp_dir.path)
|
||||||
|
|
||||||
|
self.run_command(
|
||||||
|
['archive', location], show_stdout=False, cwd=temp_dir.path
|
||||||
|
)
|
||||||
|
|
||||||
|
def switch(self, dest, url, rev_options):
|
||||||
|
repo_config = os.path.join(dest, self.dirname, 'hgrc')
|
||||||
|
config = configparser.SafeConfigParser()
|
||||||
|
try:
|
||||||
|
config.read(repo_config)
|
||||||
|
config.set('paths', 'default', url)
|
||||||
|
with open(repo_config, 'w') as config_file:
|
||||||
|
config.write(config_file)
|
||||||
|
except (OSError, configparser.NoSectionError) as exc:
|
||||||
|
logger.warning(
|
||||||
|
'Could not switch Mercurial repository to %s: %s', url, exc,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
cmd_args = ['update', '-q'] + rev_options.to_args()
|
||||||
|
self.run_command(cmd_args, cwd=dest)
|
||||||
|
|
||||||
|
def update(self, dest, rev_options):
|
||||||
|
self.run_command(['pull', '-q'], cwd=dest)
|
||||||
|
cmd_args = ['update', '-q'] + rev_options.to_args()
|
||||||
|
self.run_command(cmd_args, cwd=dest)
|
||||||
|
|
||||||
|
def obtain(self, dest):
|
||||||
|
url, rev = self.get_url_rev()
|
||||||
|
rev_options = self.make_rev_options(rev)
|
||||||
|
if self.check_destination(dest, url, rev_options):
|
||||||
|
rev_display = rev_options.to_display()
|
||||||
|
logger.info(
|
||||||
|
'Cloning hg %s%s to %s',
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
display_path(dest),
|
||||||
|
)
|
||||||
|
self.run_command(['clone', '--noupdate', '-q', url, dest])
|
||||||
|
cmd_args = ['update', '-q'] + rev_options.to_args()
|
||||||
|
self.run_command(cmd_args, cwd=dest)
|
||||||
|
|
||||||
|
def get_url(self, location):
|
||||||
|
url = self.run_command(
|
||||||
|
['showconfig', 'paths.default'],
|
||||||
|
show_stdout=False, cwd=location).strip()
|
||||||
|
if self._is_local_repository(url):
|
||||||
|
url = path_to_url(url)
|
||||||
|
return url.strip()
|
||||||
|
|
||||||
|
def get_revision(self, location):
|
||||||
|
current_revision = self.run_command(
|
||||||
|
['parents', '--template={rev}'],
|
||||||
|
show_stdout=False, cwd=location).strip()
|
||||||
|
return current_revision
|
||||||
|
|
||||||
|
def get_revision_hash(self, location):
|
||||||
|
current_rev_hash = self.run_command(
|
||||||
|
['parents', '--template={node}'],
|
||||||
|
show_stdout=False, cwd=location).strip()
|
||||||
|
return current_rev_hash
|
||||||
|
|
||||||
|
def get_src_requirement(self, dist, location):
|
||||||
|
repo = self.get_url(location)
|
||||||
|
if not repo.lower().startswith('hg:'):
|
||||||
|
repo = 'hg+' + repo
|
||||||
|
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||||
|
if not repo:
|
||||||
|
return None
|
||||||
|
current_rev_hash = self.get_revision_hash(location)
|
||||||
|
return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)
|
||||||
|
|
||||||
|
def is_commit_id_equal(self, dest, name):
|
||||||
|
"""Always assume the versions don't match"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
vcs.register(Mercurial)
|
@ -0,0 +1,271 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.index import Link
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import display_path, rmtree
|
||||||
|
from pip._internal.vcs import VersionControl, vcs
|
||||||
|
|
||||||
|
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
||||||
|
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
|
||||||
|
_svn_url_re = re.compile(r'URL: (.+)')
|
||||||
|
_svn_revision_re = re.compile(r'Revision: (.+)')
|
||||||
|
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
||||||
|
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Subversion(VersionControl):
|
||||||
|
name = 'svn'
|
||||||
|
dirname = '.svn'
|
||||||
|
repo_name = 'checkout'
|
||||||
|
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
|
||||||
|
|
||||||
|
def get_base_rev_args(self, rev):
|
||||||
|
return ['-r', rev]
|
||||||
|
|
||||||
|
def get_info(self, location):
|
||||||
|
"""Returns (url, revision), where both are strings"""
|
||||||
|
assert not location.rstrip('/').endswith(self.dirname), \
|
||||||
|
'Bad directory: %s' % location
|
||||||
|
output = self.run_command(
|
||||||
|
['info', location],
|
||||||
|
show_stdout=False,
|
||||||
|
extra_environ={'LANG': 'C'},
|
||||||
|
)
|
||||||
|
match = _svn_url_re.search(output)
|
||||||
|
if not match:
|
||||||
|
logger.warning(
|
||||||
|
'Cannot determine URL of svn checkout %s',
|
||||||
|
display_path(location),
|
||||||
|
)
|
||||||
|
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||||
|
return None, None
|
||||||
|
url = match.group(1).strip()
|
||||||
|
match = _svn_revision_re.search(output)
|
||||||
|
if not match:
|
||||||
|
logger.warning(
|
||||||
|
'Cannot determine revision of svn checkout %s',
|
||||||
|
display_path(location),
|
||||||
|
)
|
||||||
|
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||||
|
return url, None
|
||||||
|
return url, match.group(1)
|
||||||
|
|
||||||
|
def export(self, location):
|
||||||
|
"""Export the svn repository at the url to the destination location"""
|
||||||
|
url, rev = self.get_url_rev()
|
||||||
|
rev_options = get_rev_options(self, url, rev)
|
||||||
|
url = self.remove_auth_from_url(url)
|
||||||
|
logger.info('Exporting svn repository %s to %s', url, location)
|
||||||
|
with indent_log():
|
||||||
|
if os.path.exists(location):
|
||||||
|
# Subversion doesn't like to check out over an existing
|
||||||
|
# directory --force fixes this, but was only added in svn 1.5
|
||||||
|
rmtree(location)
|
||||||
|
cmd_args = ['export'] + rev_options.to_args() + [url, location]
|
||||||
|
self.run_command(cmd_args, show_stdout=False)
|
||||||
|
|
||||||
|
def switch(self, dest, url, rev_options):
|
||||||
|
cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
|
||||||
|
self.run_command(cmd_args)
|
||||||
|
|
||||||
|
def update(self, dest, rev_options):
|
||||||
|
cmd_args = ['update'] + rev_options.to_args() + [dest]
|
||||||
|
self.run_command(cmd_args)
|
||||||
|
|
||||||
|
def obtain(self, dest):
|
||||||
|
url, rev = self.get_url_rev()
|
||||||
|
rev_options = get_rev_options(self, url, rev)
|
||||||
|
url = self.remove_auth_from_url(url)
|
||||||
|
if self.check_destination(dest, url, rev_options):
|
||||||
|
rev_display = rev_options.to_display()
|
||||||
|
logger.info(
|
||||||
|
'Checking out %s%s to %s',
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
display_path(dest),
|
||||||
|
)
|
||||||
|
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
|
||||||
|
self.run_command(cmd_args)
|
||||||
|
|
||||||
|
def get_location(self, dist, dependency_links):
|
||||||
|
for url in dependency_links:
|
||||||
|
egg_fragment = Link(url).egg_fragment
|
||||||
|
if not egg_fragment:
|
||||||
|
continue
|
||||||
|
if '-' in egg_fragment:
|
||||||
|
# FIXME: will this work when a package has - in the name?
|
||||||
|
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
|
||||||
|
else:
|
||||||
|
key = egg_fragment
|
||||||
|
if key == dist.key:
|
||||||
|
return url.split('#', 1)[0]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_revision(self, location):
|
||||||
|
"""
|
||||||
|
Return the maximum revision for all files under a given location
|
||||||
|
"""
|
||||||
|
# Note: taken from setuptools.command.egg_info
|
||||||
|
revision = 0
|
||||||
|
|
||||||
|
for base, dirs, files in os.walk(location):
|
||||||
|
if self.dirname not in dirs:
|
||||||
|
dirs[:] = []
|
||||||
|
continue # no sense walking uncontrolled subdirs
|
||||||
|
dirs.remove(self.dirname)
|
||||||
|
entries_fn = os.path.join(base, self.dirname, 'entries')
|
||||||
|
if not os.path.exists(entries_fn):
|
||||||
|
# FIXME: should we warn?
|
||||||
|
continue
|
||||||
|
|
||||||
|
dirurl, localrev = self._get_svn_url_rev(base)
|
||||||
|
|
||||||
|
if base == location:
|
||||||
|
base = dirurl + '/' # save the root url
|
||||||
|
elif not dirurl or not dirurl.startswith(base):
|
||||||
|
dirs[:] = []
|
||||||
|
continue # not part of the same svn tree, skip it
|
||||||
|
revision = max(revision, localrev)
|
||||||
|
return revision
|
||||||
|
|
||||||
|
def get_url_rev(self):
|
||||||
|
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
|
||||||
|
url, rev = super(Subversion, self).get_url_rev()
|
||||||
|
if url.startswith('ssh://'):
|
||||||
|
url = 'svn+' + url
|
||||||
|
return url, rev
|
||||||
|
|
||||||
|
def get_url(self, location):
|
||||||
|
# In cases where the source is in a subdirectory, not alongside
|
||||||
|
# setup.py we have to look up in the location until we find a real
|
||||||
|
# setup.py
|
||||||
|
orig_location = location
|
||||||
|
while not os.path.exists(os.path.join(location, 'setup.py')):
|
||||||
|
last_location = location
|
||||||
|
location = os.path.dirname(location)
|
||||||
|
if location == last_location:
|
||||||
|
# We've traversed up to the root of the filesystem without
|
||||||
|
# finding setup.py
|
||||||
|
logger.warning(
|
||||||
|
"Could not find setup.py for directory %s (tried all "
|
||||||
|
"parent directories)",
|
||||||
|
orig_location,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self._get_svn_url_rev(location)[0]
|
||||||
|
|
||||||
|
def _get_svn_url_rev(self, location):
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
|
||||||
|
entries_path = os.path.join(location, self.dirname, 'entries')
|
||||||
|
if os.path.exists(entries_path):
|
||||||
|
with open(entries_path) as f:
|
||||||
|
data = f.read()
|
||||||
|
else: # subversion >= 1.7 does not have the 'entries' file
|
||||||
|
data = ''
|
||||||
|
|
||||||
|
if (data.startswith('8') or
|
||||||
|
data.startswith('9') or
|
||||||
|
data.startswith('10')):
|
||||||
|
data = list(map(str.splitlines, data.split('\n\x0c\n')))
|
||||||
|
del data[0][0] # get rid of the '8'
|
||||||
|
url = data[0][3]
|
||||||
|
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
|
||||||
|
elif data.startswith('<?xml'):
|
||||||
|
match = _svn_xml_url_re.search(data)
|
||||||
|
if not match:
|
||||||
|
raise ValueError('Badly formatted data: %r' % data)
|
||||||
|
url = match.group(1) # get repository URL
|
||||||
|
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
# subversion >= 1.7
|
||||||
|
xml = self.run_command(
|
||||||
|
['info', '--xml', location],
|
||||||
|
show_stdout=False,
|
||||||
|
)
|
||||||
|
url = _svn_info_xml_url_re.search(xml).group(1)
|
||||||
|
revs = [
|
||||||
|
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
|
||||||
|
]
|
||||||
|
except InstallationError:
|
||||||
|
url, revs = None, []
|
||||||
|
|
||||||
|
if revs:
|
||||||
|
rev = max(revs)
|
||||||
|
else:
|
||||||
|
rev = 0
|
||||||
|
|
||||||
|
return url, rev
|
||||||
|
|
||||||
|
def get_src_requirement(self, dist, location):
|
||||||
|
repo = self.get_url(location)
|
||||||
|
if repo is None:
|
||||||
|
return None
|
||||||
|
# FIXME: why not project name?
|
||||||
|
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||||
|
rev = self.get_revision(location)
|
||||||
|
return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
|
||||||
|
|
||||||
|
def is_commit_id_equal(self, dest, name):
|
||||||
|
"""Always assume the versions don't match"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def remove_auth_from_url(url):
|
||||||
|
# Return a copy of url with 'username:password@' removed.
|
||||||
|
# username/pass params are passed to subversion through flags
|
||||||
|
# and are not recognized in the url.
|
||||||
|
|
||||||
|
# parsed url
|
||||||
|
purl = urllib_parse.urlsplit(url)
|
||||||
|
stripped_netloc = \
|
||||||
|
purl.netloc.split('@')[-1]
|
||||||
|
|
||||||
|
# stripped url
|
||||||
|
url_pieces = (
|
||||||
|
purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
|
||||||
|
)
|
||||||
|
surl = urllib_parse.urlunsplit(url_pieces)
|
||||||
|
return surl
|
||||||
|
|
||||||
|
|
||||||
|
def get_rev_options(vcs, url, rev):
|
||||||
|
"""
|
||||||
|
Return a RevOptions object.
|
||||||
|
"""
|
||||||
|
r = urllib_parse.urlsplit(url)
|
||||||
|
if hasattr(r, 'username'):
|
||||||
|
# >= Python-2.5
|
||||||
|
username, password = r.username, r.password
|
||||||
|
else:
|
||||||
|
netloc = r[1]
|
||||||
|
if '@' in netloc:
|
||||||
|
auth = netloc.split('@')[0]
|
||||||
|
if ':' in auth:
|
||||||
|
username, password = auth.split(':', 1)
|
||||||
|
else:
|
||||||
|
username, password = auth, None
|
||||||
|
else:
|
||||||
|
username, password = None, None
|
||||||
|
|
||||||
|
extra_args = []
|
||||||
|
if username:
|
||||||
|
extra_args += ['--username', username]
|
||||||
|
if password:
|
||||||
|
extra_args += ['--password', password]
|
||||||
|
|
||||||
|
return vcs.make_rev_options(rev, extra_args=extra_args)
|
||||||
|
|
||||||
|
|
||||||
|
vcs.register(Subversion)
|
@ -0,0 +1,817 @@
|
|||||||
|
"""
|
||||||
|
Support for installing and building the "wheel" binary package format.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import compileall
|
||||||
|
import copy
|
||||||
|
import csv
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
from base64 import urlsafe_b64encode
|
||||||
|
from email.parser import Parser
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.distlib.scripts import ScriptMaker
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
from pip._vendor.six import StringIO
|
||||||
|
|
||||||
|
from pip._internal import pep425tags
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._internal.download import path_to_url, unpack_url
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
InstallationError, InvalidWheelFilename, UnsupportedWheel,
|
||||||
|
)
|
||||||
|
from pip._internal.locations import (
|
||||||
|
PIP_DELETE_MARKER_FILENAME, distutils_scheme,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
call_subprocess, captured_stdout, ensure_dir, read_chunks,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.ui import open_spinner
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
wheel_ext = '.whl'
|
||||||
|
|
||||||
|
VERSION_COMPATIBLE = (1, 0)
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def rehash(path, algo='sha256', blocksize=1 << 20):
|
||||||
|
"""Return (hash, length) for path using hashlib.new(algo)"""
|
||||||
|
h = hashlib.new(algo)
|
||||||
|
length = 0
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
for block in read_chunks(f, size=blocksize):
|
||||||
|
length += len(block)
|
||||||
|
h.update(block)
|
||||||
|
digest = 'sha256=' + urlsafe_b64encode(
|
||||||
|
h.digest()
|
||||||
|
).decode('latin1').rstrip('=')
|
||||||
|
return (digest, length)
|
||||||
|
|
||||||
|
|
||||||
|
def open_for_csv(name, mode):
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
nl = {}
|
||||||
|
bin = 'b'
|
||||||
|
else:
|
||||||
|
nl = {'newline': ''}
|
||||||
|
bin = ''
|
||||||
|
return open(name, mode + bin, **nl)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_script(path):
|
||||||
|
"""Replace #!python with #!/path/to/python
|
||||||
|
Return True if file was changed."""
|
||||||
|
# XXX RECORD hashes will need to be updated
|
||||||
|
if os.path.isfile(path):
|
||||||
|
with open(path, 'rb') as script:
|
||||||
|
firstline = script.readline()
|
||||||
|
if not firstline.startswith(b'#!python'):
|
||||||
|
return False
|
||||||
|
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||||
|
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
||||||
|
rest = script.read()
|
||||||
|
with open(path, 'wb') as script:
|
||||||
|
script.write(firstline)
|
||||||
|
script.write(rest)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
|
||||||
|
\.dist-info$""", re.VERBOSE)
|
||||||
|
|
||||||
|
|
||||||
|
def root_is_purelib(name, wheeldir):
|
||||||
|
"""
|
||||||
|
Return True if the extracted wheel in wheeldir should go into purelib.
|
||||||
|
"""
|
||||||
|
name_folded = name.replace("-", "_")
|
||||||
|
for item in os.listdir(wheeldir):
|
||||||
|
match = dist_info_re.match(item)
|
||||||
|
if match and match.group('name') == name_folded:
|
||||||
|
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
|
||||||
|
for line in wheel:
|
||||||
|
line = line.lower().rstrip()
|
||||||
|
if line == "root-is-purelib: true":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_entrypoints(filename):
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
return {}, {}
|
||||||
|
|
||||||
|
# This is done because you can pass a string to entry_points wrappers which
|
||||||
|
# means that they may or may not be valid INI files. The attempt here is to
|
||||||
|
# strip leading and trailing whitespace in order to make them valid INI
|
||||||
|
# files.
|
||||||
|
with open(filename) as fp:
|
||||||
|
data = StringIO()
|
||||||
|
for line in fp:
|
||||||
|
data.write(line.strip())
|
||||||
|
data.write("\n")
|
||||||
|
data.seek(0)
|
||||||
|
|
||||||
|
# get the entry points and then the script names
|
||||||
|
entry_points = pkg_resources.EntryPoint.parse_map(data)
|
||||||
|
console = entry_points.get('console_scripts', {})
|
||||||
|
gui = entry_points.get('gui_scripts', {})
|
||||||
|
|
||||||
|
def _split_ep(s):
|
||||||
|
"""get the string representation of EntryPoint, remove space and split
|
||||||
|
on '='"""
|
||||||
|
return str(s).replace(" ", "").split("=")
|
||||||
|
|
||||||
|
# convert the EntryPoint objects into strings with module:function
|
||||||
|
console = dict(_split_ep(v) for v in console.values())
|
||||||
|
gui = dict(_split_ep(v) for v in gui.values())
|
||||||
|
return console, gui
|
||||||
|
|
||||||
|
|
||||||
|
def message_about_scripts_not_on_PATH(scripts):
|
||||||
|
# type: (List[str]) -> Optional[str]
|
||||||
|
"""Determine if any scripts are not on PATH and format a warning.
|
||||||
|
|
||||||
|
Returns a warning message if one or more scripts are not on PATH,
|
||||||
|
otherwise None.
|
||||||
|
"""
|
||||||
|
if not scripts:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Group scripts by the path they were installed in
|
||||||
|
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
|
||||||
|
for destfile in scripts:
|
||||||
|
parent_dir = os.path.dirname(destfile)
|
||||||
|
script_name = os.path.basename(destfile)
|
||||||
|
grouped_by_dir[parent_dir].add(script_name)
|
||||||
|
|
||||||
|
# We don't want to warn for directories that are on PATH.
|
||||||
|
not_warn_dirs = [
|
||||||
|
os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep)
|
||||||
|
]
|
||||||
|
# If an executable sits with sys.executable, we don't warn for it.
|
||||||
|
# This covers the case of venv invocations without activating the venv.
|
||||||
|
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
||||||
|
warn_for = {
|
||||||
|
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
||||||
|
if os.path.normcase(parent_dir) not in not_warn_dirs
|
||||||
|
}
|
||||||
|
if not warn_for:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Format a message
|
||||||
|
msg_lines = []
|
||||||
|
for parent_dir, scripts in warn_for.items():
|
||||||
|
scripts = sorted(scripts)
|
||||||
|
if len(scripts) == 1:
|
||||||
|
start_text = "script {} is".format(scripts[0])
|
||||||
|
else:
|
||||||
|
start_text = "scripts {} are".format(
|
||||||
|
", ".join(scripts[:-1]) + " and " + scripts[-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
msg_lines.append(
|
||||||
|
"The {} installed in '{}' which is not on PATH."
|
||||||
|
.format(start_text, parent_dir)
|
||||||
|
)
|
||||||
|
|
||||||
|
last_line_fmt = (
|
||||||
|
"Consider adding {} to PATH or, if you prefer "
|
||||||
|
"to suppress this warning, use --no-warn-script-location."
|
||||||
|
)
|
||||||
|
if len(msg_lines) == 1:
|
||||||
|
msg_lines.append(last_line_fmt.format("this directory"))
|
||||||
|
else:
|
||||||
|
msg_lines.append(last_line_fmt.format("these directories"))
|
||||||
|
|
||||||
|
# Returns the formatted multiline message
|
||||||
|
return "\n".join(msg_lines)
|
||||||
|
|
||||||
|
|
||||||
|
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
|
||||||
|
pycompile=True, scheme=None, isolated=False, prefix=None,
|
||||||
|
warn_script_location=True):
|
||||||
|
"""Install a wheel"""
|
||||||
|
|
||||||
|
if not scheme:
|
||||||
|
scheme = distutils_scheme(
|
||||||
|
name, user=user, home=home, root=root, isolated=isolated,
|
||||||
|
prefix=prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
if root_is_purelib(name, wheeldir):
|
||||||
|
lib_dir = scheme['purelib']
|
||||||
|
else:
|
||||||
|
lib_dir = scheme['platlib']
|
||||||
|
|
||||||
|
info_dir = []
|
||||||
|
data_dirs = []
|
||||||
|
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
||||||
|
|
||||||
|
# Record details of the files moved
|
||||||
|
# installed = files copied from the wheel to the destination
|
||||||
|
# changed = files changed while installing (scripts #! line typically)
|
||||||
|
# generated = files newly generated during the install (script wrappers)
|
||||||
|
installed = {}
|
||||||
|
changed = set()
|
||||||
|
generated = []
|
||||||
|
|
||||||
|
# Compile all of the pyc files that we're going to be installing
|
||||||
|
if pycompile:
|
||||||
|
with captured_stdout() as stdout:
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings('ignore')
|
||||||
|
compileall.compile_dir(source, force=True, quiet=True)
|
||||||
|
logger.debug(stdout.getvalue())
|
||||||
|
|
||||||
|
def normpath(src, p):
|
||||||
|
return os.path.relpath(src, p).replace(os.path.sep, '/')
|
||||||
|
|
||||||
|
def record_installed(srcfile, destfile, modified=False):
|
||||||
|
"""Map archive RECORD paths to installation RECORD paths."""
|
||||||
|
oldpath = normpath(srcfile, wheeldir)
|
||||||
|
newpath = normpath(destfile, lib_dir)
|
||||||
|
installed[oldpath] = newpath
|
||||||
|
if modified:
|
||||||
|
changed.add(destfile)
|
||||||
|
|
||||||
|
def clobber(source, dest, is_base, fixer=None, filter=None):
|
||||||
|
ensure_dir(dest) # common for the 'include' path
|
||||||
|
|
||||||
|
for dir, subdirs, files in os.walk(source):
|
||||||
|
basedir = dir[len(source):].lstrip(os.path.sep)
|
||||||
|
destdir = os.path.join(dest, basedir)
|
||||||
|
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
|
||||||
|
continue
|
||||||
|
for s in subdirs:
|
||||||
|
destsubdir = os.path.join(dest, basedir, s)
|
||||||
|
if is_base and basedir == '' and destsubdir.endswith('.data'):
|
||||||
|
data_dirs.append(s)
|
||||||
|
continue
|
||||||
|
elif (is_base and
|
||||||
|
s.endswith('.dist-info') and
|
||||||
|
canonicalize_name(s).startswith(
|
||||||
|
canonicalize_name(req.name))):
|
||||||
|
assert not info_dir, ('Multiple .dist-info directories: ' +
|
||||||
|
destsubdir + ', ' +
|
||||||
|
', '.join(info_dir))
|
||||||
|
info_dir.append(destsubdir)
|
||||||
|
for f in files:
|
||||||
|
# Skip unwanted files
|
||||||
|
if filter and filter(f):
|
||||||
|
continue
|
||||||
|
srcfile = os.path.join(dir, f)
|
||||||
|
destfile = os.path.join(dest, basedir, f)
|
||||||
|
# directory creation is lazy and after the file filtering above
|
||||||
|
# to ensure we don't install empty dirs; empty dirs can't be
|
||||||
|
# uninstalled.
|
||||||
|
ensure_dir(destdir)
|
||||||
|
|
||||||
|
# We use copyfile (not move, copy, or copy2) to be extra sure
|
||||||
|
# that we are not moving directories over (copyfile fails for
|
||||||
|
# directories) as well as to ensure that we are not copying
|
||||||
|
# over any metadata because we want more control over what
|
||||||
|
# metadata we actually copy over.
|
||||||
|
shutil.copyfile(srcfile, destfile)
|
||||||
|
|
||||||
|
# Copy over the metadata for the file, currently this only
|
||||||
|
# includes the atime and mtime.
|
||||||
|
st = os.stat(srcfile)
|
||||||
|
if hasattr(os, "utime"):
|
||||||
|
os.utime(destfile, (st.st_atime, st.st_mtime))
|
||||||
|
|
||||||
|
# If our file is executable, then make our destination file
|
||||||
|
# executable.
|
||||||
|
if os.access(srcfile, os.X_OK):
|
||||||
|
st = os.stat(srcfile)
|
||||||
|
permissions = (
|
||||||
|
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||||
|
)
|
||||||
|
os.chmod(destfile, permissions)
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
if fixer:
|
||||||
|
changed = fixer(destfile)
|
||||||
|
record_installed(srcfile, destfile, changed)
|
||||||
|
|
||||||
|
clobber(source, lib_dir, True)
|
||||||
|
|
||||||
|
assert info_dir, "%s .dist-info directory not found" % req
|
||||||
|
|
||||||
|
# Get the defined entry points
|
||||||
|
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
|
||||||
|
console, gui = get_entrypoints(ep_file)
|
||||||
|
|
||||||
|
def is_entrypoint_wrapper(name):
|
||||||
|
# EP, EP.exe and EP-script.py are scripts generated for
|
||||||
|
# entry point EP by setuptools
|
||||||
|
if name.lower().endswith('.exe'):
|
||||||
|
matchname = name[:-4]
|
||||||
|
elif name.lower().endswith('-script.py'):
|
||||||
|
matchname = name[:-10]
|
||||||
|
elif name.lower().endswith(".pya"):
|
||||||
|
matchname = name[:-4]
|
||||||
|
else:
|
||||||
|
matchname = name
|
||||||
|
# Ignore setuptools-generated scripts
|
||||||
|
return (matchname in console or matchname in gui)
|
||||||
|
|
||||||
|
for datadir in data_dirs:
|
||||||
|
fixer = None
|
||||||
|
filter = None
|
||||||
|
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
||||||
|
fixer = None
|
||||||
|
if subdir == 'scripts':
|
||||||
|
fixer = fix_script
|
||||||
|
filter = is_entrypoint_wrapper
|
||||||
|
source = os.path.join(wheeldir, datadir, subdir)
|
||||||
|
dest = scheme[subdir]
|
||||||
|
clobber(source, dest, False, fixer=fixer, filter=filter)
|
||||||
|
|
||||||
|
maker = ScriptMaker(None, scheme['scripts'])
|
||||||
|
|
||||||
|
# Ensure old scripts are overwritten.
|
||||||
|
# See https://github.com/pypa/pip/issues/1800
|
||||||
|
maker.clobber = True
|
||||||
|
|
||||||
|
# Ensure we don't generate any variants for scripts because this is almost
|
||||||
|
# never what somebody wants.
|
||||||
|
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||||
|
maker.variants = {''}
|
||||||
|
|
||||||
|
# This is required because otherwise distlib creates scripts that are not
|
||||||
|
# executable.
|
||||||
|
# See https://bitbucket.org/pypa/distlib/issue/32/
|
||||||
|
maker.set_mode = True
|
||||||
|
|
||||||
|
# Simplify the script and fix the fact that the default script swallows
|
||||||
|
# every single stack trace.
|
||||||
|
# See https://bitbucket.org/pypa/distlib/issue/34/
|
||||||
|
# See https://bitbucket.org/pypa/distlib/issue/33/
|
||||||
|
def _get_script_text(entry):
|
||||||
|
if entry.suffix is None:
|
||||||
|
raise InstallationError(
|
||||||
|
"Invalid script entry point: %s for req: %s - A callable "
|
||||||
|
"suffix is required. Cf https://packaging.python.org/en/"
|
||||||
|
"latest/distributing.html#console-scripts for more "
|
||||||
|
"information." % (entry, req)
|
||||||
|
)
|
||||||
|
return maker.script_template % {
|
||||||
|
"module": entry.prefix,
|
||||||
|
"import_name": entry.suffix.split(".")[0],
|
||||||
|
"func": entry.suffix,
|
||||||
|
}
|
||||||
|
|
||||||
|
maker._get_script_text = _get_script_text
|
||||||
|
maker.script_template = r"""# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from %(module)s import %(import_name)s
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(%(func)s())
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Special case pip and setuptools to generate versioned wrappers
|
||||||
|
#
|
||||||
|
# The issue is that some projects (specifically, pip and setuptools) use
|
||||||
|
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
||||||
|
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
||||||
|
# the wheel metadata at build time, and so if the wheel is installed with
|
||||||
|
# a *different* version of Python the entry points will be wrong. The
|
||||||
|
# correct fix for this is to enhance the metadata to be able to describe
|
||||||
|
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
||||||
|
# available.
|
||||||
|
# In the meantime, projects using versioned entry points will either have
|
||||||
|
# incorrect versioned entry points, or they will not be able to distribute
|
||||||
|
# "universal" wheels (i.e., they will need a wheel per Python version).
|
||||||
|
#
|
||||||
|
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
||||||
|
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
||||||
|
# override the versioned entry points in the wheel and generate the
|
||||||
|
# correct ones. This code is purely a short-term measure until Metadata 2.0
|
||||||
|
# is available.
|
||||||
|
#
|
||||||
|
# To add the level of hack in this section of code, in order to support
|
||||||
|
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
||||||
|
# variable which will control which version scripts get installed.
|
||||||
|
#
|
||||||
|
# ENSUREPIP_OPTIONS=altinstall
|
||||||
|
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
||||||
|
# ENSUREPIP_OPTIONS=install
|
||||||
|
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
||||||
|
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
||||||
|
# not altinstall
|
||||||
|
# DEFAULT
|
||||||
|
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||||
|
# and easy_install-X.Y.
|
||||||
|
pip_script = console.pop('pip', None)
|
||||||
|
if pip_script:
|
||||||
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
|
spec = 'pip = ' + pip_script
|
||||||
|
generated.extend(maker.make(spec))
|
||||||
|
|
||||||
|
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||||
|
spec = 'pip%s = %s' % (sys.version[:1], pip_script)
|
||||||
|
generated.extend(maker.make(spec))
|
||||||
|
|
||||||
|
spec = 'pip%s = %s' % (sys.version[:3], pip_script)
|
||||||
|
generated.extend(maker.make(spec))
|
||||||
|
# Delete any other versioned pip entry points
|
||||||
|
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||||
|
for k in pip_ep:
|
||||||
|
del console[k]
|
||||||
|
easy_install_script = console.pop('easy_install', None)
|
||||||
|
if easy_install_script:
|
||||||
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
|
spec = 'easy_install = ' + easy_install_script
|
||||||
|
generated.extend(maker.make(spec))
|
||||||
|
|
||||||
|
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
|
||||||
|
generated.extend(maker.make(spec))
|
||||||
|
# Delete any other versioned easy_install entry points
|
||||||
|
easy_install_ep = [
|
||||||
|
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
||||||
|
]
|
||||||
|
for k in easy_install_ep:
|
||||||
|
del console[k]
|
||||||
|
|
||||||
|
# Generate the console and GUI entry points specified in the wheel
|
||||||
|
if len(console) > 0:
|
||||||
|
generated_console_scripts = maker.make_multiple(
|
||||||
|
['%s = %s' % kv for kv in console.items()]
|
||||||
|
)
|
||||||
|
generated.extend(generated_console_scripts)
|
||||||
|
|
||||||
|
if warn_script_location:
|
||||||
|
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||||
|
if msg is not None:
|
||||||
|
logger.warn(msg)
|
||||||
|
|
||||||
|
if len(gui) > 0:
|
||||||
|
generated.extend(
|
||||||
|
maker.make_multiple(
|
||||||
|
['%s = %s' % kv for kv in gui.items()],
|
||||||
|
{'gui': True}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Record pip as the installer
|
||||||
|
installer = os.path.join(info_dir[0], 'INSTALLER')
|
||||||
|
temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
|
||||||
|
with open(temp_installer, 'wb') as installer_file:
|
||||||
|
installer_file.write(b'pip\n')
|
||||||
|
shutil.move(temp_installer, installer)
|
||||||
|
generated.append(installer)
|
||||||
|
|
||||||
|
# Record details of all files installed
|
||||||
|
record = os.path.join(info_dir[0], 'RECORD')
|
||||||
|
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
|
||||||
|
with open_for_csv(record, 'r') as record_in:
|
||||||
|
with open_for_csv(temp_record, 'w+') as record_out:
|
||||||
|
reader = csv.reader(record_in)
|
||||||
|
writer = csv.writer(record_out)
|
||||||
|
for row in reader:
|
||||||
|
row[0] = installed.pop(row[0], row[0])
|
||||||
|
if row[0] in changed:
|
||||||
|
row[1], row[2] = rehash(row[0])
|
||||||
|
writer.writerow(row)
|
||||||
|
for f in generated:
|
||||||
|
h, l = rehash(f)
|
||||||
|
writer.writerow((normpath(f, lib_dir), h, l))
|
||||||
|
for f in installed:
|
||||||
|
writer.writerow((installed[f], '', ''))
|
||||||
|
shutil.move(temp_record, record)
|
||||||
|
|
||||||
|
|
||||||
|
def wheel_version(source_dir):
|
||||||
|
"""
|
||||||
|
Return the Wheel-Version of an extracted wheel, if possible.
|
||||||
|
|
||||||
|
Otherwise, return False if we couldn't parse / extract it.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
|
||||||
|
|
||||||
|
wheel_data = dist.get_metadata('WHEEL')
|
||||||
|
wheel_data = Parser().parsestr(wheel_data)
|
||||||
|
|
||||||
|
version = wheel_data['Wheel-Version'].strip()
|
||||||
|
version = tuple(map(int, version.split('.')))
|
||||||
|
return version
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def check_compatibility(version, name):
|
||||||
|
"""
|
||||||
|
Raises errors or warns if called with an incompatible Wheel-Version.
|
||||||
|
|
||||||
|
Pip should refuse to install a Wheel-Version that's a major series
|
||||||
|
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
|
||||||
|
installing a version only minor version ahead (e.g 1.2 > 1.1).
|
||||||
|
|
||||||
|
version: a 2-tuple representing a Wheel-Version (Major, Minor)
|
||||||
|
name: name of wheel or package to raise exception about
|
||||||
|
|
||||||
|
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
|
||||||
|
"""
|
||||||
|
if not version:
|
||||||
|
raise UnsupportedWheel(
|
||||||
|
"%s is in an unsupported or invalid wheel" % name
|
||||||
|
)
|
||||||
|
if version[0] > VERSION_COMPATIBLE[0]:
|
||||||
|
raise UnsupportedWheel(
|
||||||
|
"%s's Wheel-Version (%s) is not compatible with this version "
|
||||||
|
"of pip" % (name, '.'.join(map(str, version)))
|
||||||
|
)
|
||||||
|
elif version > VERSION_COMPATIBLE:
|
||||||
|
logger.warning(
|
||||||
|
'Installing from a newer Wheel-Version (%s)',
|
||||||
|
'.'.join(map(str, version)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Wheel(object):
|
||||||
|
"""A wheel file"""
|
||||||
|
|
||||||
|
# TODO: maybe move the install code into this class
|
||||||
|
|
||||||
|
wheel_file_re = re.compile(
|
||||||
|
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
||||||
|
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||||
|
\.whl|\.dist-info)$""",
|
||||||
|
re.VERBOSE
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, filename):
|
||||||
|
"""
|
||||||
|
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||||
|
"""
|
||||||
|
wheel_info = self.wheel_file_re.match(filename)
|
||||||
|
if not wheel_info:
|
||||||
|
raise InvalidWheelFilename(
|
||||||
|
"%s is not a valid wheel filename." % filename
|
||||||
|
)
|
||||||
|
self.filename = filename
|
||||||
|
self.name = wheel_info.group('name').replace('_', '-')
|
||||||
|
# we'll assume "_" means "-" due to wheel naming scheme
|
||||||
|
# (https://github.com/pypa/pip/issues/1150)
|
||||||
|
self.version = wheel_info.group('ver').replace('_', '-')
|
||||||
|
self.build_tag = wheel_info.group('build')
|
||||||
|
self.pyversions = wheel_info.group('pyver').split('.')
|
||||||
|
self.abis = wheel_info.group('abi').split('.')
|
||||||
|
self.plats = wheel_info.group('plat').split('.')
|
||||||
|
|
||||||
|
# All the tag combinations from this file
|
||||||
|
self.file_tags = {
|
||||||
|
(x, y, z) for x in self.pyversions
|
||||||
|
for y in self.abis for z in self.plats
|
||||||
|
}
|
||||||
|
|
||||||
|
def support_index_min(self, tags=None):
|
||||||
|
"""
|
||||||
|
Return the lowest index that one of the wheel's file_tag combinations
|
||||||
|
achieves in the supported_tags list e.g. if there are 8 supported tags,
|
||||||
|
and one of the file tags is first in the list, then return 0. Returns
|
||||||
|
None is the wheel is not supported.
|
||||||
|
"""
|
||||||
|
if tags is None: # for mock
|
||||||
|
tags = pep425tags.get_supported()
|
||||||
|
indexes = [tags.index(c) for c in self.file_tags if c in tags]
|
||||||
|
return min(indexes) if indexes else None
|
||||||
|
|
||||||
|
def supported(self, tags=None):
|
||||||
|
"""Is this wheel supported on this system?"""
|
||||||
|
if tags is None: # for mock
|
||||||
|
tags = pep425tags.get_supported()
|
||||||
|
return bool(set(tags).intersection(self.file_tags))
|
||||||
|
|
||||||
|
|
||||||
|
class WheelBuilder(object):
|
||||||
|
"""Build wheels from a RequirementSet."""
|
||||||
|
|
||||||
|
def __init__(self, finder, preparer, wheel_cache,
|
||||||
|
build_options=None, global_options=None, no_clean=False):
|
||||||
|
self.finder = finder
|
||||||
|
self.preparer = preparer
|
||||||
|
self.wheel_cache = wheel_cache
|
||||||
|
|
||||||
|
self._wheel_dir = preparer.wheel_download_dir
|
||||||
|
|
||||||
|
self.build_options = build_options or []
|
||||||
|
self.global_options = global_options or []
|
||||||
|
self.no_clean = no_clean
|
||||||
|
|
||||||
|
def _build_one(self, req, output_dir, python_tag=None):
|
||||||
|
"""Build one wheel.
|
||||||
|
|
||||||
|
:return: The filename of the built wheel, or None if the build failed.
|
||||||
|
"""
|
||||||
|
# Install build deps into temporary directory (PEP 518)
|
||||||
|
with req.build_env:
|
||||||
|
return self._build_one_inside_env(req, output_dir,
|
||||||
|
python_tag=python_tag)
|
||||||
|
|
||||||
|
def _build_one_inside_env(self, req, output_dir, python_tag=None):
|
||||||
|
with TempDirectory(kind="wheel") as temp_dir:
|
||||||
|
if self.__build_one(req, temp_dir.path, python_tag=python_tag):
|
||||||
|
try:
|
||||||
|
wheel_name = os.listdir(temp_dir.path)[0]
|
||||||
|
wheel_path = os.path.join(output_dir, wheel_name)
|
||||||
|
shutil.move(
|
||||||
|
os.path.join(temp_dir.path, wheel_name), wheel_path
|
||||||
|
)
|
||||||
|
logger.info('Stored in directory: %s', output_dir)
|
||||||
|
return wheel_path
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# Ignore return, we can't do anything else useful.
|
||||||
|
self._clean_one(req)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _base_setup_args(self, req):
|
||||||
|
# NOTE: Eventually, we'd want to also -S to the flags here, when we're
|
||||||
|
# isolating. Currently, it breaks Python in virtualenvs, because it
|
||||||
|
# relies on site.py to find parts of the standard library outside the
|
||||||
|
# virtualenv.
|
||||||
|
return [
|
||||||
|
sys.executable, '-u', '-c',
|
||||||
|
SETUPTOOLS_SHIM % req.setup_py
|
||||||
|
] + list(self.global_options)
|
||||||
|
|
||||||
|
def __build_one(self, req, tempd, python_tag=None):
|
||||||
|
base_args = self._base_setup_args(req)
|
||||||
|
|
||||||
|
spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
|
||||||
|
with open_spinner(spin_message) as spinner:
|
||||||
|
logger.debug('Destination directory: %s', tempd)
|
||||||
|
wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
|
||||||
|
+ self.build_options
|
||||||
|
|
||||||
|
if python_tag is not None:
|
||||||
|
wheel_args += ["--python-tag", python_tag]
|
||||||
|
|
||||||
|
try:
|
||||||
|
call_subprocess(wheel_args, cwd=req.setup_py_dir,
|
||||||
|
show_stdout=False, spinner=spinner)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
spinner.finish("error")
|
||||||
|
logger.error('Failed building wheel for %s', req.name)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _clean_one(self, req):
|
||||||
|
base_args = self._base_setup_args(req)
|
||||||
|
|
||||||
|
logger.info('Running setup.py clean for %s', req.name)
|
||||||
|
clean_args = base_args + ['clean', '--all']
|
||||||
|
try:
|
||||||
|
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
logger.error('Failed cleaning build dir for %s', req.name)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def build(self, requirements, session, autobuilding=False):
|
||||||
|
"""Build wheels.
|
||||||
|
|
||||||
|
:param unpack: If True, replace the sdist we built from with the
|
||||||
|
newly built wheel, in preparation for installation.
|
||||||
|
:return: True if all the wheels built correctly.
|
||||||
|
"""
|
||||||
|
from pip._internal import index
|
||||||
|
|
||||||
|
building_is_possible = self._wheel_dir or (
|
||||||
|
autobuilding and self.wheel_cache.cache_dir
|
||||||
|
)
|
||||||
|
assert building_is_possible
|
||||||
|
|
||||||
|
buildset = []
|
||||||
|
for req in requirements:
|
||||||
|
if req.constraint:
|
||||||
|
continue
|
||||||
|
if req.is_wheel:
|
||||||
|
if not autobuilding:
|
||||||
|
logger.info(
|
||||||
|
'Skipping %s, due to already being wheel.', req.name,
|
||||||
|
)
|
||||||
|
elif autobuilding and req.editable:
|
||||||
|
pass
|
||||||
|
elif autobuilding and not req.source_dir:
|
||||||
|
pass
|
||||||
|
elif autobuilding and req.link and not req.link.is_artifact:
|
||||||
|
# VCS checkout. Build wheel just for this run.
|
||||||
|
buildset.append((req, True))
|
||||||
|
else:
|
||||||
|
ephem_cache = False
|
||||||
|
if autobuilding:
|
||||||
|
link = req.link
|
||||||
|
base, ext = link.splitext()
|
||||||
|
if index.egg_info_matches(base, None, link) is None:
|
||||||
|
# E.g. local directory. Build wheel just for this run.
|
||||||
|
ephem_cache = True
|
||||||
|
if "binary" not in index.fmt_ctl_formats(
|
||||||
|
self.finder.format_control,
|
||||||
|
canonicalize_name(req.name)):
|
||||||
|
logger.info(
|
||||||
|
"Skipping bdist_wheel for %s, due to binaries "
|
||||||
|
"being disabled for it.", req.name,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
buildset.append((req, ephem_cache))
|
||||||
|
|
||||||
|
if not buildset:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Build the wheels.
|
||||||
|
logger.info(
|
||||||
|
'Building wheels for collected packages: %s',
|
||||||
|
', '.join([req.name for (req, _) in buildset]),
|
||||||
|
)
|
||||||
|
_cache = self.wheel_cache # shorter name
|
||||||
|
with indent_log():
|
||||||
|
build_success, build_failure = [], []
|
||||||
|
for req, ephem in buildset:
|
||||||
|
python_tag = None
|
||||||
|
if autobuilding:
|
||||||
|
python_tag = pep425tags.implementation_tag
|
||||||
|
if ephem:
|
||||||
|
output_dir = _cache.get_ephem_path_for_link(req.link)
|
||||||
|
else:
|
||||||
|
output_dir = _cache.get_path_for_link(req.link)
|
||||||
|
try:
|
||||||
|
ensure_dir(output_dir)
|
||||||
|
except OSError as e:
|
||||||
|
logger.warning("Building wheel for %s failed: %s",
|
||||||
|
req.name, e)
|
||||||
|
build_failure.append(req)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
output_dir = self._wheel_dir
|
||||||
|
wheel_file = self._build_one(
|
||||||
|
req, output_dir,
|
||||||
|
python_tag=python_tag,
|
||||||
|
)
|
||||||
|
if wheel_file:
|
||||||
|
build_success.append(req)
|
||||||
|
if autobuilding:
|
||||||
|
# XXX: This is mildly duplicative with prepare_files,
|
||||||
|
# but not close enough to pull out to a single common
|
||||||
|
# method.
|
||||||
|
# The code below assumes temporary source dirs -
|
||||||
|
# prevent it doing bad things.
|
||||||
|
if req.source_dir and not os.path.exists(os.path.join(
|
||||||
|
req.source_dir, PIP_DELETE_MARKER_FILENAME)):
|
||||||
|
raise AssertionError(
|
||||||
|
"bad source dir - missing marker")
|
||||||
|
# Delete the source we built the wheel from
|
||||||
|
req.remove_temporary_source()
|
||||||
|
# set the build directory again - name is known from
|
||||||
|
# the work prepare_files did.
|
||||||
|
req.source_dir = req.build_location(
|
||||||
|
self.preparer.build_dir
|
||||||
|
)
|
||||||
|
# Update the link for this.
|
||||||
|
req.link = index.Link(path_to_url(wheel_file))
|
||||||
|
assert req.link.is_wheel
|
||||||
|
# extract the wheel into the dir
|
||||||
|
unpack_url(
|
||||||
|
req.link, req.source_dir, None, False,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
build_failure.append(req)
|
||||||
|
|
||||||
|
# notify success/failure
|
||||||
|
if build_success:
|
||||||
|
logger.info(
|
||||||
|
'Successfully built %s',
|
||||||
|
' '.join([req.name for req in build_success]),
|
||||||
|
)
|
||||||
|
if build_failure:
|
||||||
|
logger.info(
|
||||||
|
'Failed to build %s',
|
||||||
|
' '.join([req.name for req in build_failure]),
|
||||||
|
)
|
||||||
|
# Return True if all builds were successful
|
||||||
|
return len(build_failure) == 0
|
@ -0,0 +1,109 @@
|
|||||||
|
"""
|
||||||
|
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
|
||||||
|
depend on something external.
|
||||||
|
|
||||||
|
Files inside of pip._vendor should be considered immutable and should only be
|
||||||
|
updated to versions from upstream.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Downstream redistributors which have debundled our dependencies should also
|
||||||
|
# patch this value to be true. This will trigger the additional patching
|
||||||
|
# to cause things like "six" to be available as pip.
|
||||||
|
DEBUNDLED = False
|
||||||
|
|
||||||
|
# By default, look in this directory for a bunch of .whl files which we will
|
||||||
|
# add to the beginning of sys.path before attempting to import anything. This
|
||||||
|
# is done to support downstream re-distributors like Debian and Fedora who
|
||||||
|
# wish to create their own Wheels for our dependencies to aid in debundling.
|
||||||
|
WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
# Define a small helper function to alias our vendored modules to the real ones
|
||||||
|
# if the vendored ones do not exist. This idea of this was taken from
|
||||||
|
# https://github.com/kennethreitz/requests/pull/2567.
|
||||||
|
def vendored(modulename):
|
||||||
|
vendored_name = "{0}.{1}".format(__name__, modulename)
|
||||||
|
|
||||||
|
try:
|
||||||
|
__import__(vendored_name, globals(), locals(), level=0)
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
__import__(modulename, globals(), locals(), level=0)
|
||||||
|
except ImportError:
|
||||||
|
# We can just silently allow import failures to pass here. If we
|
||||||
|
# got to this point it means that ``import pip._vendor.whatever``
|
||||||
|
# failed and so did ``import whatever``. Since we're importing this
|
||||||
|
# upfront in an attempt to alias imports, not erroring here will
|
||||||
|
# just mean we get a regular import error whenever pip *actually*
|
||||||
|
# tries to import one of these modules to use it, which actually
|
||||||
|
# gives us a better error message than we would have otherwise
|
||||||
|
# gotten.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
sys.modules[vendored_name] = sys.modules[modulename]
|
||||||
|
base, head = vendored_name.rsplit(".", 1)
|
||||||
|
setattr(sys.modules[base], head, sys.modules[modulename])
|
||||||
|
|
||||||
|
|
||||||
|
# If we're operating in a debundled setup, then we want to go ahead and trigger
|
||||||
|
# the aliasing of our vendored libraries as well as looking for wheels to add
|
||||||
|
# to our sys.path. This will cause all of this code to be a no-op typically
|
||||||
|
# however downstream redistributors can enable it in a consistent way across
|
||||||
|
# all platforms.
|
||||||
|
if DEBUNDLED:
|
||||||
|
# Actually look inside of WHEEL_DIR to find .whl files and add them to the
|
||||||
|
# front of our sys.path.
|
||||||
|
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
|
||||||
|
|
||||||
|
# Actually alias all of our vendored dependencies.
|
||||||
|
vendored("cachecontrol")
|
||||||
|
vendored("colorama")
|
||||||
|
vendored("distlib")
|
||||||
|
vendored("distro")
|
||||||
|
vendored("html5lib")
|
||||||
|
vendored("lockfile")
|
||||||
|
vendored("six")
|
||||||
|
vendored("six.moves")
|
||||||
|
vendored("six.moves.urllib")
|
||||||
|
vendored("six.moves.urllib.parse")
|
||||||
|
vendored("packaging")
|
||||||
|
vendored("packaging.version")
|
||||||
|
vendored("packaging.specifiers")
|
||||||
|
vendored("pkg_resources")
|
||||||
|
vendored("progress")
|
||||||
|
vendored("pytoml")
|
||||||
|
vendored("retrying")
|
||||||
|
vendored("requests")
|
||||||
|
vendored("requests.packages")
|
||||||
|
vendored("requests.packages.urllib3")
|
||||||
|
vendored("requests.packages.urllib3._collections")
|
||||||
|
vendored("requests.packages.urllib3.connection")
|
||||||
|
vendored("requests.packages.urllib3.connectionpool")
|
||||||
|
vendored("requests.packages.urllib3.contrib")
|
||||||
|
vendored("requests.packages.urllib3.contrib.ntlmpool")
|
||||||
|
vendored("requests.packages.urllib3.contrib.pyopenssl")
|
||||||
|
vendored("requests.packages.urllib3.exceptions")
|
||||||
|
vendored("requests.packages.urllib3.fields")
|
||||||
|
vendored("requests.packages.urllib3.filepost")
|
||||||
|
vendored("requests.packages.urllib3.packages")
|
||||||
|
vendored("requests.packages.urllib3.packages.ordered_dict")
|
||||||
|
vendored("requests.packages.urllib3.packages.six")
|
||||||
|
vendored("requests.packages.urllib3.packages.ssl_match_hostname")
|
||||||
|
vendored("requests.packages.urllib3.packages.ssl_match_hostname."
|
||||||
|
"_implementation")
|
||||||
|
vendored("requests.packages.urllib3.poolmanager")
|
||||||
|
vendored("requests.packages.urllib3.request")
|
||||||
|
vendored("requests.packages.urllib3.response")
|
||||||
|
vendored("requests.packages.urllib3.util")
|
||||||
|
vendored("requests.packages.urllib3.util.connection")
|
||||||
|
vendored("requests.packages.urllib3.util.request")
|
||||||
|
vendored("requests.packages.urllib3.util.response")
|
||||||
|
vendored("requests.packages.urllib3.util.retry")
|
||||||
|
vendored("requests.packages.urllib3.util.ssl_")
|
||||||
|
vendored("requests.packages.urllib3.util.timeout")
|
||||||
|
vendored("requests.packages.urllib3.util.url")
|
@ -0,0 +1,604 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||||
|
# Copyright (c) 2013 Eddy Petrișor
|
||||||
|
|
||||||
|
"""Utilities for determining application-specific dirs.
|
||||||
|
|
||||||
|
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||||
|
"""
|
||||||
|
# Dev Notes:
|
||||||
|
# - MSDN on where to store app data files:
|
||||||
|
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||||
|
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||||
|
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
|
|
||||||
|
__version_info__ = (1, 4, 3)
|
||||||
|
__version__ = '.'.join(map(str, __version_info__))
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'):
|
||||||
|
import platform
|
||||||
|
os_name = platform.java_ver()[3][0]
|
||||||
|
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||||
|
system = 'win32'
|
||||||
|
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||||
|
system = 'darwin'
|
||||||
|
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||||
|
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||||
|
# are actually checked for and the rest of the module expects
|
||||||
|
# *sys.platform* style strings.
|
||||||
|
system = 'linux2'
|
||||||
|
else:
|
||||||
|
system = sys.platform
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: ~/Library/Application Support/<AppName>
|
||||||
|
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||||
|
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||||
|
That means, by default "~/.local/share/<AppName>".
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||||
|
path = os.path.normpath(_get_win_folder(const))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Application Support/')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
r"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of data dirs should be
|
||||||
|
returned. By default, the first item from XDG_DATA_DIRS is
|
||||||
|
returned, or '/usr/local/share/<AppName>',
|
||||||
|
if XDG_DATA_DIRS is not set
|
||||||
|
|
||||||
|
Typical site data directories are:
|
||||||
|
Mac OS X: /Library/Application Support/<AppName>
|
||||||
|
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||||
|
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('/Library/Application Support')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_DATA_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_DATA_DIRS',
|
||||||
|
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific config dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user config directories are:
|
||||||
|
Mac OS X: same as user_data_dir
|
||||||
|
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||||
|
That means, by default "~/.config/<AppName>".
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = user_data_dir(appname, appauthor, None, roaming)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
r"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of config dirs should be
|
||||||
|
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||||
|
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||||
|
|
||||||
|
Typical site config directories are:
|
||||||
|
Mac OS X: same as site_data_dir
|
||||||
|
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||||
|
$XDG_CONFIG_DIRS
|
||||||
|
Win *: same as site_data_dir
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = site_data_dir(appname, appauthor)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_CONFIG_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific cache dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Cache" to the base app data dir for Windows. See
|
||||||
|
discussion below.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Caches/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName> (XDG default)
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||||
|
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||||
|
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||||
|
put cache data somewhere *under* the given dir here. Some examples:
|
||||||
|
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||||
|
...\Acme\SuperApp\Cache\1.0
|
||||||
|
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Cache")
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Caches')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific state dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user state directories are:
|
||||||
|
Mac OS X: same as user_data_dir
|
||||||
|
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
||||||
|
to extend the XDG spec and support $XDG_STATE_HOME.
|
||||||
|
|
||||||
|
That means, by default "~/.local/state/<AppName>".
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = user_data_dir(appname, appauthor, None, roaming)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific log dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Logs" to the base app data dir for Windows, and "log" to the
|
||||||
|
base cache dir for Unix. See discussion below.
|
||||||
|
|
||||||
|
Typical user log directories are:
|
||||||
|
Mac OS X: ~/Library/Logs/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings
|
||||||
|
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||||
|
examples of what some windows apps use for a logs dir.)
|
||||||
|
|
||||||
|
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||||
|
value for Windows and appends "log" to the user cache dir for Unix.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "darwin":
|
||||||
|
path = os.path.join(
|
||||||
|
os.path.expanduser('~/Library/Logs'),
|
||||||
|
appname)
|
||||||
|
elif system == "win32":
|
||||||
|
path = user_data_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Logs")
|
||||||
|
else:
|
||||||
|
path = user_cache_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "log")
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
class AppDirs(object):
|
||||||
|
"""Convenience wrapper for getting application dirs."""
|
||||||
|
def __init__(self, appname=None, appauthor=None, version=None,
|
||||||
|
roaming=False, multipath=False):
|
||||||
|
self.appname = appname
|
||||||
|
self.appauthor = appauthor
|
||||||
|
self.version = version
|
||||||
|
self.roaming = roaming
|
||||||
|
self.multipath = multipath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_data_dir(self):
|
||||||
|
return user_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_data_dir(self):
|
||||||
|
return site_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_config_dir(self):
|
||||||
|
return user_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_config_dir(self):
|
||||||
|
return site_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_cache_dir(self):
|
||||||
|
return user_cache_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_state_dir(self):
|
||||||
|
return user_state_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_log_dir(self):
|
||||||
|
return user_log_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
|
||||||
|
#---- internal support stuff
|
||||||
|
|
||||||
|
def _get_win_folder_from_registry(csidl_name):
|
||||||
|
"""This is a fallback technique at best. I'm not sure if using the
|
||||||
|
registry for this guarantees us the correct answer for all CSIDL_*
|
||||||
|
names.
|
||||||
|
"""
|
||||||
|
if PY3:
|
||||||
|
import winreg as _winreg
|
||||||
|
else:
|
||||||
|
import _winreg
|
||||||
|
|
||||||
|
shell_folder_name = {
|
||||||
|
"CSIDL_APPDATA": "AppData",
|
||||||
|
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||||
|
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
key = _winreg.OpenKey(
|
||||||
|
_winreg.HKEY_CURRENT_USER,
|
||||||
|
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||||
|
)
|
||||||
|
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_pywin32(csidl_name):
|
||||||
|
from win32com.shell import shellcon, shell
|
||||||
|
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||||
|
# Try to make this a unicode path because SHGetFolderPath does
|
||||||
|
# not return unicode strings when there is unicode data in the
|
||||||
|
# path.
|
||||||
|
try:
|
||||||
|
dir = unicode(dir)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
try:
|
||||||
|
import win32api
|
||||||
|
dir = win32api.GetShortPathName(dir)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
except UnicodeError:
|
||||||
|
pass
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_ctypes(csidl_name):
|
||||||
|
import ctypes
|
||||||
|
|
||||||
|
csidl_const = {
|
||||||
|
"CSIDL_APPDATA": 26,
|
||||||
|
"CSIDL_COMMON_APPDATA": 35,
|
||||||
|
"CSIDL_LOCAL_APPDATA": 28,
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
buf = ctypes.create_unicode_buffer(1024)
|
||||||
|
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in buf:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf2 = ctypes.create_unicode_buffer(1024)
|
||||||
|
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||||
|
buf = buf2
|
||||||
|
|
||||||
|
return buf.value
|
||||||
|
|
||||||
|
def _get_win_folder_with_jna(csidl_name):
|
||||||
|
import array
|
||||||
|
from com.sun import jna
|
||||||
|
from com.sun.jna.platform import win32
|
||||||
|
|
||||||
|
buf_size = win32.WinDef.MAX_PATH * 2
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
shell = win32.Shell32.INSTANCE
|
||||||
|
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
kernel = win32.Kernel32.INSTANCE
|
||||||
|
if kernel.GetShortPathName(dir, buf, buf_size):
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
return dir
|
||||||
|
|
||||||
|
if system == "win32":
|
||||||
|
try:
|
||||||
|
from ctypes import windll
|
||||||
|
_get_win_folder = _get_win_folder_with_ctypes
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import com.sun.jna
|
||||||
|
_get_win_folder = _get_win_folder_with_jna
|
||||||
|
except ImportError:
|
||||||
|
_get_win_folder = _get_win_folder_from_registry
|
||||||
|
|
||||||
|
|
||||||
|
#---- self test code
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
appname = "MyApp"
|
||||||
|
appauthor = "MyCompany"
|
||||||
|
|
||||||
|
props = ("user_data_dir",
|
||||||
|
"user_config_dir",
|
||||||
|
"user_cache_dir",
|
||||||
|
"user_state_dir",
|
||||||
|
"user_log_dir",
|
||||||
|
"site_data_dir",
|
||||||
|
"site_config_dir")
|
||||||
|
|
||||||
|
print("-- app dirs %s --" % __version__)
|
||||||
|
|
||||||
|
print("-- app dirs (with optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'appauthor')")
|
||||||
|
dirs = AppDirs(appname)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (with disabled 'appauthor')")
|
||||||
|
dirs = AppDirs(appname, appauthor=False)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
@ -0,0 +1,11 @@
|
|||||||
|
"""CacheControl import Interface.
|
||||||
|
|
||||||
|
Make it easy to import from cachecontrol without long namespaces.
|
||||||
|
"""
|
||||||
|
__author__ = 'Eric Larson'
|
||||||
|
__email__ = 'eric@ionrock.org'
|
||||||
|
__version__ = '0.12.4'
|
||||||
|
|
||||||
|
from .wrapper import CacheControl
|
||||||
|
from .adapter import CacheControlAdapter
|
||||||
|
from .controller import CacheController
|
@ -0,0 +1,60 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._vendor import requests
|
||||||
|
|
||||||
|
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
||||||
|
from pip._vendor.cachecontrol.cache import DictCache
|
||||||
|
from pip._vendor.cachecontrol.controller import logger
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging():
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
|
||||||
|
def get_session():
|
||||||
|
adapter = CacheControlAdapter(
|
||||||
|
DictCache(),
|
||||||
|
cache_etags=True,
|
||||||
|
serializer=None,
|
||||||
|
heuristic=None,
|
||||||
|
)
|
||||||
|
sess = requests.Session()
|
||||||
|
sess.mount('http://', adapter)
|
||||||
|
sess.mount('https://', adapter)
|
||||||
|
|
||||||
|
sess.cache_controller = adapter.controller
|
||||||
|
return sess
|
||||||
|
|
||||||
|
|
||||||
|
def get_args():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument('url', help='The URL to try and cache')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
args = get_args()
|
||||||
|
sess = get_session()
|
||||||
|
|
||||||
|
# Make a request to get a response
|
||||||
|
resp = sess.get(args.url)
|
||||||
|
|
||||||
|
# Turn on logging
|
||||||
|
setup_logging()
|
||||||
|
|
||||||
|
# try setting the cache
|
||||||
|
sess.cache_controller.cache_response(resp.request, resp.raw)
|
||||||
|
|
||||||
|
# Now try to get it
|
||||||
|
if sess.cache_controller.cached_request(resp.request):
|
||||||
|
print('Cached!')
|
||||||
|
else:
|
||||||
|
print('Not cached :(')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -0,0 +1,134 @@
|
|||||||
|
import types
|
||||||
|
import functools
|
||||||
|
import zlib
|
||||||
|
|
||||||
|
from pip._vendor.requests.adapters import HTTPAdapter
|
||||||
|
|
||||||
|
from .controller import CacheController
|
||||||
|
from .cache import DictCache
|
||||||
|
from .filewrapper import CallbackFileWrapper
|
||||||
|
|
||||||
|
|
||||||
|
class CacheControlAdapter(HTTPAdapter):
|
||||||
|
invalidating_methods = set(['PUT', 'DELETE'])
|
||||||
|
|
||||||
|
def __init__(self, cache=None,
|
||||||
|
cache_etags=True,
|
||||||
|
controller_class=None,
|
||||||
|
serializer=None,
|
||||||
|
heuristic=None,
|
||||||
|
cacheable_methods=None,
|
||||||
|
*args, **kw):
|
||||||
|
super(CacheControlAdapter, self).__init__(*args, **kw)
|
||||||
|
self.cache = cache or DictCache()
|
||||||
|
self.heuristic = heuristic
|
||||||
|
self.cacheable_methods = cacheable_methods or ('GET',)
|
||||||
|
|
||||||
|
controller_factory = controller_class or CacheController
|
||||||
|
self.controller = controller_factory(
|
||||||
|
self.cache,
|
||||||
|
cache_etags=cache_etags,
|
||||||
|
serializer=serializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
def send(self, request, cacheable_methods=None, **kw):
|
||||||
|
"""
|
||||||
|
Send a request. Use the request information to see if it
|
||||||
|
exists in the cache and cache the response if we need to and can.
|
||||||
|
"""
|
||||||
|
cacheable = cacheable_methods or self.cacheable_methods
|
||||||
|
if request.method in cacheable:
|
||||||
|
try:
|
||||||
|
cached_response = self.controller.cached_request(request)
|
||||||
|
except zlib.error:
|
||||||
|
cached_response = None
|
||||||
|
if cached_response:
|
||||||
|
return self.build_response(request, cached_response,
|
||||||
|
from_cache=True)
|
||||||
|
|
||||||
|
# check for etags and add headers if appropriate
|
||||||
|
request.headers.update(
|
||||||
|
self.controller.conditional_headers(request)
|
||||||
|
)
|
||||||
|
|
||||||
|
resp = super(CacheControlAdapter, self).send(request, **kw)
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def build_response(self, request, response, from_cache=False,
|
||||||
|
cacheable_methods=None):
|
||||||
|
"""
|
||||||
|
Build a response by making a request or using the cache.
|
||||||
|
|
||||||
|
This will end up calling send and returning a potentially
|
||||||
|
cached response
|
||||||
|
"""
|
||||||
|
cacheable = cacheable_methods or self.cacheable_methods
|
||||||
|
if not from_cache and request.method in cacheable:
|
||||||
|
# Check for any heuristics that might update headers
|
||||||
|
# before trying to cache.
|
||||||
|
if self.heuristic:
|
||||||
|
response = self.heuristic.apply(response)
|
||||||
|
|
||||||
|
# apply any expiration heuristics
|
||||||
|
if response.status == 304:
|
||||||
|
# We must have sent an ETag request. This could mean
|
||||||
|
# that we've been expired already or that we simply
|
||||||
|
# have an etag. In either case, we want to try and
|
||||||
|
# update the cache if that is the case.
|
||||||
|
cached_response = self.controller.update_cached_response(
|
||||||
|
request, response
|
||||||
|
)
|
||||||
|
|
||||||
|
if cached_response is not response:
|
||||||
|
from_cache = True
|
||||||
|
|
||||||
|
# We are done with the server response, read a
|
||||||
|
# possible response body (compliant servers will
|
||||||
|
# not return one, but we cannot be 100% sure) and
|
||||||
|
# release the connection back to the pool.
|
||||||
|
response.read(decode_content=False)
|
||||||
|
response.release_conn()
|
||||||
|
|
||||||
|
response = cached_response
|
||||||
|
|
||||||
|
# We always cache the 301 responses
|
||||||
|
elif response.status == 301:
|
||||||
|
self.controller.cache_response(request, response)
|
||||||
|
else:
|
||||||
|
# Wrap the response file with a wrapper that will cache the
|
||||||
|
# response when the stream has been consumed.
|
||||||
|
response._fp = CallbackFileWrapper(
|
||||||
|
response._fp,
|
||||||
|
functools.partial(
|
||||||
|
self.controller.cache_response,
|
||||||
|
request,
|
||||||
|
response,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if response.chunked:
|
||||||
|
super_update_chunk_length = response._update_chunk_length
|
||||||
|
|
||||||
|
def _update_chunk_length(self):
|
||||||
|
super_update_chunk_length()
|
||||||
|
if self.chunk_left == 0:
|
||||||
|
self._fp._close()
|
||||||
|
response._update_chunk_length = types.MethodType(_update_chunk_length, response)
|
||||||
|
|
||||||
|
resp = super(CacheControlAdapter, self).build_response(
|
||||||
|
request, response
|
||||||
|
)
|
||||||
|
|
||||||
|
# See if we should invalidate the cache.
|
||||||
|
if request.method in self.invalidating_methods and resp.ok:
|
||||||
|
cache_url = self.controller.cache_url(request.url)
|
||||||
|
self.cache.delete(cache_url)
|
||||||
|
|
||||||
|
# Give the request a from_cache attr to let people use it
|
||||||
|
resp.from_cache = from_cache
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.cache.close()
|
||||||
|
super(CacheControlAdapter, self).close()
|
@ -0,0 +1,39 @@
|
|||||||
|
"""
|
||||||
|
The cache object API for implementing caches. The default is a thread
|
||||||
|
safe in-memory dictionary.
|
||||||
|
"""
|
||||||
|
from threading import Lock
|
||||||
|
|
||||||
|
|
||||||
|
class BaseCache(object):
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
raise NotImplemented()
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
raise NotImplemented()
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
raise NotImplemented()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DictCache(BaseCache):
|
||||||
|
|
||||||
|
def __init__(self, init_dict=None):
|
||||||
|
self.lock = Lock()
|
||||||
|
self.data = init_dict or {}
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return self.data.get(key, None)
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
with self.lock:
|
||||||
|
self.data.update({key: value})
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
with self.lock:
|
||||||
|
if key in self.data:
|
||||||
|
self.data.pop(key)
|
@ -0,0 +1,2 @@
|
|||||||
|
from .file_cache import FileCache # noqa
|
||||||
|
from .redis_cache import RedisCache # noqa
|
@ -0,0 +1,133 @@
|
|||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from ..cache import BaseCache
|
||||||
|
from ..controller import CacheController
|
||||||
|
|
||||||
|
try:
|
||||||
|
FileNotFoundError
|
||||||
|
except NameError:
|
||||||
|
# py2.X
|
||||||
|
FileNotFoundError = OSError
|
||||||
|
|
||||||
|
|
||||||
|
def _secure_open_write(filename, fmode):
|
||||||
|
# We only want to write to this file, so open it in write only mode
|
||||||
|
flags = os.O_WRONLY
|
||||||
|
|
||||||
|
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
|
||||||
|
# will open *new* files.
|
||||||
|
# We specify this because we want to ensure that the mode we pass is the
|
||||||
|
# mode of the file.
|
||||||
|
flags |= os.O_CREAT | os.O_EXCL
|
||||||
|
|
||||||
|
# Do not follow symlinks to prevent someone from making a symlink that
|
||||||
|
# we follow and insecurely open a cache file.
|
||||||
|
if hasattr(os, "O_NOFOLLOW"):
|
||||||
|
flags |= os.O_NOFOLLOW
|
||||||
|
|
||||||
|
# On Windows we'll mark this file as binary
|
||||||
|
if hasattr(os, "O_BINARY"):
|
||||||
|
flags |= os.O_BINARY
|
||||||
|
|
||||||
|
# Before we open our file, we want to delete any existing file that is
|
||||||
|
# there
|
||||||
|
try:
|
||||||
|
os.remove(filename)
|
||||||
|
except (IOError, OSError):
|
||||||
|
# The file must not exist already, so we can just skip ahead to opening
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
|
||||||
|
# race condition happens between the os.remove and this line, that an
|
||||||
|
# error will be raised. Because we utilize a lockfile this should only
|
||||||
|
# happen if someone is attempting to attack us.
|
||||||
|
fd = os.open(filename, flags, fmode)
|
||||||
|
try:
|
||||||
|
return os.fdopen(fd, "wb")
|
||||||
|
except:
|
||||||
|
# An error occurred wrapping our FD in a file object
|
||||||
|
os.close(fd)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class FileCache(BaseCache):
|
||||||
|
def __init__(self, directory, forever=False, filemode=0o0600,
|
||||||
|
dirmode=0o0700, use_dir_lock=None, lock_class=None):
|
||||||
|
|
||||||
|
if use_dir_lock is not None and lock_class is not None:
|
||||||
|
raise ValueError("Cannot use use_dir_lock and lock_class together")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pip._vendor.lockfile import LockFile
|
||||||
|
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
|
||||||
|
except ImportError:
|
||||||
|
notice = dedent("""
|
||||||
|
NOTE: In order to use the FileCache you must have
|
||||||
|
lockfile installed. You can install it via pip:
|
||||||
|
pip install lockfile
|
||||||
|
""")
|
||||||
|
raise ImportError(notice)
|
||||||
|
else:
|
||||||
|
if use_dir_lock:
|
||||||
|
lock_class = MkdirLockFile
|
||||||
|
|
||||||
|
elif lock_class is None:
|
||||||
|
lock_class = LockFile
|
||||||
|
|
||||||
|
self.directory = directory
|
||||||
|
self.forever = forever
|
||||||
|
self.filemode = filemode
|
||||||
|
self.dirmode = dirmode
|
||||||
|
self.lock_class = lock_class
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def encode(x):
|
||||||
|
return hashlib.sha224(x.encode()).hexdigest()
|
||||||
|
|
||||||
|
def _fn(self, name):
|
||||||
|
# NOTE: This method should not change as some may depend on it.
|
||||||
|
# See: https://github.com/ionrock/cachecontrol/issues/63
|
||||||
|
hashed = self.encode(name)
|
||||||
|
parts = list(hashed[:5]) + [hashed]
|
||||||
|
return os.path.join(self.directory, *parts)
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
name = self._fn(key)
|
||||||
|
if not os.path.exists(name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
with open(name, 'rb') as fh:
|
||||||
|
return fh.read()
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
name = self._fn(key)
|
||||||
|
|
||||||
|
# Make sure the directory exists
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(name), self.dirmode)
|
||||||
|
except (IOError, OSError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with self.lock_class(name) as lock:
|
||||||
|
# Write our actual file
|
||||||
|
with _secure_open_write(lock.path, self.filemode) as fh:
|
||||||
|
fh.write(value)
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
name = self._fn(key)
|
||||||
|
if not self.forever:
|
||||||
|
try:
|
||||||
|
os.remove(name)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def url_to_file_path(url, filecache):
|
||||||
|
"""Return the file cache path based on the URL.
|
||||||
|
|
||||||
|
This does not ensure the file exists!
|
||||||
|
"""
|
||||||
|
key = CacheController.cache_url(url)
|
||||||
|
return filecache._fn(key)
|
@ -0,0 +1,43 @@
|
|||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from pip._vendor.cachecontrol.cache import BaseCache
|
||||||
|
|
||||||
|
|
||||||
|
def total_seconds(td):
|
||||||
|
"""Python 2.6 compatability"""
|
||||||
|
if hasattr(td, 'total_seconds'):
|
||||||
|
return int(td.total_seconds())
|
||||||
|
|
||||||
|
ms = td.microseconds
|
||||||
|
secs = (td.seconds + td.days * 24 * 3600)
|
||||||
|
return int((ms + secs * 10**6) / 10**6)
|
||||||
|
|
||||||
|
|
||||||
|
class RedisCache(BaseCache):
|
||||||
|
|
||||||
|
def __init__(self, conn):
|
||||||
|
self.conn = conn
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return self.conn.get(key)
|
||||||
|
|
||||||
|
def set(self, key, value, expires=None):
|
||||||
|
if not expires:
|
||||||
|
self.conn.set(key, value)
|
||||||
|
else:
|
||||||
|
expires = expires - datetime.utcnow()
|
||||||
|
self.conn.setex(key, total_seconds(expires), value)
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
self.conn.delete(key)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Helper for clearing all the keys in a database. Use with
|
||||||
|
caution!"""
|
||||||
|
for key in self.conn.keys():
|
||||||
|
self.conn.delete(key)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Redis uses connection pooling, no need to close the connection."""
|
||||||
|
pass
|
@ -0,0 +1,29 @@
|
|||||||
|
try:
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
except ImportError:
|
||||||
|
from urlparse import urljoin
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except ImportError:
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
|
||||||
|
# Handle the case where the requests module has been patched to not have
|
||||||
|
# urllib3 bundled as part of its source.
|
||||||
|
try:
|
||||||
|
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
|
||||||
|
except ImportError:
|
||||||
|
from pip._vendor.urllib3.response import HTTPResponse
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
|
||||||
|
except ImportError:
|
||||||
|
from pip._vendor.urllib3.util import is_fp_closed
|
||||||
|
|
||||||
|
# Replicate some six behaviour
|
||||||
|
try:
|
||||||
|
text_type = unicode
|
||||||
|
except NameError:
|
||||||
|
text_type = str
|
@ -0,0 +1,373 @@
|
|||||||
|
"""
|
||||||
|
The httplib2 algorithms ported for use with requests.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import calendar
|
||||||
|
import time
|
||||||
|
from email.utils import parsedate_tz
|
||||||
|
|
||||||
|
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
|
from .cache import DictCache
|
||||||
|
from .serialize import Serializer
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_uri(uri):
|
||||||
|
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
||||||
|
|
||||||
|
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
||||||
|
"""
|
||||||
|
groups = URI.match(uri).groups()
|
||||||
|
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
||||||
|
|
||||||
|
|
||||||
|
class CacheController(object):
|
||||||
|
"""An interface to see if request should cached or not.
|
||||||
|
"""
|
||||||
|
def __init__(self, cache=None, cache_etags=True, serializer=None,
|
||||||
|
status_codes=None):
|
||||||
|
self.cache = cache or DictCache()
|
||||||
|
self.cache_etags = cache_etags
|
||||||
|
self.serializer = serializer or Serializer()
|
||||||
|
self.cacheable_status_codes = status_codes or (200, 203, 300, 301)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _urlnorm(cls, uri):
|
||||||
|
"""Normalize the URL to create a safe key for the cache"""
|
||||||
|
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
||||||
|
if not scheme or not authority:
|
||||||
|
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
|
||||||
|
|
||||||
|
scheme = scheme.lower()
|
||||||
|
authority = authority.lower()
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
path = "/"
|
||||||
|
|
||||||
|
# Could do syntax based normalization of the URI before
|
||||||
|
# computing the digest. See Section 6.2.2 of Std 66.
|
||||||
|
request_uri = query and "?".join([path, query]) or path
|
||||||
|
defrag_uri = scheme + "://" + authority + request_uri
|
||||||
|
|
||||||
|
return defrag_uri
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def cache_url(cls, uri):
|
||||||
|
return cls._urlnorm(uri)
|
||||||
|
|
||||||
|
def parse_cache_control(self, headers):
|
||||||
|
known_directives = {
|
||||||
|
# https://tools.ietf.org/html/rfc7234#section-5.2
|
||||||
|
'max-age': (int, True,),
|
||||||
|
'max-stale': (int, False,),
|
||||||
|
'min-fresh': (int, True,),
|
||||||
|
'no-cache': (None, False,),
|
||||||
|
'no-store': (None, False,),
|
||||||
|
'no-transform': (None, False,),
|
||||||
|
'only-if-cached' : (None, False,),
|
||||||
|
'must-revalidate': (None, False,),
|
||||||
|
'public': (None, False,),
|
||||||
|
'private': (None, False,),
|
||||||
|
'proxy-revalidate': (None, False,),
|
||||||
|
's-maxage': (int, True,)
|
||||||
|
}
|
||||||
|
|
||||||
|
cc_headers = headers.get('cache-control',
|
||||||
|
headers.get('Cache-Control', ''))
|
||||||
|
|
||||||
|
retval = {}
|
||||||
|
|
||||||
|
for cc_directive in cc_headers.split(','):
|
||||||
|
parts = cc_directive.split('=', 1)
|
||||||
|
directive = parts[0].strip()
|
||||||
|
|
||||||
|
try:
|
||||||
|
typ, required = known_directives[directive]
|
||||||
|
except KeyError:
|
||||||
|
logger.debug('Ignoring unknown cache-control directive: %s',
|
||||||
|
directive)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not typ or not required:
|
||||||
|
retval[directive] = None
|
||||||
|
if typ:
|
||||||
|
try:
|
||||||
|
retval[directive] = typ(parts[1].strip())
|
||||||
|
except IndexError:
|
||||||
|
if required:
|
||||||
|
logger.debug('Missing value for cache-control '
|
||||||
|
'directive: %s', directive)
|
||||||
|
except ValueError:
|
||||||
|
logger.debug('Invalid value for cache-control directive '
|
||||||
|
'%s, must be %s', directive, typ.__name__)
|
||||||
|
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def cached_request(self, request):
|
||||||
|
"""
|
||||||
|
Return a cached response if it exists in the cache, otherwise
|
||||||
|
return False.
|
||||||
|
"""
|
||||||
|
cache_url = self.cache_url(request.url)
|
||||||
|
logger.debug('Looking up "%s" in the cache', cache_url)
|
||||||
|
cc = self.parse_cache_control(request.headers)
|
||||||
|
|
||||||
|
# Bail out if the request insists on fresh data
|
||||||
|
if 'no-cache' in cc:
|
||||||
|
logger.debug('Request header has "no-cache", cache bypassed')
|
||||||
|
return False
|
||||||
|
|
||||||
|
if 'max-age' in cc and cc['max-age'] == 0:
|
||||||
|
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Request allows serving from the cache, let's see if we find something
|
||||||
|
cache_data = self.cache.get(cache_url)
|
||||||
|
if cache_data is None:
|
||||||
|
logger.debug('No cache entry available')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check whether it can be deserialized
|
||||||
|
resp = self.serializer.loads(request, cache_data)
|
||||||
|
if not resp:
|
||||||
|
logger.warning('Cache entry deserialization failed, entry ignored')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we have a cached 301, return it immediately. We don't
|
||||||
|
# need to test our response for other headers b/c it is
|
||||||
|
# intrinsically "cacheable" as it is Permanent.
|
||||||
|
# See:
|
||||||
|
# https://tools.ietf.org/html/rfc7231#section-6.4.2
|
||||||
|
#
|
||||||
|
# Client can try to refresh the value by repeating the request
|
||||||
|
# with cache busting headers as usual (ie no-cache).
|
||||||
|
if resp.status == 301:
|
||||||
|
msg = ('Returning cached "301 Moved Permanently" response '
|
||||||
|
'(ignoring date and etag information)')
|
||||||
|
logger.debug(msg)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
headers = CaseInsensitiveDict(resp.headers)
|
||||||
|
if not headers or 'date' not in headers:
|
||||||
|
if 'etag' not in headers:
|
||||||
|
# Without date or etag, the cached response can never be used
|
||||||
|
# and should be deleted.
|
||||||
|
logger.debug('Purging cached response: no date or etag')
|
||||||
|
self.cache.delete(cache_url)
|
||||||
|
logger.debug('Ignoring cached response: no date')
|
||||||
|
return False
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
date = calendar.timegm(
|
||||||
|
parsedate_tz(headers['date'])
|
||||||
|
)
|
||||||
|
current_age = max(0, now - date)
|
||||||
|
logger.debug('Current age based on date: %i', current_age)
|
||||||
|
|
||||||
|
# TODO: There is an assumption that the result will be a
|
||||||
|
# urllib3 response object. This may not be best since we
|
||||||
|
# could probably avoid instantiating or constructing the
|
||||||
|
# response until we know we need it.
|
||||||
|
resp_cc = self.parse_cache_control(headers)
|
||||||
|
|
||||||
|
# determine freshness
|
||||||
|
freshness_lifetime = 0
|
||||||
|
|
||||||
|
# Check the max-age pragma in the cache control header
|
||||||
|
if 'max-age' in resp_cc:
|
||||||
|
freshness_lifetime = resp_cc['max-age']
|
||||||
|
logger.debug('Freshness lifetime from max-age: %i',
|
||||||
|
freshness_lifetime)
|
||||||
|
|
||||||
|
# If there isn't a max-age, check for an expires header
|
||||||
|
elif 'expires' in headers:
|
||||||
|
expires = parsedate_tz(headers['expires'])
|
||||||
|
if expires is not None:
|
||||||
|
expire_time = calendar.timegm(expires) - date
|
||||||
|
freshness_lifetime = max(0, expire_time)
|
||||||
|
logger.debug("Freshness lifetime from expires: %i",
|
||||||
|
freshness_lifetime)
|
||||||
|
|
||||||
|
# Determine if we are setting freshness limit in the
|
||||||
|
# request. Note, this overrides what was in the response.
|
||||||
|
if 'max-age' in cc:
|
||||||
|
freshness_lifetime = cc['max-age']
|
||||||
|
logger.debug('Freshness lifetime from request max-age: %i',
|
||||||
|
freshness_lifetime)
|
||||||
|
|
||||||
|
if 'min-fresh' in cc:
|
||||||
|
min_fresh = cc['min-fresh']
|
||||||
|
# adjust our current age by our min fresh
|
||||||
|
current_age += min_fresh
|
||||||
|
logger.debug('Adjusted current age from min-fresh: %i',
|
||||||
|
current_age)
|
||||||
|
|
||||||
|
# Return entry if it is fresh enough
|
||||||
|
if freshness_lifetime > current_age:
|
||||||
|
logger.debug('The response is "fresh", returning cached response')
|
||||||
|
logger.debug('%i > %i', freshness_lifetime, current_age)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
# we're not fresh. If we don't have an Etag, clear it out
|
||||||
|
if 'etag' not in headers:
|
||||||
|
logger.debug(
|
||||||
|
'The cached response is "stale" with no etag, purging'
|
||||||
|
)
|
||||||
|
self.cache.delete(cache_url)
|
||||||
|
|
||||||
|
# return the original handler
|
||||||
|
return False
|
||||||
|
|
||||||
|
def conditional_headers(self, request):
|
||||||
|
cache_url = self.cache_url(request.url)
|
||||||
|
resp = self.serializer.loads(request, self.cache.get(cache_url))
|
||||||
|
new_headers = {}
|
||||||
|
|
||||||
|
if resp:
|
||||||
|
headers = CaseInsensitiveDict(resp.headers)
|
||||||
|
|
||||||
|
if 'etag' in headers:
|
||||||
|
new_headers['If-None-Match'] = headers['ETag']
|
||||||
|
|
||||||
|
if 'last-modified' in headers:
|
||||||
|
new_headers['If-Modified-Since'] = headers['Last-Modified']
|
||||||
|
|
||||||
|
return new_headers
|
||||||
|
|
||||||
|
def cache_response(self, request, response, body=None,
|
||||||
|
status_codes=None):
|
||||||
|
"""
|
||||||
|
Algorithm for caching requests.
|
||||||
|
|
||||||
|
This assumes a requests Response object.
|
||||||
|
"""
|
||||||
|
# From httplib2: Don't cache 206's since we aren't going to
|
||||||
|
# handle byte range requests
|
||||||
|
cacheable_status_codes = status_codes or self.cacheable_status_codes
|
||||||
|
if response.status not in cacheable_status_codes:
|
||||||
|
logger.debug(
|
||||||
|
'Status code %s not in %s',
|
||||||
|
response.status,
|
||||||
|
cacheable_status_codes
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
response_headers = CaseInsensitiveDict(response.headers)
|
||||||
|
|
||||||
|
# If we've been given a body, our response has a Content-Length, that
|
||||||
|
# Content-Length is valid then we can check to see if the body we've
|
||||||
|
# been given matches the expected size, and if it doesn't we'll just
|
||||||
|
# skip trying to cache it.
|
||||||
|
if (body is not None and
|
||||||
|
"content-length" in response_headers and
|
||||||
|
response_headers["content-length"].isdigit() and
|
||||||
|
int(response_headers["content-length"]) != len(body)):
|
||||||
|
return
|
||||||
|
|
||||||
|
cc_req = self.parse_cache_control(request.headers)
|
||||||
|
cc = self.parse_cache_control(response_headers)
|
||||||
|
|
||||||
|
cache_url = self.cache_url(request.url)
|
||||||
|
logger.debug('Updating cache with response from "%s"', cache_url)
|
||||||
|
|
||||||
|
# Delete it from the cache if we happen to have it stored there
|
||||||
|
no_store = False
|
||||||
|
if 'no-store' in cc:
|
||||||
|
no_store = True
|
||||||
|
logger.debug('Response header has "no-store"')
|
||||||
|
if 'no-store' in cc_req:
|
||||||
|
no_store = True
|
||||||
|
logger.debug('Request header has "no-store"')
|
||||||
|
if no_store and self.cache.get(cache_url):
|
||||||
|
logger.debug('Purging existing cache entry to honor "no-store"')
|
||||||
|
self.cache.delete(cache_url)
|
||||||
|
|
||||||
|
# If we've been given an etag, then keep the response
|
||||||
|
if self.cache_etags and 'etag' in response_headers:
|
||||||
|
logger.debug('Caching due to etag')
|
||||||
|
self.cache.set(
|
||||||
|
cache_url,
|
||||||
|
self.serializer.dumps(request, response, body=body),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add to the cache any 301s. We do this before looking that
|
||||||
|
# the Date headers.
|
||||||
|
elif response.status == 301:
|
||||||
|
logger.debug('Caching permanant redirect')
|
||||||
|
self.cache.set(
|
||||||
|
cache_url,
|
||||||
|
self.serializer.dumps(request, response)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add to the cache if the response headers demand it. If there
|
||||||
|
# is no date header then we can't do anything about expiring
|
||||||
|
# the cache.
|
||||||
|
elif 'date' in response_headers:
|
||||||
|
# cache when there is a max-age > 0
|
||||||
|
if 'max-age' in cc and cc['max-age'] > 0:
|
||||||
|
logger.debug('Caching b/c date exists and max-age > 0')
|
||||||
|
self.cache.set(
|
||||||
|
cache_url,
|
||||||
|
self.serializer.dumps(request, response, body=body),
|
||||||
|
)
|
||||||
|
|
||||||
|
# If the request can expire, it means we should cache it
|
||||||
|
# in the meantime.
|
||||||
|
elif 'expires' in response_headers:
|
||||||
|
if response_headers['expires']:
|
||||||
|
logger.debug('Caching b/c of expires header')
|
||||||
|
self.cache.set(
|
||||||
|
cache_url,
|
||||||
|
self.serializer.dumps(request, response, body=body),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_cached_response(self, request, response):
|
||||||
|
"""On a 304 we will get a new set of headers that we want to
|
||||||
|
update our cached value with, assuming we have one.
|
||||||
|
|
||||||
|
This should only ever be called when we've sent an ETag and
|
||||||
|
gotten a 304 as the response.
|
||||||
|
"""
|
||||||
|
cache_url = self.cache_url(request.url)
|
||||||
|
|
||||||
|
cached_response = self.serializer.loads(
|
||||||
|
request,
|
||||||
|
self.cache.get(cache_url)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not cached_response:
|
||||||
|
# we didn't have a cached response
|
||||||
|
return response
|
||||||
|
|
||||||
|
# Lets update our headers with the headers from the new request:
|
||||||
|
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
|
||||||
|
#
|
||||||
|
# The server isn't supposed to send headers that would make
|
||||||
|
# the cached body invalid. But... just in case, we'll be sure
|
||||||
|
# to strip out ones we know that might be problmatic due to
|
||||||
|
# typical assumptions.
|
||||||
|
excluded_headers = [
|
||||||
|
"content-length",
|
||||||
|
]
|
||||||
|
|
||||||
|
cached_response.headers.update(
|
||||||
|
dict((k, v) for k, v in response.headers.items()
|
||||||
|
if k.lower() not in excluded_headers)
|
||||||
|
)
|
||||||
|
|
||||||
|
# we want a 200 b/c we have content via the cache
|
||||||
|
cached_response.status = 200
|
||||||
|
|
||||||
|
# update our cache
|
||||||
|
self.cache.set(
|
||||||
|
cache_url,
|
||||||
|
self.serializer.dumps(request, cached_response),
|
||||||
|
)
|
||||||
|
|
||||||
|
return cached_response
|
@ -0,0 +1,78 @@
|
|||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackFileWrapper(object):
|
||||||
|
"""
|
||||||
|
Small wrapper around a fp object which will tee everything read into a
|
||||||
|
buffer, and when that file is closed it will execute a callback with the
|
||||||
|
contents of that buffer.
|
||||||
|
|
||||||
|
All attributes are proxied to the underlying file object.
|
||||||
|
|
||||||
|
This class uses members with a double underscore (__) leading prefix so as
|
||||||
|
not to accidentally shadow an attribute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, fp, callback):
|
||||||
|
self.__buf = BytesIO()
|
||||||
|
self.__fp = fp
|
||||||
|
self.__callback = callback
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
# The vaguaries of garbage collection means that self.__fp is
|
||||||
|
# not always set. By using __getattribute__ and the private
|
||||||
|
# name[0] allows looking up the attribute value and raising an
|
||||||
|
# AttributeError when it doesn't exist. This stop thigns from
|
||||||
|
# infinitely recursing calls to getattr in the case where
|
||||||
|
# self.__fp hasn't been set.
|
||||||
|
#
|
||||||
|
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
|
||||||
|
fp = self.__getattribute__('_CallbackFileWrapper__fp')
|
||||||
|
return getattr(fp, name)
|
||||||
|
|
||||||
|
def __is_fp_closed(self):
|
||||||
|
try:
|
||||||
|
return self.__fp.fp is None
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.__fp.closed
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# We just don't cache it then.
|
||||||
|
# TODO: Add some logging here...
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _close(self):
|
||||||
|
if self.__callback:
|
||||||
|
self.__callback(self.__buf.getvalue())
|
||||||
|
|
||||||
|
# We assign this to None here, because otherwise we can get into
|
||||||
|
# really tricky problems where the CPython interpreter dead locks
|
||||||
|
# because the callback is holding a reference to something which
|
||||||
|
# has a __del__ method. Setting this to None breaks the cycle
|
||||||
|
# and allows the garbage collector to do it's thing normally.
|
||||||
|
self.__callback = None
|
||||||
|
|
||||||
|
def read(self, amt=None):
|
||||||
|
data = self.__fp.read(amt)
|
||||||
|
self.__buf.write(data)
|
||||||
|
if self.__is_fp_closed():
|
||||||
|
self._close()
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _safe_read(self, amt):
|
||||||
|
data = self.__fp._safe_read(amt)
|
||||||
|
if amt == 2 and data == b'\r\n':
|
||||||
|
# urllib executes this read to toss the CRLF at the end
|
||||||
|
# of the chunk.
|
||||||
|
return data
|
||||||
|
|
||||||
|
self.__buf.write(data)
|
||||||
|
if self.__is_fp_closed():
|
||||||
|
self._close()
|
||||||
|
|
||||||
|
return data
|
@ -0,0 +1,138 @@
|
|||||||
|
import calendar
|
||||||
|
import time
|
||||||
|
|
||||||
|
from email.utils import formatdate, parsedate, parsedate_tz
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||||
|
|
||||||
|
|
||||||
|
def expire_after(delta, date=None):
|
||||||
|
date = date or datetime.utcnow()
|
||||||
|
return date + delta
|
||||||
|
|
||||||
|
|
||||||
|
def datetime_to_header(dt):
|
||||||
|
return formatdate(calendar.timegm(dt.timetuple()))
|
||||||
|
|
||||||
|
|
||||||
|
class BaseHeuristic(object):
|
||||||
|
|
||||||
|
def warning(self, response):
|
||||||
|
"""
|
||||||
|
Return a valid 1xx warning header value describing the cache
|
||||||
|
adjustments.
|
||||||
|
|
||||||
|
The response is provided too allow warnings like 113
|
||||||
|
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
|
||||||
|
to explicitly say response is over 24 hours old.
|
||||||
|
"""
|
||||||
|
return '110 - "Response is Stale"'
|
||||||
|
|
||||||
|
def update_headers(self, response):
|
||||||
|
"""Update the response headers with any new headers.
|
||||||
|
|
||||||
|
NOTE: This SHOULD always include some Warning header to
|
||||||
|
signify that the response was cached by the client, not
|
||||||
|
by way of the provided headers.
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def apply(self, response):
|
||||||
|
updated_headers = self.update_headers(response)
|
||||||
|
|
||||||
|
if updated_headers:
|
||||||
|
response.headers.update(updated_headers)
|
||||||
|
warning_header_value = self.warning(response)
|
||||||
|
if warning_header_value is not None:
|
||||||
|
response.headers.update({'Warning': warning_header_value})
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class OneDayCache(BaseHeuristic):
|
||||||
|
"""
|
||||||
|
Cache the response by providing an expires 1 day in the
|
||||||
|
future.
|
||||||
|
"""
|
||||||
|
def update_headers(self, response):
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
if 'expires' not in response.headers:
|
||||||
|
date = parsedate(response.headers['date'])
|
||||||
|
expires = expire_after(timedelta(days=1),
|
||||||
|
date=datetime(*date[:6]))
|
||||||
|
headers['expires'] = datetime_to_header(expires)
|
||||||
|
headers['cache-control'] = 'public'
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
|
class ExpiresAfter(BaseHeuristic):
|
||||||
|
"""
|
||||||
|
Cache **all** requests for a defined time period.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **kw):
|
||||||
|
self.delta = timedelta(**kw)
|
||||||
|
|
||||||
|
def update_headers(self, response):
|
||||||
|
expires = expire_after(self.delta)
|
||||||
|
return {
|
||||||
|
'expires': datetime_to_header(expires),
|
||||||
|
'cache-control': 'public',
|
||||||
|
}
|
||||||
|
|
||||||
|
def warning(self, response):
|
||||||
|
tmpl = '110 - Automatically cached for %s. Response might be stale'
|
||||||
|
return tmpl % self.delta
|
||||||
|
|
||||||
|
|
||||||
|
class LastModified(BaseHeuristic):
|
||||||
|
"""
|
||||||
|
If there is no Expires header already, fall back on Last-Modified
|
||||||
|
using the heuristic from
|
||||||
|
http://tools.ietf.org/html/rfc7234#section-4.2.2
|
||||||
|
to calculate a reasonable value.
|
||||||
|
|
||||||
|
Firefox also does something like this per
|
||||||
|
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
|
||||||
|
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
||||||
|
Unlike mozilla we limit this to 24-hr.
|
||||||
|
"""
|
||||||
|
cacheable_by_default_statuses = set([
|
||||||
|
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
|
||||||
|
])
|
||||||
|
|
||||||
|
def update_headers(self, resp):
|
||||||
|
headers = resp.headers
|
||||||
|
|
||||||
|
if 'expires' in headers:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if 'cache-control' in headers and headers['cache-control'] != 'public':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if resp.status not in self.cacheable_by_default_statuses:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if 'date' not in headers or 'last-modified' not in headers:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
date = calendar.timegm(parsedate_tz(headers['date']))
|
||||||
|
last_modified = parsedate(headers['last-modified'])
|
||||||
|
if date is None or last_modified is None:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
current_age = max(0, now - date)
|
||||||
|
delta = date - calendar.timegm(last_modified)
|
||||||
|
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
|
||||||
|
if freshness_lifetime <= current_age:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
expires = date + freshness_lifetime
|
||||||
|
return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
|
||||||
|
|
||||||
|
def warning(self, resp):
|
||||||
|
return None
|
@ -0,0 +1,194 @@
|
|||||||
|
import base64
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import zlib
|
||||||
|
|
||||||
|
from pip._vendor import msgpack
|
||||||
|
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
|
from .compat import HTTPResponse, pickle, text_type
|
||||||
|
|
||||||
|
|
||||||
|
def _b64_decode_bytes(b):
|
||||||
|
return base64.b64decode(b.encode("ascii"))
|
||||||
|
|
||||||
|
|
||||||
|
def _b64_decode_str(s):
|
||||||
|
return _b64_decode_bytes(s).decode("utf8")
|
||||||
|
|
||||||
|
|
||||||
|
class Serializer(object):
|
||||||
|
|
||||||
|
def dumps(self, request, response, body=None):
|
||||||
|
response_headers = CaseInsensitiveDict(response.headers)
|
||||||
|
|
||||||
|
if body is None:
|
||||||
|
body = response.read(decode_content=False)
|
||||||
|
|
||||||
|
# NOTE: 99% sure this is dead code. I'm only leaving it
|
||||||
|
# here b/c I don't have a test yet to prove
|
||||||
|
# it. Basically, before using
|
||||||
|
# `cachecontrol.filewrapper.CallbackFileWrapper`,
|
||||||
|
# this made an effort to reset the file handle. The
|
||||||
|
# `CallbackFileWrapper` short circuits this code by
|
||||||
|
# setting the body as the content is consumed, the
|
||||||
|
# result being a `body` argument is *always* passed
|
||||||
|
# into cache_response, and in turn,
|
||||||
|
# `Serializer.dump`.
|
||||||
|
response._fp = io.BytesIO(body)
|
||||||
|
|
||||||
|
# NOTE: This is all a bit weird, but it's really important that on
|
||||||
|
# Python 2.x these objects are unicode and not str, even when
|
||||||
|
# they contain only ascii. The problem here is that msgpack
|
||||||
|
# understands the difference between unicode and bytes and we
|
||||||
|
# have it set to differentiate between them, however Python 2
|
||||||
|
# doesn't know the difference. Forcing these to unicode will be
|
||||||
|
# enough to have msgpack know the difference.
|
||||||
|
data = {
|
||||||
|
u"response": {
|
||||||
|
u"body": body,
|
||||||
|
u"headers": dict(
|
||||||
|
(text_type(k), text_type(v))
|
||||||
|
for k, v in response.headers.items()
|
||||||
|
),
|
||||||
|
u"status": response.status,
|
||||||
|
u"version": response.version,
|
||||||
|
u"reason": text_type(response.reason),
|
||||||
|
u"strict": response.strict,
|
||||||
|
u"decode_content": response.decode_content,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Construct our vary headers
|
||||||
|
data[u"vary"] = {}
|
||||||
|
if u"vary" in response_headers:
|
||||||
|
varied_headers = response_headers[u'vary'].split(',')
|
||||||
|
for header in varied_headers:
|
||||||
|
header = header.strip()
|
||||||
|
header_value = request.headers.get(header, None)
|
||||||
|
if header_value is not None:
|
||||||
|
header_value = text_type(header_value)
|
||||||
|
data[u"vary"][header] = header_value
|
||||||
|
|
||||||
|
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
|
||||||
|
|
||||||
|
def loads(self, request, data):
|
||||||
|
# Short circuit if we've been given an empty set of data
|
||||||
|
if not data:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Determine what version of the serializer the data was serialized
|
||||||
|
# with
|
||||||
|
try:
|
||||||
|
ver, data = data.split(b",", 1)
|
||||||
|
except ValueError:
|
||||||
|
ver = b"cc=0"
|
||||||
|
|
||||||
|
# Make sure that our "ver" is actually a version and isn't a false
|
||||||
|
# positive from a , being in the data stream.
|
||||||
|
if ver[:3] != b"cc=":
|
||||||
|
data = ver + data
|
||||||
|
ver = b"cc=0"
|
||||||
|
|
||||||
|
# Get the version number out of the cc=N
|
||||||
|
ver = ver.split(b"=", 1)[-1].decode("ascii")
|
||||||
|
|
||||||
|
# Dispatch to the actual load method for the given version
|
||||||
|
try:
|
||||||
|
return getattr(self, "_loads_v{0}".format(ver))(request, data)
|
||||||
|
except AttributeError:
|
||||||
|
# This is a version we don't have a loads function for, so we'll
|
||||||
|
# just treat it as a miss and return None
|
||||||
|
return
|
||||||
|
|
||||||
|
def prepare_response(self, request, cached):
|
||||||
|
"""Verify our vary headers match and construct a real urllib3
|
||||||
|
HTTPResponse object.
|
||||||
|
"""
|
||||||
|
# Special case the '*' Vary value as it means we cannot actually
|
||||||
|
# determine if the cached response is suitable for this request.
|
||||||
|
if "*" in cached.get("vary", {}):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Ensure that the Vary headers for the cached response match our
|
||||||
|
# request
|
||||||
|
for header, value in cached.get("vary", {}).items():
|
||||||
|
if request.headers.get(header, None) != value:
|
||||||
|
return
|
||||||
|
|
||||||
|
body_raw = cached["response"].pop("body")
|
||||||
|
|
||||||
|
headers = CaseInsensitiveDict(data=cached['response']['headers'])
|
||||||
|
if headers.get('transfer-encoding', '') == 'chunked':
|
||||||
|
headers.pop('transfer-encoding')
|
||||||
|
|
||||||
|
cached['response']['headers'] = headers
|
||||||
|
|
||||||
|
try:
|
||||||
|
body = io.BytesIO(body_raw)
|
||||||
|
except TypeError:
|
||||||
|
# This can happen if cachecontrol serialized to v1 format (pickle)
|
||||||
|
# using Python 2. A Python 2 str(byte string) will be unpickled as
|
||||||
|
# a Python 3 str (unicode string), which will cause the above to
|
||||||
|
# fail with:
|
||||||
|
#
|
||||||
|
# TypeError: 'str' does not support the buffer interface
|
||||||
|
body = io.BytesIO(body_raw.encode('utf8'))
|
||||||
|
|
||||||
|
return HTTPResponse(
|
||||||
|
body=body,
|
||||||
|
preload_content=False,
|
||||||
|
**cached["response"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _loads_v0(self, request, data):
|
||||||
|
# The original legacy cache data. This doesn't contain enough
|
||||||
|
# information to construct everything we need, so we'll treat this as
|
||||||
|
# a miss.
|
||||||
|
return
|
||||||
|
|
||||||
|
def _loads_v1(self, request, data):
|
||||||
|
try:
|
||||||
|
cached = pickle.loads(data)
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.prepare_response(request, cached)
|
||||||
|
|
||||||
|
def _loads_v2(self, request, data):
|
||||||
|
try:
|
||||||
|
cached = json.loads(zlib.decompress(data).decode("utf8"))
|
||||||
|
except (ValueError, zlib.error):
|
||||||
|
return
|
||||||
|
|
||||||
|
# We need to decode the items that we've base64 encoded
|
||||||
|
cached["response"]["body"] = _b64_decode_bytes(
|
||||||
|
cached["response"]["body"]
|
||||||
|
)
|
||||||
|
cached["response"]["headers"] = dict(
|
||||||
|
(_b64_decode_str(k), _b64_decode_str(v))
|
||||||
|
for k, v in cached["response"]["headers"].items()
|
||||||
|
)
|
||||||
|
cached["response"]["reason"] = _b64_decode_str(
|
||||||
|
cached["response"]["reason"],
|
||||||
|
)
|
||||||
|
cached["vary"] = dict(
|
||||||
|
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
|
||||||
|
for k, v in cached["vary"].items()
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.prepare_response(request, cached)
|
||||||
|
|
||||||
|
def _loads_v3(self, request, data):
|
||||||
|
# Due to Python 2 encoding issues, it's impossible to know for sure
|
||||||
|
# exactly how to load v3 entries, thus we'll treat these as a miss so
|
||||||
|
# that they get rewritten out as v4 entries.
|
||||||
|
return
|
||||||
|
|
||||||
|
def _loads_v4(self, request, data):
|
||||||
|
try:
|
||||||
|
cached = msgpack.loads(data, encoding='utf-8')
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.prepare_response(request, cached)
|
@ -0,0 +1,27 @@
|
|||||||
|
from .adapter import CacheControlAdapter
|
||||||
|
from .cache import DictCache
|
||||||
|
|
||||||
|
|
||||||
|
def CacheControl(sess,
|
||||||
|
cache=None,
|
||||||
|
cache_etags=True,
|
||||||
|
serializer=None,
|
||||||
|
heuristic=None,
|
||||||
|
controller_class=None,
|
||||||
|
adapter_class=None,
|
||||||
|
cacheable_methods=None):
|
||||||
|
|
||||||
|
cache = cache or DictCache()
|
||||||
|
adapter_class = adapter_class or CacheControlAdapter
|
||||||
|
adapter = adapter_class(
|
||||||
|
cache,
|
||||||
|
cache_etags=cache_etags,
|
||||||
|
serializer=serializer,
|
||||||
|
heuristic=heuristic,
|
||||||
|
controller_class=controller_class,
|
||||||
|
cacheable_methods=cacheable_methods
|
||||||
|
)
|
||||||
|
sess.mount('http://', adapter)
|
||||||
|
sess.mount('https://', adapter)
|
||||||
|
|
||||||
|
return sess
|
@ -0,0 +1,3 @@
|
|||||||
|
from .core import where, old_where
|
||||||
|
|
||||||
|
__version__ = "2018.01.18"
|
@ -0,0 +1,2 @@
|
|||||||
|
from certifi import where
|
||||||
|
print(where())
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
certifi.py
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
This module returns the installation location of cacert.pem.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedBundleWarning(DeprecationWarning):
|
||||||
|
"""
|
||||||
|
The weak security bundle is being deprecated. Please bother your service
|
||||||
|
provider to get them to stop using cross-signed roots.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def where():
|
||||||
|
f = os.path.dirname(__file__)
|
||||||
|
|
||||||
|
return os.path.join(f, 'cacert.pem')
|
||||||
|
|
||||||
|
|
||||||
|
def old_where():
|
||||||
|
warnings.warn(
|
||||||
|
"The weak security bundle has been removed. certifi.old_where() is now an alias "
|
||||||
|
"of certifi.where(). Please update your code to use certifi.where() instead. "
|
||||||
|
"certifi.old_where() will be removed in 2018.",
|
||||||
|
DeprecatedBundleWarning
|
||||||
|
)
|
||||||
|
return where()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print(where())
|
@ -0,0 +1,39 @@
|
|||||||
|
######################## BEGIN LICENSE BLOCK ########################
|
||||||
|
# This library is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU Lesser General Public
|
||||||
|
# License as published by the Free Software Foundation; either
|
||||||
|
# version 2.1 of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Lesser General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public
|
||||||
|
# License along with this library; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||||
|
# 02110-1301 USA
|
||||||
|
######################### END LICENSE BLOCK #########################
|
||||||
|
|
||||||
|
|
||||||
|
from .compat import PY2, PY3
|
||||||
|
from .universaldetector import UniversalDetector
|
||||||
|
from .version import __version__, VERSION
|
||||||
|
|
||||||
|
|
||||||
|
def detect(byte_str):
|
||||||
|
"""
|
||||||
|
Detect the encoding of the given byte string.
|
||||||
|
|
||||||
|
:param byte_str: The byte sequence to examine.
|
||||||
|
:type byte_str: ``bytes`` or ``bytearray``
|
||||||
|
"""
|
||||||
|
if not isinstance(byte_str, bytearray):
|
||||||
|
if not isinstance(byte_str, bytes):
|
||||||
|
raise TypeError('Expected object of type bytes or bytearray, got: '
|
||||||
|
'{0}'.format(type(byte_str)))
|
||||||
|
else:
|
||||||
|
byte_str = bytearray(byte_str)
|
||||||
|
detector = UniversalDetector()
|
||||||
|
detector.feed(byte_str)
|
||||||
|
return detector.close()
|
@ -0,0 +1,386 @@
|
|||||||
|
######################## BEGIN LICENSE BLOCK ########################
|
||||||
|
# The Original Code is Mozilla Communicator client code.
|
||||||
|
#
|
||||||
|
# The Initial Developer of the Original Code is
|
||||||
|
# Netscape Communications Corporation.
|
||||||
|
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||||
|
# the Initial Developer. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Contributor(s):
|
||||||
|
# Mark Pilgrim - port to Python
|
||||||
|
#
|
||||||
|
# This library is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU Lesser General Public
|
||||||
|
# License as published by the Free Software Foundation; either
|
||||||
|
# version 2.1 of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Lesser General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public
|
||||||
|
# License along with this library; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||||
|
# 02110-1301 USA
|
||||||
|
######################### END LICENSE BLOCK #########################
|
||||||
|
|
||||||
|
# Big5 frequency table
|
||||||
|
# by Taiwan's Mandarin Promotion Council
|
||||||
|
# <http://www.edu.tw:81/mandr/>
|
||||||
|
#
|
||||||
|
# 128 --> 0.42261
|
||||||
|
# 256 --> 0.57851
|
||||||
|
# 512 --> 0.74851
|
||||||
|
# 1024 --> 0.89384
|
||||||
|
# 2048 --> 0.97583
|
||||||
|
#
|
||||||
|
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
|
||||||
|
# Random Distribution Ration = 512/(5401-512)=0.105
|
||||||
|
#
|
||||||
|
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
|
||||||
|
|
||||||
|
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
|
||||||
|
|
||||||
|
#Char to FreqOrder table
|
||||||
|
BIG5_TABLE_SIZE = 5376
|
||||||
|
|
||||||
|
BIG5_CHAR_TO_FREQ_ORDER = (
|
||||||
|
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
|
||||||
|
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
|
||||||
|
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
|
||||||
|
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
|
||||||
|
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
|
||||||
|
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
|
||||||
|
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
|
||||||
|
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
|
||||||
|
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
|
||||||
|
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
|
||||||
|
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
|
||||||
|
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
|
||||||
|
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
|
||||||
|
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
|
||||||
|
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
|
||||||
|
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
|
||||||
|
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
|
||||||
|
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
|
||||||
|
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
|
||||||
|
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
|
||||||
|
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
|
||||||
|
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
|
||||||
|
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
|
||||||
|
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
|
||||||
|
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
|
||||||
|
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
|
||||||
|
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
|
||||||
|
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
|
||||||
|
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
|
||||||
|
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
|
||||||
|
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
|
||||||
|
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
|
||||||
|
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
|
||||||
|
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
|
||||||
|
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
|
||||||
|
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
|
||||||
|
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
|
||||||
|
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
|
||||||
|
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
|
||||||
|
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
|
||||||
|
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
|
||||||
|
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
|
||||||
|
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
|
||||||
|
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
|
||||||
|
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
|
||||||
|
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
|
||||||
|
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
|
||||||
|
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
|
||||||
|
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
|
||||||
|
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
|
||||||
|
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
|
||||||
|
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
|
||||||
|
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
|
||||||
|
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
|
||||||
|
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
|
||||||
|
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
|
||||||
|
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
|
||||||
|
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
|
||||||
|
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
|
||||||
|
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
|
||||||
|
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
|
||||||
|
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
|
||||||
|
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
|
||||||
|
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
|
||||||
|
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
|
||||||
|
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
|
||||||
|
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
|
||||||
|
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
|
||||||
|
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
|
||||||
|
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
|
||||||
|
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
|
||||||
|
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
|
||||||
|
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
|
||||||
|
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
|
||||||
|
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
|
||||||
|
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
|
||||||
|
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
|
||||||
|
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
|
||||||
|
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
|
||||||
|
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
|
||||||
|
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
|
||||||
|
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
|
||||||
|
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
|
||||||
|
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
|
||||||
|
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
|
||||||
|
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
|
||||||
|
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
|
||||||
|
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
|
||||||
|
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
|
||||||
|
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
|
||||||
|
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
|
||||||
|
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
|
||||||
|
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
|
||||||
|
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
|
||||||
|
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
|
||||||
|
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
|
||||||
|
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
|
||||||
|
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
|
||||||
|
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
|
||||||
|
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
|
||||||
|
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
|
||||||
|
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
|
||||||
|
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
|
||||||
|
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
|
||||||
|
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
|
||||||
|
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
|
||||||
|
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
|
||||||
|
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
|
||||||
|
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
|
||||||
|
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
|
||||||
|
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
|
||||||
|
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
|
||||||
|
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
|
||||||
|
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
|
||||||
|
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
|
||||||
|
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
|
||||||
|
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
|
||||||
|
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
|
||||||
|
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
|
||||||
|
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
|
||||||
|
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
|
||||||
|
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
|
||||||
|
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
|
||||||
|
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
|
||||||
|
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
|
||||||
|
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
|
||||||
|
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
|
||||||
|
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
|
||||||
|
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
|
||||||
|
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
|
||||||
|
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
|
||||||
|
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
|
||||||
|
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
|
||||||
|
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
|
||||||
|
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
|
||||||
|
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
|
||||||
|
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
|
||||||
|
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
|
||||||
|
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
|
||||||
|
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
|
||||||
|
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
|
||||||
|
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
|
||||||
|
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
|
||||||
|
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
|
||||||
|
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
|
||||||
|
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
|
||||||
|
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
|
||||||
|
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
|
||||||
|
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
|
||||||
|
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
|
||||||
|
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
|
||||||
|
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
|
||||||
|
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
|
||||||
|
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
|
||||||
|
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
|
||||||
|
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
|
||||||
|
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
|
||||||
|
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
|
||||||
|
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
|
||||||
|
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
|
||||||
|
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
|
||||||
|
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
|
||||||
|
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
|
||||||
|
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
|
||||||
|
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
|
||||||
|
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
|
||||||
|
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
|
||||||
|
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
|
||||||
|
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
|
||||||
|
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
|
||||||
|
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
|
||||||
|
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
|
||||||
|
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
|
||||||
|
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
|
||||||
|
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
|
||||||
|
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
|
||||||
|
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
|
||||||
|
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
|
||||||
|
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
|
||||||
|
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
|
||||||
|
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
|
||||||
|
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
|
||||||
|
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
|
||||||
|
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
|
||||||
|
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
|
||||||
|
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
|
||||||
|
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
|
||||||
|
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
|
||||||
|
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
|
||||||
|
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
|
||||||
|
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
|
||||||
|
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
|
||||||
|
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
|
||||||
|
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
|
||||||
|
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
|
||||||
|
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
|
||||||
|
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
|
||||||
|
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
|
||||||
|
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
|
||||||
|
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
|
||||||
|
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
|
||||||
|
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
|
||||||
|
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
|
||||||
|
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
|
||||||
|
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
|
||||||
|
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
|
||||||
|
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
|
||||||
|
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
|
||||||
|
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
|
||||||
|
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
|
||||||
|
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
|
||||||
|
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
|
||||||
|
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
|
||||||
|
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
|
||||||
|
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
|
||||||
|
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
|
||||||
|
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
|
||||||
|
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
|
||||||
|
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
|
||||||
|
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
|
||||||
|
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
|
||||||
|
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
|
||||||
|
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
|
||||||
|
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
|
||||||
|
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
|
||||||
|
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
|
||||||
|
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
|
||||||
|
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
|
||||||
|
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
|
||||||
|
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
|
||||||
|
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
|
||||||
|
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
|
||||||
|
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
|
||||||
|
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
|
||||||
|
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
|
||||||
|
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
|
||||||
|
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
|
||||||
|
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
|
||||||
|
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
|
||||||
|
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
|
||||||
|
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
|
||||||
|
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
|
||||||
|
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
|
||||||
|
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
|
||||||
|
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
|
||||||
|
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
|
||||||
|
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
|
||||||
|
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
|
||||||
|
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
|
||||||
|
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
|
||||||
|
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
|
||||||
|
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
|
||||||
|
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
|
||||||
|
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
|
||||||
|
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
|
||||||
|
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
|
||||||
|
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
|
||||||
|
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
|
||||||
|
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
|
||||||
|
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
|
||||||
|
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
|
||||||
|
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
|
||||||
|
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
|
||||||
|
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
|
||||||
|
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
|
||||||
|
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
|
||||||
|
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
|
||||||
|
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
|
||||||
|
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
|
||||||
|
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
|
||||||
|
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
|
||||||
|
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
|
||||||
|
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
|
||||||
|
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
|
||||||
|
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
|
||||||
|
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
|
||||||
|
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
|
||||||
|
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
|
||||||
|
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
|
||||||
|
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
|
||||||
|
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
|
||||||
|
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
|
||||||
|
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
|
||||||
|
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
|
||||||
|
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
|
||||||
|
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
|
||||||
|
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
|
||||||
|
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
|
||||||
|
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
|
||||||
|
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
|
||||||
|
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
|
||||||
|
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
|
||||||
|
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
|
||||||
|
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
|
||||||
|
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
|
||||||
|
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
|
||||||
|
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
|
||||||
|
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
|
||||||
|
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
|
||||||
|
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
|
||||||
|
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
|
||||||
|
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
|
||||||
|
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
|
||||||
|
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
|
||||||
|
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
|
||||||
|
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
|
||||||
|
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
|
||||||
|
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
|
||||||
|
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
|
||||||
|
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
|
||||||
|
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
|
||||||
|
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
|
||||||
|
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
|
||||||
|
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
|
||||||
|
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
|
||||||
|
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
|
||||||
|
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
|
||||||
|
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
|
||||||
|
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
|
||||||
|
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
|
||||||
|
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
|
||||||
|
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
|
||||||
|
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
|
||||||
|
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
|
||||||
|
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
|
||||||
|
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
|
||||||
|
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
|
||||||
|
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
|
||||||
|
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
|
||||||
|
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
|
||||||
|
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
|
||||||
|
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
|
||||||
|
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
|
||||||
|
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
|
||||||
|
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
|
||||||
|
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,47 @@
|
|||||||
|
######################## BEGIN LICENSE BLOCK ########################
|
||||||
|
# The Original Code is Mozilla Communicator client code.
|
||||||
|
#
|
||||||
|
# The Initial Developer of the Original Code is
|
||||||
|
# Netscape Communications Corporation.
|
||||||
|
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||||
|
# the Initial Developer. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Contributor(s):
|
||||||
|
# Mark Pilgrim - port to Python
|
||||||
|
#
|
||||||
|
# This library is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU Lesser General Public
|
||||||
|
# License as published by the Free Software Foundation; either
|
||||||
|
# version 2.1 of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Lesser General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public
|
||||||
|
# License along with this library; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||||
|
# 02110-1301 USA
|
||||||
|
######################### END LICENSE BLOCK #########################
|
||||||
|
|
||||||
|
from .mbcharsetprober import MultiByteCharSetProber
|
||||||
|
from .codingstatemachine import CodingStateMachine
|
||||||
|
from .chardistribution import Big5DistributionAnalysis
|
||||||
|
from .mbcssm import BIG5_SM_MODEL
|
||||||
|
|
||||||
|
|
||||||
|
class Big5Prober(MultiByteCharSetProber):
|
||||||
|
def __init__(self):
|
||||||
|
super(Big5Prober, self).__init__()
|
||||||
|
self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
|
||||||
|
self.distribution_analyzer = Big5DistributionAnalysis()
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def charset_name(self):
|
||||||
|
return "Big5"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def language(self):
|
||||||
|
return "Chinese"
|
@ -0,0 +1,233 @@
|
|||||||
|
######################## BEGIN LICENSE BLOCK ########################
|
||||||
|
# The Original Code is Mozilla Communicator client code.
|
||||||
|
#
|
||||||
|
# The Initial Developer of the Original Code is
|
||||||
|
# Netscape Communications Corporation.
|
||||||
|
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||||
|
# the Initial Developer. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Contributor(s):
|
||||||
|
# Mark Pilgrim - port to Python
|
||||||
|
#
|
||||||
|
# This library is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU Lesser General Public
|
||||||
|
# License as published by the Free Software Foundation; either
|
||||||
|
# version 2.1 of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Lesser General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public
|
||||||
|
# License along with this library; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||||
|
# 02110-1301 USA
|
||||||
|
######################### END LICENSE BLOCK #########################
|
||||||
|
|
||||||
|
from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
|
||||||
|
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
|
||||||
|
from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
|
||||||
|
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
|
||||||
|
from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
|
||||||
|
GB2312_TYPICAL_DISTRIBUTION_RATIO)
|
||||||
|
from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
|
||||||
|
BIG5_TYPICAL_DISTRIBUTION_RATIO)
|
||||||
|
from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
|
||||||
|
JIS_TYPICAL_DISTRIBUTION_RATIO)
|
||||||
|
|
||||||
|
|
||||||
|
class CharDistributionAnalysis(object):
|
||||||
|
ENOUGH_DATA_THRESHOLD = 1024
|
||||||
|
SURE_YES = 0.99
|
||||||
|
SURE_NO = 0.01
|
||||||
|
MINIMUM_DATA_THRESHOLD = 3
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Mapping table to get frequency order from char order (get from
|
||||||
|
# GetOrder())
|
||||||
|
self._char_to_freq_order = None
|
||||||
|
self._table_size = None # Size of above table
|
||||||
|
# This is a constant value which varies from language to language,
|
||||||
|
# used in calculating confidence. See
|
||||||
|
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
|
||||||
|
# for further detail.
|
||||||
|
self.typical_distribution_ratio = None
|
||||||
|
self._done = None
|
||||||
|
self._total_chars = None
|
||||||
|
self._freq_chars = None
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
"""reset analyser, clear any state"""
|
||||||
|
# If this flag is set to True, detection is done and conclusion has
|
||||||
|
# been made
|
||||||
|
self._done = False
|
||||||
|
self._total_chars = 0 # Total characters encountered
|
||||||
|
# The number of characters whose frequency order is less than 512
|
||||||
|
self._freq_chars = 0
|
||||||
|
|
||||||
|
def feed(self, char, char_len):
|
||||||
|
"""feed a character with known length"""
|
||||||
|
if char_len == 2:
|
||||||
|
# we only care about 2-bytes character in our distribution analysis
|
||||||
|
order = self.get_order(char)
|
||||||
|
else:
|
||||||
|
order = -1
|
||||||
|
if order >= 0:
|
||||||
|
self._total_chars += 1
|
||||||
|
# order is valid
|
||||||
|
if order < self._table_size:
|
||||||
|
if 512 > self._char_to_freq_order[order]:
|
||||||
|
self._freq_chars += 1
|
||||||
|
|
||||||
|
def get_confidence(self):
|
||||||
|
"""return confidence based on existing data"""
|
||||||
|
# if we didn't receive any character in our consideration range,
|
||||||
|
# return negative answer
|
||||||
|
if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
|
||||||
|
return self.SURE_NO
|
||||||
|
|
||||||
|
if self._total_chars != self._freq_chars:
|
||||||
|
r = (self._freq_chars / ((self._total_chars - self._freq_chars)
|
||||||
|
* self.typical_distribution_ratio))
|
||||||
|
if r < self.SURE_YES:
|
||||||
|
return r
|
||||||
|
|
||||||
|
# normalize confidence (we don't want to be 100% sure)
|
||||||
|
return self.SURE_YES
|
||||||
|
|
||||||
|
def got_enough_data(self):
|
||||||
|
# It is not necessary to receive all data to draw conclusion.
|
||||||
|
# For charset detection, certain amount of data is enough
|
||||||
|
return self._total_chars > self.ENOUGH_DATA_THRESHOLD
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# We do not handle characters based on the original encoding string,
|
||||||
|
# but convert this encoding string to a number, here called order.
|
||||||
|
# This allows multiple encodings of a language to share one frequency
|
||||||
|
# table.
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
|
||||||
|
def __init__(self):
|
||||||
|
super(EUCTWDistributionAnalysis, self).__init__()
|
||||||
|
self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
|
||||||
|
self._table_size = EUCTW_TABLE_SIZE
|
||||||
|
self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# for euc-TW encoding, we are interested
|
||||||
|
# first byte range: 0xc4 -- 0xfe
|
||||||
|
# second byte range: 0xa1 -- 0xfe
|
||||||
|
# no validation needed here. State machine has done that
|
||||||
|
first_char = byte_str[0]
|
||||||
|
if first_char >= 0xC4:
|
||||||
|
return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
|
||||||
|
def __init__(self):
|
||||||
|
super(EUCKRDistributionAnalysis, self).__init__()
|
||||||
|
self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
|
||||||
|
self._table_size = EUCKR_TABLE_SIZE
|
||||||
|
self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# for euc-KR encoding, we are interested
|
||||||
|
# first byte range: 0xb0 -- 0xfe
|
||||||
|
# second byte range: 0xa1 -- 0xfe
|
||||||
|
# no validation needed here. State machine has done that
|
||||||
|
first_char = byte_str[0]
|
||||||
|
if first_char >= 0xB0:
|
||||||
|
return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
class GB2312DistributionAnalysis(CharDistributionAnalysis):
|
||||||
|
def __init__(self):
|
||||||
|
super(GB2312DistributionAnalysis, self).__init__()
|
||||||
|
self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
|
||||||
|
self._table_size = GB2312_TABLE_SIZE
|
||||||
|
self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# for GB2312 encoding, we are interested
|
||||||
|
# first byte range: 0xb0 -- 0xfe
|
||||||
|
# second byte range: 0xa1 -- 0xfe
|
||||||
|
# no validation needed here. State machine has done that
|
||||||
|
first_char, second_char = byte_str[0], byte_str[1]
|
||||||
|
if (first_char >= 0xB0) and (second_char >= 0xA1):
|
||||||
|
return 94 * (first_char - 0xB0) + second_char - 0xA1
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
class Big5DistributionAnalysis(CharDistributionAnalysis):
|
||||||
|
def __init__(self):
|
||||||
|
super(Big5DistributionAnalysis, self).__init__()
|
||||||
|
self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
|
||||||
|
self._table_size = BIG5_TABLE_SIZE
|
||||||
|
self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# for big5 encoding, we are interested
|
||||||
|
# first byte range: 0xa4 -- 0xfe
|
||||||
|
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
|
||||||
|
# no validation needed here. State machine has done that
|
||||||
|
first_char, second_char = byte_str[0], byte_str[1]
|
||||||
|
if first_char >= 0xA4:
|
||||||
|
if second_char >= 0xA1:
|
||||||
|
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
|
||||||
|
else:
|
||||||
|
return 157 * (first_char - 0xA4) + second_char - 0x40
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
class SJISDistributionAnalysis(CharDistributionAnalysis):
|
||||||
|
def __init__(self):
|
||||||
|
super(SJISDistributionAnalysis, self).__init__()
|
||||||
|
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
|
||||||
|
self._table_size = JIS_TABLE_SIZE
|
||||||
|
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# for sjis encoding, we are interested
|
||||||
|
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
|
||||||
|
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
|
||||||
|
# no validation needed here. State machine has done that
|
||||||
|
first_char, second_char = byte_str[0], byte_str[1]
|
||||||
|
if (first_char >= 0x81) and (first_char <= 0x9F):
|
||||||
|
order = 188 * (first_char - 0x81)
|
||||||
|
elif (first_char >= 0xE0) and (first_char <= 0xEF):
|
||||||
|
order = 188 * (first_char - 0xE0 + 31)
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
order = order + second_char - 0x40
|
||||||
|
if second_char > 0x7F:
|
||||||
|
order = -1
|
||||||
|
return order
|
||||||
|
|
||||||
|
|
||||||
|
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
|
||||||
|
def __init__(self):
|
||||||
|
super(EUCJPDistributionAnalysis, self).__init__()
|
||||||
|
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
|
||||||
|
self._table_size = JIS_TABLE_SIZE
|
||||||
|
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
|
||||||
|
|
||||||
|
def get_order(self, byte_str):
|
||||||
|
# for euc-JP encoding, we are interested
|
||||||
|
# first byte range: 0xa0 -- 0xfe
|
||||||
|
# second byte range: 0xa1 -- 0xfe
|
||||||
|
# no validation needed here. State machine has done that
|
||||||
|
char = byte_str[0]
|
||||||
|
if char >= 0xA0:
|
||||||
|
return 94 * (char - 0xA1) + byte_str[1] - 0xa1
|
||||||
|
else:
|
||||||
|
return -1
|
@ -0,0 +1,106 @@
|
|||||||
|
######################## BEGIN LICENSE BLOCK ########################
|
||||||
|
# The Original Code is Mozilla Communicator client code.
|
||||||
|
#
|
||||||
|
# The Initial Developer of the Original Code is
|
||||||
|
# Netscape Communications Corporation.
|
||||||
|
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||||
|
# the Initial Developer. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Contributor(s):
|
||||||
|
# Mark Pilgrim - port to Python
|
||||||
|
#
|
||||||
|
# This library is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU Lesser General Public
|
||||||
|
# License as published by the Free Software Foundation; either
|
||||||
|
# version 2.1 of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Lesser General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public
|
||||||
|
# License along with this library; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||||
|
# 02110-1301 USA
|
||||||
|
######################### END LICENSE BLOCK #########################
|
||||||
|
|
||||||
|
from .enums import ProbingState
|
||||||
|
from .charsetprober import CharSetProber
|
||||||
|
|
||||||
|
|
||||||
|
class CharSetGroupProber(CharSetProber):
|
||||||
|
def __init__(self, lang_filter=None):
|
||||||
|
super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
|
||||||
|
self._active_num = 0
|
||||||
|
self.probers = []
|
||||||
|
self._best_guess_prober = None
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
super(CharSetGroupProber, self).reset()
|
||||||
|
self._active_num = 0
|
||||||
|
for prober in self.probers:
|
||||||
|
if prober:
|
||||||
|
prober.reset()
|
||||||
|
prober.active = True
|
||||||
|
self._active_num += 1
|
||||||
|
self._best_guess_prober = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def charset_name(self):
|
||||||
|
if not self._best_guess_prober:
|
||||||
|
self.get_confidence()
|
||||||
|
if not self._best_guess_prober:
|
||||||
|
return None
|
||||||
|
return self._best_guess_prober.charset_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def language(self):
|
||||||
|
if not self._best_guess_prober:
|
||||||
|
self.get_confidence()
|
||||||
|
if not self._best_guess_prober:
|
||||||
|
return None
|
||||||
|
return self._best_guess_prober.language
|
||||||
|
|
||||||
|
def feed(self, byte_str):
|
||||||
|
for prober in self.probers:
|
||||||
|
if not prober:
|
||||||
|
continue
|
||||||
|
if not prober.active:
|
||||||
|
continue
|
||||||
|
state = prober.feed(byte_str)
|
||||||
|
if not state:
|
||||||
|
continue
|
||||||
|
if state == ProbingState.FOUND_IT:
|
||||||
|
self._best_guess_prober = prober
|
||||||
|
return self.state
|
||||||
|
elif state == ProbingState.NOT_ME:
|
||||||
|
prober.active = False
|
||||||
|
self._active_num -= 1
|
||||||
|
if self._active_num <= 0:
|
||||||
|
self._state = ProbingState.NOT_ME
|
||||||
|
return self.state
|
||||||
|
return self.state
|
||||||
|
|
||||||
|
def get_confidence(self):
|
||||||
|
state = self.state
|
||||||
|
if state == ProbingState.FOUND_IT:
|
||||||
|
return 0.99
|
||||||
|
elif state == ProbingState.NOT_ME:
|
||||||
|
return 0.01
|
||||||
|
best_conf = 0.0
|
||||||
|
self._best_guess_prober = None
|
||||||
|
for prober in self.probers:
|
||||||
|
if not prober:
|
||||||
|
continue
|
||||||
|
if not prober.active:
|
||||||
|
self.logger.debug('%s not active', prober.charset_name)
|
||||||
|
continue
|
||||||
|
conf = prober.get_confidence()
|
||||||
|
self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf)
|
||||||
|
if best_conf < conf:
|
||||||
|
best_conf = conf
|
||||||
|
self._best_guess_prober = prober
|
||||||
|
if not self._best_guess_prober:
|
||||||
|
return 0.0
|
||||||
|
return best_conf
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user