At July 17 C.
This commit is contained in:
parent
3cc4338e1c
commit
85e265e5c1
|
@ -3,11 +3,11 @@ from openpyxl.cell import MergedCell
|
||||||
from openpyxl.utils import get_column_letter, column_index_from_string
|
from openpyxl.utils import get_column_letter, column_index_from_string
|
||||||
|
|
||||||
|
|
||||||
def colIndexFromString(string):
|
def col_index_from_string(string):
|
||||||
return column_index_from_string(string) - 1
|
return column_index_from_string(string) - 1
|
||||||
|
|
||||||
|
|
||||||
def rowIndexFromString(string):
|
def row_index_from_string(string):
|
||||||
return int(string) - 1
|
return int(string) - 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ class Excel:
|
||||||
raise Exception('文件已经打开 | File has been opened.')
|
raise Exception('文件已经打开 | File has been opened.')
|
||||||
if filename:
|
if filename:
|
||||||
self.__filename__ = os.path.abspath(filename)
|
self.__filename__ = os.path.abspath(filename)
|
||||||
self.__workbook__ = openpyxl.load_workbook(filename, read_only=read_only)
|
self.__workbook__ = openpyxl.load_workbook(filename, read_only=False)
|
||||||
self.__autosave__ = auto_save
|
self.__autosave__ = auto_save
|
||||||
self.select(sheet)
|
self.select(sheet)
|
||||||
else:
|
else:
|
||||||
|
@ -181,6 +181,8 @@ class Excel:
|
||||||
:param filename: 另存为的文件路径,默认为保存文件 | The file path to save as, the default is to save the file.
|
:param filename: 另存为的文件路径,默认为保存文件 | The file path to save as, the default is to save the file.
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
if filename is None and self.__readonly__:
|
||||||
|
raise Exception('只读模式 | Read-only')
|
||||||
self.__workbook__.save(filename or self.__filename__)
|
self.__workbook__.save(filename or self.__filename__)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
def json_encode(data, indent=None):
|
def json_encode(data, indent=None, unicode=True):
|
||||||
return json.dumps(data, indent=indent)
|
return json.dumps(data, indent=indent, ensure_ascii=unicode)
|
||||||
|
|
||||||
|
|
||||||
def json_decode(data):
|
def json_decode(data):
|
||||||
|
@ -17,4 +17,5 @@ if __name__ == '__main__':
|
||||||
"""
|
"""
|
||||||
print(json_encode(data_encode))
|
print(json_encode(data_encode))
|
||||||
print(json_encode(data_encode, indent=4))
|
print(json_encode(data_encode, indent=4))
|
||||||
|
print(json_encode(data_encode, indent=4, unicode=False))
|
||||||
print(json_decode(data_decode))
|
print(json_decode(data_decode))
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
def yaml_encode(data, indent=None):
|
def yaml_encode(data, indent=None, unicode=False):
|
||||||
return yaml.dump(data, indent=indent, allow_unicode=True, sort_keys=False)
|
return yaml.dump(data, indent=indent, allow_unicode=not unicode, sort_keys=False)
|
||||||
|
|
||||||
|
|
||||||
def yaml_decode(data):
|
def yaml_decode(data):
|
||||||
|
@ -23,4 +23,5 @@ if __name__ == '__main__':
|
||||||
b: 5
|
b: 5
|
||||||
"""
|
"""
|
||||||
print(yaml_encode(data_encode))
|
print(yaml_encode(data_encode))
|
||||||
|
print(yaml_encode(data_encode, unicode=True))
|
||||||
print(yaml_decode(data_decode))
|
print(yaml_decode(data_decode))
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
from Base.Class.Excel import *
|
||||||
|
|
||||||
|
|
||||||
|
def read_view_dict(filename=None, sheet=None,
|
||||||
|
area='', from_col=0, from_row=0, to_col=0, to_row=0, fields=None,
|
||||||
|
auto_truncate=False):
|
||||||
|
view = Excel().open(filename=filename, read_only=True).select(sheet).cellGetView(
|
||||||
|
area=area, from_col=from_col, from_row=from_row, to_col=to_col, to_row=to_row
|
||||||
|
)
|
||||||
|
tabs = []
|
||||||
|
try:
|
||||||
|
for i in range(len(view[0])):
|
||||||
|
try:
|
||||||
|
if not isinstance(fields[i], str):
|
||||||
|
raise TypeError()
|
||||||
|
tabs.append(fields[i])
|
||||||
|
except:
|
||||||
|
tabs.append('col' + str(i))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if len(tabs) != len(set(tabs)):
|
||||||
|
raise Exception('字段存在重复项目 | There are duplicates in the field')
|
||||||
|
data = []
|
||||||
|
for line in view:
|
||||||
|
if auto_truncate:
|
||||||
|
none_number = 0
|
||||||
|
for value in line:
|
||||||
|
if value is None:
|
||||||
|
none_number += 1
|
||||||
|
if len(line) == none_number:
|
||||||
|
break
|
||||||
|
line_dict = {}
|
||||||
|
for i in range(len(line)):
|
||||||
|
line_dict[tabs[i]] = line[i]
|
||||||
|
data.append(line_dict)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from Base.Class.Json import *
|
||||||
|
from Base.Class.Yaml import *
|
||||||
|
print(json_encode(read_view_dict(filename='../../example.xlsx', sheet='表一', area='A2:F530', fields=['id','name','age','city','mark1','mark2'], auto_truncate=True), 4, False))
|
|
@ -0,0 +1,12 @@
|
||||||
|
from Base.Class.Json import *
|
||||||
|
from Base.Class.Yaml import *
|
||||||
|
|
||||||
|
|
||||||
|
def auto_decode(data):
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
return json_decode(data)
|
||||||
|
except:
|
||||||
|
return yaml_decode(data)
|
||||||
|
except:
|
||||||
|
raise Exception('非Json或Yaml数据格式 | String in not a Json or Yaml format.')
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,138 @@
|
||||||
|
from Business.Class.ExcelUtils import *
|
||||||
|
from Business.Class.JsonOrYaml import *
|
||||||
|
from Base.Class.Http import *
|
||||||
|
|
||||||
|
from Base.Debug.Decorator import *
|
||||||
|
import pandas as pd
|
||||||
|
from pandas.core.frame import DataFrame
|
||||||
|
|
||||||
|
|
||||||
|
class RunTest:
|
||||||
|
# 全局变量列表
|
||||||
|
_g_ = {}
|
||||||
|
# 局部变量列表
|
||||||
|
_l_ = {}
|
||||||
|
_r_ = None
|
||||||
|
# 日志报告输出路径
|
||||||
|
fileLst = {"log": None, "report": None}
|
||||||
|
# 执行用例级别列表
|
||||||
|
caseLvl = None
|
||||||
|
viewLst = None
|
||||||
|
cellLst = None
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self._init(*args, **kwargs)
|
||||||
|
|
||||||
|
def _setRequestMode(self, mode):
|
||||||
|
self._r_ = [Request, Session][mode]()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _setOutput(self, **kwargs):
|
||||||
|
import os
|
||||||
|
for key in kwargs:
|
||||||
|
self.fileLst[key] = kwargs[key] and os.path.abspath(kwargs[key])
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _matLevelThatRun(self, lv=None):
|
||||||
|
if lv is None:
|
||||||
|
return False
|
||||||
|
if self.caseLvl is None:
|
||||||
|
return True
|
||||||
|
return lv in self.caseLvl
|
||||||
|
|
||||||
|
def _init(self, workbook, data='数据配置', case='测试用例', level=None, log=None, report=None):
|
||||||
|
init = Excel().open(workbook, read_only=True)
|
||||||
|
init_data = auto_decode(init.select(data).cellGet(0, 0))
|
||||||
|
init_case = auto_decode(init.select(case).cellGet(0, 0))
|
||||||
|
self.cellLst = {
|
||||||
|
"ITEM":
|
||||||
|
auto_decode(init.select(case).cellGet(cell=init_case["fixeds"]["item"])),
|
||||||
|
"MODE":
|
||||||
|
auto_decode(init.select(case).cellGet(cell=init_case["fixeds"]["mode"]))
|
||||||
|
}
|
||||||
|
self.viewLst = {
|
||||||
|
"HTTP":
|
||||||
|
DataFrame(read_view_dict(filename=workbook, sheet=data,
|
||||||
|
area=init_data["tables"]["http"]["views"], fields=init_data["tables"]["http"]["field"],
|
||||||
|
auto_truncate=True)),
|
||||||
|
"BASE":
|
||||||
|
DataFrame(read_view_dict(filename=workbook, sheet=data,
|
||||||
|
area=init_data["tables"]["base"]["views"],
|
||||||
|
fields=init_data["tables"]["base"]["field"],
|
||||||
|
auto_truncate=True)),
|
||||||
|
"DATA":
|
||||||
|
DataFrame(read_view_dict(filename=workbook, sheet=data,
|
||||||
|
area=init_data["tables"]["data"]["views"],
|
||||||
|
fields=init_data["tables"]["data"]["field"],
|
||||||
|
auto_truncate=True)),
|
||||||
|
"CASE":
|
||||||
|
DataFrame(read_view_dict(filename=workbook, sheet=case,
|
||||||
|
area=init_case["tables"]["case"]["views"],
|
||||||
|
fields=init_case["tables"]["case"]["field"],
|
||||||
|
auto_truncate=True)),
|
||||||
|
}
|
||||||
|
init.exit()
|
||||||
|
self.levels(level)
|
||||||
|
self.report(log=log, report=report)
|
||||||
|
self._setRequestMode(self.cellLst["MODE"] in ['会话模式', 'Session'])
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _run_mini(self, case_dict):
|
||||||
|
print(case_dict)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def report(self, log=None, report=None):
|
||||||
|
self._setOutput(log=log, report=report)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def levels(self, level=None):
|
||||||
|
if isinstance(level, (list, tuple, str)):
|
||||||
|
self.caseLvl = level
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
r = RunTest('D:/Desktop/接口自动化测试用例.xlsx','数据配置','测试用例', level=['P0'], log='./1.txt', report='./1.html')
|
||||||
|
r._run_mini(r.viewLst["CASE"].loc[0].to_dict())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# print(tables)
|
||||||
|
# print(json_encode(tables, indent=4, unicode=False))
|
||||||
|
# print(json_encode(tables["HTTP"], indent=4, unicode=False))
|
||||||
|
# data = tables["HTTP"]
|
||||||
|
# data = data.where((data["Env"] == '测试环境') & (data["Name"] == 'HTTP0'), inplace=False).dropna(how='all')
|
||||||
|
# print(data.loc[:,"Name"])
|
BIN
example.xlsx
BIN
example.xlsx
Binary file not shown.
2
main7.py
2
main7.py
|
@ -9,6 +9,6 @@ if __name__ == '__main__':
|
||||||
# print(excel.cellGetView('A1:C9'))
|
# print(excel.cellGetView('A1:C9'))
|
||||||
# print(excel.cellGetView('1:9'))
|
# print(excel.cellGetView('1:9'))
|
||||||
|
|
||||||
print(excel.cellGetView('1:9'))
|
print(excel.cellGetView('D5:F12'))
|
||||||
# excel.save()
|
# excel.save()
|
||||||
# print(excel.cellGetView(area='F6:F11'))
|
# print(excel.cellGetView(area='F6:F11'))
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
import os, time
|
||||||
|
from HTMLTestRunner import HTMLTestRunner
|
||||||
|
from test_case import TestDemo
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
base_path = os.path.dirname(__file__)
|
||||||
|
report_path = base_path
|
||||||
|
report_filename = os.path.join(report_path, 'report.html')
|
||||||
|
|
||||||
|
case_suite = unittest.TestSuite()
|
||||||
|
case_suite.addTest(TestDemo('test_one'))
|
||||||
|
case_suite.addTest(TestDemo('test_two'))
|
||||||
|
case_suite.addTest(TestDemo('test_tre'))
|
||||||
|
|
||||||
|
|
||||||
|
def start():
|
||||||
|
with open(report_filename, 'wb') as f:
|
||||||
|
runner = HTMLTestRunner(stream=f, title='自动化测试报告', verbosity=2, description='描述', tester='Tester')
|
||||||
|
runner.run(case_suite)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
start()
|
|
@ -0,0 +1,23 @@
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
class TestDemo(unittest.TestCase):
|
||||||
|
def test_one(self):
|
||||||
|
'''
|
||||||
|
哈哈
|
||||||
|
:return:
|
||||||
|
'''
|
||||||
|
assert 1 == 1
|
||||||
|
|
||||||
|
def test_two(self):
|
||||||
|
'''
|
||||||
|
呵呵
|
||||||
|
:return:
|
||||||
|
'''
|
||||||
|
assert 'H' in 'Hello!'
|
||||||
|
|
||||||
|
def test_tre(self):
|
||||||
|
assert 5 == 10, '断言失败'
|
||||||
|
|
||||||
|
|
||||||
|
TestDemo.test_tre.__doc__ = "测试吖"
|
|
@ -1 +0,0 @@
|
||||||
pip
|
|
|
@ -1,20 +0,0 @@
|
||||||
Copyright (c) 2008-2021 The pip developers (see AUTHORS.txt file)
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
@ -1,91 +0,0 @@
|
||||||
Metadata-Version: 2.1
|
|
||||||
Name: pip
|
|
||||||
Version: 21.1.2
|
|
||||||
Summary: The PyPA recommended tool for installing Python packages.
|
|
||||||
Home-page: https://pip.pypa.io/
|
|
||||||
Author: The pip developers
|
|
||||||
Author-email: distutils-sig@python.org
|
|
||||||
License: MIT
|
|
||||||
Project-URL: Documentation, https://pip.pypa.io
|
|
||||||
Project-URL: Source, https://github.com/pypa/pip
|
|
||||||
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Classifier: Topic :: Software Development :: Build Tools
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3 :: Only
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Requires-Python: >=3.6
|
|
||||||
|
|
||||||
pip - The Python Package Installer
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
|
||||||
:target: https://pypi.org/project/pip/
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
|
||||||
:target: https://pip.pypa.io/en/latest
|
|
||||||
|
|
||||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
|
||||||
|
|
||||||
Please take a look at our documentation for how to install and use pip:
|
|
||||||
|
|
||||||
* `Installation`_
|
|
||||||
* `Usage`_
|
|
||||||
|
|
||||||
We release updates regularly, with a new version every 3 months. Find more details in our documentation:
|
|
||||||
|
|
||||||
* `Release notes`_
|
|
||||||
* `Release process`_
|
|
||||||
|
|
||||||
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
|
|
||||||
|
|
||||||
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
|
|
||||||
|
|
||||||
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
|
|
||||||
|
|
||||||
* `Issue tracking`_
|
|
||||||
* `Discourse channel`_
|
|
||||||
* `User IRC`_
|
|
||||||
|
|
||||||
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
|
|
||||||
|
|
||||||
* `GitHub page`_
|
|
||||||
* `Development documentation`_
|
|
||||||
* `Development mailing list`_
|
|
||||||
* `Development IRC`_
|
|
||||||
|
|
||||||
Code of Conduct
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Everyone interacting in the pip project's codebases, issue trackers, chat
|
|
||||||
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
|
||||||
|
|
||||||
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
|
|
||||||
.. _Python Package Index: https://pypi.org
|
|
||||||
.. _Installation: https://pip.pypa.io/en/stable/installing.html
|
|
||||||
.. _Usage: https://pip.pypa.io/en/stable/
|
|
||||||
.. _Release notes: https://pip.pypa.io/en/stable/news.html
|
|
||||||
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
|
|
||||||
.. _GitHub page: https://github.com/pypa/pip
|
|
||||||
.. _Development documentation: https://pip.pypa.io/en/latest/development
|
|
||||||
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
|
|
||||||
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
|
|
||||||
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
|
|
||||||
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
|
|
||||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
|
||||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
|
||||||
.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/
|
|
||||||
.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
|
|
||||||
.. _Development IRC: https://webchat.freenode.net/?channels=%23pypa-dev
|
|
||||||
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
|
|
||||||
|
|
||||||
|
|
|
@ -1,854 +0,0 @@
|
||||||
pip/__init__.py,sha256=xl6vJWTn7sB7v5h7qPj-CtTzfXvbBezY1Fxe9Smjd1I,368
|
|
||||||
pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198
|
|
||||||
pip/py.typed,sha256=l9g-Fc1zgtIZ70tLJDcx6qKeqDutTVVSceIqUod-awg,286
|
|
||||||
pip/_internal/__init__.py,sha256=XvJ1JIumQnfLNFxVRdf_xrbhkTg1WMUrf2GzrH27F3A,410
|
|
||||||
pip/_internal/build_env.py,sha256=2hFtbEoO4vA0FxehN_e2oXZ_3E3tAvKpnVmc8sOYjv0,9746
|
|
||||||
pip/_internal/cache.py,sha256=6VONtoReGZbBd7sqY1n6hwkdWC4iz3tmXwXwZjpjZKw,9958
|
|
||||||
pip/_internal/configuration.py,sha256=QBLfhv-sbP-oR08NFxSYnv_mLB-SgtNOsWXAF9tDEcM,13725
|
|
||||||
pip/_internal/exceptions.py,sha256=2JQJSS68oggR_ZIOA-h1U2DRADURbkQn9Nf4EZWZ834,13170
|
|
||||||
pip/_internal/main.py,sha256=BZ0vkdqgpoteTo1A1Q8ovFe8EzgKFJWOUjPmIUQfGCY,351
|
|
||||||
pip/_internal/pyproject.py,sha256=bN_dliFVxorLITxCEzT0UmPYFoSqk_vGBtM1QwiQays,7061
|
|
||||||
pip/_internal/self_outdated_check.py,sha256=ivoUYaGuq-Ra_DvlZvPtHhgbY97NKHYuPGzrgN2G1A8,6484
|
|
||||||
pip/_internal/wheel_builder.py,sha256=hW63ZmABr65rOiSRBHXu1jBUdEZw5LZiw0LaQBbz0lI,11740
|
|
||||||
pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
|
|
||||||
pip/_internal/cli/autocompletion.py,sha256=r2GQSaHHim1LwPhMaO9MPeKdsSv5H8S9ElVsmByQNew,6350
|
|
||||||
pip/_internal/cli/base_command.py,sha256=26MHnlzZSC-Wk2j2OGsBDs5cl2ladrovJyVy1_2g0Zk,7741
|
|
||||||
pip/_internal/cli/cmdoptions.py,sha256=52JIyP5C6yT8DpT1O2ZseAY-vMvLTb8FqO0g85OFYMs,28999
|
|
||||||
pip/_internal/cli/command_context.py,sha256=k2JF5WPsP1MNKaXWK8jZFbJhYffzkdvGaPsL53tZbDU,815
|
|
||||||
pip/_internal/cli/main.py,sha256=G_OsY66FZRtmLrMJ4k3m77tmtsRRRQd3_-qle1lvmng,2483
|
|
||||||
pip/_internal/cli/main_parser.py,sha256=G70Z1fXLYzeJuuotgwKwq-daCJ0jCmmHxx6aFHz6WAQ,2642
|
|
||||||
pip/_internal/cli/parser.py,sha256=rx4w6IgD0Obi7t1k9mV0zlYhy_DuCoaDCqhkUKMOFNU,11097
|
|
||||||
pip/_internal/cli/progress_bars.py,sha256=ck_ILji6aRTG0zxXajnPWIpQTGxTzm3nscZOxwNmTWo,8576
|
|
||||||
pip/_internal/cli/req_command.py,sha256=refPyZdKuluridcLaCdSJtgyYFchxd9y8pMMp_7PO-s,16884
|
|
||||||
pip/_internal/cli/spinners.py,sha256=VLdSWCvyk3KokujLyBf_QKYcGbrePQoPB4v7jqG7xyA,5347
|
|
||||||
pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116
|
|
||||||
pip/_internal/commands/__init__.py,sha256=v-xml8oMwrQhCpmApkpcMOE97Mp8QaBxoRObnGS43_8,3659
|
|
||||||
pip/_internal/commands/cache.py,sha256=AELf98RWR_giU9wl0RSXf-MsTyO5G_iwO0iHoF4Fbmc,7414
|
|
||||||
pip/_internal/commands/check.py,sha256=Dt0w7NqFp8o_45J7w32GQrKezsz2vwo_U8UmsHD9YNI,1587
|
|
||||||
pip/_internal/commands/completion.py,sha256=UxS09s8rEnU08AAiN3gHdQIjU4XGSlv5SJ3rIJdTyhA,2951
|
|
||||||
pip/_internal/commands/configuration.py,sha256=X1fdVdEg8MHFtArU-3bM6WBNax1E7Z7qszPEdlK1zqo,9206
|
|
||||||
pip/_internal/commands/debug.py,sha256=yntOplw93VZoQAVBB3BXPKuqbam4mT6TErastFwFy3s,6806
|
|
||||||
pip/_internal/commands/download.py,sha256=zv8S_DN2-k6K0VSR3yCPLSrLehoYkj3IvyO1Ho8t8V4,4993
|
|
||||||
pip/_internal/commands/freeze.py,sha256=vPVguwBb15ubv8Es9oPSyWePBe2cq39QxjU4KizeTwk,3431
|
|
||||||
pip/_internal/commands/hash.py,sha256=ip64AsJ6EFUEaWKDvsZmdQHks1JTEgrDjH5byl-IYyc,1713
|
|
||||||
pip/_internal/commands/help.py,sha256=6Mnzrak_j-yE3psDCqi2GxISJqIZJ04DObKU9QhnxME,1149
|
|
||||||
pip/_internal/commands/install.py,sha256=aFvZQfPrMrHDb6jjbmrVlyvDxMIeX3ZcZKSQvY6c0KI,27135
|
|
||||||
pip/_internal/commands/list.py,sha256=jfqDS4xvm6WV8rHVSmvpaI811ukvD4OiPZwGGKMwwkI,11331
|
|
||||||
pip/_internal/commands/search.py,sha256=EwcGPkDDTwFMpi2PBKhPuWX2YBMPcy7Ox1WFcWnouaw,5598
|
|
||||||
pip/_internal/commands/show.py,sha256=sz2vbxh4l7Bj4jKlkDGTHYD6I8_duSpSUFVxUiH44xQ,6866
|
|
||||||
pip/_internal/commands/uninstall.py,sha256=EDcx3a03l3U8tpZ2p4ffIdn45hY2YFEmq9yoeccF2ow,3216
|
|
||||||
pip/_internal/commands/wheel.py,sha256=wKGSksuYjjhgOYa_jD6ulaKpPXaUzPiyzfRNNT4DOio,6233
|
|
||||||
pip/_internal/distributions/__init__.py,sha256=ow1iPW_Qp-TOyOU-WghOKC8vAv1_Syk1zETZVO_vKEE,864
|
|
||||||
pip/_internal/distributions/base.py,sha256=UVndaok0jOHrLH0JqN0YzlxVEnvFQumYy37diY3ZCuE,1245
|
|
||||||
pip/_internal/distributions/installed.py,sha256=uaTMPvY3hr_M1BCy107vJHWspKMJgrPxv30W3_zZZ0Q,667
|
|
||||||
pip/_internal/distributions/sdist.py,sha256=co8fNR8qIhHRLBncwV92oJ7e8IOCGPgEsbEFdNPk1Yk,3900
|
|
||||||
pip/_internal/distributions/wheel.py,sha256=n9MqNoWyMqNscfbNeeqh1bztoZUiB5x1H9h4tFfiJUw,1205
|
|
||||||
pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
|
|
||||||
pip/_internal/index/collector.py,sha256=aEXtHK0La4nGP7mu5N5CQ3tmfjaczLwbGi8Ar4oGz5o,18192
|
|
||||||
pip/_internal/index/package_finder.py,sha256=3J9Rzq1NAO2p_zDb4fv33GeBBBOYusV9kXtAn2j6eCU,37294
|
|
||||||
pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557
|
|
||||||
pip/_internal/locations/__init__.py,sha256=9EXRxCpyiMClU87-P5E66tcFxybcA_KzLrzcK2Vt7zs,4826
|
|
||||||
pip/_internal/locations/_distutils.py,sha256=L5flRSr9BH0lBwPUl61cyBc1OnVD06FOENkDMRjyg38,5212
|
|
||||||
pip/_internal/locations/_sysconfig.py,sha256=Tt8gkN7shxbqoUlzqM19myiBRzbft9CzkmcSS4YHk1s,5959
|
|
||||||
pip/_internal/locations/base.py,sha256=QbkpgmzIbWBnUL2_3qu29sqCNewoqYbkVw8KmigRe2c,1478
|
|
||||||
pip/_internal/metadata/__init__.py,sha256=KINR8ZYO_ilc2pkV3t5KcQLzWLNc3GjZDklGWTVJ-zU,1471
|
|
||||||
pip/_internal/metadata/base.py,sha256=6BiB_b3lvNHYIVKbzrDhi0bJmSls5Q1K-iBeHWlKnIw,4750
|
|
||||||
pip/_internal/metadata/pkg_resources.py,sha256=4FVPxYFABQ_1tbh_CRBzK4x0_SIgH1uCKx2ZLyhkouQ,4248
|
|
||||||
pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
|
|
||||||
pip/_internal/models/candidate.py,sha256=LlyGF2SMGjeet9bLbEAzAWDP82Wcp3342Ysa7tCW_9M,1001
|
|
||||||
pip/_internal/models/direct_url.py,sha256=VrnJNOqcPznfNarjQJavsx2tgG7GfcLa6PyZCuf_L7A,6555
|
|
||||||
pip/_internal/models/format_control.py,sha256=l2jp47mWsJp7-LxMs05l9T-qFg9Z5PwdyP9R7Xc_VZQ,2629
|
|
||||||
pip/_internal/models/index.py,sha256=asMraZVPI0snye404GztEpXgKerj1yAFmZl2p3eN4Bg,1092
|
|
||||||
pip/_internal/models/link.py,sha256=5wdHbGDLbafSdYpo2Ky7F9RRo226zRy6ik3cLH_8Kwc,7472
|
|
||||||
pip/_internal/models/scheme.py,sha256=iqceC7gKiTn2ZLgCOgGQbcmo49TRg9EnQUSsQH3U-7A,770
|
|
||||||
pip/_internal/models/search_scope.py,sha256=4uGNEqYrz4ku6_WzowqivuMvN0fj5XQ03WB14YjcN5U,4613
|
|
||||||
pip/_internal/models/selection_prefs.py,sha256=aNRDL97Gz3yWJW3og0yuvOkU02UL8OeNQDuDatZ8SDo,1947
|
|
||||||
pip/_internal/models/target_python.py,sha256=SLGG3z9Pj_CiA5jmMnNDv2MN3ST3keVuanVDzTvO5pM,3962
|
|
||||||
pip/_internal/models/wheel.py,sha256=MWjxQkBNXI6XOWiTuzMG7uONhFu8xA94OqD_9BuIsVc,3614
|
|
||||||
pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
|
|
||||||
pip/_internal/network/auth.py,sha256=d8Df0fy01P1jJlF3XDMM8ACyktR1cN9zURG-ye1ncc0,11833
|
|
||||||
pip/_internal/network/cache.py,sha256=J_xpsLWbRrlCSUcQhA5-TuT5LWIlpVtTH4fZ1XSjyb4,2213
|
|
||||||
pip/_internal/network/download.py,sha256=8frb2bINOf-jbmFPapKbyEO9sjXJWJG6OJaW4hQ9r3s,6243
|
|
||||||
pip/_internal/network/lazy_wheel.py,sha256=XMfrDK1IBy44L3Gx3UZ2B8s90VRXDa96520IOPmzmOU,7924
|
|
||||||
pip/_internal/network/session.py,sha256=VHeiorPflYPNWK2pM_q22c-H5gmRBDh9UKCJW3VAUFI,16247
|
|
||||||
pip/_internal/network/utils.py,sha256=uqT6QkO9NHUwqTw3gHBWMQFdaYqYabB423QUZuiQD3c,4072
|
|
||||||
pip/_internal/network/xmlrpc.py,sha256=CL1WBOTgxPwbcZ6QubZ4pXQXjb7qTTFpTUFe-ZaWkcA,1703
|
|
||||||
pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/operations/check.py,sha256=OtMZ2ff0zk8Ghpl7eIXySZ4D8pCUfzPAYNpGTxw1qWU,5245
|
|
||||||
pip/_internal/operations/freeze.py,sha256=D-ex0Bwy6E0EVS_gHlixlEpKDpRxFZnUmTy7nf8s7ts,9999
|
|
||||||
pip/_internal/operations/prepare.py,sha256=AXHNg1iGceg1lyqDqbcabmAFIfQ1k1cIfgmVY5JCWoo,24850
|
|
||||||
pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/operations/build/metadata.py,sha256=jJp05Rrp0AMsQb7izDXbNGC1LtPNwOhHQj7cRM5324c,1165
|
|
||||||
pip/_internal/operations/build/metadata_legacy.py,sha256=ECMBhLEPEQv6PUUCpPCXW-wN9QRXdY45PNXJv7BZKTU,1917
|
|
||||||
pip/_internal/operations/build/wheel.py,sha256=WYLMxuxqN3ahJTQk2MI9hdmZKBpFyxHeNpUdO0PybxU,1106
|
|
||||||
pip/_internal/operations/build/wheel_legacy.py,sha256=NOJhTYMYljdbizFo_WjkaKGWG1SEZ6aByrBdCrrsZB8,3227
|
|
||||||
pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
|
|
||||||
pip/_internal/operations/install/editable_legacy.py,sha256=bjBObfE6sz3UmGI7y4-GCgKa2WmTgnWlFFU7b-i0sQs,1396
|
|
||||||
pip/_internal/operations/install/legacy.py,sha256=f59fQbNLO2rvl8bNQm_CuW6dgPvXXQ7y5apulWZi01E,4177
|
|
||||||
pip/_internal/operations/install/wheel.py,sha256=1gV2G-owlA2iwcbxYAc4BOTiPRRGB8TzpuU0wuhM2VQ,29960
|
|
||||||
pip/_internal/req/__init__.py,sha256=lRNHBv0ZAZNbSwmXU-XUdm66gsiNmuiBDi1DFYJ4hIQ,2983
|
|
||||||
pip/_internal/req/constructors.py,sha256=4sinGd7srKhI94DV6XO-qRX2M6Kr907OFmsfklKrt64,16267
|
|
||||||
pip/_internal/req/req_file.py,sha256=nPIFl2Mi9UDGhrj-K0E3_QugF7tl3UBDty1czbIF7fk,18000
|
|
||||||
pip/_internal/req/req_install.py,sha256=RR2mkaAU2REDtjZY3nRy0ojcUA_Bf0JpjX9ZTyZUUa4,33067
|
|
||||||
pip/_internal/req/req_set.py,sha256=AutsaiV2s-2ILwtWtTA4OJW_ZLRg4GXg6wM0Y_hZb1k,7778
|
|
||||||
pip/_internal/req/req_tracker.py,sha256=XuPweX1lbJXT2gSkCXICS5hna6byme5PeQp4Ok8-R2o,4391
|
|
||||||
pip/_internal/req/req_uninstall.py,sha256=gACinTIcScZGw81qLaFdTj9KGXlVuCpru7XvHGjIE-E,23468
|
|
||||||
pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/resolution/base.py,sha256=T4QnfShJErpPWe4iOiO7VmXuz1bxe20LLNs33AUslYM,563
|
|
||||||
pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/resolution/legacy/resolver.py,sha256=OF_6Yh4hrFfJ4u0HLF4ZRBlA8lBHUfAaFnhuVKIQhPM,17934
|
|
||||||
pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/resolution/resolvelib/base.py,sha256=MbakyqSotBGVJpI3kApqqP2fPPZih9DgsfkpuFd-ADM,5677
|
|
||||||
pip/_internal/resolution/resolvelib/candidates.py,sha256=dEKSuK9B5M52c1SugB43zXnnxgNWNTa7hCCwItSX61c,19976
|
|
||||||
pip/_internal/resolution/resolvelib/factory.py,sha256=taqeDmXk0kAY9EVqSMhEJriY02MSShbZvt9VqEAgkw4,25446
|
|
||||||
pip/_internal/resolution/resolvelib/found_candidates.py,sha256=FzxKczhel3GhViOIEfGHUfUQ6rN3U0blMMUuu-blHfU,5410
|
|
||||||
pip/_internal/resolution/resolvelib/provider.py,sha256=HYITnjs7hcxDGANCDdL4qg2MJ1aw1jA9cMyxNP2mLrk,7673
|
|
||||||
pip/_internal/resolution/resolvelib/reporter.py,sha256=xgaCtXLj791A_qRfV9Y1nXGeaWVq3JE0ygIA3YNRWq0,2765
|
|
||||||
pip/_internal/resolution/resolvelib/requirements.py,sha256=fF2RH6VCanTuF-iwu8tZY8Bh0FakDBTw7tkDJyTsy9E,6047
|
|
||||||
pip/_internal/resolution/resolvelib/resolver.py,sha256=3hlnrZklszFUwGQFF33nLkEO8kxz4vZ3_uKp_L8YvmE,12085
|
|
||||||
pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/utils/appdirs.py,sha256=HCCFaOrZOnMLzRDpKXcMiFh_2kWZ-PzFdN8peLiwkNY,1222
|
|
||||||
pip/_internal/utils/compat.py,sha256=I58tTZ3qqGZqeGVP_mERM8N7QPu71niLpxfO3Ij2jfQ,1912
|
|
||||||
pip/_internal/utils/compatibility_tags.py,sha256=IcQEHCZJvdfKciACmXGCKt39Yog2_Q2XQKMHojA_2pg,5589
|
|
||||||
pip/_internal/utils/datetime.py,sha256=biZdEJEQBGq8A-N7ooposipeGzmSHdI0WX60kll_AEs,255
|
|
||||||
pip/_internal/utils/deprecation.py,sha256=CD9gU1zmDtC3Nk2TM14FVpAa_bxCMd03Kx5t3LoFwkg,3277
|
|
||||||
pip/_internal/utils/direct_url_helpers.py,sha256=-chZUxdJkFRG-pA2MY7_Wii5U5o18o5K4AqBsWd92-c,3935
|
|
||||||
pip/_internal/utils/distutils_args.py,sha256=KxWTaz07A_1ukCyw_pNah-i6sBvrVtdMsnF8jguDNYQ,1262
|
|
||||||
pip/_internal/utils/encoding.py,sha256=T0cQTkGB7-s3wivLlHcKbKqvJoM0yLdo8ot89LlGdz0,1190
|
|
||||||
pip/_internal/utils/entrypoints.py,sha256=m4UXkLZTnPsdSisQzNFiHM1CZcMK8N1CA98g4ORex2c,1066
|
|
||||||
pip/_internal/utils/filesystem.py,sha256=a3rnoUB_HTdEbDaAUHSNMPIHqHds4UA-mLQ5bvgOjSQ,6045
|
|
||||||
pip/_internal/utils/filetypes.py,sha256=weviVbapHWVQ_8-K-PTQ_TnYL66kZi4SrVBTmRYZXLc,761
|
|
||||||
pip/_internal/utils/glibc.py,sha256=GM1Y2hWkOf_tumySGFg-iNbc7oilBQQrjczb_705CF8,3170
|
|
||||||
pip/_internal/utils/hashes.py,sha256=o1qQEkqe2AqsRm_JhLoM4hkxmVtewH0ZZpQ6EBObHuU,5167
|
|
||||||
pip/_internal/utils/inject_securetransport.py,sha256=tGl9Bgyt2IHKtB3b0B-6r3W2yYF3Og-PBe0647S3lZs,810
|
|
||||||
pip/_internal/utils/logging.py,sha256=Bkp3QSjur3ekkunAInsGJ6ls7KF8ANTtBgGhjY0vltg,12133
|
|
||||||
pip/_internal/utils/misc.py,sha256=F7LDb6PQIwniYwLczhU2pSAyHZ9bnTVT1yI_OduYh3w,23315
|
|
||||||
pip/_internal/utils/models.py,sha256=qCgYyUw2mIH1pombsJ3YQsMtONZgyJ4BGwO5MJnSC4c,1329
|
|
||||||
pip/_internal/utils/packaging.py,sha256=I1938AB7FprcVJJd6C0vSiMuCVajmrxZF55vX5j0bMo,2900
|
|
||||||
pip/_internal/utils/parallel.py,sha256=RZF4JddPEWVbkkPCknfvpqaLfm3Pmqd_ABoCHmV4lXs,3224
|
|
||||||
pip/_internal/utils/pkg_resources.py,sha256=jwH5JViPe-JlXLvLC0-ASfTTCRYvm0u9CwQGcWjxStI,1106
|
|
||||||
pip/_internal/utils/setuptools_build.py,sha256=xk9sRBjUyNTHs_TvEWebVWs1GfLPN208MzpSXr9Ok_A,5047
|
|
||||||
pip/_internal/utils/subprocess.py,sha256=uxaP3IzPiBYhG0MbdfPK_uchZAh27uZ3wO3q5hRfEyo,10036
|
|
||||||
pip/_internal/utils/temp_dir.py,sha256=9gs3N9GQeVXRVWjJIalSpH1uj8yQXPTzarb5n1_HMVo,7950
|
|
||||||
pip/_internal/utils/unpacking.py,sha256=PioYYwfTCn_VeYer80onhrO9Y1ggetqOPSOroG38bRQ,9032
|
|
||||||
pip/_internal/utils/urls.py,sha256=XzjQsHGd2YDmJhoCogspPTqh6Kl5tGENRHPcwjS0JC4,1256
|
|
||||||
pip/_internal/utils/virtualenv.py,sha256=iRTK-sD6bWpHqXcZ0ECfdpFLWatMOHFUVCIRa0L6Gu0,3564
|
|
||||||
pip/_internal/utils/wheel.py,sha256=DOIVZaXN7bMOAeMEqzIOZHGl4OFO-KGrEqBUB848DPo,6290
|
|
||||||
pip/_internal/vcs/__init__.py,sha256=CjyxHCgdt19l21j0tJGiQ_6Yk8m-KWmQThmYvljd1eo,571
|
|
||||||
pip/_internal/vcs/bazaar.py,sha256=Ay_vN-87vYSEzBqXT3RVwl40vlk56j3jy_AfQbMj4uo,2962
|
|
||||||
pip/_internal/vcs/git.py,sha256=URUz1kSqhDhqJsr9ulaFTewP8Zjwf7oVPP7skdj9SMQ,15431
|
|
||||||
pip/_internal/vcs/mercurial.py,sha256=2X3eIyeAWQWI2TxoPT-xuVsD6fxr7YSyHw4MR9EWz4M,5043
|
|
||||||
pip/_internal/vcs/subversion.py,sha256=lPfCu841JAMRG_jTX_TbRZrBpKdId5eQ8t7_xI7w3L0,11876
|
|
||||||
pip/_internal/vcs/versioncontrol.py,sha256=N60TSMbTr79ADzR61BCrk8YogUQcBBnNaLgJPTfXsfc,23086
|
|
||||||
pip/_vendor/__init__.py,sha256=gCrQwPBY2OZBeedvKOLdRZ3W1LIRM60fG6d4mgW_-9Y,4760
|
|
||||||
pip/_vendor/appdirs.py,sha256=M6IYRJtdZgmSPCXCSMBRB0VT3P8MdFbWCDbSLrB2Ebg,25907
|
|
||||||
pip/_vendor/distro.py,sha256=xxMIh2a3KmippeWEHzynTdHT3_jZM0o-pos0dAWJROM,43628
|
|
||||||
pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394
|
|
||||||
pip/_vendor/six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159
|
|
||||||
pip/_vendor/vendor.txt,sha256=yaN2qLLkKuoRmFLCxGJ1LZtZiuV7T7NoisZqwWNRhIU,364
|
|
||||||
pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302
|
|
||||||
pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295
|
|
||||||
pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882
|
|
||||||
pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
|
|
||||||
pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695
|
|
||||||
pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149
|
|
||||||
pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
|
|
||||||
pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
|
|
||||||
pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091
|
|
||||||
pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690
|
|
||||||
pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
|
|
||||||
pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
|
|
||||||
pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856
|
|
||||||
pip/_vendor/certifi/__init__.py,sha256=SsmdmFHjHCY4VLtqwpp9P_jsOcAuHj-5c5WqoEz-oFg,62
|
|
||||||
pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255
|
|
||||||
pip/_vendor/certifi/cacert.pem,sha256=u3fxPT--yemLvyislQRrRBlsfY9Vq3cgBh6ZmRqCkZc,263774
|
|
||||||
pip/_vendor/certifi/core.py,sha256=gOFd0zHYlx4krrLEn982esOtmz3djiG0BFSDhgjlvcI,2840
|
|
||||||
pip/_vendor/chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271
|
|
||||||
pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
|
|
||||||
pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
|
|
||||||
pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
|
|
||||||
pip/_vendor/chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839
|
|
||||||
pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
|
|
||||||
pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
|
|
||||||
pip/_vendor/chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200
|
|
||||||
pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
|
|
||||||
pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
|
|
||||||
pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
|
|
||||||
pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
|
|
||||||
pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
|
|
||||||
pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
|
|
||||||
pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
|
|
||||||
pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
|
|
||||||
pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
|
|
||||||
pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
|
|
||||||
pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
|
|
||||||
pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
|
|
||||||
pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
|
|
||||||
pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
|
|
||||||
pip/_vendor/chardet/langbulgarianmodel.py,sha256=rk9CJpuxO0bObboJcv6gNgWuosYZmd8qEEds5y7DS_Y,105697
|
|
||||||
pip/_vendor/chardet/langgreekmodel.py,sha256=S-uNQ1ihC75yhBvSux24gLFZv3QyctMwC6OxLJdX-bw,99571
|
|
||||||
pip/_vendor/chardet/langhebrewmodel.py,sha256=DzPP6TPGG_-PV7tqspu_d8duueqm7uN-5eQ0aHUw1Gg,98776
|
|
||||||
pip/_vendor/chardet/langhungarianmodel.py,sha256=RtJH7DZdsmaHqyK46Kkmnk5wQHiJwJPPJSqqIlpeZRc,102498
|
|
||||||
pip/_vendor/chardet/langrussianmodel.py,sha256=THqJOhSxiTQcHboDNSc5yofc2koXXQFHFyjtyuntUfM,131180
|
|
||||||
pip/_vendor/chardet/langthaimodel.py,sha256=R1wXHnUMtejpw0JnH_JO8XdYasME6wjVqp1zP7TKLgg,103312
|
|
||||||
pip/_vendor/chardet/langturkishmodel.py,sha256=rfwanTptTwSycE4-P-QasPmzd-XVYgevytzjlEzBBu8,95946
|
|
||||||
pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
|
|
||||||
pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
|
|
||||||
pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
|
|
||||||
pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
|
|
||||||
pip/_vendor/chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136
|
|
||||||
pip/_vendor/chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309
|
|
||||||
pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
|
|
||||||
pip/_vendor/chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503
|
|
||||||
pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
|
|
||||||
pip/_vendor/chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242
|
|
||||||
pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
||||||
pip/_vendor/chardet/cli/chardetect.py,sha256=XK5zqjUG2a4-y6eLHZ8ThYcp6WWUrdlmELxNypcc2SE,2747
|
|
||||||
pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474
|
|
||||||
pip/_vendor/colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239
|
|
||||||
pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
|
|
||||||
pip/_vendor/colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517
|
|
||||||
pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915
|
|
||||||
pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404
|
|
||||||
pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438
|
|
||||||
pip/_vendor/distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581
|
|
||||||
pip/_vendor/distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408
|
|
||||||
pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059
|
|
||||||
pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066
|
|
||||||
pip/_vendor/distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100
|
|
||||||
pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
|
|
||||||
pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387
|
|
||||||
pip/_vendor/distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962
|
|
||||||
pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766
|
|
||||||
pip/_vendor/distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180
|
|
||||||
pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768
|
|
||||||
pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984
|
|
||||||
pip/_vendor/distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845
|
|
||||||
pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391
|
|
||||||
pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112
|
|
||||||
pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840
|
|
||||||
pip/_vendor/distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144
|
|
||||||
pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
|
|
||||||
pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
|
|
||||||
pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707
|
|
||||||
pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
|
|
||||||
pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854
|
|
||||||
pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
|
|
||||||
pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160
|
|
||||||
pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728
|
|
||||||
pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353
|
|
||||||
pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040
|
|
||||||
pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931
|
|
||||||
pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464
|
|
||||||
pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186
|
|
||||||
pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759
|
|
||||||
pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109
|
|
||||||
pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013
|
|
||||||
pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775
|
|
||||||
pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919
|
|
||||||
pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286
|
|
||||||
pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945
|
|
||||||
pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643
|
|
||||||
pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588
|
|
||||||
pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897
|
|
||||||
pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214
|
|
||||||
pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679
|
|
||||||
pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715
|
|
||||||
pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776
|
|
||||||
pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592
|
|
||||||
pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565
|
|
||||||
pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925
|
|
||||||
pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836
|
|
||||||
pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766
|
|
||||||
pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719
|
|
||||||
pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476
|
|
||||||
pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413
|
|
||||||
pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551
|
|
||||||
pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357
|
|
||||||
pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309
|
|
||||||
pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58
|
|
||||||
pip/_vendor/idna/codec.py,sha256=4RVMhqFquJgyGBKyl40ARqcgDzkDDXZUvyl1EOCRLFE,3027
|
|
||||||
pip/_vendor/idna/compat.py,sha256=g-7Ph45nzILe_7xvxdbTebrHZq4mQWxIOH1rjMc6xrs,232
|
|
||||||
pip/_vendor/idna/core.py,sha256=VdFGQyiit1eMKUQ2x0mNXoGThrXlRyp070mPDyLX9Yg,11849
|
|
||||||
pip/_vendor/idna/idnadata.py,sha256=cl4x9RLdw1ZMtEEbvKwAsX-Id3AdIjO5U3HaoKM6VGs,42350
|
|
||||||
pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749
|
|
||||||
pip/_vendor/idna/package_data.py,sha256=kxptFveZ37zbPSmKU7KMEA8Pi7h3-sM1-p2agm2PpCI,21
|
|
||||||
pip/_vendor/idna/uts46data.py,sha256=4CZEB6ZQgmSNIATBn2V_xdW9PEgVOXAOYRzCeQGsK_E,196224
|
|
||||||
pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118
|
|
||||||
pip/_vendor/msgpack/_version.py,sha256=dFR03oACnj4lsKd1RnwD7BPMiVI_FMygdOL1TOBEw_U,20
|
|
||||||
pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
|
|
||||||
pip/_vendor/msgpack/ext.py,sha256=4l356Y4sVEcvCla2dh_cL57vh4GMhZfa3kuWHFHYz6A,6088
|
|
||||||
pip/_vendor/msgpack/fallback.py,sha256=Rpv1Ldey8f8ueRnQznD4ARKBn9dxM2PywVNkXI8IEeE,38026
|
|
||||||
pip/_vendor/packaging/__about__.py,sha256=j4B7IMMSqpUnYzcYd5H5WZlILXevD7Zm_n9lj_TROTw,726
|
|
||||||
pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562
|
|
||||||
pip/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128
|
|
||||||
pip/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022
|
|
||||||
pip/_vendor/packaging/_typing.py,sha256=VgA0AAvsc97KB5nF89zoudOyCMEsV7FlaXzZbYqEkzA,1824
|
|
||||||
pip/_vendor/packaging/markers.py,sha256=8DOn1c7oZ_DySBlLom_9o49GzobVGYN8-kpK_nsj8oQ,9472
|
|
||||||
pip/_vendor/packaging/requirements.py,sha256=MHqf_FKihHC0VkOB62ZUdUyG8okEL97D4Xy_jK1yFS0,5110
|
|
||||||
pip/_vendor/packaging/specifiers.py,sha256=RaxQ-JKyCqI5QBm6gDvboZ2K6jjLVd-pxq0kvYf28kc,32208
|
|
||||||
pip/_vendor/packaging/tags.py,sha256=BMEL_3W3E8nXK_AXAWqmlYccsvoznFKkTBkTPR48DB8,29561
|
|
||||||
pip/_vendor/packaging/utils.py,sha256=5vUxwCVYSmaNJFgd7KaCBpxHXQN89KIvRLvCsDzao0k,4385
|
|
||||||
pip/_vendor/packaging/version.py,sha256=t7FpsZKmDncMn6EG28dEu_5NBZUa9_HVoiG-fsDo3oc,15974
|
|
||||||
pip/_vendor/pep517/__init__.py,sha256=mju9elFHLEUJ23rU5Zpdj8nROdY0Vj3bp4ZgvBTs6bg,130
|
|
||||||
pip/_vendor/pep517/build.py,sha256=Z49CmRFafX7NjoBModiibwQYa_EYz3E0F31b7D5WVvs,3456
|
|
||||||
pip/_vendor/pep517/check.py,sha256=8LJLtfZ99zAcV4vKJ1a-odMxg2sEImD7RMNg_Ere-1Y,6082
|
|
||||||
pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098
|
|
||||||
pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780
|
|
||||||
pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129
|
|
||||||
pip/_vendor/pep517/envbuild.py,sha256=szKUFlO50X1ahQfXwz4hD9V2VE_bz9MLVPIeidsFo4w,6041
|
|
||||||
pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463
|
|
||||||
pip/_vendor/pep517/wrappers.py,sha256=QYZfN1nWoq4Z2krY-UX14JLAxkdNwujYjRGf7qFc914,11044
|
|
||||||
pip/_vendor/pep517/in_process/__init__.py,sha256=MyWoAi8JHdcBv7yXuWpUSVADbx6LSB9rZh7kTIgdA8Y,563
|
|
||||||
pip/_vendor/pep517/in_process/_in_process.py,sha256=XrKOTURJdia5R7i3i_OQmS89LASFXE3HQXfX63qZBIE,8438
|
|
||||||
pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277
|
|
||||||
pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562
|
|
||||||
pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857
|
|
||||||
pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854
|
|
||||||
pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372
|
|
||||||
pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380
|
|
||||||
pip/_vendor/requests/__init__.py,sha256=ib7nRjDadbCMOeX2sMQLcbXzy982HoKRY2LD_gWqwPM,4458
|
|
||||||
pip/_vendor/requests/__version__.py,sha256=k4J8c1yFRFzwGWwlN7miaDOclFtbcIs1GlnmT17YbXQ,441
|
|
||||||
pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
|
|
||||||
pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548
|
|
||||||
pip/_vendor/requests/api.py,sha256=PlHM-HT3PQ5lyufoeGmV-nJxRi7UnUyGVh7OV7B9XV4,6496
|
|
||||||
pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207
|
|
||||||
pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465
|
|
||||||
pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045
|
|
||||||
pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
|
|
||||||
pip/_vendor/requests/exceptions.py,sha256=d9fJJw8YFBB9VzG9qhvxLuOx6be3c_Dwbck-dVUEAcs,3173
|
|
||||||
pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578
|
|
||||||
pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
|
|
||||||
pip/_vendor/requests/models.py,sha256=UkkaVuU1tc-BKYB41dds35saisoTpaYJ2YBCFZEEfhM,34373
|
|
||||||
pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
|
|
||||||
pip/_vendor/requests/sessions.py,sha256=BsnR-zYILgoFzJ6yq4T8ht_i0PwwPGVAxWxWaV5dcHg,30137
|
|
||||||
pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188
|
|
||||||
pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005
|
|
||||||
pip/_vendor/requests/utils.py,sha256=_K9AgkN6efPe-a-zgZurXzds5PBC0CzDkyjAE2oCQFQ,30529
|
|
||||||
pip/_vendor/resolvelib/__init__.py,sha256=QWAqNErjxqEMKl-AUccXz10aCKVmO-WmWvxUl3QOlFY,537
|
|
||||||
pip/_vendor/resolvelib/providers.py,sha256=bfzFDZd7UqkkAS7lUM_HeYbA-HzjKfDlle_pn_79vio,5638
|
|
||||||
pip/_vendor/resolvelib/reporters.py,sha256=hQvvXuuEBOyEWO8KDfLsWKVjX55UFMAUwO0YZMNpzAw,1364
|
|
||||||
pip/_vendor/resolvelib/resolvers.py,sha256=P6aq-7pY5E7zROb0zUUWqFIHEA9Lm0MWsx_bYXzUg3A,17292
|
|
||||||
pip/_vendor/resolvelib/structs.py,sha256=Z6m4CkKJlWH4ZIKelEsKNeZqKTvyux4hqBNzY4kZzLo,4495
|
|
||||||
pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156
|
|
||||||
pip/_vendor/tenacity/__init__.py,sha256=6qSjN2BJDt864b6nxFoalpbCLQHiD2iYAlnUS9dWSSw,16528
|
|
||||||
pip/_vendor/tenacity/_asyncio.py,sha256=6C4Sfv9IOUYf1-0vuIoE6OGbmJrJywH0-YslrxmbxKw,2833
|
|
||||||
pip/_vendor/tenacity/_utils.py,sha256=W1nujHum1f9i4RQpOSjqsQo9_mQtaUtNznXAmQHsL28,4555
|
|
||||||
pip/_vendor/tenacity/after.py,sha256=KNIi2WT83r4eqA3QaXMK1zXQzkbLgVHj5uRanY6HabM,1307
|
|
||||||
pip/_vendor/tenacity/before.py,sha256=B9pAXn6_J1UKzwTL9nFtRpOhNg8s5vGSi4bqnx4-laA,1154
|
|
||||||
pip/_vendor/tenacity/before_sleep.py,sha256=lZEMHNaFRmdCcws3Moh4EOZ9zeo4MRxskdiUudvNuvY,1784
|
|
||||||
pip/_vendor/tenacity/compat.py,sha256=dHonJkJlHwD2cmqLrYHYU0Tdzm2bn1-76QZSt6OCemw,739
|
|
||||||
pip/_vendor/tenacity/nap.py,sha256=7VVudOTmuv_-C_XJlvjGcgHbV6_A2HlzymaXu8vj1d8,1280
|
|
||||||
pip/_vendor/tenacity/retry.py,sha256=xskLGa15EsNhPPOmIUcKS7CqjaRAtWxGFNPNRjjz9UU,5463
|
|
||||||
pip/_vendor/tenacity/stop.py,sha256=4cjSe_YPSawz6iI-QBDN0xFfE_zlKvjhFwx21ZlyD2E,2435
|
|
||||||
pip/_vendor/tenacity/tornadoweb.py,sha256=q3XZW2A9Rky1BhUQbNHF61hM1EXQ57dA7wxPnlSOx3s,1729
|
|
||||||
pip/_vendor/tenacity/wait.py,sha256=FAoIfIUSNf5OWJYT7nhjFC0uOVijHMBd56AJRyLN230,6017
|
|
||||||
pip/_vendor/toml/__init__.py,sha256=kYgYzehhUx1cctsuprmjEKwnSdmQeC53cTxi7nxQrko,747
|
|
||||||
pip/_vendor/toml/decoder.py,sha256=deDPQqpj92SG6pAtwLbgKHrIsly7hAZG-U6g2y7hyGc,38954
|
|
||||||
pip/_vendor/toml/encoder.py,sha256=tBe93_GB21K52TlSbMiYuGeIGXH70F2WzAg-lIfVoko,9964
|
|
||||||
pip/_vendor/toml/ordered.py,sha256=UWt5Eka90IWVBYdvLgY5PXnkBcVYpHjnw9T67rM85T8,378
|
|
||||||
pip/_vendor/toml/tz.py,sha256=-5vg8wkg_atnVi2TnEveexIVE7T_FxBVr_-2WVfO1oA,701
|
|
||||||
pip/_vendor/urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
|
|
||||||
pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
|
|
||||||
pip/_vendor/urllib3/_version.py,sha256=2Bjk_cB49921PTvereWp8ZR3NhLNoCMAyHSGP-OesLk,63
|
|
||||||
pip/_vendor/urllib3/connection.py,sha256=q-vf_TM3MyRbZcFn3-VCKZBSf0oEhGjv7BFeZm_7kw4,18748
|
|
||||||
pip/_vendor/urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
|
|
||||||
pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
|
|
||||||
pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
|
|
||||||
pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
|
|
||||||
pip/_vendor/urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
|
|
||||||
pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
|
|
||||||
pip/_vendor/urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
|
|
||||||
pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
|
|
||||||
pip/_vendor/urllib3/contrib/appengine.py,sha256=lm86XjaOI7ajbonsN0JLA0ckkgSFWhgxWKLW_Ymt4sI,11034
|
|
||||||
pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
|
|
||||||
pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=kqm9SX4h_6h76QwGDBiNQ7i-ktKZunZuxzTVjjtHDto,16795
|
|
||||||
pip/_vendor/urllib3/contrib/securetransport.py,sha256=MEEHa3YqG8ifDPYG0gO12C1tZu2I-HqGF4lC53cHFPg,34303
|
|
||||||
pip/_vendor/urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=eRy1Mj-wpg7sR6-OSvnSV4jUbjMT464dLN_CWxbIRVw,17649
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
|
|
||||||
pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
|
|
||||||
pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
|
|
||||||
pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
|
|
||||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
|
|
||||||
pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
|
|
||||||
pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
|
|
||||||
pip/_vendor/urllib3/util/connection.py,sha256=_I-ZoF58xXLLjo-Q5IGaJrMxy2IW_exI8K9O9pq7op0,4922
|
|
||||||
pip/_vendor/urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
|
|
||||||
pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
|
|
||||||
pip/_vendor/urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
|
|
||||||
pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
|
|
||||||
pip/_vendor/urllib3/util/retry.py,sha256=s3ZNKXO6_t23ZQMg8zlu20PMSqraT495-S_mEY_19ak,21396
|
|
||||||
pip/_vendor/urllib3/util/ssl_.py,sha256=dKcH-sqiR_ESWqKP1PJ6SUAUSvqC-fkMQGrTokV4NMY,16281
|
|
||||||
pip/_vendor/urllib3/util/ssltransport.py,sha256=vOOCPRn-dODUZ2qtMCfStb0JmjgrgJaKLqJ9qvKucFs,6932
|
|
||||||
pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
|
|
||||||
pip/_vendor/urllib3/util/url.py,sha256=KP_yaHA0TFFAsQSImc_FOHO-Wq3PNHf_bKObKcrgdU4,13981
|
|
||||||
pip/_vendor/urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
|
|
||||||
pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
|
|
||||||
pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
|
|
||||||
pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
|
|
||||||
pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
|
|
||||||
pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
|
|
||||||
pip-21.1.2.dist-info/LICENSE.txt,sha256=I6c2HCsVgQKLxiO52ivSSZeryqR4Gs5q1ESjeUT42uE,1090
|
|
||||||
pip-21.1.2.dist-info/METADATA,sha256=y7MhSvSvMOtugsnkLlhg556X0KfAwbSbQtWIkYcP10k,4103
|
|
||||||
pip-21.1.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
|
|
||||||
pip-21.1.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125
|
|
||||||
pip-21.1.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
pip-21.1.2.dist-info/RECORD,,
|
|
||||||
pip\_vendor\tenacity\stop.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\install\legacy.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\gb2312prober.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\freeze.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\before_sleep.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\compat.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\_collections.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\serializer.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\__pycache__,,
|
|
||||||
pip\_internal\operations\build\wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\six.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\factory.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\_version.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\base_command.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\mercurial.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\version.cpython-310.pyc,,
|
|
||||||
pip\_internal\distributions\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pkg_resources\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\proxy.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\in_process\__pycache__,,
|
|
||||||
pip\_vendor\chardet\eucjpprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\webencodings\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\deprecation.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\status_codes.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\resources.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\progress_bars.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\filesystem.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\api.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treebuilders\dom.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\parallel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\compat.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\euckrfreq.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\completion.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\envbuild.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\models.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\compat.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\tornadoweb.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\ssl_match_hostname\__pycache__,,
|
|
||||||
pip\_internal\network\cache.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\legacy\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\big5prober.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\check.cpython-310.pyc,,
|
|
||||||
pip-21.1.2.virtualenv,,
|
|
||||||
pip\_vendor\pep517\check.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treebuilders\etree_lxml.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\pkg_resources.cpython-310.pyc,,
|
|
||||||
pip\_internal\metadata\pkg_resources.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\compat.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\core.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_trie\__pycache__,,
|
|
||||||
pip\_internal\__pycache__,,
|
|
||||||
pip\_vendor\html5lib\_trie\_base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\url.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\index.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\euckrprober.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\_internal_utils.cpython-310.pyc,,
|
|
||||||
pip\_internal\network\session.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\ntlmpool.cpython-310.pyc,,
|
|
||||||
pip\_internal\main.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\install\wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\download.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langhungarianmodel.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\autocompletion.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\backports\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\pyopenssl.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\build\wheel_legacy.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\virtualenv.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\caches\__pycache__,,
|
|
||||||
pip\_vendor\webencodings\labels.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\_structures.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\contrib\_securetransport\bindings.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\logging.cpython-310.pyc,,
|
|
||||||
pip\_internal\metadata\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\idnadata.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\subprocess.cpython-310.pyc,,
|
|
||||||
pip\_internal\network\utils.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\__pycache__,,
|
|
||||||
pip\_vendor\certifi\__main__.cpython-310.pyc,,
|
|
||||||
pip\_internal\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\install.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langthaimodel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\metadata.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\encoding.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\target_python.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\debug.cpython-310.pyc,,
|
|
||||||
pip\_internal\index\collector.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\caches\redis_cache.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\charsetgroupprober.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\spinners.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\format_control.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_utils.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\__pycache__,,
|
|
||||||
pip\_internal\models\candidate.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\temp_dir.cpython-310.pyc,,
|
|
||||||
..\..\Scripts\pip-3.10.exe,,
|
|
||||||
pip\_internal\models\search_scope.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\locators.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\ssltransport.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\__pycache__,,
|
|
||||||
pip\_internal\resolution\resolvelib\resolver.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\compat.cpython-310.pyc,,
|
|
||||||
pip\_vendor\toml\encoder.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\help.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\filepost.cpython-310.pyc,,
|
|
||||||
pip\_internal\index\package_finder.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\entrypoints.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\sbcharsetprober.cpython-310.pyc,,
|
|
||||||
pip\_internal\configuration.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\__pycache__,,
|
|
||||||
pip\_internal\cli\parser.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\req_set.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\unpacking.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\jisfreq.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\status_codes.cpython-310.pyc,,
|
|
||||||
pip\_internal\distributions\__pycache__,,
|
|
||||||
pip\_internal\network\xmlrpc.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pkg_resources\__pycache__,,
|
|
||||||
pip\_vendor\packaging\specifiers.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\caches\file_cache.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\__pycache__,,
|
|
||||||
pip\_vendor\html5lib\filters\sanitizer.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\cp949prober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\inject_meta_charset.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\check.cpython-310.pyc,,
|
|
||||||
pip\_vendor\webencodings\__pycache__,,
|
|
||||||
pip\_vendor\distlib\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\request.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\wait.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\contrib\socks.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\reporter.cpython-310.pyc,,
|
|
||||||
..\..\Scripts\pip3.10.exe,,
|
|
||||||
pip\_vendor\toml\tz.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\_backport\sysconfig.cpython-310.pyc,,
|
|
||||||
pip\_vendor\progress\counter.cpython-310.pyc,,
|
|
||||||
pip\_internal\network\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\__pycache__,,
|
|
||||||
pip\_internal\models\direct_url.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\setuptools_build.cpython-310.pyc,,
|
|
||||||
pip\_vendor\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\list.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\intranges.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\colorlog.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\__pycache__,,
|
|
||||||
pip\_internal\resolution\legacy\__pycache__,,
|
|
||||||
pip\_internal\network\download.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\html5parser.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\database.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langbulgarianmodel.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\__about__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treebuilders\base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pyparsing.cpython-310.pyc,,
|
|
||||||
pip\_vendor\webencodings\mklabels.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\ssl_.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\euctwprober.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\hashes.cpython-310.pyc,,
|
|
||||||
pip\_vendor\appdirs.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\scripts.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\_cmd.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distro.cpython-310.pyc,,
|
|
||||||
pip\_vendor\colorama\__pycache__,,
|
|
||||||
pip\_internal\commands\__pycache__,,
|
|
||||||
pip\_vendor\chardet\enums.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\__pycache__,,
|
|
||||||
pip\_vendor\requests\adapters.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_inputstream.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\build\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\__pycache__,,
|
|
||||||
pip\_internal\locations\__init__.cpython-310.pyc,,
|
|
||||||
pip\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\found_candidates.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\version.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\misc.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\latin1prober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\contrib\securetransport.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\help.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\_backport\tarfile.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\backports\__pycache__,,
|
|
||||||
pip\_vendor\cachecontrol\compat.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\direct_url_helpers.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\sjisprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\appengine.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\distutils_args.cpython-310.pyc,,
|
|
||||||
pip\_vendor\progress\bar.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\compat\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\metadata\__pycache__,,
|
|
||||||
pip\_vendor\html5lib\treebuilders\etree.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\markers.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\__version__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\uts46data.cpython-310.pyc,,
|
|
||||||
pip\_vendor\webencodings\x_user_defined.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\req_install.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\wait.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\__pycache__,,
|
|
||||||
pip\_vendor\colorama\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\manifest.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langturkishmodel.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\main.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\install\__pycache__,,
|
|
||||||
pip\_vendor\chardet\utf8prober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\cli\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\requirements.cpython-310.pyc,,
|
|
||||||
pip\_vendor\colorama\win32.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\in_process\_in_process.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\build\metadata_legacy.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\_compat.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\self_outdated_check.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langhebrewmodel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\base.cpython-310.pyc,,
|
|
||||||
pip\_internal\index\sources.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\main_parser.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_trie\py.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treeadapters\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\cookies.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\timeout.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\__pycache__,,
|
|
||||||
pip\_internal\utils\glibc.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\base.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\configuration.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\before.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\_securetransport\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\legacy\resolver.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\urls.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\prepare.cpython-310.pyc,,
|
|
||||||
pip\_vendor\progress\spinner.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\install\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\escprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\nap.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\req_uninstall.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\appdirs.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\cmdoptions.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\uninstall.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\auth.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\_asyncio.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\build_env.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\__pycache__,,
|
|
||||||
pip\_internal\models\wheel.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\provider.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\response.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\constructors.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\connection.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\big5freq.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\alphabeticalattributes.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\chardistribution.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\euctwfreq.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\queue.cpython-310.pyc,,
|
|
||||||
pip\_internal\wheel_builder.cpython-310.pyc,,
|
|
||||||
pip\_vendor\colorama\initialise.cpython-310.pyc,,
|
|
||||||
pip\_internal\network\__pycache__,,
|
|
||||||
pip\_vendor\__pycache__,,
|
|
||||||
pip\_internal\utils\datetime.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\__pycache__,,
|
|
||||||
pip\_internal\commands\cache.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\req_tracker.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\compat.cpython-310.pyc,,
|
|
||||||
pip\_vendor\toml\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\metadata\base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\msgpack\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\serialize.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treeadapters\sax.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\dom.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\models.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\tags.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\bazaar.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\filewrapper.cpython-310.pyc,,
|
|
||||||
pip\_vendor\msgpack\exceptions.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\_typing.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\whitespace.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\candidates.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\subversion.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\poolmanager.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\versioncontrol.cpython-310.pyc,,
|
|
||||||
pip\_internal\locations\_sysconfig.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\adapter.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\structures.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\etree_lxml.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\connectionpool.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\build\__pycache__,,
|
|
||||||
pip\_internal\cache.cpython-310.pyc,,
|
|
||||||
pip\_internal\locations\__pycache__,,
|
|
||||||
pip\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\util\retry.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_tokenizer.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langgreekmodel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\network\lazy_wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\wrappers.cpython-310.pyc,,
|
|
||||||
pip\_vendor\toml\decoder.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\genshi.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\version.cpython-310.pyc,,
|
|
||||||
pip\_vendor\progress\__pycache__,,
|
|
||||||
pip\_internal\index\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\show.cpython-310.pyc,,
|
|
||||||
..\..\Scripts\pip.exe,,
|
|
||||||
pip\_vendor\resolvelib\compat\__pycache__,,
|
|
||||||
pip\_vendor\requests\hooks.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\_utils.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\langrussianmodel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treeadapters\genshi.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\controller.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\resolvers.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\fields.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\hash.cpython-310.pyc,,
|
|
||||||
pip\_vendor\certifi\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\lint.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\compat\collections_abc.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\cli\__pycache__,,
|
|
||||||
pip\_vendor\chardet\metadata\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\msgpack\fallback.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\_securetransport\low_level.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_ihatexml.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\filters\optionaltags.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\__pycache__,,
|
|
||||||
pip\_internal\distributions\installed.cpython-310.pyc,,
|
|
||||||
pip\_vendor\progress\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\__pycache__,,
|
|
||||||
pip\_vendor\html5lib\treeadapters\__pycache__,,
|
|
||||||
..\..\Scripts\pip3.exe,,
|
|
||||||
pip\_internal\cli\command_context.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\request.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\markers.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\__pycache__,,
|
|
||||||
pip\_vendor\html5lib\filters\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pkg_resources\py31compat.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\install\editable_legacy.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\link.cpython-310.pyc,,
|
|
||||||
pip\_vendor\colorama\ansi.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\heuristics.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\contrib\_securetransport\__pycache__,,
|
|
||||||
pip\_internal\distributions\base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\package_data.cpython-310.pyc,,
|
|
||||||
pip\_internal\vcs\git.cpython-310.pyc,,
|
|
||||||
pip\_vendor\certifi\core.cpython-310.pyc,,
|
|
||||||
pip\_internal\locations\_distutils.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\structs.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\universaldetector.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\exceptions.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\retry.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\response.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\util\connection.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\__pycache__,,
|
|
||||||
pip\_vendor\pep517\meta.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\sessions.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\mbcharsetprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\in_process\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\commands\freeze.cpython-310.pyc,,
|
|
||||||
pip\_vendor\idna\codec.cpython-310.pyc,,
|
|
||||||
pip\_internal\distributions\sdist.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\reporters.cpython-310.pyc,,
|
|
||||||
pip-21.1.2.dist-info\__pycache__,,
|
|
||||||
pip\_internal\locations\base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\hebrewprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\index.cpython-310.pyc,,
|
|
||||||
pip\_internal\cli\req_command.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\packages.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\filetypes.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\utils.cpython-310.pyc,,
|
|
||||||
pip\_internal\pyproject.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\ssl_match_hostname\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\wrapper.cpython-310.pyc,,
|
|
||||||
pip\_vendor\toml\ordered.cpython-310.pyc,,
|
|
||||||
pip\_vendor\toml\__pycache__,,
|
|
||||||
pip\_vendor\packaging\utils.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\dirtools.cpython-310.pyc,,
|
|
||||||
pip\_vendor\msgpack\__pycache__,,
|
|
||||||
pip\_internal\commands\search.cpython-310.pyc,,
|
|
||||||
pip\_vendor\pep517\build.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\escsm.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\__pycache__,,
|
|
||||||
pip\_vendor\colorama\ansitowin32.cpython-310.pyc,,
|
|
||||||
pip\_vendor\msgpack\_version.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\_trie\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treebuilders\__pycache__,,
|
|
||||||
pip\_vendor\chardet\compat.cpython-310.pyc,,
|
|
||||||
pip\__main__.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\packaging.cpython-310.pyc,,
|
|
||||||
pip\_vendor\msgpack\ext.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\codingstatemachine.cpython-310.pyc,,
|
|
||||||
pip-21.1.2.dist-info\INSTALLER,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\cli\chardetect.cpython-310.pyc,,
|
|
||||||
pip\_internal\network\auth.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\_backport\misc.cpython-310.pyc,,
|
|
||||||
pip\_internal\models\selection_prefs.cpython-310.pyc,,
|
|
||||||
pip\_internal\exceptions.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\sbcsgroupprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\backports\makefile.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\inject_securetransport.cpython-310.pyc,,
|
|
||||||
pip\_vendor\urllib3\packages\ssl_match_hostname\_implementation.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\constants.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\__pycache__,,
|
|
||||||
pip\_internal\models\scheme.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\providers.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\_backport\__pycache__,,
|
|
||||||
pip\_vendor\distlib\_backport\shutil.cpython-310.pyc,,
|
|
||||||
pip\_internal\resolution\resolvelib\base.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\metadata\languages.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\caches\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\index\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\contrib\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\cachecontrol\cache.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\certs.cpython-310.pyc,,
|
|
||||||
pip\_internal\req\req_file.cpython-310.pyc,,
|
|
||||||
pip\_vendor\resolvelib\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\gb2312freq.cpython-310.pyc,,
|
|
||||||
pip\_vendor\html5lib\treebuilders\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\requirements.cpython-310.pyc,,
|
|
||||||
pip\_vendor\tenacity\after.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\mbcsgroupprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\charsetprober.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\mbcssm.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\jpcntx.cpython-310.pyc,,
|
|
||||||
pip\_vendor\six.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\util.cpython-310.pyc,,
|
|
||||||
pip\_internal\utils\compatibility_tags.cpython-310.pyc,,
|
|
||||||
pip\_vendor\certifi\__pycache__,,
|
|
||||||
pip\_vendor\urllib3\contrib\_appengine_environ.cpython-310.pyc,,
|
|
||||||
pip\_vendor\webencodings\tests.cpython-310.pyc,,
|
|
||||||
pip\_vendor\chardet\metadata\__pycache__,,
|
|
||||||
pip\_vendor\html5lib\treewalkers\etree.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\__init__.cpython-310.pyc,,
|
|
||||||
pip\_internal\operations\build\metadata.cpython-310.pyc,,
|
|
||||||
pip\_internal\distributions\wheel.cpython-310.pyc,,
|
|
||||||
pip\_vendor\distlib\_backport\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\packaging\__init__.cpython-310.pyc,,
|
|
||||||
pip\_vendor\colorama\winterm.cpython-310.pyc,,
|
|
||||||
pip\_vendor\requests\exceptions.cpython-310.pyc,,
|
|
|
@ -1,5 +0,0 @@
|
||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.36.2)
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
[console_scripts]
|
|
||||||
pip = pip._internal.cli.main:main
|
|
||||||
pip3 = pip._internal.cli.main:main
|
|
||||||
pip3.8 = pip._internal.cli.main:main
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
pip
|
|
|
@ -1,10 +1,9 @@
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
__version__ = "21.1.2"
|
__version__ = "22.1.2"
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args: Optional[List[str]] = None) -> int:
|
||||||
# type: (Optional[List[str]]) -> int
|
|
||||||
"""This is an internal API only meant for use by pip's own console scripts.
|
"""This is an internal API only meant for use by pip's own console scripts.
|
||||||
|
|
||||||
For additional details, see https://github.com/pypa/pip/issues/7498.
|
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import pip._internal.utils.inject_securetransport # noqa
|
import pip._internal.utils.inject_securetransport # noqa
|
||||||
|
from pip._internal.utils import _log
|
||||||
|
|
||||||
|
# init_logging() must be called before any call to logging.getLogger()
|
||||||
|
# which happens at import of most modules.
|
||||||
|
_log.init_logging()
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args: (Optional[List[str]]) = None) -> int:
|
||||||
# type: (Optional[List[str]]) -> int
|
|
||||||
"""This is preserved for old console scripts that may still be referencing
|
"""This is preserved for old console scripts that may still be referencing
|
||||||
it.
|
it.
|
||||||
|
|
||||||
|
|
|
@ -11,14 +11,16 @@ import zipfile
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from sysconfig import get_paths
|
from sysconfig import get_paths
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type
|
from typing import TYPE_CHECKING, Generator, Iterable, List, Optional, Set, Tuple, Type
|
||||||
|
|
||||||
from pip._vendor.certifi import where
|
from pip._vendor.certifi import where
|
||||||
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
|
from pip._vendor.packaging.version import Version
|
||||||
|
|
||||||
from pip import __file__ as pip_location
|
from pip import __file__ as pip_location
|
||||||
from pip._internal.cli.spinners import open_spinner
|
from pip._internal.cli.spinners import open_spinner
|
||||||
from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
|
from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
|
||||||
|
from pip._internal.metadata import get_default_environment, get_environment
|
||||||
from pip._internal.utils.subprocess import call_subprocess
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
|
|
||||||
|
@ -29,20 +31,18 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class _Prefix:
|
class _Prefix:
|
||||||
|
def __init__(self, path: str) -> None:
|
||||||
def __init__(self, path):
|
|
||||||
# type: (str) -> None
|
|
||||||
self.path = path
|
self.path = path
|
||||||
self.setup = False
|
self.setup = False
|
||||||
self.bin_dir = get_paths(
|
self.bin_dir = get_paths(
|
||||||
'nt' if os.name == 'nt' else 'posix_prefix',
|
"nt" if os.name == "nt" else "posix_prefix",
|
||||||
vars={'base': path, 'platbase': path}
|
vars={"base": path, "platbase": path},
|
||||||
)['scripts']
|
)["scripts"]
|
||||||
self.lib_dirs = get_prefixed_libs(path)
|
self.lib_dirs = get_prefixed_libs(path)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def _create_standalone_pip() -> Iterator[str]:
|
def _create_standalone_pip() -> Generator[str, None, None]:
|
||||||
"""Create a "standalone pip" zip file.
|
"""Create a "standalone pip" zip file.
|
||||||
|
|
||||||
The zip file's content is identical to the currently-running pip.
|
The zip file's content is identical to the currently-running pip.
|
||||||
|
@ -68,22 +68,18 @@ def _create_standalone_pip() -> Iterator[str]:
|
||||||
|
|
||||||
|
|
||||||
class BuildEnvironment:
|
class BuildEnvironment:
|
||||||
"""Creates and manages an isolated environment to install build deps
|
"""Creates and manages an isolated environment to install build deps"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
||||||
temp_dir = TempDirectory(
|
|
||||||
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
|
|
||||||
)
|
|
||||||
|
|
||||||
self._prefixes = OrderedDict(
|
self._prefixes = OrderedDict(
|
||||||
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
||||||
for name in ('normal', 'overlay')
|
for name in ("normal", "overlay")
|
||||||
)
|
)
|
||||||
|
|
||||||
self._bin_dirs = [] # type: List[str]
|
self._bin_dirs: List[str] = []
|
||||||
self._lib_dirs = [] # type: List[str]
|
self._lib_dirs: List[str] = []
|
||||||
for prefix in reversed(list(self._prefixes.values())):
|
for prefix in reversed(list(self._prefixes.values())):
|
||||||
self._bin_dirs.append(prefix.bin_dir)
|
self._bin_dirs.append(prefix.bin_dir)
|
||||||
self._lib_dirs.extend(prefix.lib_dirs)
|
self._lib_dirs.extend(prefix.lib_dirs)
|
||||||
|
@ -94,12 +90,15 @@ class BuildEnvironment:
|
||||||
system_sites = {
|
system_sites = {
|
||||||
os.path.normcase(site) for site in (get_purelib(), get_platlib())
|
os.path.normcase(site) for site in (get_purelib(), get_platlib())
|
||||||
}
|
}
|
||||||
self._site_dir = os.path.join(temp_dir.path, 'site')
|
self._site_dir = os.path.join(temp_dir.path, "site")
|
||||||
if not os.path.exists(self._site_dir):
|
if not os.path.exists(self._site_dir):
|
||||||
os.mkdir(self._site_dir)
|
os.mkdir(self._site_dir)
|
||||||
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
with open(
|
||||||
fp.write(textwrap.dedent(
|
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
||||||
'''
|
) as fp:
|
||||||
|
fp.write(
|
||||||
|
textwrap.dedent(
|
||||||
|
"""
|
||||||
import os, site, sys
|
import os, site, sys
|
||||||
|
|
||||||
# First, drop system-sites related paths.
|
# First, drop system-sites related paths.
|
||||||
|
@ -122,89 +121,98 @@ class BuildEnvironment:
|
||||||
for path in {lib_dirs!r}:
|
for path in {lib_dirs!r}:
|
||||||
assert not path in sys.path
|
assert not path in sys.path
|
||||||
site.addsitedir(path)
|
site.addsitedir(path)
|
||||||
'''
|
"""
|
||||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
||||||
|
)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._save_env = {
|
self._save_env = {
|
||||||
name: os.environ.get(name, None)
|
name: os.environ.get(name, None)
|
||||||
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
||||||
}
|
}
|
||||||
|
|
||||||
path = self._bin_dirs[:]
|
path = self._bin_dirs[:]
|
||||||
old_path = self._save_env['PATH']
|
old_path = self._save_env["PATH"]
|
||||||
if old_path:
|
if old_path:
|
||||||
path.extend(old_path.split(os.pathsep))
|
path.extend(old_path.split(os.pathsep))
|
||||||
|
|
||||||
pythonpath = [self._site_dir]
|
pythonpath = [self._site_dir]
|
||||||
|
|
||||||
os.environ.update({
|
os.environ.update(
|
||||||
'PATH': os.pathsep.join(path),
|
{
|
||||||
'PYTHONNOUSERSITE': '1',
|
"PATH": os.pathsep.join(path),
|
||||||
'PYTHONPATH': os.pathsep.join(pythonpath),
|
"PYTHONNOUSERSITE": "1",
|
||||||
})
|
"PYTHONPATH": os.pathsep.join(pythonpath),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type, # type: Optional[Type[BaseException]]
|
exc_type: Optional[Type[BaseException]],
|
||||||
exc_val, # type: Optional[BaseException]
|
exc_val: Optional[BaseException],
|
||||||
exc_tb # type: Optional[TracebackType]
|
exc_tb: Optional[TracebackType],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
for varname, old_value in self._save_env.items():
|
for varname, old_value in self._save_env.items():
|
||||||
if old_value is None:
|
if old_value is None:
|
||||||
os.environ.pop(varname, None)
|
os.environ.pop(varname, None)
|
||||||
else:
|
else:
|
||||||
os.environ[varname] = old_value
|
os.environ[varname] = old_value
|
||||||
|
|
||||||
def check_requirements(self, reqs):
|
def check_requirements(
|
||||||
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
self, reqs: Iterable[str]
|
||||||
|
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
||||||
"""Return 2 sets:
|
"""Return 2 sets:
|
||||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||||
- missing requirements: set of reqs
|
- missing requirements: set of reqs
|
||||||
"""
|
"""
|
||||||
missing = set()
|
missing = set()
|
||||||
conflicting = set()
|
conflicting = set()
|
||||||
if reqs:
|
if reqs:
|
||||||
ws = WorkingSet(self._lib_dirs)
|
env = (
|
||||||
for req in reqs:
|
get_environment(self._lib_dirs)
|
||||||
try:
|
if hasattr(self, "_lib_dirs")
|
||||||
if ws.find(Requirement.parse(req)) is None:
|
else get_default_environment()
|
||||||
missing.add(req)
|
)
|
||||||
except VersionConflict as e:
|
for req_str in reqs:
|
||||||
conflicting.add((str(e.args[0].as_requirement()),
|
req = Requirement(req_str)
|
||||||
str(e.args[1])))
|
# We're explicitly evaluating with an empty extra value, since build
|
||||||
|
# environments are not provided any mechanism to select specific extras.
|
||||||
|
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
||||||
|
continue
|
||||||
|
dist = env.get_distribution(req.name)
|
||||||
|
if not dist:
|
||||||
|
missing.add(req_str)
|
||||||
|
continue
|
||||||
|
if isinstance(dist.version, Version):
|
||||||
|
installed_req_str = f"{req.name}=={dist.version}"
|
||||||
|
else:
|
||||||
|
installed_req_str = f"{req.name}==={dist.version}"
|
||||||
|
if not req.specifier.contains(dist.version, prereleases=True):
|
||||||
|
conflicting.add((installed_req_str, req_str))
|
||||||
|
# FIXME: Consider direct URL?
|
||||||
return conflicting, missing
|
return conflicting, missing
|
||||||
|
|
||||||
def install_requirements(
|
def install_requirements(
|
||||||
self,
|
self,
|
||||||
finder, # type: PackageFinder
|
finder: "PackageFinder",
|
||||||
requirements, # type: Iterable[str]
|
requirements: Iterable[str],
|
||||||
prefix_as_string, # type: str
|
prefix_as_string: str,
|
||||||
message # type: str
|
*,
|
||||||
):
|
kind: str,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
prefix = self._prefixes[prefix_as_string]
|
prefix = self._prefixes[prefix_as_string]
|
||||||
assert not prefix.setup
|
assert not prefix.setup
|
||||||
prefix.setup = True
|
prefix.setup = True
|
||||||
if not requirements:
|
if not requirements:
|
||||||
return
|
return
|
||||||
with contextlib.ExitStack() as ctx:
|
with contextlib.ExitStack() as ctx:
|
||||||
# TODO: Remove this block when dropping 3.6 support. Python 3.6
|
pip_runnable = ctx.enter_context(_create_standalone_pip())
|
||||||
# lacks importlib.resources and pep517 has issues loading files in
|
|
||||||
# a zip, so we fallback to the "old" method by adding the current
|
|
||||||
# pip directory to the child process's sys.path.
|
|
||||||
if sys.version_info < (3, 7):
|
|
||||||
pip_runnable = os.path.dirname(pip_location)
|
|
||||||
else:
|
|
||||||
pip_runnable = ctx.enter_context(_create_standalone_pip())
|
|
||||||
self._install_requirements(
|
self._install_requirements(
|
||||||
pip_runnable,
|
pip_runnable,
|
||||||
finder,
|
finder,
|
||||||
requirements,
|
requirements,
|
||||||
prefix,
|
prefix,
|
||||||
message,
|
kind=kind,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -213,74 +221,84 @@ class BuildEnvironment:
|
||||||
finder: "PackageFinder",
|
finder: "PackageFinder",
|
||||||
requirements: Iterable[str],
|
requirements: Iterable[str],
|
||||||
prefix: _Prefix,
|
prefix: _Prefix,
|
||||||
message: str,
|
*,
|
||||||
|
kind: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
args = [
|
args: List[str] = [
|
||||||
sys.executable, pip_runnable, 'install',
|
sys.executable,
|
||||||
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
pip_runnable,
|
||||||
'--no-warn-script-location',
|
"install",
|
||||||
] # type: List[str]
|
"--ignore-installed",
|
||||||
|
"--no-user",
|
||||||
|
"--prefix",
|
||||||
|
prefix.path,
|
||||||
|
"--no-warn-script-location",
|
||||||
|
]
|
||||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||||
args.append('-v')
|
args.append("-v")
|
||||||
for format_control in ('no_binary', 'only_binary'):
|
for format_control in ("no_binary", "only_binary"):
|
||||||
formats = getattr(finder.format_control, format_control)
|
formats = getattr(finder.format_control, format_control)
|
||||||
args.extend(('--' + format_control.replace('_', '-'),
|
args.extend(
|
||||||
','.join(sorted(formats or {':none:'}))))
|
(
|
||||||
|
"--" + format_control.replace("_", "-"),
|
||||||
|
",".join(sorted(formats or {":none:"})),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
index_urls = finder.index_urls
|
index_urls = finder.index_urls
|
||||||
if index_urls:
|
if index_urls:
|
||||||
args.extend(['-i', index_urls[0]])
|
args.extend(["-i", index_urls[0]])
|
||||||
for extra_index in index_urls[1:]:
|
for extra_index in index_urls[1:]:
|
||||||
args.extend(['--extra-index-url', extra_index])
|
args.extend(["--extra-index-url", extra_index])
|
||||||
else:
|
else:
|
||||||
args.append('--no-index')
|
args.append("--no-index")
|
||||||
for link in finder.find_links:
|
for link in finder.find_links:
|
||||||
args.extend(['--find-links', link])
|
args.extend(["--find-links", link])
|
||||||
|
|
||||||
for host in finder.trusted_hosts:
|
for host in finder.trusted_hosts:
|
||||||
args.extend(['--trusted-host', host])
|
args.extend(["--trusted-host", host])
|
||||||
if finder.allow_all_prereleases:
|
if finder.allow_all_prereleases:
|
||||||
args.append('--pre')
|
args.append("--pre")
|
||||||
if finder.prefer_binary:
|
if finder.prefer_binary:
|
||||||
args.append('--prefer-binary')
|
args.append("--prefer-binary")
|
||||||
args.append('--')
|
args.append("--")
|
||||||
args.extend(requirements)
|
args.extend(requirements)
|
||||||
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
||||||
with open_spinner(message) as spinner:
|
with open_spinner(f"Installing {kind}") as spinner:
|
||||||
call_subprocess(args, spinner=spinner, extra_environ=extra_environ)
|
call_subprocess(
|
||||||
|
args,
|
||||||
|
command_desc=f"pip subprocess to install {kind}",
|
||||||
|
spinner=spinner,
|
||||||
|
extra_environ=extra_environ,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NoOpBuildEnvironment(BuildEnvironment):
|
class NoOpBuildEnvironment(BuildEnvironment):
|
||||||
"""A no-op drop-in replacement for BuildEnvironment
|
"""A no-op drop-in replacement for BuildEnvironment"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type, # type: Optional[Type[BaseException]]
|
exc_type: Optional[Type[BaseException]],
|
||||||
exc_val, # type: Optional[BaseException]
|
exc_val: Optional[BaseException],
|
||||||
exc_tb # type: Optional[TracebackType]
|
exc_tb: Optional[TracebackType],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def install_requirements(
|
def install_requirements(
|
||||||
self,
|
self,
|
||||||
finder, # type: PackageFinder
|
finder: "PackageFinder",
|
||||||
requirements, # type: Iterable[str]
|
requirements: Iterable[str],
|
||||||
prefix_as_string, # type: str
|
prefix_as_string: str,
|
||||||
message # type: str
|
*,
|
||||||
):
|
kind: str,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
|
@ -20,8 +20,7 @@ from pip._internal.utils.urls import path_to_url
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _hash_dict(d):
|
def _hash_dict(d: Dict[str, str]) -> str:
|
||||||
# type: (Dict[str, str]) -> str
|
|
||||||
"""Return a stable sha224 of a dictionary."""
|
"""Return a stable sha224 of a dictionary."""
|
||||||
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
||||||
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
||||||
|
@ -31,15 +30,16 @@ class Cache:
|
||||||
"""An abstract class - provides cache directories for data from links
|
"""An abstract class - provides cache directories for data from links
|
||||||
|
|
||||||
|
|
||||||
:param cache_dir: The root of the cache.
|
:param cache_dir: The root of the cache.
|
||||||
:param format_control: An object of FormatControl class to limit
|
:param format_control: An object of FormatControl class to limit
|
||||||
binaries being read from the cache.
|
binaries being read from the cache.
|
||||||
:param allowed_formats: which formats of files the cache should store.
|
:param allowed_formats: which formats of files the cache should store.
|
||||||
('binary' and 'source' are the only allowed values)
|
('binary' and 'source' are the only allowed values)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control, allowed_formats):
|
def __init__(
|
||||||
# type: (str, FormatControl, Set[str]) -> None
|
self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str]
|
||||||
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
assert not cache_dir or os.path.isabs(cache_dir)
|
assert not cache_dir or os.path.isabs(cache_dir)
|
||||||
self.cache_dir = cache_dir or None
|
self.cache_dir = cache_dir or None
|
||||||
|
@ -49,10 +49,8 @@ class Cache:
|
||||||
_valid_formats = {"source", "binary"}
|
_valid_formats = {"source", "binary"}
|
||||||
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||||
|
|
||||||
def _get_cache_path_parts(self, link):
|
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
||||||
# type: (Link) -> List[str]
|
"""Get parts of part that must be os.path.joined with cache_dir"""
|
||||||
"""Get parts of part that must be os.path.joined with cache_dir
|
|
||||||
"""
|
|
||||||
|
|
||||||
# We want to generate an url to use as our cache key, we don't want to
|
# We want to generate an url to use as our cache key, we don't want to
|
||||||
# just re-use the URL because it might have other items in the fragment
|
# just re-use the URL because it might have other items in the fragment
|
||||||
|
@ -84,19 +82,12 @@ class Cache:
|
||||||
|
|
||||||
return parts
|
return parts
|
||||||
|
|
||||||
def _get_candidates(self, link, canonical_package_name):
|
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
||||||
# type: (Link, str) -> List[Any]
|
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
||||||
can_not_cache = (
|
|
||||||
not self.cache_dir or
|
|
||||||
not canonical_package_name or
|
|
||||||
not link
|
|
||||||
)
|
|
||||||
if can_not_cache:
|
if can_not_cache:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
formats = self.format_control.get_allowed_formats(
|
formats = self.format_control.get_allowed_formats(canonical_package_name)
|
||||||
canonical_package_name
|
|
||||||
)
|
|
||||||
if not self.allowed_formats.intersection(formats):
|
if not self.allowed_formats.intersection(formats):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -107,19 +98,16 @@ class Cache:
|
||||||
candidates.append((candidate, path))
|
candidates.append((candidate, path))
|
||||||
return candidates
|
return candidates
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
def get_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
"""Return a directory to store cached items in for link."""
|
||||||
"""Return a directory to store cached items in for link.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Link:
|
||||||
# type: (...) -> Link
|
|
||||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||||
passed link.
|
passed link.
|
||||||
"""
|
"""
|
||||||
|
@ -127,15 +115,12 @@ class Cache:
|
||||||
|
|
||||||
|
|
||||||
class SimpleWheelCache(Cache):
|
class SimpleWheelCache(Cache):
|
||||||
"""A cache of wheels for future installs.
|
"""A cache of wheels for future installs."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control):
|
def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
|
||||||
# type: (str, FormatControl) -> None
|
|
||||||
super().__init__(cache_dir, format_control, {"binary"})
|
super().__init__(cache_dir, format_control, {"binary"})
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
def get_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
|
||||||
"""Return a directory to store cached wheels for link
|
"""Return a directory to store cached wheels for link
|
||||||
|
|
||||||
Because there are M wheels for any one sdist, we provide a directory
|
Because there are M wheels for any one sdist, we provide a directory
|
||||||
|
@ -157,20 +142,17 @@ class SimpleWheelCache(Cache):
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Link:
|
||||||
# type: (...) -> Link
|
|
||||||
candidates = []
|
candidates = []
|
||||||
|
|
||||||
if not package_name:
|
if not package_name:
|
||||||
return link
|
return link
|
||||||
|
|
||||||
canonical_package_name = canonicalize_name(package_name)
|
canonical_package_name = canonicalize_name(package_name)
|
||||||
for wheel_name, wheel_dir in self._get_candidates(
|
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
||||||
link, canonical_package_name
|
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
wheel = Wheel(wheel_name)
|
wheel = Wheel(wheel_name)
|
||||||
except InvalidWheelFilename:
|
except InvalidWheelFilename:
|
||||||
|
@ -179,7 +161,9 @@ class SimpleWheelCache(Cache):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Ignoring cached wheel %s for %s as it "
|
"Ignoring cached wheel %s for %s as it "
|
||||||
"does not match the expected distribution name %s.",
|
"does not match the expected distribution name %s.",
|
||||||
wheel_name, link, package_name,
|
wheel_name,
|
||||||
|
link,
|
||||||
|
package_name,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if not wheel.supported(supported_tags):
|
if not wheel.supported(supported_tags):
|
||||||
|
@ -201,11 +185,9 @@ class SimpleWheelCache(Cache):
|
||||||
|
|
||||||
|
|
||||||
class EphemWheelCache(SimpleWheelCache):
|
class EphemWheelCache(SimpleWheelCache):
|
||||||
"""A SimpleWheelCache that creates it's own temporary cache directory
|
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, format_control):
|
def __init__(self, format_control: FormatControl) -> None:
|
||||||
# type: (FormatControl) -> None
|
|
||||||
self._temp_dir = TempDirectory(
|
self._temp_dir = TempDirectory(
|
||||||
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||||
globally_managed=True,
|
globally_managed=True,
|
||||||
|
@ -217,8 +199,8 @@ class EphemWheelCache(SimpleWheelCache):
|
||||||
class CacheEntry:
|
class CacheEntry:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
persistent, # type: bool
|
persistent: bool,
|
||||||
):
|
):
|
||||||
self.link = link
|
self.link = link
|
||||||
self.persistent = persistent
|
self.persistent = persistent
|
||||||
|
@ -231,27 +213,23 @@ class WheelCache(Cache):
|
||||||
when a certain link is not found in the simple wheel cache first.
|
when a certain link is not found in the simple wheel cache first.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control):
|
def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
|
||||||
# type: (str, FormatControl) -> None
|
super().__init__(cache_dir, format_control, {"binary"})
|
||||||
super().__init__(cache_dir, format_control, {'binary'})
|
|
||||||
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||||
self._ephem_cache = EphemWheelCache(format_control)
|
self._ephem_cache = EphemWheelCache(format_control)
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
def get_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
|
||||||
return self._wheel_cache.get_path_for_link(link)
|
return self._wheel_cache.get_path_for_link(link)
|
||||||
|
|
||||||
def get_ephem_path_for_link(self, link):
|
def get_ephem_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
|
||||||
return self._ephem_cache.get_path_for_link(link)
|
return self._ephem_cache.get_path_for_link(link)
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Link:
|
||||||
# type: (...) -> Link
|
|
||||||
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
||||||
if cache_entry is None:
|
if cache_entry is None:
|
||||||
return link
|
return link
|
||||||
|
@ -259,11 +237,10 @@ class WheelCache(Cache):
|
||||||
|
|
||||||
def get_cache_entry(
|
def get_cache_entry(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Optional[CacheEntry]:
|
||||||
# type: (...) -> Optional[CacheEntry]
|
|
||||||
"""Returns a CacheEntry with a link to a cached item if it exists or
|
"""Returns a CacheEntry with a link to a cached item if it exists or
|
||||||
None. The cache entry indicates if the item was found in the persistent
|
None. The cache entry indicates if the item was found in the persistent
|
||||||
or ephemeral cache.
|
or ephemeral cache.
|
||||||
|
|
|
@ -9,11 +9,10 @@ from typing import Any, Iterable, List, Optional
|
||||||
|
|
||||||
from pip._internal.cli.main_parser import create_main_parser
|
from pip._internal.cli.main_parser import create_main_parser
|
||||||
from pip._internal.commands import commands_dict, create_command
|
from pip._internal.commands import commands_dict, create_command
|
||||||
from pip._internal.utils.misc import get_installed_distributions
|
from pip._internal.metadata import get_default_environment
|
||||||
|
|
||||||
|
|
||||||
def autocomplete():
|
def autocomplete() -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Entry Point for completion of main and subcommand options."""
|
"""Entry Point for completion of main and subcommand options."""
|
||||||
# Don't complete if user hasn't sourced bash_completion file.
|
# Don't complete if user hasn't sourced bash_completion file.
|
||||||
if "PIP_AUTO_COMPLETE" not in os.environ:
|
if "PIP_AUTO_COMPLETE" not in os.environ:
|
||||||
|
@ -30,7 +29,7 @@ def autocomplete():
|
||||||
options = []
|
options = []
|
||||||
|
|
||||||
# subcommand
|
# subcommand
|
||||||
subcommand_name = None # type: Optional[str]
|
subcommand_name: Optional[str] = None
|
||||||
for word in cwords:
|
for word in cwords:
|
||||||
if word in subcommands:
|
if word in subcommands:
|
||||||
subcommand_name = word
|
subcommand_name = word
|
||||||
|
@ -46,11 +45,13 @@ def autocomplete():
|
||||||
"uninstall",
|
"uninstall",
|
||||||
]
|
]
|
||||||
if should_list_installed:
|
if should_list_installed:
|
||||||
|
env = get_default_environment()
|
||||||
lc = current.lower()
|
lc = current.lower()
|
||||||
installed = [
|
installed = [
|
||||||
dist.key
|
dist.canonical_name
|
||||||
for dist in get_installed_distributions(local_only=True)
|
for dist in env.iter_installed_distributions(local_only=True)
|
||||||
if dist.key.startswith(lc) and dist.key not in cwords[1:]
|
if dist.canonical_name.startswith(lc)
|
||||||
|
and dist.canonical_name not in cwords[1:]
|
||||||
]
|
]
|
||||||
# if there are no dists installed, fall back to option completion
|
# if there are no dists installed, fall back to option completion
|
||||||
if installed:
|
if installed:
|
||||||
|
@ -58,6 +59,14 @@ def autocomplete():
|
||||||
print(dist)
|
print(dist)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
should_list_installables = (
|
||||||
|
not current.startswith("-") and subcommand_name == "install"
|
||||||
|
)
|
||||||
|
if should_list_installables:
|
||||||
|
for path in auto_complete_paths(current, "path"):
|
||||||
|
print(path)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
subcommand = create_command(subcommand_name)
|
subcommand = create_command(subcommand_name)
|
||||||
|
|
||||||
for opt in subcommand.parser.option_list_all:
|
for opt in subcommand.parser.option_list_all:
|
||||||
|
@ -107,8 +116,9 @@ def autocomplete():
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def get_path_completion_type(cwords, cword, opts):
|
def get_path_completion_type(
|
||||||
# type: (List[str], int, Iterable[Any]) -> Optional[str]
|
cwords: List[str], cword: int, opts: Iterable[Any]
|
||||||
|
) -> Optional[str]:
|
||||||
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
||||||
|
|
||||||
:param cwords: same as the environmental variable ``COMP_WORDS``
|
:param cwords: same as the environmental variable ``COMP_WORDS``
|
||||||
|
@ -130,14 +140,13 @@ def get_path_completion_type(cwords, cword, opts):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def auto_complete_paths(current, completion_type):
|
def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
||||||
# type: (str, str) -> Iterable[str]
|
|
||||||
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
||||||
and directories starting with ``current``; otherwise only list directories
|
and directories starting with ``current``; otherwise only list directories
|
||||||
starting with ``current``.
|
starting with ``current``.
|
||||||
|
|
||||||
:param current: The word to be completed
|
:param current: The word to be completed
|
||||||
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
||||||
:return: A generator of regular files and/or directories
|
:return: A generator of regular files and/or directories
|
||||||
"""
|
"""
|
||||||
directory, filename = os.path.split(current)
|
directory, filename = os.path.split(current)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
"""Base Command class, and related routines"""
|
"""Base Command class, and related routines"""
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
import optparse
|
import optparse
|
||||||
|
@ -7,7 +8,9 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Any, List, Optional, Tuple
|
from typing import Any, Callable, List, Optional, Tuple
|
||||||
|
|
||||||
|
from pip._vendor.rich import traceback as rich_traceback
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.command_context import CommandContextMixIn
|
from pip._internal.cli.command_context import CommandContextMixIn
|
||||||
|
@ -21,12 +24,12 @@ from pip._internal.cli.status_codes import (
|
||||||
from pip._internal.exceptions import (
|
from pip._internal.exceptions import (
|
||||||
BadCommand,
|
BadCommand,
|
||||||
CommandError,
|
CommandError,
|
||||||
|
DiagnosticPipError,
|
||||||
InstallationError,
|
InstallationError,
|
||||||
NetworkConnectionError,
|
NetworkConnectionError,
|
||||||
PreviousBuildDirError,
|
PreviousBuildDirError,
|
||||||
UninstallationError,
|
UninstallationError,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.deprecation import deprecated
|
|
||||||
from pip._internal.utils.filesystem import check_path_owner
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||||
from pip._internal.utils.misc import get_prog, normalize_path
|
from pip._internal.utils.misc import get_prog, normalize_path
|
||||||
|
@ -40,11 +43,10 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Command(CommandContextMixIn):
|
class Command(CommandContextMixIn):
|
||||||
usage = None # type: str
|
usage: str = ""
|
||||||
ignore_require_venv = False # type: bool
|
ignore_require_venv: bool = False
|
||||||
|
|
||||||
def __init__(self, name, summary, isolated=False):
|
def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
|
||||||
# type: (str, str, bool) -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -59,7 +61,7 @@ class Command(CommandContextMixIn):
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tempdir_registry = None # type: Optional[TempDirRegistry]
|
self.tempdir_registry: Optional[TempDirRegistry] = None
|
||||||
|
|
||||||
# Commands should add options to this option group
|
# Commands should add options to this option group
|
||||||
optgroup_name = f"{self.name.capitalize()} Options"
|
optgroup_name = f"{self.name.capitalize()} Options"
|
||||||
|
@ -74,12 +76,10 @@ class Command(CommandContextMixIn):
|
||||||
|
|
||||||
self.add_options()
|
self.add_options()
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def handle_pip_version_check(self, options):
|
def handle_pip_version_check(self, options: Values) -> None:
|
||||||
# type: (Values) -> None
|
|
||||||
"""
|
"""
|
||||||
This is a no-op so that commands by default do not do the pip version
|
This is a no-op so that commands by default do not do the pip version
|
||||||
check.
|
check.
|
||||||
|
@ -88,25 +88,21 @@ class Command(CommandContextMixIn):
|
||||||
# are present.
|
# are present.
|
||||||
assert not hasattr(options, "no_index")
|
assert not hasattr(options, "no_index")
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[Any]) -> int
|
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def parse_args(self, args):
|
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
|
||||||
# type: (List[str]) -> Tuple[Any, Any]
|
|
||||||
# factored out for testability
|
# factored out for testability
|
||||||
return self.parser.parse_args(args)
|
return self.parser.parse_args(args)
|
||||||
|
|
||||||
def main(self, args):
|
def main(self, args: List[str]) -> int:
|
||||||
# type: (List[str]) -> int
|
|
||||||
try:
|
try:
|
||||||
with self.main_context():
|
with self.main_context():
|
||||||
return self._main(args)
|
return self._main(args)
|
||||||
finally:
|
finally:
|
||||||
logging.shutdown()
|
logging.shutdown()
|
||||||
|
|
||||||
def _main(self, args):
|
def _main(self, args: List[str]) -> int:
|
||||||
# type: (List[str]) -> int
|
|
||||||
# We must initialize this before the tempdir manager, otherwise the
|
# We must initialize this before the tempdir manager, otherwise the
|
||||||
# configuration would not be accessible by the time we clean up the
|
# configuration would not be accessible by the time we clean up the
|
||||||
# tempdir manager.
|
# tempdir manager.
|
||||||
|
@ -155,20 +151,6 @@ class Command(CommandContextMixIn):
|
||||||
)
|
)
|
||||||
options.cache_dir = None
|
options.cache_dir = None
|
||||||
|
|
||||||
if getattr(options, "build_dir", None):
|
|
||||||
deprecated(
|
|
||||||
reason=(
|
|
||||||
"The -b/--build/--build-dir/--build-directory "
|
|
||||||
"option is deprecated and has no effect anymore."
|
|
||||||
),
|
|
||||||
replacement=(
|
|
||||||
"use the TMPDIR/TEMP/TMP environment variable, "
|
|
||||||
"possibly combined with --no-clean"
|
|
||||||
),
|
|
||||||
gone_in="21.3",
|
|
||||||
issue=8333,
|
|
||||||
)
|
|
||||||
|
|
||||||
if "2020-resolver" in options.features_enabled:
|
if "2020-resolver" in options.features_enabled:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"--use-feature=2020-resolver no longer has any effect, "
|
"--use-feature=2020-resolver no longer has any effect, "
|
||||||
|
@ -176,46 +158,66 @@ class Command(CommandContextMixIn):
|
||||||
"This will become an error in pip 21.0."
|
"This will become an error in pip 21.0."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def intercepts_unhandled_exc(
|
||||||
|
run_func: Callable[..., int]
|
||||||
|
) -> Callable[..., int]:
|
||||||
|
@functools.wraps(run_func)
|
||||||
|
def exc_logging_wrapper(*args: Any) -> int:
|
||||||
|
try:
|
||||||
|
status = run_func(*args)
|
||||||
|
assert isinstance(status, int)
|
||||||
|
return status
|
||||||
|
except DiagnosticPipError as exc:
|
||||||
|
logger.error("[present-rich] %s", exc)
|
||||||
|
logger.debug("Exception information:", exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except PreviousBuildDirError as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug("Exception information:", exc_info=True)
|
||||||
|
|
||||||
|
return PREVIOUS_BUILD_DIR_ERROR
|
||||||
|
except (
|
||||||
|
InstallationError,
|
||||||
|
UninstallationError,
|
||||||
|
BadCommand,
|
||||||
|
NetworkConnectionError,
|
||||||
|
) as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug("Exception information:", exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except CommandError as exc:
|
||||||
|
logger.critical("%s", exc)
|
||||||
|
logger.debug("Exception information:", exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except BrokenStdoutLoggingError:
|
||||||
|
# Bypass our logger and write any remaining messages to
|
||||||
|
# stderr because stdout no longer works.
|
||||||
|
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
||||||
|
if level_number <= logging.DEBUG:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.critical("Operation cancelled by user")
|
||||||
|
logger.debug("Exception information:", exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except BaseException:
|
||||||
|
logger.critical("Exception:", exc_info=True)
|
||||||
|
|
||||||
|
return UNKNOWN_ERROR
|
||||||
|
|
||||||
|
return exc_logging_wrapper
|
||||||
|
|
||||||
try:
|
try:
|
||||||
status = self.run(options, args)
|
if not options.debug_mode:
|
||||||
assert isinstance(status, int)
|
run = intercepts_unhandled_exc(self.run)
|
||||||
return status
|
else:
|
||||||
except PreviousBuildDirError as exc:
|
run = self.run
|
||||||
logger.critical(str(exc))
|
rich_traceback.install(show_locals=True)
|
||||||
logger.debug("Exception information:", exc_info=True)
|
return run(options, args)
|
||||||
|
|
||||||
return PREVIOUS_BUILD_DIR_ERROR
|
|
||||||
except (
|
|
||||||
InstallationError,
|
|
||||||
UninstallationError,
|
|
||||||
BadCommand,
|
|
||||||
NetworkConnectionError,
|
|
||||||
) as exc:
|
|
||||||
logger.critical(str(exc))
|
|
||||||
logger.debug("Exception information:", exc_info=True)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except CommandError as exc:
|
|
||||||
logger.critical("%s", exc)
|
|
||||||
logger.debug("Exception information:", exc_info=True)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except BrokenStdoutLoggingError:
|
|
||||||
# Bypass our logger and write any remaining messages to stderr
|
|
||||||
# because stdout no longer works.
|
|
||||||
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
|
||||||
if level_number <= logging.DEBUG:
|
|
||||||
traceback.print_exc(file=sys.stderr)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.critical("Operation cancelled by user")
|
|
||||||
logger.debug("Exception information:", exc_info=True)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except BaseException:
|
|
||||||
logger.critical("Exception:", exc_info=True)
|
|
||||||
|
|
||||||
return UNKNOWN_ERROR
|
|
||||||
finally:
|
finally:
|
||||||
self.handle_pip_version_check(options)
|
self.handle_pip_version_check(options)
|
||||||
|
|
|
@ -10,9 +10,10 @@ pass on state. To be consistent, all options will follow this design.
|
||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import importlib.util
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
import warnings
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
@ -21,7 +22,6 @@ from typing import Any, Callable, Dict, Optional, Tuple
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.cli.parser import ConfigOptionParser
|
from pip._internal.cli.parser import ConfigOptionParser
|
||||||
from pip._internal.cli.progress_bars import BAR_TYPES
|
|
||||||
from pip._internal.exceptions import CommandError
|
from pip._internal.exceptions import CommandError
|
||||||
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
||||||
from pip._internal.models.format_control import FormatControl
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
@ -30,9 +30,10 @@ from pip._internal.models.target_python import TargetPython
|
||||||
from pip._internal.utils.hashes import STRONG_HASHES
|
from pip._internal.utils.hashes import STRONG_HASHES
|
||||||
from pip._internal.utils.misc import strtobool
|
from pip._internal.utils.misc import strtobool
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def raise_option_error(parser, option, msg):
|
|
||||||
# type: (OptionParser, Option, str) -> None
|
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
||||||
"""
|
"""
|
||||||
Raise an option parsing error using parser.error().
|
Raise an option parsing error using parser.error().
|
||||||
|
|
||||||
|
@ -46,8 +47,7 @@ def raise_option_error(parser, option, msg):
|
||||||
parser.error(msg)
|
parser.error(msg)
|
||||||
|
|
||||||
|
|
||||||
def make_option_group(group, parser):
|
def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
|
||||||
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
|
|
||||||
"""
|
"""
|
||||||
Return an OptionGroup object
|
Return an OptionGroup object
|
||||||
group -- assumed to be dict with 'name' and 'options' keys
|
group -- assumed to be dict with 'name' and 'options' keys
|
||||||
|
@ -59,8 +59,9 @@ def make_option_group(group, parser):
|
||||||
return option_group
|
return option_group
|
||||||
|
|
||||||
|
|
||||||
def check_install_build_global(options, check_options=None):
|
def check_install_build_global(
|
||||||
# type: (Values, Optional[Values]) -> None
|
options: Values, check_options: Optional[Values] = None
|
||||||
|
) -> None:
|
||||||
"""Disable wheels if per-setup.py call options are set.
|
"""Disable wheels if per-setup.py call options are set.
|
||||||
|
|
||||||
:param options: The OptionParser options to update.
|
:param options: The OptionParser options to update.
|
||||||
|
@ -70,23 +71,20 @@ def check_install_build_global(options, check_options=None):
|
||||||
if check_options is None:
|
if check_options is None:
|
||||||
check_options = options
|
check_options = options
|
||||||
|
|
||||||
def getname(n):
|
def getname(n: str) -> Optional[Any]:
|
||||||
# type: (str) -> Optional[Any]
|
|
||||||
return getattr(check_options, n, None)
|
return getattr(check_options, n, None)
|
||||||
|
|
||||||
names = ["build_options", "global_options", "install_options"]
|
names = ["build_options", "global_options", "install_options"]
|
||||||
if any(map(getname, names)):
|
if any(map(getname, names)):
|
||||||
control = options.format_control
|
control = options.format_control
|
||||||
control.disallow_binaries()
|
control.disallow_binaries()
|
||||||
warnings.warn(
|
logger.warning(
|
||||||
"Disabling all use of wheels due to the use of --build-option "
|
"Disabling all use of wheels due to the use of --build-option "
|
||||||
"/ --global-option / --install-option.",
|
"/ --global-option / --install-option.",
|
||||||
stacklevel=2,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_dist_restriction(options, check_target=False):
|
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
||||||
# type: (Values, bool) -> None
|
|
||||||
"""Function for determining if custom platform options are allowed.
|
"""Function for determining if custom platform options are allowed.
|
||||||
|
|
||||||
:param options: The OptionParser options.
|
:param options: The OptionParser options.
|
||||||
|
@ -126,13 +124,11 @@ def check_dist_restriction(options, check_target=False):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _path_option_check(option, opt, value):
|
def _path_option_check(option: Option, opt: str, value: str) -> str:
|
||||||
# type: (Option, str, str) -> str
|
|
||||||
return os.path.expanduser(value)
|
return os.path.expanduser(value)
|
||||||
|
|
||||||
|
|
||||||
def _package_name_option_check(option, opt, value):
|
def _package_name_option_check(option: Option, opt: str, value: str) -> str:
|
||||||
# type: (Option, str, str) -> str
|
|
||||||
return canonicalize_name(value)
|
return canonicalize_name(value)
|
||||||
|
|
||||||
|
|
||||||
|
@ -147,16 +143,28 @@ class PipOption(Option):
|
||||||
# options #
|
# options #
|
||||||
###########
|
###########
|
||||||
|
|
||||||
help_ = partial(
|
help_: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"-h",
|
"-h",
|
||||||
"--help",
|
"--help",
|
||||||
dest="help",
|
dest="help",
|
||||||
action="help",
|
action="help",
|
||||||
help="Show help.",
|
help="Show help.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
isolated_mode = partial(
|
debug_mode: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--debug",
|
||||||
|
dest="debug_mode",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Let unhandled exceptions propagate outside the main subroutine, "
|
||||||
|
"instead of logging them to stderr."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
isolated_mode: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--isolated",
|
"--isolated",
|
||||||
dest="isolated_mode",
|
dest="isolated_mode",
|
||||||
|
@ -166,20 +174,22 @@ isolated_mode = partial(
|
||||||
"Run pip in an isolated mode, ignoring environment variables and user "
|
"Run pip in an isolated mode, ignoring environment variables and user "
|
||||||
"configuration."
|
"configuration."
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
require_virtualenv = partial(
|
require_virtualenv: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
# Run only if inside a virtualenv, bail if not.
|
|
||||||
"--require-virtualenv",
|
"--require-virtualenv",
|
||||||
"--require-venv",
|
"--require-venv",
|
||||||
dest="require_venv",
|
dest="require_venv",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help=SUPPRESS_HELP,
|
help=(
|
||||||
) # type: Callable[..., Option]
|
"Allow pip to only run in a virtual environment; "
|
||||||
|
"exit with an error otherwise."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
verbose = partial(
|
verbose: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"-v",
|
"-v",
|
||||||
"--verbose",
|
"--verbose",
|
||||||
|
@ -187,27 +197,27 @@ verbose = partial(
|
||||||
action="count",
|
action="count",
|
||||||
default=0,
|
default=0,
|
||||||
help="Give more output. Option is additive, and can be used up to 3 times.",
|
help="Give more output. Option is additive, and can be used up to 3 times.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
no_color = partial(
|
no_color: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-color",
|
"--no-color",
|
||||||
dest="no_color",
|
dest="no_color",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Suppress colored output.",
|
help="Suppress colored output.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
version = partial(
|
version: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"-V",
|
"-V",
|
||||||
"--version",
|
"--version",
|
||||||
dest="version",
|
dest="version",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Show version and exit.",
|
help="Show version and exit.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
quiet = partial(
|
quiet: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"-q",
|
"-q",
|
||||||
"--quiet",
|
"--quiet",
|
||||||
|
@ -219,23 +229,19 @@ quiet = partial(
|
||||||
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
|
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
|
||||||
" levels)."
|
" levels)."
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
progress_bar = partial(
|
progress_bar: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--progress-bar",
|
"--progress-bar",
|
||||||
dest="progress_bar",
|
dest="progress_bar",
|
||||||
type="choice",
|
type="choice",
|
||||||
choices=list(BAR_TYPES.keys()),
|
choices=["on", "off"],
|
||||||
default="on",
|
default="on",
|
||||||
help=(
|
help="Specify whether the progress bar should be used [on, off] (default: on)",
|
||||||
"Specify type of progress to be displayed ["
|
)
|
||||||
+ "|".join(BAR_TYPES.keys())
|
|
||||||
+ "] (default: %default)"
|
|
||||||
),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
log = partial(
|
log: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--log",
|
"--log",
|
||||||
"--log-file",
|
"--log-file",
|
||||||
|
@ -244,9 +250,9 @@ log = partial(
|
||||||
metavar="path",
|
metavar="path",
|
||||||
type="path",
|
type="path",
|
||||||
help="Path to a verbose appending log.",
|
help="Path to a verbose appending log.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
no_input = partial(
|
no_input: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
# Don't ask for input
|
# Don't ask for input
|
||||||
"--no-input",
|
"--no-input",
|
||||||
|
@ -254,18 +260,18 @@ no_input = partial(
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Disable prompting for input.",
|
help="Disable prompting for input.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
proxy = partial(
|
proxy: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--proxy",
|
"--proxy",
|
||||||
dest="proxy",
|
dest="proxy",
|
||||||
type="str",
|
type="str",
|
||||||
default="",
|
default="",
|
||||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
|
help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
retries = partial(
|
retries: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--retries",
|
"--retries",
|
||||||
dest="retries",
|
dest="retries",
|
||||||
|
@ -273,9 +279,9 @@ retries = partial(
|
||||||
default=5,
|
default=5,
|
||||||
help="Maximum number of retries each connection should attempt "
|
help="Maximum number of retries each connection should attempt "
|
||||||
"(default %default times).",
|
"(default %default times).",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
timeout = partial(
|
timeout: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--timeout",
|
"--timeout",
|
||||||
"--default-timeout",
|
"--default-timeout",
|
||||||
|
@ -284,11 +290,10 @@ timeout = partial(
|
||||||
type="float",
|
type="float",
|
||||||
default=15,
|
default=15,
|
||||||
help="Set the socket timeout (default %default seconds).",
|
help="Set the socket timeout (default %default seconds).",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def exists_action():
|
def exists_action() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
# Option when path already exist
|
# Option when path already exist
|
||||||
"--exists-action",
|
"--exists-action",
|
||||||
|
@ -303,7 +308,7 @@ def exists_action():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
cert = partial(
|
cert: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--cert",
|
"--cert",
|
||||||
dest="cert",
|
dest="cert",
|
||||||
|
@ -315,9 +320,9 @@ cert = partial(
|
||||||
"See 'SSL Certificate Verification' in pip documentation "
|
"See 'SSL Certificate Verification' in pip documentation "
|
||||||
"for more information."
|
"for more information."
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
client_cert = partial(
|
client_cert: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--client-cert",
|
"--client-cert",
|
||||||
dest="client_cert",
|
dest="client_cert",
|
||||||
|
@ -326,9 +331,9 @@ client_cert = partial(
|
||||||
metavar="path",
|
metavar="path",
|
||||||
help="Path to SSL client certificate, a single file containing the "
|
help="Path to SSL client certificate, a single file containing the "
|
||||||
"private key and the certificate in PEM format.",
|
"private key and the certificate in PEM format.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
index_url = partial(
|
index_url: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"-i",
|
"-i",
|
||||||
"--index-url",
|
"--index-url",
|
||||||
|
@ -340,11 +345,10 @@ index_url = partial(
|
||||||
"This should point to a repository compliant with PEP 503 "
|
"This should point to a repository compliant with PEP 503 "
|
||||||
"(the simple repository API) or a local directory laid out "
|
"(the simple repository API) or a local directory laid out "
|
||||||
"in the same format.",
|
"in the same format.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def extra_index_url():
|
def extra_index_url() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"--extra-index-url",
|
"--extra-index-url",
|
||||||
dest="extra_index_urls",
|
dest="extra_index_urls",
|
||||||
|
@ -357,18 +361,17 @@ def extra_index_url():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
no_index = partial(
|
no_index: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-index",
|
"--no-index",
|
||||||
dest="no_index",
|
dest="no_index",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Ignore package index (only looking at --find-links URLs instead).",
|
help="Ignore package index (only looking at --find-links URLs instead).",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def find_links():
|
def find_links() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"-f",
|
"-f",
|
||||||
"--find-links",
|
"--find-links",
|
||||||
|
@ -378,14 +381,13 @@ def find_links():
|
||||||
metavar="url",
|
metavar="url",
|
||||||
help="If a URL or path to an html file, then parse for links to "
|
help="If a URL or path to an html file, then parse for links to "
|
||||||
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
|
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
|
||||||
"If a local path or file:// URL that's a directory, "
|
"If a local path or file:// URL that's a directory, "
|
||||||
"then look for archives in the directory listing. "
|
"then look for archives in the directory listing. "
|
||||||
"Links to VCS project URLs are not supported.",
|
"Links to VCS project URLs are not supported.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def trusted_host():
|
def trusted_host() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"--trusted-host",
|
"--trusted-host",
|
||||||
dest="trusted_hosts",
|
dest="trusted_hosts",
|
||||||
|
@ -397,8 +399,7 @@ def trusted_host():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def constraints():
|
def constraints() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"-c",
|
"-c",
|
||||||
"--constraint",
|
"--constraint",
|
||||||
|
@ -411,8 +412,7 @@ def constraints():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def requirements():
|
def requirements() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"-r",
|
"-r",
|
||||||
"--requirement",
|
"--requirement",
|
||||||
|
@ -425,8 +425,7 @@ def requirements():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def editable():
|
def editable() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"-e",
|
"-e",
|
||||||
"--editable",
|
"--editable",
|
||||||
|
@ -441,13 +440,12 @@ def editable():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_src(option, opt_str, value, parser):
|
def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
|
||||||
value = os.path.abspath(value)
|
value = os.path.abspath(value)
|
||||||
setattr(parser.values, option.dest, value)
|
setattr(parser.values, option.dest, value)
|
||||||
|
|
||||||
|
|
||||||
src = partial(
|
src: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--src",
|
"--src",
|
||||||
"--source",
|
"--source",
|
||||||
|
@ -462,17 +460,17 @@ src = partial(
|
||||||
help="Directory to check out editable projects into. "
|
help="Directory to check out editable projects into. "
|
||||||
'The default in a virtualenv is "<venv path>/src". '
|
'The default in a virtualenv is "<venv path>/src". '
|
||||||
'The default for global installs is "<current dir>/src".',
|
'The default for global installs is "<current dir>/src".',
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_format_control(values, option):
|
def _get_format_control(values: Values, option: Option) -> Any:
|
||||||
# type: (Values, Option) -> Any
|
|
||||||
"""Get a format_control object."""
|
"""Get a format_control object."""
|
||||||
return getattr(values, option.dest)
|
return getattr(values, option.dest)
|
||||||
|
|
||||||
|
|
||||||
def _handle_no_binary(option, opt_str, value, parser):
|
def _handle_no_binary(
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
existing = _get_format_control(parser.values, option)
|
existing = _get_format_control(parser.values, option)
|
||||||
FormatControl.handle_mutual_excludes(
|
FormatControl.handle_mutual_excludes(
|
||||||
value,
|
value,
|
||||||
|
@ -481,8 +479,9 @@ def _handle_no_binary(option, opt_str, value, parser):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_only_binary(option, opt_str, value, parser):
|
def _handle_only_binary(
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
existing = _get_format_control(parser.values, option)
|
existing = _get_format_control(parser.values, option)
|
||||||
FormatControl.handle_mutual_excludes(
|
FormatControl.handle_mutual_excludes(
|
||||||
value,
|
value,
|
||||||
|
@ -491,8 +490,7 @@ def _handle_only_binary(option, opt_str, value, parser):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def no_binary():
|
def no_binary() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
format_control = FormatControl(set(), set())
|
format_control = FormatControl(set(), set())
|
||||||
return Option(
|
return Option(
|
||||||
"--no-binary",
|
"--no-binary",
|
||||||
|
@ -510,8 +508,7 @@ def no_binary():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def only_binary():
|
def only_binary() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
format_control = FormatControl(set(), set())
|
format_control = FormatControl(set(), set())
|
||||||
return Option(
|
return Option(
|
||||||
"--only-binary",
|
"--only-binary",
|
||||||
|
@ -529,7 +526,7 @@ def only_binary():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
platforms = partial(
|
platforms: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--platform",
|
"--platform",
|
||||||
dest="platforms",
|
dest="platforms",
|
||||||
|
@ -541,12 +538,11 @@ platforms = partial(
|
||||||
"platform of the running system. Use this option multiple times to "
|
"platform of the running system. Use this option multiple times to "
|
||||||
"specify multiple platforms supported by the target interpreter."
|
"specify multiple platforms supported by the target interpreter."
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
# This was made a separate function for unit-testing purposes.
|
# This was made a separate function for unit-testing purposes.
|
||||||
def _convert_python_version(value):
|
def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
|
||||||
# type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
|
|
||||||
"""
|
"""
|
||||||
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
|
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
|
||||||
|
|
||||||
|
@ -575,8 +571,9 @@ def _convert_python_version(value):
|
||||||
return (version_info, None)
|
return (version_info, None)
|
||||||
|
|
||||||
|
|
||||||
def _handle_python_version(option, opt_str, value, parser):
|
def _handle_python_version(
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Handle a provided --python-version value.
|
Handle a provided --python-version value.
|
||||||
"""
|
"""
|
||||||
|
@ -591,7 +588,7 @@ def _handle_python_version(option, opt_str, value, parser):
|
||||||
parser.values.python_version = version_info
|
parser.values.python_version = version_info
|
||||||
|
|
||||||
|
|
||||||
python_version = partial(
|
python_version: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--python-version",
|
"--python-version",
|
||||||
dest="python_version",
|
dest="python_version",
|
||||||
|
@ -609,10 +606,10 @@ python_version = partial(
|
||||||
version can also be given as a string without dots (e.g. "37" for 3.7.0).
|
version can also be given as a string without dots (e.g. "37" for 3.7.0).
|
||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
implementation = partial(
|
implementation: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--implementation",
|
"--implementation",
|
||||||
dest="implementation",
|
dest="implementation",
|
||||||
|
@ -625,10 +622,10 @@ implementation = partial(
|
||||||
"interpreter implementation is used. Use 'py' to force "
|
"interpreter implementation is used. Use 'py' to force "
|
||||||
"implementation-agnostic wheels."
|
"implementation-agnostic wheels."
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
abis = partial(
|
abis: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--abi",
|
"--abi",
|
||||||
dest="abis",
|
dest="abis",
|
||||||
|
@ -643,19 +640,17 @@ abis = partial(
|
||||||
"--implementation, --platform, and --python-version when using this "
|
"--implementation, --platform, and --python-version when using this "
|
||||||
"option."
|
"option."
|
||||||
),
|
),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def add_target_python_options(cmd_opts):
|
def add_target_python_options(cmd_opts: OptionGroup) -> None:
|
||||||
# type: (OptionGroup) -> None
|
|
||||||
cmd_opts.add_option(platforms())
|
cmd_opts.add_option(platforms())
|
||||||
cmd_opts.add_option(python_version())
|
cmd_opts.add_option(python_version())
|
||||||
cmd_opts.add_option(implementation())
|
cmd_opts.add_option(implementation())
|
||||||
cmd_opts.add_option(abis())
|
cmd_opts.add_option(abis())
|
||||||
|
|
||||||
|
|
||||||
def make_target_python(options):
|
def make_target_python(options: Values) -> TargetPython:
|
||||||
# type: (Values) -> TargetPython
|
|
||||||
target_python = TargetPython(
|
target_python = TargetPython(
|
||||||
platforms=options.platforms,
|
platforms=options.platforms,
|
||||||
py_version_info=options.python_version,
|
py_version_info=options.python_version,
|
||||||
|
@ -666,8 +661,7 @@ def make_target_python(options):
|
||||||
return target_python
|
return target_python
|
||||||
|
|
||||||
|
|
||||||
def prefer_binary():
|
def prefer_binary() -> Option:
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
return Option(
|
||||||
"--prefer-binary",
|
"--prefer-binary",
|
||||||
dest="prefer_binary",
|
dest="prefer_binary",
|
||||||
|
@ -677,7 +671,7 @@ def prefer_binary():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
cache_dir = partial(
|
cache_dir: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--cache-dir",
|
"--cache-dir",
|
||||||
dest="cache_dir",
|
dest="cache_dir",
|
||||||
|
@ -685,11 +679,12 @@ cache_dir = partial(
|
||||||
metavar="dir",
|
metavar="dir",
|
||||||
type="path",
|
type="path",
|
||||||
help="Store the cache data in <dir>.",
|
help="Store the cache data in <dir>.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_no_cache_dir(option, opt, value, parser):
|
def _handle_no_cache_dir(
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
option: Option, opt: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Process a value provided for the --no-cache-dir option.
|
Process a value provided for the --no-cache-dir option.
|
||||||
|
|
||||||
|
@ -716,16 +711,16 @@ def _handle_no_cache_dir(option, opt, value, parser):
|
||||||
parser.values.cache_dir = False
|
parser.values.cache_dir = False
|
||||||
|
|
||||||
|
|
||||||
no_cache = partial(
|
no_cache: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-cache-dir",
|
"--no-cache-dir",
|
||||||
dest="cache_dir",
|
dest="cache_dir",
|
||||||
action="callback",
|
action="callback",
|
||||||
callback=_handle_no_cache_dir,
|
callback=_handle_no_cache_dir,
|
||||||
help="Disable the cache.",
|
help="Disable the cache.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
no_deps = partial(
|
no_deps: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-deps",
|
"--no-deps",
|
||||||
"--no-dependencies",
|
"--no-dependencies",
|
||||||
|
@ -733,29 +728,17 @@ no_deps = partial(
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Don't install package dependencies.",
|
help="Don't install package dependencies.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
build_dir = partial(
|
ignore_requires_python: Callable[..., Option] = partial(
|
||||||
PipOption,
|
|
||||||
"-b",
|
|
||||||
"--build",
|
|
||||||
"--build-dir",
|
|
||||||
"--build-directory",
|
|
||||||
dest="build_dir",
|
|
||||||
type="path",
|
|
||||||
metavar="dir",
|
|
||||||
help=SUPPRESS_HELP,
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
ignore_requires_python = partial(
|
|
||||||
Option,
|
Option,
|
||||||
"--ignore-requires-python",
|
"--ignore-requires-python",
|
||||||
dest="ignore_requires_python",
|
dest="ignore_requires_python",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Ignore the Requires-Python information.",
|
help="Ignore the Requires-Python information.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
no_build_isolation = partial(
|
no_build_isolation: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-build-isolation",
|
"--no-build-isolation",
|
||||||
dest="build_isolation",
|
dest="build_isolation",
|
||||||
|
@ -764,11 +747,21 @@ no_build_isolation = partial(
|
||||||
help="Disable isolation when building a modern source distribution. "
|
help="Disable isolation when building a modern source distribution. "
|
||||||
"Build dependencies specified by PEP 518 must be already installed "
|
"Build dependencies specified by PEP 518 must be already installed "
|
||||||
"if this option is used.",
|
"if this option is used.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
check_build_deps: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--check-build-dependencies",
|
||||||
|
dest="check_build_deps",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Check the build dependencies when PEP517 is used.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_no_use_pep517(option, opt, value, parser):
|
def _handle_no_use_pep517(
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
option: Option, opt: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Process a value provided for the --no-use-pep517 option.
|
Process a value provided for the --no-use-pep517 option.
|
||||||
|
|
||||||
|
@ -787,11 +780,17 @@ def _handle_no_use_pep517(option, opt, value, parser):
|
||||||
"""
|
"""
|
||||||
raise_option_error(parser, option=option, msg=msg)
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
|
# If user doesn't wish to use pep517, we check if setuptools is installed
|
||||||
|
# and raise error if it is not.
|
||||||
|
if not importlib.util.find_spec("setuptools"):
|
||||||
|
msg = "It is not possible to use --no-use-pep517 without setuptools installed."
|
||||||
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
# Otherwise, --no-use-pep517 was passed via the command-line.
|
# Otherwise, --no-use-pep517 was passed via the command-line.
|
||||||
parser.values.use_pep517 = False
|
parser.values.use_pep517 = False
|
||||||
|
|
||||||
|
|
||||||
use_pep517 = partial(
|
use_pep517: Any = partial(
|
||||||
Option,
|
Option,
|
||||||
"--use-pep517",
|
"--use-pep517",
|
||||||
dest="use_pep517",
|
dest="use_pep517",
|
||||||
|
@ -799,9 +798,9 @@ use_pep517 = partial(
|
||||||
default=None,
|
default=None,
|
||||||
help="Use PEP 517 for building source distributions "
|
help="Use PEP 517 for building source distributions "
|
||||||
"(use --no-use-pep517 to force legacy behaviour).",
|
"(use --no-use-pep517 to force legacy behaviour).",
|
||||||
) # type: Any
|
)
|
||||||
|
|
||||||
no_use_pep517 = partial(
|
no_use_pep517: Any = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-use-pep517",
|
"--no-use-pep517",
|
||||||
dest="use_pep517",
|
dest="use_pep517",
|
||||||
|
@ -809,9 +808,36 @@ no_use_pep517 = partial(
|
||||||
callback=_handle_no_use_pep517,
|
callback=_handle_no_use_pep517,
|
||||||
default=None,
|
default=None,
|
||||||
help=SUPPRESS_HELP,
|
help=SUPPRESS_HELP,
|
||||||
) # type: Any
|
)
|
||||||
|
|
||||||
install_options = partial(
|
|
||||||
|
def _handle_config_settings(
|
||||||
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
|
key, sep, val = value.partition("=")
|
||||||
|
if sep != "=":
|
||||||
|
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
|
||||||
|
dest = getattr(parser.values, option.dest)
|
||||||
|
if dest is None:
|
||||||
|
dest = {}
|
||||||
|
setattr(parser.values, option.dest, dest)
|
||||||
|
dest[key] = val
|
||||||
|
|
||||||
|
|
||||||
|
config_settings: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--config-settings",
|
||||||
|
dest="config_settings",
|
||||||
|
type=str,
|
||||||
|
action="callback",
|
||||||
|
callback=_handle_config_settings,
|
||||||
|
metavar="settings",
|
||||||
|
help="Configuration settings to be passed to the PEP 517 build backend. "
|
||||||
|
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
|
||||||
|
"to pass multiple keys to the backend.",
|
||||||
|
)
|
||||||
|
|
||||||
|
install_options: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--install-option",
|
"--install-option",
|
||||||
dest="install_options",
|
dest="install_options",
|
||||||
|
@ -822,18 +848,18 @@ install_options = partial(
|
||||||
'bin"). Use multiple --install-option options to pass multiple '
|
'bin"). Use multiple --install-option options to pass multiple '
|
||||||
"options to setup.py install. If you are using an option with a "
|
"options to setup.py install. If you are using an option with a "
|
||||||
"directory path, be sure to use absolute path.",
|
"directory path, be sure to use absolute path.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
build_options = partial(
|
build_options: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--build-option",
|
"--build-option",
|
||||||
dest="build_options",
|
dest="build_options",
|
||||||
metavar="options",
|
metavar="options",
|
||||||
action="append",
|
action="append",
|
||||||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
global_options = partial(
|
global_options: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--global-option",
|
"--global-option",
|
||||||
dest="global_options",
|
dest="global_options",
|
||||||
|
@ -841,26 +867,26 @@ global_options = partial(
|
||||||
metavar="options",
|
metavar="options",
|
||||||
help="Extra global options to be supplied to the setup.py "
|
help="Extra global options to be supplied to the setup.py "
|
||||||
"call before the install or bdist_wheel command.",
|
"call before the install or bdist_wheel command.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
no_clean = partial(
|
no_clean: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-clean",
|
"--no-clean",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Don't clean up build directories.",
|
help="Don't clean up build directories.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
pre = partial(
|
pre: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--pre",
|
"--pre",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Include pre-release and development versions. By default, "
|
help="Include pre-release and development versions. By default, "
|
||||||
"pip only finds stable versions.",
|
"pip only finds stable versions.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
disable_pip_version_check = partial(
|
disable_pip_version_check: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--disable-pip-version-check",
|
"--disable-pip-version-check",
|
||||||
dest="disable_pip_version_check",
|
dest="disable_pip_version_check",
|
||||||
|
@ -868,11 +894,21 @@ disable_pip_version_check = partial(
|
||||||
default=False,
|
default=False,
|
||||||
help="Don't periodically check PyPI to determine whether a new version "
|
help="Don't periodically check PyPI to determine whether a new version "
|
||||||
"of pip is available for download. Implied with --no-index.",
|
"of pip is available for download. Implied with --no-index.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
root_user_action: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--root-user-action",
|
||||||
|
dest="root_user_action",
|
||||||
|
default="warn",
|
||||||
|
choices=["warn", "ignore"],
|
||||||
|
help="Action if pip is run as a root user. By default, a warning message is shown.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_merge_hash(option, opt_str, value, parser):
|
def _handle_merge_hash(
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
"""Given a value spelled "algo:digest", append the digest to a list
|
"""Given a value spelled "algo:digest", append the digest to a list
|
||||||
pointed to in a dict by the algo name."""
|
pointed to in a dict by the algo name."""
|
||||||
if not parser.values.hashes:
|
if not parser.values.hashes:
|
||||||
|
@ -894,7 +930,7 @@ def _handle_merge_hash(option, opt_str, value, parser):
|
||||||
parser.values.hashes.setdefault(algo, []).append(digest)
|
parser.values.hashes.setdefault(algo, []).append(digest)
|
||||||
|
|
||||||
|
|
||||||
hash = partial(
|
hash: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--hash",
|
"--hash",
|
||||||
# Hash values eventually end up in InstallRequirement.hashes due to
|
# Hash values eventually end up in InstallRequirement.hashes due to
|
||||||
|
@ -905,10 +941,10 @@ hash = partial(
|
||||||
type="string",
|
type="string",
|
||||||
help="Verify that the package's archive matches this "
|
help="Verify that the package's archive matches this "
|
||||||
"hash before installing. Example: --hash=sha256:abcdef...",
|
"hash before installing. Example: --hash=sha256:abcdef...",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
require_hashes = partial(
|
require_hashes: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--require-hashes",
|
"--require-hashes",
|
||||||
dest="require_hashes",
|
dest="require_hashes",
|
||||||
|
@ -917,10 +953,10 @@ require_hashes = partial(
|
||||||
help="Require a hash to check each requirement against, for "
|
help="Require a hash to check each requirement against, for "
|
||||||
"repeatable installs. This option is implied when any package in a "
|
"repeatable installs. This option is implied when any package in a "
|
||||||
"requirements file has a --hash option.",
|
"requirements file has a --hash option.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
list_path = partial(
|
list_path: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--path",
|
"--path",
|
||||||
dest="path",
|
dest="path",
|
||||||
|
@ -928,16 +964,15 @@ list_path = partial(
|
||||||
action="append",
|
action="append",
|
||||||
help="Restrict to the specified installation path for listing "
|
help="Restrict to the specified installation path for listing "
|
||||||
"packages (can be used multiple times).",
|
"packages (can be used multiple times).",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_list_path_option(options):
|
def check_list_path_option(options: Values) -> None:
|
||||||
# type: (Values) -> None
|
|
||||||
if options.path and (options.user or options.local):
|
if options.path and (options.user or options.local):
|
||||||
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
|
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
|
||||||
|
|
||||||
|
|
||||||
list_exclude = partial(
|
list_exclude: Callable[..., Option] = partial(
|
||||||
PipOption,
|
PipOption,
|
||||||
"--exclude",
|
"--exclude",
|
||||||
dest="excludes",
|
dest="excludes",
|
||||||
|
@ -945,50 +980,55 @@ list_exclude = partial(
|
||||||
metavar="package",
|
metavar="package",
|
||||||
type="package_name",
|
type="package_name",
|
||||||
help="Exclude specified package from the output",
|
help="Exclude specified package from the output",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
no_python_version_warning = partial(
|
no_python_version_warning: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--no-python-version-warning",
|
"--no-python-version-warning",
|
||||||
dest="no_python_version_warning",
|
dest="no_python_version_warning",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Silence deprecation warnings for upcoming unsupported Pythons.",
|
help="Silence deprecation warnings for upcoming unsupported Pythons.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
use_new_feature = partial(
|
use_new_feature: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--use-feature",
|
"--use-feature",
|
||||||
dest="features_enabled",
|
dest="features_enabled",
|
||||||
metavar="feature",
|
metavar="feature",
|
||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
choices=["2020-resolver", "fast-deps", "in-tree-build"],
|
choices=["2020-resolver", "fast-deps"],
|
||||||
help="Enable new functionality, that may be backward incompatible.",
|
help="Enable new functionality, that may be backward incompatible.",
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
use_deprecated_feature = partial(
|
use_deprecated_feature: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--use-deprecated",
|
"--use-deprecated",
|
||||||
dest="deprecated_features_enabled",
|
dest="deprecated_features_enabled",
|
||||||
metavar="feature",
|
metavar="feature",
|
||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
choices=["legacy-resolver"],
|
choices=[
|
||||||
|
"legacy-resolver",
|
||||||
|
"backtrack-on-build-failures",
|
||||||
|
"html5lib",
|
||||||
|
],
|
||||||
help=("Enable deprecated functionality, that will be removed in the future."),
|
help=("Enable deprecated functionality, that will be removed in the future."),
|
||||||
) # type: Callable[..., Option]
|
)
|
||||||
|
|
||||||
|
|
||||||
##########
|
##########
|
||||||
# groups #
|
# groups #
|
||||||
##########
|
##########
|
||||||
|
|
||||||
general_group = {
|
general_group: Dict[str, Any] = {
|
||||||
"name": "General Options",
|
"name": "General Options",
|
||||||
"options": [
|
"options": [
|
||||||
help_,
|
help_,
|
||||||
|
debug_mode,
|
||||||
isolated_mode,
|
isolated_mode,
|
||||||
require_virtualenv,
|
require_virtualenv,
|
||||||
verbose,
|
verbose,
|
||||||
|
@ -1011,9 +1051,9 @@ general_group = {
|
||||||
use_new_feature,
|
use_new_feature,
|
||||||
use_deprecated_feature,
|
use_deprecated_feature,
|
||||||
],
|
],
|
||||||
} # type: Dict[str, Any]
|
}
|
||||||
|
|
||||||
index_group = {
|
index_group: Dict[str, Any] = {
|
||||||
"name": "Package Index Options",
|
"name": "Package Index Options",
|
||||||
"options": [
|
"options": [
|
||||||
index_url,
|
index_url,
|
||||||
|
@ -1021,4 +1061,4 @@ index_group = {
|
||||||
no_index,
|
no_index,
|
||||||
find_links,
|
find_links,
|
||||||
],
|
],
|
||||||
} # type: Dict[str, Any]
|
}
|
||||||
|
|
|
@ -1,19 +1,17 @@
|
||||||
from contextlib import ExitStack, contextmanager
|
from contextlib import ExitStack, contextmanager
|
||||||
from typing import ContextManager, Iterator, TypeVar
|
from typing import ContextManager, Generator, TypeVar
|
||||||
|
|
||||||
_T = TypeVar("_T", covariant=True)
|
_T = TypeVar("_T", covariant=True)
|
||||||
|
|
||||||
|
|
||||||
class CommandContextMixIn:
|
class CommandContextMixIn:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._in_main_context = False
|
self._in_main_context = False
|
||||||
self._main_context = ExitStack()
|
self._main_context = ExitStack()
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def main_context(self):
|
def main_context(self) -> Generator[None, None, None]:
|
||||||
# type: () -> Iterator[None]
|
|
||||||
assert not self._in_main_context
|
assert not self._in_main_context
|
||||||
|
|
||||||
self._in_main_context = True
|
self._in_main_context = True
|
||||||
|
@ -23,8 +21,7 @@ class CommandContextMixIn:
|
||||||
finally:
|
finally:
|
||||||
self._in_main_context = False
|
self._in_main_context = False
|
||||||
|
|
||||||
def enter_context(self, context_provider):
|
def enter_context(self, context_provider: ContextManager[_T]) -> _T:
|
||||||
# type: (ContextManager[_T]) -> _T
|
|
||||||
assert self._in_main_context
|
assert self._in_main_context
|
||||||
|
|
||||||
return self._main_context.enter_context(context_provider)
|
return self._main_context.enter_context(context_provider)
|
||||||
|
|
|
@ -42,8 +42,7 @@ logger = logging.getLogger(__name__)
|
||||||
# main, this should not be an issue in practice.
|
# main, this should not be an issue in practice.
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args: Optional[List[str]] = None) -> int:
|
||||||
# type: (Optional[List[str]]) -> int
|
|
||||||
if args is None:
|
if args is None:
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,7 @@ from pip._internal.utils.misc import get_pip_version, get_prog
|
||||||
__all__ = ["create_main_parser", "parse_command"]
|
__all__ = ["create_main_parser", "parse_command"]
|
||||||
|
|
||||||
|
|
||||||
def create_main_parser():
|
def create_main_parser() -> ConfigOptionParser:
|
||||||
# type: () -> ConfigOptionParser
|
|
||||||
"""Creates and returns the main parser for pip's CLI"""
|
"""Creates and returns the main parser for pip's CLI"""
|
||||||
|
|
||||||
parser = ConfigOptionParser(
|
parser = ConfigOptionParser(
|
||||||
|
@ -46,8 +45,7 @@ def create_main_parser():
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def parse_command(args):
|
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
||||||
# type: (List[str]) -> Tuple[str, List[str]]
|
|
||||||
parser = create_main_parser()
|
parser = create_main_parser()
|
||||||
|
|
||||||
# Note: parser calls disable_interspersed_args(), so the result of this
|
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||||
|
|
|
@ -6,7 +6,7 @@ import shutil
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from typing import Any, Dict, Iterator, List, Tuple
|
from typing import Any, Dict, Generator, List, Tuple
|
||||||
|
|
||||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||||
from pip._internal.configuration import Configuration, ConfigurationError
|
from pip._internal.configuration import Configuration, ConfigurationError
|
||||||
|
@ -18,20 +18,19 @@ logger = logging.getLogger(__name__)
|
||||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
"""A prettier/less verbose help formatter for optparse."""
|
"""A prettier/less verbose help formatter for optparse."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
# type: (*Any, **Any) -> None
|
|
||||||
# help position must be aligned with __init__.parseopts.description
|
# help position must be aligned with __init__.parseopts.description
|
||||||
kwargs["max_help_position"] = 30
|
kwargs["max_help_position"] = 30
|
||||||
kwargs["indent_increment"] = 1
|
kwargs["indent_increment"] = 1
|
||||||
kwargs["width"] = shutil.get_terminal_size()[0] - 2
|
kwargs["width"] = shutil.get_terminal_size()[0] - 2
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def format_option_strings(self, option):
|
def format_option_strings(self, option: optparse.Option) -> str:
|
||||||
# type: (optparse.Option) -> str
|
|
||||||
return self._format_option_strings(option)
|
return self._format_option_strings(option)
|
||||||
|
|
||||||
def _format_option_strings(self, option, mvarfmt=" <{}>", optsep=", "):
|
def _format_option_strings(
|
||||||
# type: (optparse.Option, str, str) -> str
|
self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
|
||||||
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Return a comma-separated list of option strings and metavars.
|
Return a comma-separated list of option strings and metavars.
|
||||||
|
|
||||||
|
@ -55,14 +54,12 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
|
|
||||||
return "".join(opts)
|
return "".join(opts)
|
||||||
|
|
||||||
def format_heading(self, heading):
|
def format_heading(self, heading: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
if heading == "Options":
|
if heading == "Options":
|
||||||
return ""
|
return ""
|
||||||
return heading + ":\n"
|
return heading + ":\n"
|
||||||
|
|
||||||
def format_usage(self, usage):
|
def format_usage(self, usage: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
"""
|
||||||
Ensure there is only one newline between usage and the first heading
|
Ensure there is only one newline between usage and the first heading
|
||||||
if there is no description.
|
if there is no description.
|
||||||
|
@ -70,8 +67,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
|
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def format_description(self, description):
|
def format_description(self, description: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
# leave full control over description to us
|
# leave full control over description to us
|
||||||
if description:
|
if description:
|
||||||
if hasattr(self.parser, "main"):
|
if hasattr(self.parser, "main"):
|
||||||
|
@ -89,16 +85,14 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def format_epilog(self, epilog):
|
def format_epilog(self, epilog: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
# leave full control over epilog to us
|
# leave full control over epilog to us
|
||||||
if epilog:
|
if epilog:
|
||||||
return epilog
|
return epilog
|
||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def indent_lines(self, text, indent):
|
def indent_lines(self, text: str, indent: str) -> str:
|
||||||
# type: (str, str) -> str
|
|
||||||
new_lines = [indent + line for line in text.split("\n")]
|
new_lines = [indent + line for line in text.split("\n")]
|
||||||
return "\n".join(new_lines)
|
return "\n".join(new_lines)
|
||||||
|
|
||||||
|
@ -112,8 +106,7 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||||
Also redact auth from url type options
|
Also redact auth from url type options
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def expand_default(self, option):
|
def expand_default(self, option: optparse.Option) -> str:
|
||||||
# type: (optparse.Option) -> str
|
|
||||||
default_values = None
|
default_values = None
|
||||||
if self.parser is not None:
|
if self.parser is not None:
|
||||||
assert isinstance(self.parser, ConfigOptionParser)
|
assert isinstance(self.parser, ConfigOptionParser)
|
||||||
|
@ -137,8 +130,9 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||||
|
|
||||||
|
|
||||||
class CustomOptionParser(optparse.OptionParser):
|
class CustomOptionParser(optparse.OptionParser):
|
||||||
def insert_option_group(self, idx, *args, **kwargs):
|
def insert_option_group(
|
||||||
# type: (int, Any, Any) -> optparse.OptionGroup
|
self, idx: int, *args: Any, **kwargs: Any
|
||||||
|
) -> optparse.OptionGroup:
|
||||||
"""Insert an OptionGroup at a given position."""
|
"""Insert an OptionGroup at a given position."""
|
||||||
group = self.add_option_group(*args, **kwargs)
|
group = self.add_option_group(*args, **kwargs)
|
||||||
|
|
||||||
|
@ -148,8 +142,7 @@ class CustomOptionParser(optparse.OptionParser):
|
||||||
return group
|
return group
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def option_list_all(self):
|
def option_list_all(self) -> List[optparse.Option]:
|
||||||
# type: () -> List[optparse.Option]
|
|
||||||
"""Get a list of all options, including those in option groups."""
|
"""Get a list of all options, including those in option groups."""
|
||||||
res = self.option_list[:]
|
res = self.option_list[:]
|
||||||
for i in self.option_groups:
|
for i in self.option_groups:
|
||||||
|
@ -164,35 +157,34 @@ class ConfigOptionParser(CustomOptionParser):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*args, # type: Any
|
*args: Any,
|
||||||
name, # type: str
|
name: str,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
**kwargs, # type: Any
|
**kwargs: Any,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.config = Configuration(isolated)
|
self.config = Configuration(isolated)
|
||||||
|
|
||||||
assert self.name
|
assert self.name
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def check_default(self, option, key, val):
|
def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
|
||||||
# type: (optparse.Option, str, Any) -> Any
|
|
||||||
try:
|
try:
|
||||||
return option.check_value(key, val)
|
return option.check_value(key, val)
|
||||||
except optparse.OptionValueError as exc:
|
except optparse.OptionValueError as exc:
|
||||||
print(f"An error occurred during configuration: {exc}")
|
print(f"An error occurred during configuration: {exc}")
|
||||||
sys.exit(3)
|
sys.exit(3)
|
||||||
|
|
||||||
def _get_ordered_configuration_items(self):
|
def _get_ordered_configuration_items(
|
||||||
# type: () -> Iterator[Tuple[str, Any]]
|
self,
|
||||||
|
) -> Generator[Tuple[str, Any], None, None]:
|
||||||
# Configuration gives keys in an unordered manner. Order them.
|
# Configuration gives keys in an unordered manner. Order them.
|
||||||
override_order = ["global", self.name, ":env:"]
|
override_order = ["global", self.name, ":env:"]
|
||||||
|
|
||||||
# Pool the options into different groups
|
# Pool the options into different groups
|
||||||
section_items = {
|
section_items: Dict[str, List[Tuple[str, Any]]] = {
|
||||||
name: [] for name in override_order
|
name: [] for name in override_order
|
||||||
} # type: Dict[str, List[Tuple[str, Any]]]
|
}
|
||||||
for section_key, val in self.config.items():
|
for section_key, val in self.config.items():
|
||||||
# ignore empty values
|
# ignore empty values
|
||||||
if not val:
|
if not val:
|
||||||
|
@ -211,8 +203,7 @@ class ConfigOptionParser(CustomOptionParser):
|
||||||
for key, val in section_items[section]:
|
for key, val in section_items[section]:
|
||||||
yield key, val
|
yield key, val
|
||||||
|
|
||||||
def _update_defaults(self, defaults):
|
def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
# type: (Dict[str, Any]) -> Dict[str, Any]
|
|
||||||
"""Updates the given defaults with values from the config files and
|
"""Updates the given defaults with values from the config files and
|
||||||
the environ. Does a little special handling for certain types of
|
the environ. Does a little special handling for certain types of
|
||||||
options (lists)."""
|
options (lists)."""
|
||||||
|
@ -276,8 +267,7 @@ class ConfigOptionParser(CustomOptionParser):
|
||||||
self.values = None
|
self.values = None
|
||||||
return defaults
|
return defaults
|
||||||
|
|
||||||
def get_default_values(self):
|
def get_default_values(self) -> optparse.Values:
|
||||||
# type: () -> optparse.Values
|
|
||||||
"""Overriding to make updating the defaults after instantiation of
|
"""Overriding to make updating the defaults after instantiation of
|
||||||
the option parser possible, _update_defaults() does the dirty work."""
|
the option parser possible, _update_defaults() does the dirty work."""
|
||||||
if not self.process_default_values:
|
if not self.process_default_values:
|
||||||
|
@ -299,7 +289,6 @@ class ConfigOptionParser(CustomOptionParser):
|
||||||
defaults[option.dest] = option.check_value(opt_str, default)
|
defaults[option.dest] = option.check_value(opt_str, default)
|
||||||
return optparse.Values(defaults)
|
return optparse.Values(defaults)
|
||||||
|
|
||||||
def error(self, msg):
|
def error(self, msg: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
self.print_usage(sys.stderr)
|
self.print_usage(sys.stderr)
|
||||||
self.exit(UNKNOWN_ERROR, f"{msg}\n")
|
self.exit(UNKNOWN_ERROR, f"{msg}\n")
|
||||||
|
|
|
@ -1,261 +1,68 @@
|
||||||
import itertools
|
import functools
|
||||||
import sys
|
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
|
||||||
from signal import SIGINT, default_int_handler, signal
|
|
||||||
from typing import Any, Dict, List
|
|
||||||
|
|
||||||
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
|
from pip._vendor.rich.progress import (
|
||||||
from pip._vendor.progress.spinner import Spinner
|
BarColumn,
|
||||||
|
DownloadColumn,
|
||||||
|
FileSizeColumn,
|
||||||
|
Progress,
|
||||||
|
ProgressColumn,
|
||||||
|
SpinnerColumn,
|
||||||
|
TextColumn,
|
||||||
|
TimeElapsedColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
TransferSpeedColumn,
|
||||||
|
)
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
|
||||||
from pip._internal.utils.logging import get_indentation
|
from pip._internal.utils.logging import get_indentation
|
||||||
from pip._internal.utils.misc import format_size
|
|
||||||
|
|
||||||
try:
|
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
|
||||||
from pip._vendor import colorama
|
|
||||||
# Lots of different errors can come from this, including SystemError and
|
|
||||||
# ImportError.
|
|
||||||
except Exception:
|
|
||||||
colorama = None
|
|
||||||
|
|
||||||
|
|
||||||
def _select_progress_class(preferred, fallback):
|
def _rich_progress_bar(
|
||||||
# type: (Bar, Bar) -> Bar
|
iterable: Iterable[bytes],
|
||||||
encoding = getattr(preferred.file, "encoding", None)
|
*,
|
||||||
|
bar_type: str,
|
||||||
|
size: int,
|
||||||
|
) -> Generator[bytes, None, None]:
|
||||||
|
assert bar_type == "on", "This should only be used in the default mode."
|
||||||
|
|
||||||
# If we don't know what encoding this file is in, then we'll just assume
|
if not size:
|
||||||
# that it doesn't support unicode and use the ASCII bar.
|
total = float("inf")
|
||||||
if not encoding:
|
columns: Tuple[ProgressColumn, ...] = (
|
||||||
return fallback
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
SpinnerColumn("line", speed=1.5),
|
||||||
# Collect all of the possible characters we want to use with the preferred
|
FileSizeColumn(),
|
||||||
# bar.
|
TransferSpeedColumn(),
|
||||||
characters = [
|
TimeElapsedColumn(),
|
||||||
getattr(preferred, "empty_fill", ""),
|
)
|
||||||
getattr(preferred, "fill", ""),
|
|
||||||
]
|
|
||||||
characters += list(getattr(preferred, "phases", []))
|
|
||||||
|
|
||||||
# Try to decode the characters we're using for the bar using the encoding
|
|
||||||
# of the given file, if this works then we'll assume that we can use the
|
|
||||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
|
||||||
try:
|
|
||||||
"".join(characters).encode(encoding)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return fallback
|
|
||||||
else:
|
else:
|
||||||
return preferred
|
total = size
|
||||||
|
columns = (
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
|
BarColumn(),
|
||||||
|
DownloadColumn(),
|
||||||
|
TransferSpeedColumn(),
|
||||||
class InterruptibleMixin:
|
TextColumn("eta"),
|
||||||
"""
|
TimeRemainingColumn(),
|
||||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
|
||||||
|
|
||||||
This allows downloads to be interrupted without leaving temporary state
|
|
||||||
(like hidden cursors) behind.
|
|
||||||
|
|
||||||
This class is similar to the progress library's existing SigIntMixin
|
|
||||||
helper, but as of version 1.2, that helper has the following problems:
|
|
||||||
|
|
||||||
1. It calls sys.exit().
|
|
||||||
2. It discards the existing SIGINT handler completely.
|
|
||||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
|
||||||
which will have unexpected delayed effects if the user triggers an
|
|
||||||
unrelated keyboard interrupt some time after a progress-displaying
|
|
||||||
download has already completed, for example.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
# type: (List[Any], Dict[Any, Any]) -> None
|
|
||||||
"""
|
|
||||||
Save the original SIGINT handler for later.
|
|
||||||
"""
|
|
||||||
# https://github.com/python/mypy/issues/5887
|
|
||||||
super().__init__(*args, **kwargs) # type: ignore
|
|
||||||
|
|
||||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
|
||||||
|
|
||||||
# If signal() returns None, the previous handler was not installed from
|
|
||||||
# Python, and we cannot restore it. This probably should not happen,
|
|
||||||
# but if it does, we must restore something sensible instead, at least.
|
|
||||||
# The least bad option should be Python's default SIGINT handler, which
|
|
||||||
# just raises KeyboardInterrupt.
|
|
||||||
if self.original_handler is None:
|
|
||||||
self.original_handler = default_int_handler
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""
|
|
||||||
Restore the original SIGINT handler after finishing.
|
|
||||||
|
|
||||||
This should happen regardless of whether the progress display finishes
|
|
||||||
normally, or gets interrupted.
|
|
||||||
"""
|
|
||||||
super().finish() # type: ignore
|
|
||||||
signal(SIGINT, self.original_handler)
|
|
||||||
|
|
||||||
def handle_sigint(self, signum, frame): # type: ignore
|
|
||||||
"""
|
|
||||||
Call self.finish() before delegating to the original SIGINT handler.
|
|
||||||
|
|
||||||
This handler should only be in place while the progress display is
|
|
||||||
active.
|
|
||||||
"""
|
|
||||||
self.finish()
|
|
||||||
self.original_handler(signum, frame)
|
|
||||||
|
|
||||||
|
|
||||||
class SilentBar(Bar):
|
|
||||||
def update(self):
|
|
||||||
# type: () -> None
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BlueEmojiBar(IncrementalBar):
|
|
||||||
|
|
||||||
suffix = "%(percent)d%%"
|
|
||||||
bar_prefix = " "
|
|
||||||
bar_suffix = " "
|
|
||||||
phases = ("\U0001F539", "\U0001F537", "\U0001F535")
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadProgressMixin:
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
# type: (List[Any], Dict[Any, Any]) -> None
|
|
||||||
# https://github.com/python/mypy/issues/5887
|
|
||||||
super().__init__(*args, **kwargs) # type: ignore
|
|
||||||
self.message = (" " * (get_indentation() + 2)) + self.message # type: str
|
|
||||||
|
|
||||||
@property
|
|
||||||
def downloaded(self):
|
|
||||||
# type: () -> str
|
|
||||||
return format_size(self.index) # type: ignore
|
|
||||||
|
|
||||||
@property
|
|
||||||
def download_speed(self):
|
|
||||||
# type: () -> str
|
|
||||||
# Avoid zero division errors...
|
|
||||||
if self.avg == 0.0: # type: ignore
|
|
||||||
return "..."
|
|
||||||
return format_size(1 / self.avg) + "/s" # type: ignore
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pretty_eta(self):
|
|
||||||
# type: () -> str
|
|
||||||
if self.eta: # type: ignore
|
|
||||||
return f"eta {self.eta_td}" # type: ignore
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def iter(self, it): # type: ignore
|
|
||||||
for x in it:
|
|
||||||
yield x
|
|
||||||
# B305 is incorrectly raised here
|
|
||||||
# https://github.com/PyCQA/flake8-bugbear/issues/59
|
|
||||||
self.next(len(x)) # noqa: B305
|
|
||||||
self.finish()
|
|
||||||
|
|
||||||
|
|
||||||
class WindowsMixin:
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
# type: (List[Any], Dict[Any, Any]) -> None
|
|
||||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
|
||||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
|
||||||
# Windows.
|
|
||||||
# This call needs to go before the super() call, so that hide_cursor
|
|
||||||
# is set in time. The base progress bar class writes the "hide cursor"
|
|
||||||
# code to the terminal in its init, so if we don't set this soon
|
|
||||||
# enough, we get a "hide" with no corresponding "show"...
|
|
||||||
if WINDOWS and self.hide_cursor: # type: ignore
|
|
||||||
self.hide_cursor = False
|
|
||||||
|
|
||||||
# https://github.com/python/mypy/issues/5887
|
|
||||||
super().__init__(*args, **kwargs) # type: ignore
|
|
||||||
|
|
||||||
# Check if we are running on Windows and we have the colorama module,
|
|
||||||
# if we do then wrap our file with it.
|
|
||||||
if WINDOWS and colorama:
|
|
||||||
self.file = colorama.AnsiToWin32(self.file) # type: ignore
|
|
||||||
# The progress code expects to be able to call self.file.isatty()
|
|
||||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
|
||||||
# add it.
|
|
||||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
|
||||||
# The progress code expects to be able to call self.file.flush()
|
|
||||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
|
||||||
# add it.
|
|
||||||
self.file.flush = lambda: self.file.wrapped.flush()
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
|
|
||||||
|
|
||||||
file = sys.stdout
|
|
||||||
message = "%(percent)d%%"
|
|
||||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadBar(BaseDownloadProgressBar, Bar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadProgressSpinner(
|
|
||||||
WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
|
|
||||||
):
|
|
||||||
|
|
||||||
file = sys.stdout
|
|
||||||
suffix = "%(downloaded)s %(download_speed)s"
|
|
||||||
|
|
||||||
def next_phase(self):
|
|
||||||
# type: () -> str
|
|
||||||
if not hasattr(self, "_phaser"):
|
|
||||||
self._phaser = itertools.cycle(self.phases)
|
|
||||||
return next(self._phaser)
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
# type: () -> None
|
|
||||||
message = self.message % self
|
|
||||||
phase = self.next_phase()
|
|
||||||
suffix = self.suffix % self
|
|
||||||
line = "".join(
|
|
||||||
[
|
|
||||||
message,
|
|
||||||
" " if message else "",
|
|
||||||
phase,
|
|
||||||
" " if suffix else "",
|
|
||||||
suffix,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.writeln(line)
|
progress = Progress(*columns, refresh_per_second=30)
|
||||||
|
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
|
||||||
|
with progress:
|
||||||
|
for chunk in iterable:
|
||||||
|
yield chunk
|
||||||
|
progress.update(task_id, advance=len(chunk))
|
||||||
|
|
||||||
|
|
||||||
BAR_TYPES = {
|
def get_download_progress_renderer(
|
||||||
"off": (DownloadSilentBar, DownloadSilentBar),
|
*, bar_type: str, size: Optional[int] = None
|
||||||
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
) -> DownloadProgressRenderer:
|
||||||
"ascii": (DownloadBar, DownloadProgressSpinner),
|
"""Get an object that can be used to render the download progress.
|
||||||
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
|
||||||
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
Returns a callable, that takes an iterable to "wrap".
|
||||||
def DownloadProgressProvider(progress_bar, max=None): # type: ignore
|
"""
|
||||||
if max is None or max == 0:
|
if bar_type == "on":
|
||||||
return BAR_TYPES[progress_bar][1]().iter
|
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
|
||||||
else:
|
else:
|
||||||
return BAR_TYPES[progress_bar][0](max=max).iter
|
return iter # no-op, when passed an iterator
|
||||||
|
|
|
@ -22,6 +22,7 @@ from pip._internal.index.package_finder import PackageFinder
|
||||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
from pip._internal.models.target_python import TargetPython
|
from pip._internal.models.target_python import TargetPython
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
|
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||||
from pip._internal.operations.prepare import RequirementPreparer
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
from pip._internal.req.constructors import (
|
from pip._internal.req.constructors import (
|
||||||
install_req_from_editable,
|
install_req_from_editable,
|
||||||
|
@ -31,9 +32,9 @@ from pip._internal.req.constructors import (
|
||||||
)
|
)
|
||||||
from pip._internal.req.req_file import parse_requirements
|
from pip._internal.req.req_file import parse_requirements
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
|
||||||
from pip._internal.resolution.base import BaseResolver
|
from pip._internal.resolution.base import BaseResolver
|
||||||
from pip._internal.self_outdated_check import pip_self_version_check
|
from pip._internal.self_outdated_check import pip_self_version_check
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
from pip._internal.utils.temp_dir import (
|
from pip._internal.utils.temp_dir import (
|
||||||
TempDirectory,
|
TempDirectory,
|
||||||
TempDirectoryTypeRegistry,
|
TempDirectoryTypeRegistry,
|
||||||
|
@ -50,14 +51,12 @@ class SessionCommandMixin(CommandContextMixIn):
|
||||||
A class mixin for command classes needing _build_session().
|
A class mixin for command classes needing _build_session().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._session = None # Optional[PipSession]
|
self._session: Optional[PipSession] = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_index_urls(cls, options):
|
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
|
||||||
# type: (Values) -> Optional[List[str]]
|
|
||||||
"""Return a list of index urls from user-provided options."""
|
"""Return a list of index urls from user-provided options."""
|
||||||
index_urls = []
|
index_urls = []
|
||||||
if not getattr(options, "no_index", False):
|
if not getattr(options, "no_index", False):
|
||||||
|
@ -70,8 +69,7 @@ class SessionCommandMixin(CommandContextMixIn):
|
||||||
# Return None rather than an empty list
|
# Return None rather than an empty list
|
||||||
return index_urls or None
|
return index_urls or None
|
||||||
|
|
||||||
def get_default_session(self, options):
|
def get_default_session(self, options: Values) -> PipSession:
|
||||||
# type: (Values) -> PipSession
|
|
||||||
"""Get a default-managed session."""
|
"""Get a default-managed session."""
|
||||||
if self._session is None:
|
if self._session is None:
|
||||||
self._session = self.enter_context(self._build_session(options))
|
self._session = self.enter_context(self._build_session(options))
|
||||||
|
@ -81,8 +79,12 @@ class SessionCommandMixin(CommandContextMixIn):
|
||||||
assert self._session is not None
|
assert self._session is not None
|
||||||
return self._session
|
return self._session
|
||||||
|
|
||||||
def _build_session(self, options, retries=None, timeout=None):
|
def _build_session(
|
||||||
# type: (Values, Optional[int], Optional[int]) -> PipSession
|
self,
|
||||||
|
options: Values,
|
||||||
|
retries: Optional[int] = None,
|
||||||
|
timeout: Optional[int] = None,
|
||||||
|
) -> PipSession:
|
||||||
assert not options.cache_dir or os.path.isabs(options.cache_dir)
|
assert not options.cache_dir or os.path.isabs(options.cache_dir)
|
||||||
session = PipSession(
|
session = PipSession(
|
||||||
cache=(
|
cache=(
|
||||||
|
@ -126,8 +128,7 @@ class IndexGroupCommand(Command, SessionCommandMixin):
|
||||||
This also corresponds to the commands that permit the pip version check.
|
This also corresponds to the commands that permit the pip version check.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def handle_pip_version_check(self, options):
|
def handle_pip_version_check(self, options: Values) -> None:
|
||||||
# type: (Values) -> None
|
|
||||||
"""
|
"""
|
||||||
Do the pip version check if not disabled.
|
Do the pip version check if not disabled.
|
||||||
|
|
||||||
|
@ -154,8 +155,7 @@ KEEPABLE_TEMPDIR_TYPES = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def warn_if_run_as_root():
|
def warn_if_run_as_root() -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Output a warning for sudo users on Unix.
|
"""Output a warning for sudo users on Unix.
|
||||||
|
|
||||||
In a virtual environment, sudo pip still writes to virtualenv.
|
In a virtual environment, sudo pip still writes to virtualenv.
|
||||||
|
@ -173,29 +173,30 @@ def warn_if_run_as_root():
|
||||||
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
||||||
if sys.platform == "win32" or sys.platform == "cygwin":
|
if sys.platform == "win32" or sys.platform == "cygwin":
|
||||||
return
|
return
|
||||||
if sys.platform == "darwin" or sys.platform == "linux":
|
|
||||||
if os.getuid() != 0:
|
if os.getuid() != 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Running pip as root will break packages and permissions. "
|
"Running pip as the 'root' user can result in broken permissions and "
|
||||||
"You should install packages reliably by using venv: "
|
"conflicting behaviour with the system package manager. "
|
||||||
|
"It is recommended to use a virtual environment instead: "
|
||||||
"https://pip.pypa.io/warnings/venv"
|
"https://pip.pypa.io/warnings/venv"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def with_cleanup(func):
|
def with_cleanup(func: Any) -> Any:
|
||||||
# type: (Any) -> Any
|
|
||||||
"""Decorator for common logic related to managing temporary
|
"""Decorator for common logic related to managing temporary
|
||||||
directories.
|
directories.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def configure_tempdir_registry(registry):
|
def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
|
||||||
# type: (TempDirectoryTypeRegistry) -> None
|
|
||||||
for t in KEEPABLE_TEMPDIR_TYPES:
|
for t in KEEPABLE_TEMPDIR_TYPES:
|
||||||
registry.set_delete(t, False)
|
registry.set_delete(t, False)
|
||||||
|
|
||||||
def wrapper(self, options, args):
|
def wrapper(
|
||||||
# type: (RequirementCommand, Values, List[Any]) -> Optional[int]
|
self: RequirementCommand, options: Values, args: List[Any]
|
||||||
|
) -> Optional[int]:
|
||||||
assert self.tempdir_registry is not None
|
assert self.tempdir_registry is not None
|
||||||
if options.no_clean:
|
if options.no_clean:
|
||||||
configure_tempdir_registry(self.tempdir_registry)
|
configure_tempdir_registry(self.tempdir_registry)
|
||||||
|
@ -213,33 +214,56 @@ def with_cleanup(func):
|
||||||
|
|
||||||
|
|
||||||
class RequirementCommand(IndexGroupCommand):
|
class RequirementCommand(IndexGroupCommand):
|
||||||
def __init__(self, *args, **kw):
|
def __init__(self, *args: Any, **kw: Any) -> None:
|
||||||
# type: (Any, Any) -> None
|
|
||||||
super().__init__(*args, **kw)
|
super().__init__(*args, **kw)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.no_clean())
|
self.cmd_opts.add_option(cmdoptions.no_clean())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def determine_resolver_variant(options):
|
def determine_resolver_variant(options: Values) -> str:
|
||||||
# type: (Values) -> str
|
|
||||||
"""Determines which resolver should be used, based on the given options."""
|
"""Determines which resolver should be used, based on the given options."""
|
||||||
if "legacy-resolver" in options.deprecated_features_enabled:
|
if "legacy-resolver" in options.deprecated_features_enabled:
|
||||||
return "legacy"
|
return "legacy"
|
||||||
|
|
||||||
return "2020-resolver"
|
return "2020-resolver"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def determine_build_failure_suppression(options: Values) -> bool:
|
||||||
|
"""Determines whether build failures should be suppressed and backtracked on."""
|
||||||
|
if "backtrack-on-build-failures" not in options.deprecated_features_enabled:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if "legacy-resolver" in options.deprecated_features_enabled:
|
||||||
|
raise CommandError("Cannot backtrack with legacy resolver.")
|
||||||
|
|
||||||
|
deprecated(
|
||||||
|
reason=(
|
||||||
|
"Backtracking on build failures can mask issues related to how "
|
||||||
|
"a package generates metadata or builds a wheel. This flag will "
|
||||||
|
"be removed in pip 22.2."
|
||||||
|
),
|
||||||
|
gone_in=None,
|
||||||
|
replacement=(
|
||||||
|
"avoiding known-bad versions by explicitly telling pip to ignore them "
|
||||||
|
"(either directly as requirements, or via a constraints file)"
|
||||||
|
),
|
||||||
|
feature_flag=None,
|
||||||
|
issue=10655,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def make_requirement_preparer(
|
def make_requirement_preparer(
|
||||||
cls,
|
cls,
|
||||||
temp_build_dir, # type: TempDirectory
|
temp_build_dir: TempDirectory,
|
||||||
options, # type: Values
|
options: Values,
|
||||||
req_tracker, # type: RequirementTracker
|
build_tracker: BuildTracker,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
finder, # type: PackageFinder
|
finder: PackageFinder,
|
||||||
use_user_site, # type: bool
|
use_user_site: bool,
|
||||||
download_dir=None, # type: str
|
download_dir: Optional[str] = None,
|
||||||
):
|
verbosity: int = 0,
|
||||||
# type: (...) -> RequirementPreparer
|
) -> RequirementPreparer:
|
||||||
"""
|
"""
|
||||||
Create a RequirementPreparer instance for the given parameters.
|
Create a RequirementPreparer instance for the given parameters.
|
||||||
"""
|
"""
|
||||||
|
@ -269,32 +293,32 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
src_dir=options.src_dir,
|
src_dir=options.src_dir,
|
||||||
download_dir=download_dir,
|
download_dir=download_dir,
|
||||||
build_isolation=options.build_isolation,
|
build_isolation=options.build_isolation,
|
||||||
req_tracker=req_tracker,
|
check_build_deps=options.check_build_deps,
|
||||||
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
progress_bar=options.progress_bar,
|
progress_bar=options.progress_bar,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
require_hashes=options.require_hashes,
|
require_hashes=options.require_hashes,
|
||||||
use_user_site=use_user_site,
|
use_user_site=use_user_site,
|
||||||
lazy_wheel=lazy_wheel,
|
lazy_wheel=lazy_wheel,
|
||||||
in_tree_build="in-tree-build" in options.features_enabled,
|
verbosity=verbosity,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def make_resolver(
|
def make_resolver(
|
||||||
cls,
|
cls,
|
||||||
preparer, # type: RequirementPreparer
|
preparer: RequirementPreparer,
|
||||||
finder, # type: PackageFinder
|
finder: PackageFinder,
|
||||||
options, # type: Values
|
options: Values,
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
wheel_cache: Optional[WheelCache] = None,
|
||||||
use_user_site=False, # type: bool
|
use_user_site: bool = False,
|
||||||
ignore_installed=True, # type: bool
|
ignore_installed: bool = True,
|
||||||
ignore_requires_python=False, # type: bool
|
ignore_requires_python: bool = False,
|
||||||
force_reinstall=False, # type: bool
|
force_reinstall: bool = False,
|
||||||
upgrade_strategy="to-satisfy-only", # type: str
|
upgrade_strategy: str = "to-satisfy-only",
|
||||||
use_pep517=None, # type: Optional[bool]
|
use_pep517: Optional[bool] = None,
|
||||||
py_version_info=None, # type: Optional[Tuple[int, ...]]
|
py_version_info: Optional[Tuple[int, ...]] = None,
|
||||||
):
|
) -> BaseResolver:
|
||||||
# type: (...) -> BaseResolver
|
|
||||||
"""
|
"""
|
||||||
Create a Resolver instance for the given parameters.
|
Create a Resolver instance for the given parameters.
|
||||||
"""
|
"""
|
||||||
|
@ -302,7 +326,9 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
install_req_from_req_string,
|
install_req_from_req_string,
|
||||||
isolated=options.isolated_mode,
|
isolated=options.isolated_mode,
|
||||||
use_pep517=use_pep517,
|
use_pep517=use_pep517,
|
||||||
|
config_settings=getattr(options, "config_settings", None),
|
||||||
)
|
)
|
||||||
|
suppress_build_failures = cls.determine_build_failure_suppression(options)
|
||||||
resolver_variant = cls.determine_resolver_variant(options)
|
resolver_variant = cls.determine_resolver_variant(options)
|
||||||
# The long import name and duplicated invocation is needed to convince
|
# The long import name and duplicated invocation is needed to convince
|
||||||
# Mypy into correctly typechecking. Otherwise it would complain the
|
# Mypy into correctly typechecking. Otherwise it would complain the
|
||||||
|
@ -322,6 +348,7 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
force_reinstall=force_reinstall,
|
force_reinstall=force_reinstall,
|
||||||
upgrade_strategy=upgrade_strategy,
|
upgrade_strategy=upgrade_strategy,
|
||||||
py_version_info=py_version_info,
|
py_version_info=py_version_info,
|
||||||
|
suppress_build_failures=suppress_build_failures,
|
||||||
)
|
)
|
||||||
import pip._internal.resolution.legacy.resolver
|
import pip._internal.resolution.legacy.resolver
|
||||||
|
|
||||||
|
@ -341,16 +368,15 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
|
|
||||||
def get_requirements(
|
def get_requirements(
|
||||||
self,
|
self,
|
||||||
args, # type: List[str]
|
args: List[str],
|
||||||
options, # type: Values
|
options: Values,
|
||||||
finder, # type: PackageFinder
|
finder: PackageFinder,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
):
|
) -> List[InstallRequirement]:
|
||||||
# type: (...) -> List[InstallRequirement]
|
|
||||||
"""
|
"""
|
||||||
Parse command-line arguments into the corresponding requirements.
|
Parse command-line arguments into the corresponding requirements.
|
||||||
"""
|
"""
|
||||||
requirements = [] # type: List[InstallRequirement]
|
requirements: List[InstallRequirement] = []
|
||||||
for filename in options.constraints:
|
for filename in options.constraints:
|
||||||
for parsed_req in parse_requirements(
|
for parsed_req in parse_requirements(
|
||||||
filename,
|
filename,
|
||||||
|
@ -373,6 +399,7 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
isolated=options.isolated_mode,
|
isolated=options.isolated_mode,
|
||||||
use_pep517=options.use_pep517,
|
use_pep517=options.use_pep517,
|
||||||
user_supplied=True,
|
user_supplied=True,
|
||||||
|
config_settings=getattr(options, "config_settings", None),
|
||||||
)
|
)
|
||||||
requirements.append(req_to_add)
|
requirements.append(req_to_add)
|
||||||
|
|
||||||
|
@ -382,6 +409,7 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
user_supplied=True,
|
user_supplied=True,
|
||||||
isolated=options.isolated_mode,
|
isolated=options.isolated_mode,
|
||||||
use_pep517=options.use_pep517,
|
use_pep517=options.use_pep517,
|
||||||
|
config_settings=getattr(options, "config_settings", None),
|
||||||
)
|
)
|
||||||
requirements.append(req_to_add)
|
requirements.append(req_to_add)
|
||||||
|
|
||||||
|
@ -420,8 +448,7 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
return requirements
|
return requirements
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def trace_basic_info(finder):
|
def trace_basic_info(finder: PackageFinder) -> None:
|
||||||
# type: (PackageFinder) -> None
|
|
||||||
"""
|
"""
|
||||||
Trace basic information about the provided objects.
|
Trace basic information about the provided objects.
|
||||||
"""
|
"""
|
||||||
|
@ -433,12 +460,11 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
|
|
||||||
def _build_package_finder(
|
def _build_package_finder(
|
||||||
self,
|
self,
|
||||||
options, # type: Values
|
options: Values,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
target_python=None, # type: Optional[TargetPython]
|
target_python: Optional[TargetPython] = None,
|
||||||
ignore_requires_python=None, # type: Optional[bool]
|
ignore_requires_python: Optional[bool] = None,
|
||||||
):
|
) -> PackageFinder:
|
||||||
# type: (...) -> PackageFinder
|
|
||||||
"""
|
"""
|
||||||
Create a package finder appropriate to this requirement command.
|
Create a package finder appropriate to this requirement command.
|
||||||
|
|
||||||
|
@ -458,4 +484,5 @@ class RequirementCommand(IndexGroupCommand):
|
||||||
link_collector=link_collector,
|
link_collector=link_collector,
|
||||||
selection_prefs=selection_prefs,
|
selection_prefs=selection_prefs,
|
||||||
target_python=target_python,
|
target_python=target_python,
|
||||||
|
use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
|
||||||
)
|
)
|
||||||
|
|
|
@ -3,9 +3,7 @@ import itertools
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import IO, Iterator
|
from typing import IO, Generator
|
||||||
|
|
||||||
from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
|
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
from pip._internal.utils.logging import get_indentation
|
from pip._internal.utils.logging import get_indentation
|
||||||
|
@ -14,25 +12,22 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SpinnerInterface:
|
class SpinnerInterface:
|
||||||
def spin(self):
|
def spin(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def finish(self, final_status):
|
def finish(self, final_status: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class InteractiveSpinner(SpinnerInterface):
|
class InteractiveSpinner(SpinnerInterface):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
message,
|
message: str,
|
||||||
file=None,
|
file: IO[str] = None,
|
||||||
spin_chars="-\\|/",
|
spin_chars: str = "-\\|/",
|
||||||
# Empirically, 8 updates/second looks nice
|
# Empirically, 8 updates/second looks nice
|
||||||
min_update_interval_seconds=0.125,
|
min_update_interval_seconds: float = 0.125,
|
||||||
):
|
):
|
||||||
# type: (str, IO[str], str, float) -> None
|
|
||||||
self._message = message
|
self._message = message
|
||||||
if file is None:
|
if file is None:
|
||||||
file = sys.stdout
|
file = sys.stdout
|
||||||
|
@ -45,8 +40,7 @@ class InteractiveSpinner(SpinnerInterface):
|
||||||
self._file.write(" " * get_indentation() + self._message + " ... ")
|
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||||
self._width = 0
|
self._width = 0
|
||||||
|
|
||||||
def _write(self, status):
|
def _write(self, status: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
assert not self._finished
|
assert not self._finished
|
||||||
# Erase what we wrote before by backspacing to the beginning, writing
|
# Erase what we wrote before by backspacing to the beginning, writing
|
||||||
# spaces to overwrite the old text, and then backspacing again
|
# spaces to overwrite the old text, and then backspacing again
|
||||||
|
@ -58,16 +52,14 @@ class InteractiveSpinner(SpinnerInterface):
|
||||||
self._file.flush()
|
self._file.flush()
|
||||||
self._rate_limiter.reset()
|
self._rate_limiter.reset()
|
||||||
|
|
||||||
def spin(self):
|
def spin(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if self._finished:
|
if self._finished:
|
||||||
return
|
return
|
||||||
if not self._rate_limiter.ready():
|
if not self._rate_limiter.ready():
|
||||||
return
|
return
|
||||||
self._write(next(self._spin_cycle))
|
self._write(next(self._spin_cycle))
|
||||||
|
|
||||||
def finish(self, final_status):
|
def finish(self, final_status: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
if self._finished:
|
if self._finished:
|
||||||
return
|
return
|
||||||
self._write(final_status)
|
self._write(final_status)
|
||||||
|
@ -81,29 +73,25 @@ class InteractiveSpinner(SpinnerInterface):
|
||||||
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||||
# an indication that a task has frozen.
|
# an indication that a task has frozen.
|
||||||
class NonInteractiveSpinner(SpinnerInterface):
|
class NonInteractiveSpinner(SpinnerInterface):
|
||||||
def __init__(self, message, min_update_interval_seconds=60):
|
def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
|
||||||
# type: (str, float) -> None
|
|
||||||
self._message = message
|
self._message = message
|
||||||
self._finished = False
|
self._finished = False
|
||||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||||
self._update("started")
|
self._update("started")
|
||||||
|
|
||||||
def _update(self, status):
|
def _update(self, status: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
assert not self._finished
|
assert not self._finished
|
||||||
self._rate_limiter.reset()
|
self._rate_limiter.reset()
|
||||||
logger.info("%s: %s", self._message, status)
|
logger.info("%s: %s", self._message, status)
|
||||||
|
|
||||||
def spin(self):
|
def spin(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if self._finished:
|
if self._finished:
|
||||||
return
|
return
|
||||||
if not self._rate_limiter.ready():
|
if not self._rate_limiter.ready():
|
||||||
return
|
return
|
||||||
self._update("still running...")
|
self._update("still running...")
|
||||||
|
|
||||||
def finish(self, final_status):
|
def finish(self, final_status: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
if self._finished:
|
if self._finished:
|
||||||
return
|
return
|
||||||
self._update(f"finished with status '{final_status}'")
|
self._update(f"finished with status '{final_status}'")
|
||||||
|
@ -111,32 +99,28 @@ class NonInteractiveSpinner(SpinnerInterface):
|
||||||
|
|
||||||
|
|
||||||
class RateLimiter:
|
class RateLimiter:
|
||||||
def __init__(self, min_update_interval_seconds):
|
def __init__(self, min_update_interval_seconds: float) -> None:
|
||||||
# type: (float) -> None
|
|
||||||
self._min_update_interval_seconds = min_update_interval_seconds
|
self._min_update_interval_seconds = min_update_interval_seconds
|
||||||
self._last_update = 0 # type: float
|
self._last_update: float = 0
|
||||||
|
|
||||||
def ready(self):
|
def ready(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
delta = now - self._last_update
|
delta = now - self._last_update
|
||||||
return delta >= self._min_update_interval_seconds
|
return delta >= self._min_update_interval_seconds
|
||||||
|
|
||||||
def reset(self):
|
def reset(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._last_update = time.time()
|
self._last_update = time.time()
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def open_spinner(message):
|
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
|
||||||
# type: (str) -> Iterator[SpinnerInterface]
|
|
||||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||||
# through the logging system, but it acts like it has level INFO,
|
# through the logging system, but it acts like it has level INFO,
|
||||||
# i.e. it's only displayed if we're at level INFO or better.
|
# i.e. it's only displayed if we're at level INFO or better.
|
||||||
# Non-interactive spinner goes through the logging system, so it is always
|
# Non-interactive spinner goes through the logging system, so it is always
|
||||||
# in sync with logging configuration.
|
# in sync with logging configuration.
|
||||||
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||||
spinner = InteractiveSpinner(message) # type: SpinnerInterface
|
spinner: SpinnerInterface = InteractiveSpinner(message)
|
||||||
else:
|
else:
|
||||||
spinner = NonInteractiveSpinner(message)
|
spinner = NonInteractiveSpinner(message)
|
||||||
try:
|
try:
|
||||||
|
@ -152,9 +136,12 @@ def open_spinner(message):
|
||||||
spinner.finish("done")
|
spinner.finish("done")
|
||||||
|
|
||||||
|
|
||||||
|
HIDE_CURSOR = "\x1b[?25l"
|
||||||
|
SHOW_CURSOR = "\x1b[?25h"
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def hidden_cursor(file):
|
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
|
||||||
# type: (IO[str]) -> Iterator[None]
|
|
||||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||||
# even via colorama. So don't even try.
|
# even via colorama. So don't even try.
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
|
|
|
@ -3,87 +3,105 @@ Package containing all pip commands
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
from collections import OrderedDict, namedtuple
|
from collections import namedtuple
|
||||||
from typing import Any, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
|
|
||||||
CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
|
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
|
||||||
|
|
||||||
# The ordering matters for help display.
|
# This dictionary does a bunch of heavy lifting for help output:
|
||||||
# Also, even though the module path starts with the same
|
# - Enables avoiding additional (costly) imports for presenting `--help`.
|
||||||
# "pip._internal.commands" prefix in each case, we include the full path
|
# - The ordering matters for help display.
|
||||||
# because it makes testing easier (specifically when modifying commands_dict
|
#
|
||||||
# in test setup / teardown by adding info for a FakeCommand class defined
|
# Even though the module path starts with the same "pip._internal.commands"
|
||||||
# in a test-related module).
|
# prefix, the full path makes testing easier (specifically when modifying
|
||||||
# Finally, we need to pass an iterable of pairs here rather than a dict
|
# `commands_dict` in test setup / teardown).
|
||||||
# so that the ordering won't be lost when using Python 2.7.
|
commands_dict: Dict[str, CommandInfo] = {
|
||||||
commands_dict = OrderedDict([
|
"install": CommandInfo(
|
||||||
('install', CommandInfo(
|
"pip._internal.commands.install",
|
||||||
'pip._internal.commands.install', 'InstallCommand',
|
"InstallCommand",
|
||||||
'Install packages.',
|
"Install packages.",
|
||||||
)),
|
),
|
||||||
('download', CommandInfo(
|
"download": CommandInfo(
|
||||||
'pip._internal.commands.download', 'DownloadCommand',
|
"pip._internal.commands.download",
|
||||||
'Download packages.',
|
"DownloadCommand",
|
||||||
)),
|
"Download packages.",
|
||||||
('uninstall', CommandInfo(
|
),
|
||||||
'pip._internal.commands.uninstall', 'UninstallCommand',
|
"uninstall": CommandInfo(
|
||||||
'Uninstall packages.',
|
"pip._internal.commands.uninstall",
|
||||||
)),
|
"UninstallCommand",
|
||||||
('freeze', CommandInfo(
|
"Uninstall packages.",
|
||||||
'pip._internal.commands.freeze', 'FreezeCommand',
|
),
|
||||||
'Output installed packages in requirements format.',
|
"freeze": CommandInfo(
|
||||||
)),
|
"pip._internal.commands.freeze",
|
||||||
('list', CommandInfo(
|
"FreezeCommand",
|
||||||
'pip._internal.commands.list', 'ListCommand',
|
"Output installed packages in requirements format.",
|
||||||
'List installed packages.',
|
),
|
||||||
)),
|
"list": CommandInfo(
|
||||||
('show', CommandInfo(
|
"pip._internal.commands.list",
|
||||||
'pip._internal.commands.show', 'ShowCommand',
|
"ListCommand",
|
||||||
'Show information about installed packages.',
|
"List installed packages.",
|
||||||
)),
|
),
|
||||||
('check', CommandInfo(
|
"show": CommandInfo(
|
||||||
'pip._internal.commands.check', 'CheckCommand',
|
"pip._internal.commands.show",
|
||||||
'Verify installed packages have compatible dependencies.',
|
"ShowCommand",
|
||||||
)),
|
"Show information about installed packages.",
|
||||||
('config', CommandInfo(
|
),
|
||||||
'pip._internal.commands.configuration', 'ConfigurationCommand',
|
"check": CommandInfo(
|
||||||
'Manage local and global configuration.',
|
"pip._internal.commands.check",
|
||||||
)),
|
"CheckCommand",
|
||||||
('search', CommandInfo(
|
"Verify installed packages have compatible dependencies.",
|
||||||
'pip._internal.commands.search', 'SearchCommand',
|
),
|
||||||
'Search PyPI for packages.',
|
"config": CommandInfo(
|
||||||
)),
|
"pip._internal.commands.configuration",
|
||||||
('cache', CommandInfo(
|
"ConfigurationCommand",
|
||||||
'pip._internal.commands.cache', 'CacheCommand',
|
"Manage local and global configuration.",
|
||||||
|
),
|
||||||
|
"search": CommandInfo(
|
||||||
|
"pip._internal.commands.search",
|
||||||
|
"SearchCommand",
|
||||||
|
"Search PyPI for packages.",
|
||||||
|
),
|
||||||
|
"cache": CommandInfo(
|
||||||
|
"pip._internal.commands.cache",
|
||||||
|
"CacheCommand",
|
||||||
"Inspect and manage pip's wheel cache.",
|
"Inspect and manage pip's wheel cache.",
|
||||||
)),
|
),
|
||||||
('wheel', CommandInfo(
|
"index": CommandInfo(
|
||||||
'pip._internal.commands.wheel', 'WheelCommand',
|
"pip._internal.commands.index",
|
||||||
'Build wheels from your requirements.',
|
"IndexCommand",
|
||||||
)),
|
"Inspect information available from package indexes.",
|
||||||
('hash', CommandInfo(
|
),
|
||||||
'pip._internal.commands.hash', 'HashCommand',
|
"wheel": CommandInfo(
|
||||||
'Compute hashes of package archives.',
|
"pip._internal.commands.wheel",
|
||||||
)),
|
"WheelCommand",
|
||||||
('completion', CommandInfo(
|
"Build wheels from your requirements.",
|
||||||
'pip._internal.commands.completion', 'CompletionCommand',
|
),
|
||||||
'A helper command used for command completion.',
|
"hash": CommandInfo(
|
||||||
)),
|
"pip._internal.commands.hash",
|
||||||
('debug', CommandInfo(
|
"HashCommand",
|
||||||
'pip._internal.commands.debug', 'DebugCommand',
|
"Compute hashes of package archives.",
|
||||||
'Show information useful for debugging.',
|
),
|
||||||
)),
|
"completion": CommandInfo(
|
||||||
('help', CommandInfo(
|
"pip._internal.commands.completion",
|
||||||
'pip._internal.commands.help', 'HelpCommand',
|
"CompletionCommand",
|
||||||
'Show help for commands.',
|
"A helper command used for command completion.",
|
||||||
)),
|
),
|
||||||
]) # type: OrderedDict[str, CommandInfo]
|
"debug": CommandInfo(
|
||||||
|
"pip._internal.commands.debug",
|
||||||
|
"DebugCommand",
|
||||||
|
"Show information useful for debugging.",
|
||||||
|
),
|
||||||
|
"help": CommandInfo(
|
||||||
|
"pip._internal.commands.help",
|
||||||
|
"HelpCommand",
|
||||||
|
"Show help for commands.",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def create_command(name, **kwargs):
|
def create_command(name: str, **kwargs: Any) -> Command:
|
||||||
# type: (str, **Any) -> Command
|
|
||||||
"""
|
"""
|
||||||
Create an instance of the Command class with the given name.
|
Create an instance of the Command class with the given name.
|
||||||
"""
|
"""
|
||||||
|
@ -95,8 +113,7 @@ def create_command(name, **kwargs):
|
||||||
return command
|
return command
|
||||||
|
|
||||||
|
|
||||||
def get_similar_commands(name):
|
def get_similar_commands(name: str) -> Optional[str]:
|
||||||
# type: (str) -> Optional[str]
|
|
||||||
"""Command name auto-correct."""
|
"""Command name auto-correct."""
|
||||||
from difflib import get_close_matches
|
from difflib import get_close_matches
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
|
@ -8,8 +7,9 @@ import pip._internal.utils.filesystem as filesystem
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
from pip._internal.exceptions import CommandError, PipError
|
from pip._internal.exceptions import CommandError, PipError
|
||||||
|
from pip._internal.utils.logging import getLogger
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class CacheCommand(Command):
|
class CacheCommand(Command):
|
||||||
|
@ -36,22 +36,20 @@ class CacheCommand(Command):
|
||||||
%prog purge
|
%prog purge
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--format',
|
"--format",
|
||||||
action='store',
|
action="store",
|
||||||
dest='list_format',
|
dest="list_format",
|
||||||
default="human",
|
default="human",
|
||||||
choices=('human', 'abspath'),
|
choices=("human", "abspath"),
|
||||||
help="Select the output format among: human (default) or abspath"
|
help="Select the output format among: human (default) or abspath",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[Any]) -> int
|
|
||||||
handlers = {
|
handlers = {
|
||||||
"dir": self.get_cache_dir,
|
"dir": self.get_cache_dir,
|
||||||
"info": self.get_cache_info,
|
"info": self.get_cache_info,
|
||||||
|
@ -61,8 +59,7 @@ class CacheCommand(Command):
|
||||||
}
|
}
|
||||||
|
|
||||||
if not options.cache_dir:
|
if not options.cache_dir:
|
||||||
logger.error("pip cache commands can not "
|
logger.error("pip cache commands can not function since cache is disabled.")
|
||||||
"function since cache is disabled.")
|
|
||||||
return ERROR
|
return ERROR
|
||||||
|
|
||||||
# Determine action
|
# Determine action
|
||||||
|
@ -84,78 +81,77 @@ class CacheCommand(Command):
|
||||||
|
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
def get_cache_dir(self, options, args):
|
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
||||||
# type: (Values, List[Any]) -> None
|
|
||||||
if args:
|
if args:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
logger.info(options.cache_dir)
|
logger.info(options.cache_dir)
|
||||||
|
|
||||||
def get_cache_info(self, options, args):
|
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
||||||
# type: (Values, List[Any]) -> None
|
|
||||||
if args:
|
if args:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
num_http_files = len(self._find_http_files(options))
|
num_http_files = len(self._find_http_files(options))
|
||||||
num_packages = len(self._find_wheels(options, '*'))
|
num_packages = len(self._find_wheels(options, "*"))
|
||||||
|
|
||||||
http_cache_location = self._cache_dir(options, 'http')
|
http_cache_location = self._cache_dir(options, "http")
|
||||||
wheels_cache_location = self._cache_dir(options, 'wheels')
|
wheels_cache_location = self._cache_dir(options, "wheels")
|
||||||
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
||||||
wheels_cache_size = filesystem.format_directory_size(
|
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
||||||
wheels_cache_location
|
|
||||||
)
|
|
||||||
|
|
||||||
message = textwrap.dedent("""
|
message = (
|
||||||
Package index page cache location: {http_cache_location}
|
textwrap.dedent(
|
||||||
Package index page cache size: {http_cache_size}
|
"""
|
||||||
Number of HTTP files: {num_http_files}
|
Package index page cache location: {http_cache_location}
|
||||||
Wheels location: {wheels_cache_location}
|
Package index page cache size: {http_cache_size}
|
||||||
Wheels size: {wheels_cache_size}
|
Number of HTTP files: {num_http_files}
|
||||||
Number of wheels: {package_count}
|
Wheels location: {wheels_cache_location}
|
||||||
""").format(
|
Wheels size: {wheels_cache_size}
|
||||||
http_cache_location=http_cache_location,
|
Number of wheels: {package_count}
|
||||||
http_cache_size=http_cache_size,
|
"""
|
||||||
num_http_files=num_http_files,
|
)
|
||||||
wheels_cache_location=wheels_cache_location,
|
.format(
|
||||||
package_count=num_packages,
|
http_cache_location=http_cache_location,
|
||||||
wheels_cache_size=wheels_cache_size,
|
http_cache_size=http_cache_size,
|
||||||
).strip()
|
num_http_files=num_http_files,
|
||||||
|
wheels_cache_location=wheels_cache_location,
|
||||||
|
package_count=num_packages,
|
||||||
|
wheels_cache_size=wheels_cache_size,
|
||||||
|
)
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(message)
|
logger.info(message)
|
||||||
|
|
||||||
def list_cache_items(self, options, args):
|
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||||
# type: (Values, List[Any]) -> None
|
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
if args:
|
if args:
|
||||||
pattern = args[0]
|
pattern = args[0]
|
||||||
else:
|
else:
|
||||||
pattern = '*'
|
pattern = "*"
|
||||||
|
|
||||||
files = self._find_wheels(options, pattern)
|
files = self._find_wheels(options, pattern)
|
||||||
if options.list_format == 'human':
|
if options.list_format == "human":
|
||||||
self.format_for_human(files)
|
self.format_for_human(files)
|
||||||
else:
|
else:
|
||||||
self.format_for_abspath(files)
|
self.format_for_abspath(files)
|
||||||
|
|
||||||
def format_for_human(self, files):
|
def format_for_human(self, files: List[str]) -> None:
|
||||||
# type: (List[str]) -> None
|
|
||||||
if not files:
|
if not files:
|
||||||
logger.info('Nothing cached.')
|
logger.info("Nothing cached.")
|
||||||
return
|
return
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for filename in files:
|
for filename in files:
|
||||||
wheel = os.path.basename(filename)
|
wheel = os.path.basename(filename)
|
||||||
size = filesystem.format_file_size(filename)
|
size = filesystem.format_file_size(filename)
|
||||||
results.append(f' - {wheel} ({size})')
|
results.append(f" - {wheel} ({size})")
|
||||||
logger.info('Cache contents:\n')
|
logger.info("Cache contents:\n")
|
||||||
logger.info('\n'.join(sorted(results)))
|
logger.info("\n".join(sorted(results)))
|
||||||
|
|
||||||
def format_for_abspath(self, files):
|
def format_for_abspath(self, files: List[str]) -> None:
|
||||||
# type: (List[str]) -> None
|
|
||||||
if not files:
|
if not files:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -163,49 +159,48 @@ class CacheCommand(Command):
|
||||||
for filename in files:
|
for filename in files:
|
||||||
results.append(filename)
|
results.append(filename)
|
||||||
|
|
||||||
logger.info('\n'.join(sorted(results)))
|
logger.info("\n".join(sorted(results)))
|
||||||
|
|
||||||
def remove_cache_items(self, options, args):
|
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||||
# type: (Values, List[Any]) -> None
|
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
raise CommandError('Please provide a pattern')
|
raise CommandError("Please provide a pattern")
|
||||||
|
|
||||||
files = self._find_wheels(options, args[0])
|
files = self._find_wheels(options, args[0])
|
||||||
|
|
||||||
# Only fetch http files if no specific pattern given
|
no_matching_msg = "No matching packages"
|
||||||
if args[0] == '*':
|
if args[0] == "*":
|
||||||
|
# Only fetch http files if no specific pattern given
|
||||||
files += self._find_http_files(options)
|
files += self._find_http_files(options)
|
||||||
|
else:
|
||||||
|
# Add the pattern to the log message
|
||||||
|
no_matching_msg += ' for pattern "{}"'.format(args[0])
|
||||||
|
|
||||||
if not files:
|
if not files:
|
||||||
raise CommandError('No matching packages')
|
logger.warning(no_matching_msg)
|
||||||
|
|
||||||
for filename in files:
|
for filename in files:
|
||||||
os.unlink(filename)
|
os.unlink(filename)
|
||||||
logger.debug('Removed %s', filename)
|
logger.verbose("Removed %s", filename)
|
||||||
logger.info('Files removed: %s', len(files))
|
logger.info("Files removed: %s", len(files))
|
||||||
|
|
||||||
def purge_cache(self, options, args):
|
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
||||||
# type: (Values, List[Any]) -> None
|
|
||||||
if args:
|
if args:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
return self.remove_cache_items(options, ['*'])
|
return self.remove_cache_items(options, ["*"])
|
||||||
|
|
||||||
def _cache_dir(self, options, subdir):
|
def _cache_dir(self, options: Values, subdir: str) -> str:
|
||||||
# type: (Values, str) -> str
|
|
||||||
return os.path.join(options.cache_dir, subdir)
|
return os.path.join(options.cache_dir, subdir)
|
||||||
|
|
||||||
def _find_http_files(self, options):
|
def _find_http_files(self, options: Values) -> List[str]:
|
||||||
# type: (Values) -> List[str]
|
http_dir = self._cache_dir(options, "http")
|
||||||
http_dir = self._cache_dir(options, 'http')
|
return filesystem.find_files(http_dir, "*")
|
||||||
return filesystem.find_files(http_dir, '*')
|
|
||||||
|
|
||||||
def _find_wheels(self, options, pattern):
|
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
||||||
# type: (Values, str) -> List[str]
|
wheel_dir = self._cache_dir(options, "wheels")
|
||||||
wheel_dir = self._cache_dir(options, 'wheels')
|
|
||||||
|
|
||||||
# The wheel filename format, as specified in PEP 427, is:
|
# The wheel filename format, as specified in PEP 427, is:
|
||||||
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Any, List
|
from typing import List
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
@ -19,8 +19,7 @@ class CheckCommand(Command):
|
||||||
usage = """
|
usage = """
|
||||||
%prog [options]"""
|
%prog [options]"""
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[Any]) -> int
|
|
||||||
|
|
||||||
package_set, parsing_probs = create_package_set_from_installed()
|
package_set, parsing_probs = create_package_set_from_installed()
|
||||||
missing, conflicting = check_package_set(package_set)
|
missing, conflicting = check_package_set(package_set)
|
||||||
|
@ -30,7 +29,9 @@ class CheckCommand(Command):
|
||||||
for dependency in missing[project_name]:
|
for dependency in missing[project_name]:
|
||||||
write_output(
|
write_output(
|
||||||
"%s %s requires %s, which is not installed.",
|
"%s %s requires %s, which is not installed.",
|
||||||
project_name, version, dependency[0],
|
project_name,
|
||||||
|
version,
|
||||||
|
dependency[0],
|
||||||
)
|
)
|
||||||
|
|
||||||
for project_name in conflicting:
|
for project_name in conflicting:
|
||||||
|
@ -38,7 +39,11 @@ class CheckCommand(Command):
|
||||||
for dep_name, dep_version, req in conflicting[project_name]:
|
for dep_name, dep_version, req in conflicting[project_name]:
|
||||||
write_output(
|
write_output(
|
||||||
"%s %s has requirement %s, but you have %s %s.",
|
"%s %s has requirement %s, but you have %s %s.",
|
||||||
project_name, version, req, dep_name, dep_version,
|
project_name,
|
||||||
|
version,
|
||||||
|
req,
|
||||||
|
dep_name,
|
||||||
|
dep_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
if missing or conflicting or parsing_probs:
|
if missing or conflicting or parsing_probs:
|
||||||
|
|
|
@ -12,7 +12,7 @@ BASE_COMPLETION = """
|
||||||
"""
|
"""
|
||||||
|
|
||||||
COMPLETION_SCRIPTS = {
|
COMPLETION_SCRIPTS = {
|
||||||
'bash': """
|
"bash": """
|
||||||
_pip_completion()
|
_pip_completion()
|
||||||
{{
|
{{
|
||||||
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
||||||
|
@ -21,7 +21,7 @@ COMPLETION_SCRIPTS = {
|
||||||
}}
|
}}
|
||||||
complete -o default -F _pip_completion {prog}
|
complete -o default -F _pip_completion {prog}
|
||||||
""",
|
""",
|
||||||
'zsh': """
|
"zsh": """
|
||||||
function _pip_completion {{
|
function _pip_completion {{
|
||||||
local words cword
|
local words cword
|
||||||
read -Ac words
|
read -Ac words
|
||||||
|
@ -32,7 +32,7 @@ COMPLETION_SCRIPTS = {
|
||||||
}}
|
}}
|
||||||
compctl -K _pip_completion {prog}
|
compctl -K _pip_completion {prog}
|
||||||
""",
|
""",
|
||||||
'fish': """
|
"fish": """
|
||||||
function __fish_complete_pip
|
function __fish_complete_pip
|
||||||
set -lx COMP_WORDS (commandline -o) ""
|
set -lx COMP_WORDS (commandline -o) ""
|
||||||
set -lx COMP_CWORD ( \\
|
set -lx COMP_CWORD ( \\
|
||||||
|
@ -43,6 +43,28 @@ COMPLETION_SCRIPTS = {
|
||||||
end
|
end
|
||||||
complete -fa "(__fish_complete_pip)" -c {prog}
|
complete -fa "(__fish_complete_pip)" -c {prog}
|
||||||
""",
|
""",
|
||||||
|
"powershell": """
|
||||||
|
if ((Test-Path Function:\\TabExpansion) -and -not `
|
||||||
|
(Test-Path Function:\\_pip_completeBackup)) {{
|
||||||
|
Rename-Item Function:\\TabExpansion _pip_completeBackup
|
||||||
|
}}
|
||||||
|
function TabExpansion($line, $lastWord) {{
|
||||||
|
$lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
|
||||||
|
if ($lastBlock.StartsWith("{prog} ")) {{
|
||||||
|
$Env:COMP_WORDS=$lastBlock
|
||||||
|
$Env:COMP_CWORD=$lastBlock.Split().Length - 1
|
||||||
|
$Env:PIP_AUTO_COMPLETE=1
|
||||||
|
(& {prog}).Split()
|
||||||
|
Remove-Item Env:COMP_WORDS
|
||||||
|
Remove-Item Env:COMP_CWORD
|
||||||
|
Remove-Item Env:PIP_AUTO_COMPLETE
|
||||||
|
}}
|
||||||
|
elseif (Test-Path Function:\\_pip_completeBackup) {{
|
||||||
|
# Fall back on existing tab expansion
|
||||||
|
_pip_completeBackup $line $lastWord
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
""",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,43 +73,54 @@ class CompletionCommand(Command):
|
||||||
|
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--bash', '-b',
|
"--bash",
|
||||||
action='store_const',
|
"-b",
|
||||||
const='bash',
|
action="store_const",
|
||||||
dest='shell',
|
const="bash",
|
||||||
help='Emit completion code for bash')
|
dest="shell",
|
||||||
|
help="Emit completion code for bash",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--zsh', '-z',
|
"--zsh",
|
||||||
action='store_const',
|
"-z",
|
||||||
const='zsh',
|
action="store_const",
|
||||||
dest='shell',
|
const="zsh",
|
||||||
help='Emit completion code for zsh')
|
dest="shell",
|
||||||
|
help="Emit completion code for zsh",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--fish', '-f',
|
"--fish",
|
||||||
action='store_const',
|
"-f",
|
||||||
const='fish',
|
action="store_const",
|
||||||
dest='shell',
|
const="fish",
|
||||||
help='Emit completion code for fish')
|
dest="shell",
|
||||||
|
help="Emit completion code for fish",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"--powershell",
|
||||||
|
"-p",
|
||||||
|
action="store_const",
|
||||||
|
const="powershell",
|
||||||
|
dest="shell",
|
||||||
|
help="Emit completion code for powershell",
|
||||||
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
"""Prints the completion code of the given shell"""
|
"""Prints the completion code of the given shell"""
|
||||||
shells = COMPLETION_SCRIPTS.keys()
|
shells = COMPLETION_SCRIPTS.keys()
|
||||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
shell_options = ["--" + shell for shell in sorted(shells)]
|
||||||
if options.shell in shells:
|
if options.shell in shells:
|
||||||
script = textwrap.dedent(
|
script = textwrap.dedent(
|
||||||
COMPLETION_SCRIPTS.get(options.shell, '').format(
|
COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
|
||||||
prog=get_prog())
|
|
||||||
)
|
)
|
||||||
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
else:
|
else:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
|
"ERROR: You must pass {}\n".format(" or ".join(shell_options))
|
||||||
)
|
)
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
|
@ -27,14 +27,20 @@ class ConfigurationCommand(Command):
|
||||||
|
|
||||||
- list: List the active configuration (or from the file specified)
|
- list: List the active configuration (or from the file specified)
|
||||||
- edit: Edit the configuration file in an editor
|
- edit: Edit the configuration file in an editor
|
||||||
- get: Get the value associated with name
|
- get: Get the value associated with command.option
|
||||||
- set: Set the name=value
|
- set: Set the command.option=value
|
||||||
- unset: Unset the value associated with name
|
- unset: Unset the value associated with command.option
|
||||||
- debug: List the configuration files and values defined under them
|
- debug: List the configuration files and values defined under them
|
||||||
|
|
||||||
|
Configuration keys should be dot separated command and option name,
|
||||||
|
with the special prefix "global" affecting any command. For example,
|
||||||
|
"pip config set global.index-url https://example.org/" would configure
|
||||||
|
the index url for all commands, but "pip config set download.timeout 10"
|
||||||
|
would configure a 10 second timeout only for "pip download" commands.
|
||||||
|
|
||||||
If none of --user, --global and --site are passed, a virtual
|
If none of --user, --global and --site are passed, a virtual
|
||||||
environment configuration file is used if one is active and the file
|
environment configuration file is used if one is active and the file
|
||||||
exists. Otherwise, all modifications happen on the to the user file by
|
exists. Otherwise, all modifications happen to the user file by
|
||||||
default.
|
default.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -43,53 +49,51 @@ class ConfigurationCommand(Command):
|
||||||
%prog [<file-option>] list
|
%prog [<file-option>] list
|
||||||
%prog [<file-option>] [--editor <editor-path>] edit
|
%prog [<file-option>] [--editor <editor-path>] edit
|
||||||
|
|
||||||
%prog [<file-option>] get name
|
%prog [<file-option>] get command.option
|
||||||
%prog [<file-option>] set name value
|
%prog [<file-option>] set command.option value
|
||||||
%prog [<file-option>] unset name
|
%prog [<file-option>] unset command.option
|
||||||
%prog [<file-option>] debug
|
%prog [<file-option>] debug
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--editor',
|
"--editor",
|
||||||
dest='editor',
|
dest="editor",
|
||||||
action='store',
|
action="store",
|
||||||
default=None,
|
default=None,
|
||||||
help=(
|
help=(
|
||||||
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
||||||
'environment variables if not provided.'
|
"environment variables if not provided."
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--global',
|
"--global",
|
||||||
dest='global_file',
|
dest="global_file",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Use the system-wide configuration file only'
|
help="Use the system-wide configuration file only",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"--user",
|
||||||
dest='user_file',
|
dest="user_file",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Use the user configuration file only'
|
help="Use the user configuration file only",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--site',
|
"--site",
|
||||||
dest='site_file',
|
dest="site_file",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Use the current environment configuration file only'
|
help="Use the current environment configuration file only",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
handlers = {
|
handlers = {
|
||||||
"list": self.list_values,
|
"list": self.list_values,
|
||||||
"edit": self.open_in_editor,
|
"edit": self.open_in_editor,
|
||||||
|
@ -134,13 +138,16 @@ class ConfigurationCommand(Command):
|
||||||
|
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
def _determine_file(self, options, need_value):
|
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
||||||
# type: (Values, bool) -> Optional[Kind]
|
file_options = [
|
||||||
file_options = [key for key, value in (
|
key
|
||||||
(kinds.USER, options.user_file),
|
for key, value in (
|
||||||
(kinds.GLOBAL, options.global_file),
|
(kinds.USER, options.user_file),
|
||||||
(kinds.SITE, options.site_file),
|
(kinds.GLOBAL, options.global_file),
|
||||||
) if value]
|
(kinds.SITE, options.site_file),
|
||||||
|
)
|
||||||
|
if value
|
||||||
|
]
|
||||||
|
|
||||||
if not file_options:
|
if not file_options:
|
||||||
if not need_value:
|
if not need_value:
|
||||||
|
@ -161,36 +168,31 @@ class ConfigurationCommand(Command):
|
||||||
"(--user, --site, --global) to perform."
|
"(--user, --site, --global) to perform."
|
||||||
)
|
)
|
||||||
|
|
||||||
def list_values(self, options, args):
|
def list_values(self, options: Values, args: List[str]) -> None:
|
||||||
# type: (Values, List[str]) -> None
|
|
||||||
self._get_n_args(args, "list", n=0)
|
self._get_n_args(args, "list", n=0)
|
||||||
|
|
||||||
for key, value in sorted(self.configuration.items()):
|
for key, value in sorted(self.configuration.items()):
|
||||||
write_output("%s=%r", key, value)
|
write_output("%s=%r", key, value)
|
||||||
|
|
||||||
def get_name(self, options, args):
|
def get_name(self, options: Values, args: List[str]) -> None:
|
||||||
# type: (Values, List[str]) -> None
|
|
||||||
key = self._get_n_args(args, "get [name]", n=1)
|
key = self._get_n_args(args, "get [name]", n=1)
|
||||||
value = self.configuration.get_value(key)
|
value = self.configuration.get_value(key)
|
||||||
|
|
||||||
write_output("%s", value)
|
write_output("%s", value)
|
||||||
|
|
||||||
def set_name_value(self, options, args):
|
def set_name_value(self, options: Values, args: List[str]) -> None:
|
||||||
# type: (Values, List[str]) -> None
|
|
||||||
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
||||||
self.configuration.set_value(key, value)
|
self.configuration.set_value(key, value)
|
||||||
|
|
||||||
self._save_configuration()
|
self._save_configuration()
|
||||||
|
|
||||||
def unset_name(self, options, args):
|
def unset_name(self, options: Values, args: List[str]) -> None:
|
||||||
# type: (Values, List[str]) -> None
|
|
||||||
key = self._get_n_args(args, "unset [name]", n=1)
|
key = self._get_n_args(args, "unset [name]", n=1)
|
||||||
self.configuration.unset_value(key)
|
self.configuration.unset_value(key)
|
||||||
|
|
||||||
self._save_configuration()
|
self._save_configuration()
|
||||||
|
|
||||||
def list_config_values(self, options, args):
|
def list_config_values(self, options: Values, args: List[str]) -> None:
|
||||||
# type: (Values, List[str]) -> None
|
|
||||||
"""List config key-value pairs across different config files"""
|
"""List config key-value pairs across different config files"""
|
||||||
self._get_n_args(args, "debug", n=0)
|
self._get_n_args(args, "debug", n=0)
|
||||||
|
|
||||||
|
@ -202,30 +204,25 @@ class ConfigurationCommand(Command):
|
||||||
for fname in files:
|
for fname in files:
|
||||||
with indent_log():
|
with indent_log():
|
||||||
file_exists = os.path.exists(fname)
|
file_exists = os.path.exists(fname)
|
||||||
write_output("%s, exists: %r",
|
write_output("%s, exists: %r", fname, file_exists)
|
||||||
fname, file_exists)
|
|
||||||
if file_exists:
|
if file_exists:
|
||||||
self.print_config_file_values(variant)
|
self.print_config_file_values(variant)
|
||||||
|
|
||||||
def print_config_file_values(self, variant):
|
def print_config_file_values(self, variant: Kind) -> None:
|
||||||
# type: (Kind) -> None
|
|
||||||
"""Get key-value pairs from the file of a variant"""
|
"""Get key-value pairs from the file of a variant"""
|
||||||
for name, value in self.configuration.\
|
for name, value in self.configuration.get_values_in_config(variant).items():
|
||||||
get_values_in_config(variant).items():
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
write_output("%s: %s", name, value)
|
write_output("%s: %s", name, value)
|
||||||
|
|
||||||
def print_env_var_values(self):
|
def print_env_var_values(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Get key-values pairs present as environment variables"""
|
"""Get key-values pairs present as environment variables"""
|
||||||
write_output("%s:", 'env_var')
|
write_output("%s:", "env_var")
|
||||||
with indent_log():
|
with indent_log():
|
||||||
for key, value in sorted(self.configuration.get_environ_vars()):
|
for key, value in sorted(self.configuration.get_environ_vars()):
|
||||||
env_var = f'PIP_{key.upper()}'
|
env_var = f"PIP_{key.upper()}"
|
||||||
write_output("%s=%r", env_var, value)
|
write_output("%s=%r", env_var, value)
|
||||||
|
|
||||||
def open_in_editor(self, options, args):
|
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
||||||
# type: (Values, List[str]) -> None
|
|
||||||
editor = self._determine_editor(options)
|
editor = self._determine_editor(options)
|
||||||
|
|
||||||
fname = self.configuration.get_file_to_edit()
|
fname = self.configuration.get_file_to_edit()
|
||||||
|
@ -234,19 +231,20 @@ class ConfigurationCommand(Command):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call([editor, fname])
|
subprocess.check_call([editor, fname])
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
if not e.filename:
|
||||||
|
e.filename = editor
|
||||||
|
raise
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise PipError(
|
raise PipError(
|
||||||
"Editor Subprocess exited with exit code {}"
|
"Editor Subprocess exited with exit code {}".format(e.returncode)
|
||||||
.format(e.returncode)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_n_args(self, args, example, n):
|
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
||||||
# type: (List[str], str, int) -> Any
|
"""Helper to make sure the command got the right number of arguments"""
|
||||||
"""Helper to make sure the command got the right number of arguments
|
|
||||||
"""
|
|
||||||
if len(args) != n:
|
if len(args) != n:
|
||||||
msg = (
|
msg = (
|
||||||
'Got unexpected number of arguments, expected {}. '
|
"Got unexpected number of arguments, expected {}. "
|
||||||
'(example: "{} config {}")'
|
'(example: "{} config {}")'
|
||||||
).format(n, get_prog(), example)
|
).format(n, get_prog(), example)
|
||||||
raise PipError(msg)
|
raise PipError(msg)
|
||||||
|
@ -256,8 +254,7 @@ class ConfigurationCommand(Command):
|
||||||
else:
|
else:
|
||||||
return args
|
return args
|
||||||
|
|
||||||
def _save_configuration(self):
|
def _save_configuration(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
# We successfully ran a modifying command. Need to save the
|
# We successfully ran a modifying command. Need to save the
|
||||||
# configuration.
|
# configuration.
|
||||||
try:
|
try:
|
||||||
|
@ -268,8 +265,7 @@ class ConfigurationCommand(Command):
|
||||||
)
|
)
|
||||||
raise PipError("Internal Error.")
|
raise PipError("Internal Error.")
|
||||||
|
|
||||||
def _determine_editor(self, options):
|
def _determine_editor(self, options: Values) -> str:
|
||||||
# type: (Values) -> str
|
|
||||||
if options.editor is not None:
|
if options.editor is not None:
|
||||||
return options.editor
|
return options.editor
|
||||||
elif "VISUAL" in os.environ:
|
elif "VISUAL" in os.environ:
|
||||||
|
|
|
@ -23,61 +23,51 @@ from pip._internal.utils.misc import get_pip_version
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def show_value(name, value):
|
def show_value(name: str, value: Any) -> None:
|
||||||
# type: (str, Any) -> None
|
logger.info("%s: %s", name, value)
|
||||||
logger.info('%s: %s', name, value)
|
|
||||||
|
|
||||||
|
|
||||||
def show_sys_implementation():
|
def show_sys_implementation() -> None:
|
||||||
# type: () -> None
|
logger.info("sys.implementation:")
|
||||||
logger.info('sys.implementation:')
|
|
||||||
implementation_name = sys.implementation.name
|
implementation_name = sys.implementation.name
|
||||||
with indent_log():
|
with indent_log():
|
||||||
show_value('name', implementation_name)
|
show_value("name", implementation_name)
|
||||||
|
|
||||||
|
|
||||||
def create_vendor_txt_map():
|
def create_vendor_txt_map() -> Dict[str, str]:
|
||||||
# type: () -> Dict[str, str]
|
|
||||||
vendor_txt_path = os.path.join(
|
vendor_txt_path = os.path.join(
|
||||||
os.path.dirname(pip_location),
|
os.path.dirname(pip_location), "_vendor", "vendor.txt"
|
||||||
'_vendor',
|
|
||||||
'vendor.txt'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(vendor_txt_path) as f:
|
with open(vendor_txt_path) as f:
|
||||||
# Purge non version specifying lines.
|
# Purge non version specifying lines.
|
||||||
# Also, remove any space prefix or suffixes (including comments).
|
# Also, remove any space prefix or suffixes (including comments).
|
||||||
lines = [line.strip().split(' ', 1)[0]
|
lines = [
|
||||||
for line in f.readlines() if '==' in line]
|
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
||||||
|
]
|
||||||
|
|
||||||
# Transform into "module" -> version dict.
|
# Transform into "module" -> version dict.
|
||||||
return dict(line.split('==', 1) for line in lines) # type: ignore
|
return dict(line.split("==", 1) for line in lines)
|
||||||
|
|
||||||
|
|
||||||
def get_module_from_module_name(module_name):
|
def get_module_from_module_name(module_name: str) -> ModuleType:
|
||||||
# type: (str) -> ModuleType
|
|
||||||
# Module name can be uppercase in vendor.txt for some reason...
|
# Module name can be uppercase in vendor.txt for some reason...
|
||||||
module_name = module_name.lower()
|
module_name = module_name.lower()
|
||||||
# PATCH: setuptools is actually only pkg_resources.
|
# PATCH: setuptools is actually only pkg_resources.
|
||||||
if module_name == 'setuptools':
|
if module_name == "setuptools":
|
||||||
module_name = 'pkg_resources'
|
module_name = "pkg_resources"
|
||||||
|
|
||||||
__import__(
|
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
||||||
f'pip._vendor.{module_name}',
|
|
||||||
globals(),
|
|
||||||
locals(),
|
|
||||||
level=0
|
|
||||||
)
|
|
||||||
return getattr(pip._vendor, module_name)
|
return getattr(pip._vendor, module_name)
|
||||||
|
|
||||||
|
|
||||||
def get_vendor_version_from_module(module_name):
|
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
||||||
# type: (str) -> Optional[str]
|
|
||||||
module = get_module_from_module_name(module_name)
|
module = get_module_from_module_name(module_name)
|
||||||
version = getattr(module, '__version__', None)
|
version = getattr(module, "__version__", None)
|
||||||
|
|
||||||
if not version:
|
if not version:
|
||||||
# Try to find version in debundled module info.
|
# Try to find version in debundled module info.
|
||||||
|
assert module.__file__ is not None
|
||||||
env = get_environment([os.path.dirname(module.__file__)])
|
env = get_environment([os.path.dirname(module.__file__)])
|
||||||
dist = env.get_distribution(module_name)
|
dist = env.get_distribution(module_name)
|
||||||
if dist:
|
if dist:
|
||||||
|
@ -86,35 +76,36 @@ def get_vendor_version_from_module(module_name):
|
||||||
return version
|
return version
|
||||||
|
|
||||||
|
|
||||||
def show_actual_vendor_versions(vendor_txt_versions):
|
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
||||||
# type: (Dict[str, str]) -> None
|
|
||||||
"""Log the actual version and print extra info if there is
|
"""Log the actual version and print extra info if there is
|
||||||
a conflict or if the actual version could not be imported.
|
a conflict or if the actual version could not be imported.
|
||||||
"""
|
"""
|
||||||
for module_name, expected_version in vendor_txt_versions.items():
|
for module_name, expected_version in vendor_txt_versions.items():
|
||||||
extra_message = ''
|
extra_message = ""
|
||||||
actual_version = get_vendor_version_from_module(module_name)
|
actual_version = get_vendor_version_from_module(module_name)
|
||||||
if not actual_version:
|
if not actual_version:
|
||||||
extra_message = ' (Unable to locate actual module version, using'\
|
extra_message = (
|
||||||
' vendor.txt specified version)'
|
" (Unable to locate actual module version, using"
|
||||||
|
" vendor.txt specified version)"
|
||||||
|
)
|
||||||
actual_version = expected_version
|
actual_version = expected_version
|
||||||
elif parse_version(actual_version) != parse_version(expected_version):
|
elif parse_version(actual_version) != parse_version(expected_version):
|
||||||
extra_message = ' (CONFLICT: vendor.txt suggests version should'\
|
extra_message = (
|
||||||
' be {})'.format(expected_version)
|
" (CONFLICT: vendor.txt suggests version should"
|
||||||
logger.info('%s==%s%s', module_name, actual_version, extra_message)
|
" be {})".format(expected_version)
|
||||||
|
)
|
||||||
|
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
||||||
|
|
||||||
|
|
||||||
def show_vendor_versions():
|
def show_vendor_versions() -> None:
|
||||||
# type: () -> None
|
logger.info("vendored library versions:")
|
||||||
logger.info('vendored library versions:')
|
|
||||||
|
|
||||||
vendor_txt_versions = create_vendor_txt_map()
|
vendor_txt_versions = create_vendor_txt_map()
|
||||||
with indent_log():
|
with indent_log():
|
||||||
show_actual_vendor_versions(vendor_txt_versions)
|
show_actual_vendor_versions(vendor_txt_versions)
|
||||||
|
|
||||||
|
|
||||||
def show_tags(options):
|
def show_tags(options: Values) -> None:
|
||||||
# type: (Values) -> None
|
|
||||||
tag_limit = 10
|
tag_limit = 10
|
||||||
|
|
||||||
target_python = make_target_python(options)
|
target_python = make_target_python(options)
|
||||||
|
@ -122,11 +113,11 @@ def show_tags(options):
|
||||||
|
|
||||||
# Display the target options that were explicitly provided.
|
# Display the target options that were explicitly provided.
|
||||||
formatted_target = target_python.format_given()
|
formatted_target = target_python.format_given()
|
||||||
suffix = ''
|
suffix = ""
|
||||||
if formatted_target:
|
if formatted_target:
|
||||||
suffix = f' (target: {formatted_target})'
|
suffix = f" (target: {formatted_target})"
|
||||||
|
|
||||||
msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
|
msg = "Compatible tags: {}{}".format(len(tags), suffix)
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
|
|
||||||
if options.verbose < 1 and len(tags) > tag_limit:
|
if options.verbose < 1 and len(tags) > tag_limit:
|
||||||
|
@ -141,30 +132,28 @@ def show_tags(options):
|
||||||
|
|
||||||
if tags_limited:
|
if tags_limited:
|
||||||
msg = (
|
msg = (
|
||||||
'...\n'
|
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
||||||
'[First {tag_limit} tags shown. Pass --verbose to show all.]'
|
|
||||||
).format(tag_limit=tag_limit)
|
).format(tag_limit=tag_limit)
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
|
|
||||||
|
|
||||||
def ca_bundle_info(config):
|
def ca_bundle_info(config: Configuration) -> str:
|
||||||
# type: (Configuration) -> str
|
|
||||||
levels = set()
|
levels = set()
|
||||||
for key, _ in config.items():
|
for key, _ in config.items():
|
||||||
levels.add(key.split('.')[0])
|
levels.add(key.split(".")[0])
|
||||||
|
|
||||||
if not levels:
|
if not levels:
|
||||||
return "Not specified"
|
return "Not specified"
|
||||||
|
|
||||||
levels_that_override_global = ['install', 'wheel', 'download']
|
levels_that_override_global = ["install", "wheel", "download"]
|
||||||
global_overriding_level = [
|
global_overriding_level = [
|
||||||
level for level in levels if level in levels_that_override_global
|
level for level in levels if level in levels_that_override_global
|
||||||
]
|
]
|
||||||
if not global_overriding_level:
|
if not global_overriding_level:
|
||||||
return 'global'
|
return "global"
|
||||||
|
|
||||||
if 'global' in levels:
|
if "global" in levels:
|
||||||
levels.remove('global')
|
levels.remove("global")
|
||||||
return ", ".join(levels)
|
return ", ".join(levels)
|
||||||
|
|
||||||
|
|
||||||
|
@ -177,34 +166,33 @@ class DebugCommand(Command):
|
||||||
%prog <options>"""
|
%prog <options>"""
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
self.parser.config.load()
|
self.parser.config.load()
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"This command is only meant for debugging. "
|
"This command is only meant for debugging. "
|
||||||
"Do not use this with automation for parsing and getting these "
|
"Do not use this with automation for parsing and getting these "
|
||||||
"details, since the output and options of this command may "
|
"details, since the output and options of this command may "
|
||||||
"change without notice."
|
"change without notice."
|
||||||
)
|
)
|
||||||
show_value('pip version', get_pip_version())
|
show_value("pip version", get_pip_version())
|
||||||
show_value('sys.version', sys.version)
|
show_value("sys.version", sys.version)
|
||||||
show_value('sys.executable', sys.executable)
|
show_value("sys.executable", sys.executable)
|
||||||
show_value('sys.getdefaultencoding', sys.getdefaultencoding())
|
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
||||||
show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
|
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
||||||
show_value(
|
show_value(
|
||||||
'locale.getpreferredencoding', locale.getpreferredencoding(),
|
"locale.getpreferredencoding",
|
||||||
|
locale.getpreferredencoding(),
|
||||||
)
|
)
|
||||||
show_value('sys.platform', sys.platform)
|
show_value("sys.platform", sys.platform)
|
||||||
show_sys_implementation()
|
show_sys_implementation()
|
||||||
|
|
||||||
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
||||||
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
|
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
||||||
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
|
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
||||||
show_value("pip._vendor.certifi.where()", where())
|
show_value("pip._vendor.certifi.where()", where())
|
||||||
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.cmdoptions import make_target_python
|
from pip._internal.cli.cmdoptions import make_target_python
|
||||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||||
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
@ -34,11 +34,9 @@ class DownloadCommand(RequirementCommand):
|
||||||
%prog [options] <local project path> ...
|
%prog [options] <local project path> ...
|
||||||
%prog [options] <archive url/path> ..."""
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
@ -51,14 +49,18 @@ class DownloadCommand(RequirementCommand):
|
||||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
"-d",
|
||||||
dest='download_dir',
|
"--dest",
|
||||||
metavar='dir',
|
"--destination-dir",
|
||||||
|
"--destination-directory",
|
||||||
|
dest="download_dir",
|
||||||
|
metavar="dir",
|
||||||
default=os.curdir,
|
default=os.curdir,
|
||||||
help=("Download packages into <dir>."),
|
help="Download packages into <dir>.",
|
||||||
)
|
)
|
||||||
|
|
||||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||||
|
@ -72,8 +74,7 @@ class DownloadCommand(RequirementCommand):
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
@with_cleanup
|
@with_cleanup
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
|
|
||||||
options.ignore_installed = True
|
options.ignore_installed = True
|
||||||
# editable doesn't really make sense for `pip download`, but the bowels
|
# editable doesn't really make sense for `pip download`, but the bowels
|
||||||
|
@ -95,7 +96,7 @@ class DownloadCommand(RequirementCommand):
|
||||||
ignore_requires_python=options.ignore_requires_python,
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
)
|
)
|
||||||
|
|
||||||
req_tracker = self.enter_context(get_requirement_tracker())
|
build_tracker = self.enter_context(get_build_tracker())
|
||||||
|
|
||||||
directory = TempDirectory(
|
directory = TempDirectory(
|
||||||
delete=not options.no_clean,
|
delete=not options.no_clean,
|
||||||
|
@ -108,11 +109,12 @@ class DownloadCommand(RequirementCommand):
|
||||||
preparer = self.make_requirement_preparer(
|
preparer = self.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
req_tracker=req_tracker,
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
download_dir=options.download_dir,
|
download_dir=options.download_dir,
|
||||||
use_user_site=False,
|
use_user_site=False,
|
||||||
|
verbosity=self.verbosity,
|
||||||
)
|
)
|
||||||
|
|
||||||
resolver = self.make_resolver(
|
resolver = self.make_resolver(
|
||||||
|
@ -125,17 +127,15 @@ class DownloadCommand(RequirementCommand):
|
||||||
|
|
||||||
self.trace_basic_info(finder)
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
requirement_set = resolver.resolve(
|
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||||
reqs, check_supported_wheels=True
|
|
||||||
)
|
|
||||||
|
|
||||||
downloaded = [] # type: List[str]
|
downloaded: List[str] = []
|
||||||
for req in requirement_set.requirements.values():
|
for req in requirement_set.requirements.values():
|
||||||
if req.satisfied_by is None:
|
if req.satisfied_by is None:
|
||||||
assert req.name is not None
|
assert req.name is not None
|
||||||
preparer.save_linked_requirement(req)
|
preparer.save_linked_requirement(req)
|
||||||
downloaded.append(req.name)
|
downloaded.append(req.name)
|
||||||
if downloaded:
|
if downloaded:
|
||||||
write_output('Successfully downloaded %s', ' '.join(downloaded))
|
write_output("Successfully downloaded %s", " ".join(downloaded))
|
||||||
|
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
|
@ -7,9 +7,8 @@ from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
from pip._internal.operations.freeze import freeze
|
from pip._internal.operations.freeze import freeze
|
||||||
from pip._internal.utils.compat import stdlib_pkgs
|
from pip._internal.utils.compat import stdlib_pkgs
|
||||||
from pip._internal.utils.deprecation import deprecated
|
|
||||||
|
|
||||||
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
|
||||||
|
|
||||||
|
|
||||||
class FreezeCommand(Command):
|
class FreezeCommand(Command):
|
||||||
|
@ -23,56 +22,59 @@ class FreezeCommand(Command):
|
||||||
%prog [options]"""
|
%prog [options]"""
|
||||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-r', '--requirement',
|
"-r",
|
||||||
dest='requirements',
|
"--requirement",
|
||||||
action='append',
|
dest="requirements",
|
||||||
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
metavar='file',
|
metavar="file",
|
||||||
help="Use the order in the given requirements file and its "
|
help=(
|
||||||
"comments when generating output. This option can be "
|
"Use the order in the given requirements file and its "
|
||||||
"used multiple times.")
|
"comments when generating output. This option can be "
|
||||||
|
"used multiple times."
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-f', '--find-links',
|
"-l",
|
||||||
dest='find_links',
|
"--local",
|
||||||
action='append',
|
dest="local",
|
||||||
default=[],
|
action="store_true",
|
||||||
metavar='URL',
|
|
||||||
help='URL for finding packages, which will be added to the '
|
|
||||||
'output.')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-l', '--local',
|
|
||||||
dest='local',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
default=False,
|
||||||
help='If in a virtualenv that has global access, do not output '
|
help=(
|
||||||
'globally-installed packages.')
|
"If in a virtualenv that has global access, do not output "
|
||||||
|
"globally-installed packages."
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"--user",
|
||||||
dest='user',
|
dest="user",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Only output packages installed in user-site.')
|
help="Only output packages installed in user-site.",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--all',
|
"--all",
|
||||||
dest='freeze_all',
|
dest="freeze_all",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help='Do not skip these packages in the output:'
|
help=(
|
||||||
' {}'.format(', '.join(DEV_PKGS)))
|
"Do not skip these packages in the output:"
|
||||||
|
" {}".format(", ".join(DEV_PKGS))
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--exclude-editable',
|
"--exclude-editable",
|
||||||
dest='exclude_editable',
|
dest="exclude_editable",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help='Exclude editable package from output.')
|
help="Exclude editable package from output.",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
skip = set(stdlib_pkgs)
|
skip = set(stdlib_pkgs)
|
||||||
if not options.freeze_all:
|
if not options.freeze_all:
|
||||||
skip.update(DEV_PKGS)
|
skip.update(DEV_PKGS)
|
||||||
|
@ -82,17 +84,8 @@ class FreezeCommand(Command):
|
||||||
|
|
||||||
cmdoptions.check_list_path_option(options)
|
cmdoptions.check_list_path_option(options)
|
||||||
|
|
||||||
if options.find_links:
|
|
||||||
deprecated(
|
|
||||||
"--find-links option in pip freeze is deprecated.",
|
|
||||||
replacement=None,
|
|
||||||
gone_in="21.2",
|
|
||||||
issue=9069,
|
|
||||||
)
|
|
||||||
|
|
||||||
for line in freeze(
|
for line in freeze(
|
||||||
requirement=options.requirements,
|
requirement=options.requirements,
|
||||||
find_links=options.find_links,
|
|
||||||
local_only=options.local,
|
local_only=options.local,
|
||||||
user_only=options.user,
|
user_only=options.user,
|
||||||
paths=options.path,
|
paths=options.path,
|
||||||
|
@ -100,5 +93,5 @@ class FreezeCommand(Command):
|
||||||
skip=skip,
|
skip=skip,
|
||||||
exclude_editable=options.exclude_editable,
|
exclude_editable=options.exclude_editable,
|
||||||
):
|
):
|
||||||
sys.stdout.write(line + '\n')
|
sys.stdout.write(line + "\n")
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
|
@ -20,38 +20,39 @@ class HashCommand(Command):
|
||||||
installs.
|
installs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
usage = '%prog [options] <file> ...'
|
usage = "%prog [options] <file> ..."
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-a', '--algorithm',
|
"-a",
|
||||||
dest='algorithm',
|
"--algorithm",
|
||||||
|
dest="algorithm",
|
||||||
choices=STRONG_HASHES,
|
choices=STRONG_HASHES,
|
||||||
action='store',
|
action="store",
|
||||||
default=FAVORITE_HASH,
|
default=FAVORITE_HASH,
|
||||||
help='The hash algorithm to use: one of {}'.format(
|
help="The hash algorithm to use: one of {}".format(
|
||||||
', '.join(STRONG_HASHES)))
|
", ".join(STRONG_HASHES)
|
||||||
|
),
|
||||||
|
)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
if not args:
|
if not args:
|
||||||
self.parser.print_usage(sys.stderr)
|
self.parser.print_usage(sys.stderr)
|
||||||
return ERROR
|
return ERROR
|
||||||
|
|
||||||
algorithm = options.algorithm
|
algorithm = options.algorithm
|
||||||
for path in args:
|
for path in args:
|
||||||
write_output('%s:\n--hash=%s:%s',
|
write_output(
|
||||||
path, algorithm, _hash_of_file(path, algorithm))
|
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
||||||
|
)
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def _hash_of_file(path, algorithm):
|
def _hash_of_file(path: str, algorithm: str) -> str:
|
||||||
# type: (str, str) -> str
|
|
||||||
"""Return the hash digest of a file."""
|
"""Return the hash digest of a file."""
|
||||||
with open(path, 'rb') as archive:
|
with open(path, "rb") as archive:
|
||||||
hash = hashlib.new(algorithm)
|
hash = hashlib.new(algorithm)
|
||||||
for chunk in read_chunks(archive):
|
for chunk in read_chunks(archive):
|
||||||
hash.update(chunk)
|
hash.update(chunk)
|
||||||
|
|
|
@ -13,8 +13,7 @@ class HelpCommand(Command):
|
||||||
%prog <command>"""
|
%prog <command>"""
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
from pip._internal.commands import (
|
from pip._internal.commands import (
|
||||||
commands_dict,
|
commands_dict,
|
||||||
create_command,
|
create_command,
|
||||||
|
@ -34,7 +33,7 @@ class HelpCommand(Command):
|
||||||
if guess:
|
if guess:
|
||||||
msg.append(f'maybe you meant "{guess}"')
|
msg.append(f'maybe you meant "{guess}"')
|
||||||
|
|
||||||
raise CommandError(' - '.join(msg))
|
raise CommandError(" - ".join(msg))
|
||||||
|
|
||||||
command = create_command(cmd_name)
|
command = create_command(cmd_name)
|
||||||
command.parser.print_help()
|
command.parser.print_help()
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import errno
|
import errno
|
||||||
import logging
|
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -22,12 +21,14 @@ from pip._internal.exceptions import CommandError, InstallationError
|
||||||
from pip._internal.locations import get_scheme
|
from pip._internal.locations import get_scheme
|
||||||
from pip._internal.metadata import get_environment
|
from pip._internal.metadata import get_environment
|
||||||
from pip._internal.models.format_control import FormatControl
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||||
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
||||||
from pip._internal.req import install_given_reqs
|
from pip._internal.req import install_given_reqs
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
from pip._internal.utils.compat import WINDOWS
|
||||||
from pip._internal.utils.distutils_args import parse_distutils_args
|
from pip._internal.utils.distutils_args import parse_distutils_args
|
||||||
from pip._internal.utils.filesystem import test_writable_dir
|
from pip._internal.utils.filesystem import test_writable_dir
|
||||||
|
from pip._internal.utils.logging import getLogger
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import (
|
||||||
ensure_dir,
|
ensure_dir,
|
||||||
get_pip_version,
|
get_pip_version,
|
||||||
|
@ -45,13 +46,11 @@ from pip._internal.wheel_builder import (
|
||||||
should_build_for_install_command,
|
should_build_for_install_command,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_check_binary_allowed(format_control):
|
def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate:
|
||||||
# type: (FormatControl) -> BinaryAllowedPredicate
|
def check_binary_allowed(req: InstallRequirement) -> bool:
|
||||||
def check_binary_allowed(req):
|
|
||||||
# type: (InstallRequirement) -> bool
|
|
||||||
canonical_name = canonicalize_name(req.name or "")
|
canonical_name = canonicalize_name(req.name or "")
|
||||||
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
||||||
return "binary" in allowed_formats
|
return "binary" in allowed_formats
|
||||||
|
@ -79,8 +78,7 @@ class InstallCommand(RequirementCommand):
|
||||||
%prog [options] [-e] <local project path> ...
|
%prog [options] [-e] <local project path> ...
|
||||||
%prog [options] <archive url/path> ..."""
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
@ -88,94 +86,112 @@ class InstallCommand(RequirementCommand):
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.editable())
|
self.cmd_opts.add_option(cmdoptions.editable())
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-t', '--target',
|
"-t",
|
||||||
dest='target_dir',
|
"--target",
|
||||||
metavar='dir',
|
dest="target_dir",
|
||||||
|
metavar="dir",
|
||||||
default=None,
|
default=None,
|
||||||
help='Install packages into <dir>. '
|
help=(
|
||||||
'By default this will not replace existing files/folders in '
|
"Install packages into <dir>. "
|
||||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
"By default this will not replace existing files/folders in "
|
||||||
'with new versions.'
|
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
||||||
|
"with new versions."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"--user",
|
||||||
dest='use_user_site',
|
dest="use_user_site",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help="Install to the Python user install directory for your "
|
help=(
|
||||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
"Install to the Python user install directory for your "
|
||||||
"Windows. (See the Python documentation for site.USER_BASE "
|
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||||
"for full details.)")
|
"Windows. (See the Python documentation for site.USER_BASE "
|
||||||
|
"for full details.)"
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--no-user',
|
"--no-user",
|
||||||
dest='use_user_site',
|
dest="use_user_site",
|
||||||
action='store_false',
|
action="store_false",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP,
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--root',
|
"--root",
|
||||||
dest='root_path',
|
dest="root_path",
|
||||||
metavar='dir',
|
metavar="dir",
|
||||||
default=None,
|
default=None,
|
||||||
help="Install everything relative to this alternate root "
|
help="Install everything relative to this alternate root directory.",
|
||||||
"directory.")
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--prefix',
|
"--prefix",
|
||||||
dest='prefix_path',
|
dest="prefix_path",
|
||||||
metavar='dir',
|
metavar="dir",
|
||||||
default=None,
|
default=None,
|
||||||
help="Installation prefix where lib, bin and other top-level "
|
help=(
|
||||||
"folders are placed")
|
"Installation prefix where lib, bin and other top-level "
|
||||||
|
"folders are placed"
|
||||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
),
|
||||||
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.src())
|
self.cmd_opts.add_option(cmdoptions.src())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-U', '--upgrade',
|
"-U",
|
||||||
dest='upgrade',
|
"--upgrade",
|
||||||
action='store_true',
|
dest="upgrade",
|
||||||
help='Upgrade all specified packages to the newest available '
|
action="store_true",
|
||||||
'version. The handling of dependencies depends on the '
|
help=(
|
||||||
'upgrade-strategy used.'
|
"Upgrade all specified packages to the newest available "
|
||||||
|
"version. The handling of dependencies depends on the "
|
||||||
|
"upgrade-strategy used."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--upgrade-strategy',
|
"--upgrade-strategy",
|
||||||
dest='upgrade_strategy',
|
dest="upgrade_strategy",
|
||||||
default='only-if-needed',
|
default="only-if-needed",
|
||||||
choices=['only-if-needed', 'eager'],
|
choices=["only-if-needed", "eager"],
|
||||||
help='Determines how dependency upgrading should be handled '
|
help=(
|
||||||
'[default: %default]. '
|
"Determines how dependency upgrading should be handled "
|
||||||
'"eager" - dependencies are upgraded regardless of '
|
"[default: %default]. "
|
||||||
'whether the currently installed version satisfies the '
|
'"eager" - dependencies are upgraded regardless of '
|
||||||
'requirements of the upgraded package(s). '
|
"whether the currently installed version satisfies the "
|
||||||
'"only-if-needed" - are upgraded only when they do not '
|
"requirements of the upgraded package(s). "
|
||||||
'satisfy the requirements of the upgraded package(s).'
|
'"only-if-needed" - are upgraded only when they do not '
|
||||||
|
"satisfy the requirements of the upgraded package(s)."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--force-reinstall',
|
"--force-reinstall",
|
||||||
dest='force_reinstall',
|
dest="force_reinstall",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help='Reinstall all packages even if they are already '
|
help="Reinstall all packages even if they are already up-to-date.",
|
||||||
'up-to-date.')
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-I', '--ignore-installed',
|
"-I",
|
||||||
dest='ignore_installed',
|
"--ignore-installed",
|
||||||
action='store_true',
|
dest="ignore_installed",
|
||||||
help='Ignore the installed packages, overwriting them. '
|
action="store_true",
|
||||||
'This can break your system if the existing package '
|
help=(
|
||||||
'is of a different version or was installed '
|
"Ignore the installed packages, overwriting them. "
|
||||||
'with a different package manager!'
|
"This can break your system if the existing package "
|
||||||
|
"is of a different version or was installed "
|
||||||
|
"with a different package manager!"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||||
self.cmd_opts.add_option(cmdoptions.install_options())
|
self.cmd_opts.add_option(cmdoptions.install_options())
|
||||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
|
||||||
|
@ -208,12 +224,12 @@ class InstallCommand(RequirementCommand):
|
||||||
default=True,
|
default=True,
|
||||||
help="Do not warn about broken dependencies",
|
help="Do not warn about broken dependencies",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||||
|
|
||||||
index_opts = cmdoptions.make_option_group(
|
index_opts = cmdoptions.make_option_group(
|
||||||
cmdoptions.index_group,
|
cmdoptions.index_group,
|
||||||
|
@ -224,8 +240,7 @@ class InstallCommand(RequirementCommand):
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
@with_cleanup
|
@with_cleanup
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
if options.use_user_site and options.target_dir is not None:
|
if options.use_user_site and options.target_dir is not None:
|
||||||
raise CommandError("Can not combine '--user' and '--target'")
|
raise CommandError("Can not combine '--user' and '--target'")
|
||||||
|
|
||||||
|
@ -238,7 +253,7 @@ class InstallCommand(RequirementCommand):
|
||||||
|
|
||||||
install_options = options.install_options or []
|
install_options = options.install_options or []
|
||||||
|
|
||||||
logger.debug("Using %s", get_pip_version())
|
logger.verbose("Using %s", get_pip_version())
|
||||||
options.use_user_site = decide_user_install(
|
options.use_user_site = decide_user_install(
|
||||||
options.use_user_site,
|
options.use_user_site,
|
||||||
prefix_path=options.prefix_path,
|
prefix_path=options.prefix_path,
|
||||||
|
@ -247,16 +262,19 @@ class InstallCommand(RequirementCommand):
|
||||||
isolated_mode=options.isolated_mode,
|
isolated_mode=options.isolated_mode,
|
||||||
)
|
)
|
||||||
|
|
||||||
target_temp_dir = None # type: Optional[TempDirectory]
|
target_temp_dir: Optional[TempDirectory] = None
|
||||||
target_temp_dir_path = None # type: Optional[str]
|
target_temp_dir_path: Optional[str] = None
|
||||||
if options.target_dir:
|
if options.target_dir:
|
||||||
options.ignore_installed = True
|
options.ignore_installed = True
|
||||||
options.target_dir = os.path.abspath(options.target_dir)
|
options.target_dir = os.path.abspath(options.target_dir)
|
||||||
if (os.path.exists(options.target_dir) and not
|
if (
|
||||||
os.path.isdir(options.target_dir)):
|
# fmt: off
|
||||||
|
os.path.exists(options.target_dir) and
|
||||||
|
not os.path.isdir(options.target_dir)
|
||||||
|
# fmt: on
|
||||||
|
):
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Target path exists but is not a directory, will not "
|
"Target path exists but is not a directory, will not continue."
|
||||||
"continue."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a target directory for using with the target option
|
# Create a target directory for using with the target option
|
||||||
|
@ -277,7 +295,7 @@ class InstallCommand(RequirementCommand):
|
||||||
)
|
)
|
||||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||||
|
|
||||||
req_tracker = self.enter_context(get_requirement_tracker())
|
build_tracker = self.enter_context(get_build_tracker())
|
||||||
|
|
||||||
directory = TempDirectory(
|
directory = TempDirectory(
|
||||||
delete=not options.no_clean,
|
delete=not options.no_clean,
|
||||||
|
@ -288,17 +306,22 @@ class InstallCommand(RequirementCommand):
|
||||||
try:
|
try:
|
||||||
reqs = self.get_requirements(args, options, finder, session)
|
reqs = self.get_requirements(args, options, finder, session)
|
||||||
|
|
||||||
reject_location_related_install_options(
|
# Only when installing is it permitted to use PEP 660.
|
||||||
reqs, options.install_options
|
# In other circumstances (pip wheel, pip download) we generate
|
||||||
)
|
# regular (i.e. non editable) metadata and wheels.
|
||||||
|
for req in reqs:
|
||||||
|
req.permit_editable_wheels = True
|
||||||
|
|
||||||
|
reject_location_related_install_options(reqs, options.install_options)
|
||||||
|
|
||||||
preparer = self.make_requirement_preparer(
|
preparer = self.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
req_tracker=req_tracker,
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
use_user_site=options.use_user_site,
|
use_user_site=options.use_user_site,
|
||||||
|
verbosity=self.verbosity,
|
||||||
)
|
)
|
||||||
resolver = self.make_resolver(
|
resolver = self.make_resolver(
|
||||||
preparer=preparer,
|
preparer=preparer,
|
||||||
|
@ -327,19 +350,14 @@ class InstallCommand(RequirementCommand):
|
||||||
# If we're not replacing an already installed pip,
|
# If we're not replacing an already installed pip,
|
||||||
# we're not modifying it.
|
# we're not modifying it.
|
||||||
modifying_pip = pip_req.satisfied_by is None
|
modifying_pip = pip_req.satisfied_by is None
|
||||||
protect_pip_from_modification_on_windows(
|
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
||||||
modifying_pip=modifying_pip
|
|
||||||
)
|
|
||||||
|
|
||||||
check_binary_allowed = get_check_binary_allowed(
|
check_binary_allowed = get_check_binary_allowed(finder.format_control)
|
||||||
finder.format_control
|
|
||||||
)
|
|
||||||
|
|
||||||
reqs_to_build = [
|
reqs_to_build = [
|
||||||
r for r in requirement_set.requirements.values()
|
r
|
||||||
if should_build_for_install_command(
|
for r in requirement_set.requirements.values()
|
||||||
r, check_binary_allowed
|
if should_build_for_install_command(r, check_binary_allowed)
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
_, build_failures = build(
|
_, build_failures = build(
|
||||||
|
@ -350,44 +368,40 @@ class InstallCommand(RequirementCommand):
|
||||||
global_options=[],
|
global_options=[],
|
||||||
)
|
)
|
||||||
|
|
||||||
# If we're using PEP 517, we cannot do a direct install
|
# If we're using PEP 517, we cannot do a legacy setup.py install
|
||||||
# so we fail here.
|
# so we fail here.
|
||||||
pep517_build_failure_names = [
|
pep517_build_failure_names: List[str] = [
|
||||||
r.name # type: ignore
|
r.name for r in build_failures if r.use_pep517 # type: ignore
|
||||||
for r in build_failures if r.use_pep517
|
]
|
||||||
] # type: List[str]
|
|
||||||
if pep517_build_failure_names:
|
if pep517_build_failure_names:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Could not build wheels for {} which use"
|
"Could not build wheels for {}, which is required to "
|
||||||
" PEP 517 and cannot be installed directly".format(
|
"install pyproject.toml-based projects".format(
|
||||||
", ".join(pep517_build_failure_names)
|
", ".join(pep517_build_failure_names)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# For now, we just warn about failures building legacy
|
# For now, we just warn about failures building legacy
|
||||||
# requirements, as we'll fall through to a direct
|
# requirements, as we'll fall through to a setup.py install for
|
||||||
# install for those.
|
# those.
|
||||||
for r in build_failures:
|
for r in build_failures:
|
||||||
if not r.use_pep517:
|
if not r.use_pep517:
|
||||||
r.legacy_install_reason = 8368
|
r.legacy_install_reason = 8368
|
||||||
|
|
||||||
to_install = resolver.get_installation_order(
|
to_install = resolver.get_installation_order(requirement_set)
|
||||||
requirement_set
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for conflicts in the package set we're installing.
|
# Check for conflicts in the package set we're installing.
|
||||||
conflicts = None # type: Optional[ConflictDetails]
|
conflicts: Optional[ConflictDetails] = None
|
||||||
should_warn_about_conflicts = (
|
should_warn_about_conflicts = (
|
||||||
not options.ignore_dependencies and
|
not options.ignore_dependencies and options.warn_about_conflicts
|
||||||
options.warn_about_conflicts
|
|
||||||
)
|
)
|
||||||
if should_warn_about_conflicts:
|
if should_warn_about_conflicts:
|
||||||
conflicts = self._determine_conflicts(to_install)
|
conflicts = self._determine_conflicts(to_install)
|
||||||
|
|
||||||
# Don't warn about script install locations if
|
# Don't warn about script install locations if
|
||||||
# --target has been specified
|
# --target or --prefix has been specified
|
||||||
warn_script_location = options.warn_script_location
|
warn_script_location = options.warn_script_location
|
||||||
if options.target_dir:
|
if options.target_dir or options.prefix_path:
|
||||||
warn_script_location = False
|
warn_script_location = False
|
||||||
|
|
||||||
installed = install_given_reqs(
|
installed = install_given_reqs(
|
||||||
|
@ -411,7 +425,7 @@ class InstallCommand(RequirementCommand):
|
||||||
)
|
)
|
||||||
env = get_environment(lib_locations)
|
env = get_environment(lib_locations)
|
||||||
|
|
||||||
installed.sort(key=operator.attrgetter('name'))
|
installed.sort(key=operator.attrgetter("name"))
|
||||||
items = []
|
items = []
|
||||||
for result in installed:
|
for result in installed:
|
||||||
item = result.name
|
item = result.name
|
||||||
|
@ -429,16 +443,19 @@ class InstallCommand(RequirementCommand):
|
||||||
resolver_variant=self.determine_resolver_variant(options),
|
resolver_variant=self.determine_resolver_variant(options),
|
||||||
)
|
)
|
||||||
|
|
||||||
installed_desc = ' '.join(items)
|
installed_desc = " ".join(items)
|
||||||
if installed_desc:
|
if installed_desc:
|
||||||
write_output(
|
write_output(
|
||||||
'Successfully installed %s', installed_desc,
|
"Successfully installed %s",
|
||||||
|
installed_desc,
|
||||||
)
|
)
|
||||||
except OSError as error:
|
except OSError as error:
|
||||||
show_traceback = (self.verbosity >= 1)
|
show_traceback = self.verbosity >= 1
|
||||||
|
|
||||||
message = create_os_error_message(
|
message = create_os_error_message(
|
||||||
error, show_traceback, options.use_user_site,
|
error,
|
||||||
|
show_traceback,
|
||||||
|
options.use_user_site,
|
||||||
)
|
)
|
||||||
logger.error(message, exc_info=show_traceback) # noqa
|
logger.error(message, exc_info=show_traceback) # noqa
|
||||||
|
|
||||||
|
@ -449,12 +466,13 @@ class InstallCommand(RequirementCommand):
|
||||||
self._handle_target_dir(
|
self._handle_target_dir(
|
||||||
options.target_dir, target_temp_dir, options.upgrade
|
options.target_dir, target_temp_dir, options.upgrade
|
||||||
)
|
)
|
||||||
|
if options.root_user_action == "warn":
|
||||||
warn_if_run_as_root()
|
warn_if_run_as_root()
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
|
def _handle_target_dir(
|
||||||
# type: (str, TempDirectory, bool) -> None
|
self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
|
||||||
|
) -> None:
|
||||||
ensure_dir(target_dir)
|
ensure_dir(target_dir)
|
||||||
|
|
||||||
# Checking both purelib and platlib directories for installed
|
# Checking both purelib and platlib directories for installed
|
||||||
|
@ -463,7 +481,7 @@ class InstallCommand(RequirementCommand):
|
||||||
|
|
||||||
# Checking both purelib and platlib directories for installed
|
# Checking both purelib and platlib directories for installed
|
||||||
# packages to be moved to target directory
|
# packages to be moved to target directory
|
||||||
scheme = get_scheme('', home=target_temp_dir.path)
|
scheme = get_scheme("", home=target_temp_dir.path)
|
||||||
purelib_dir = scheme.purelib
|
purelib_dir = scheme.purelib
|
||||||
platlib_dir = scheme.platlib
|
platlib_dir = scheme.platlib
|
||||||
data_dir = scheme.data
|
data_dir = scheme.data
|
||||||
|
@ -485,18 +503,18 @@ class InstallCommand(RequirementCommand):
|
||||||
if os.path.exists(target_item_dir):
|
if os.path.exists(target_item_dir):
|
||||||
if not upgrade:
|
if not upgrade:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Target directory %s already exists. Specify '
|
"Target directory %s already exists. Specify "
|
||||||
'--upgrade to force replacement.',
|
"--upgrade to force replacement.",
|
||||||
target_item_dir
|
target_item_dir,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if os.path.islink(target_item_dir):
|
if os.path.islink(target_item_dir):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Target directory %s already exists and is '
|
"Target directory %s already exists and is "
|
||||||
'a link. pip will not automatically replace '
|
"a link. pip will not automatically replace "
|
||||||
'links, please remove if replacement is '
|
"links, please remove if replacement is "
|
||||||
'desired.',
|
"desired.",
|
||||||
target_item_dir
|
target_item_dir,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if os.path.isdir(target_item_dir):
|
if os.path.isdir(target_item_dir):
|
||||||
|
@ -504,13 +522,11 @@ class InstallCommand(RequirementCommand):
|
||||||
else:
|
else:
|
||||||
os.remove(target_item_dir)
|
os.remove(target_item_dir)
|
||||||
|
|
||||||
shutil.move(
|
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
||||||
os.path.join(lib_dir, item),
|
|
||||||
target_item_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
def _determine_conflicts(self, to_install):
|
def _determine_conflicts(
|
||||||
# type: (List[InstallRequirement]) -> Optional[ConflictDetails]
|
self, to_install: List[InstallRequirement]
|
||||||
|
) -> Optional[ConflictDetails]:
|
||||||
try:
|
try:
|
||||||
return check_install_conflicts(to_install)
|
return check_install_conflicts(to_install)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -520,13 +536,14 @@ class InstallCommand(RequirementCommand):
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _warn_about_conflicts(self, conflict_details, resolver_variant):
|
def _warn_about_conflicts(
|
||||||
# type: (ConflictDetails, str) -> None
|
self, conflict_details: ConflictDetails, resolver_variant: str
|
||||||
|
) -> None:
|
||||||
package_set, (missing, conflicting) = conflict_details
|
package_set, (missing, conflicting) = conflict_details
|
||||||
if not missing and not conflicting:
|
if not missing and not conflicting:
|
||||||
return
|
return
|
||||||
|
|
||||||
parts = [] # type: List[str]
|
parts: List[str] = []
|
||||||
if resolver_variant == "legacy":
|
if resolver_variant == "legacy":
|
||||||
parts.append(
|
parts.append(
|
||||||
"pip's legacy dependency resolver does not consider dependency "
|
"pip's legacy dependency resolver does not consider dependency "
|
||||||
|
@ -567,7 +584,7 @@ class InstallCommand(RequirementCommand):
|
||||||
requirement=req,
|
requirement=req,
|
||||||
dep_name=dep_name,
|
dep_name=dep_name,
|
||||||
dep_version=dep_version,
|
dep_version=dep_version,
|
||||||
you=("you" if resolver_variant == "2020-resolver" else "you'll")
|
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
||||||
)
|
)
|
||||||
parts.append(message)
|
parts.append(message)
|
||||||
|
|
||||||
|
@ -575,15 +592,14 @@ class InstallCommand(RequirementCommand):
|
||||||
|
|
||||||
|
|
||||||
def get_lib_location_guesses(
|
def get_lib_location_guesses(
|
||||||
user=False, # type: bool
|
user: bool = False,
|
||||||
home=None, # type: Optional[str]
|
home: Optional[str] = None,
|
||||||
root=None, # type: Optional[str]
|
root: Optional[str] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
prefix=None # type: Optional[str]
|
prefix: Optional[str] = None,
|
||||||
):
|
) -> List[str]:
|
||||||
# type:(...) -> List[str]
|
|
||||||
scheme = get_scheme(
|
scheme = get_scheme(
|
||||||
'',
|
"",
|
||||||
user=user,
|
user=user,
|
||||||
home=home,
|
home=home,
|
||||||
root=root,
|
root=root,
|
||||||
|
@ -593,22 +609,20 @@ def get_lib_location_guesses(
|
||||||
return [scheme.purelib, scheme.platlib]
|
return [scheme.purelib, scheme.platlib]
|
||||||
|
|
||||||
|
|
||||||
def site_packages_writable(root, isolated):
|
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
||||||
# type: (Optional[str], bool) -> bool
|
|
||||||
return all(
|
return all(
|
||||||
test_writable_dir(d) for d in set(
|
test_writable_dir(d)
|
||||||
get_lib_location_guesses(root=root, isolated=isolated))
|
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def decide_user_install(
|
def decide_user_install(
|
||||||
use_user_site, # type: Optional[bool]
|
use_user_site: Optional[bool],
|
||||||
prefix_path=None, # type: Optional[str]
|
prefix_path: Optional[str] = None,
|
||||||
target_dir=None, # type: Optional[str]
|
target_dir: Optional[str] = None,
|
||||||
root_path=None, # type: Optional[str]
|
root_path: Optional[str] = None,
|
||||||
isolated_mode=False, # type: bool
|
isolated_mode: bool = False,
|
||||||
):
|
) -> bool:
|
||||||
# type: (...) -> bool
|
|
||||||
"""Determine whether to do a user install based on the input options.
|
"""Determine whether to do a user install based on the input options.
|
||||||
|
|
||||||
If use_user_site is False, no additional checks are done.
|
If use_user_site is False, no additional checks are done.
|
||||||
|
@ -656,18 +670,21 @@ def decide_user_install(
|
||||||
logger.debug("Non-user install because site-packages writeable")
|
logger.debug("Non-user install because site-packages writeable")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.info("Defaulting to user installation because normal site-packages "
|
logger.info(
|
||||||
"is not writeable")
|
"Defaulting to user installation because normal site-packages "
|
||||||
|
"is not writeable"
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reject_location_related_install_options(requirements, options):
|
def reject_location_related_install_options(
|
||||||
# type: (List[InstallRequirement], Optional[List[str]]) -> None
|
requirements: List[InstallRequirement], options: Optional[List[str]]
|
||||||
|
) -> None:
|
||||||
"""If any location-changing --install-option arguments were passed for
|
"""If any location-changing --install-option arguments were passed for
|
||||||
requirements or on the command-line, then show a deprecation warning.
|
requirements or on the command-line, then show a deprecation warning.
|
||||||
"""
|
"""
|
||||||
def format_options(option_names):
|
|
||||||
# type: (Iterable[str]) -> List[str]
|
def format_options(option_names: Iterable[str]) -> List[str]:
|
||||||
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
||||||
|
|
||||||
offenders = []
|
offenders = []
|
||||||
|
@ -686,9 +703,7 @@ def reject_location_related_install_options(requirements, options):
|
||||||
location_options = parse_distutils_args(options)
|
location_options = parse_distutils_args(options)
|
||||||
if location_options:
|
if location_options:
|
||||||
offenders.append(
|
offenders.append(
|
||||||
"{!r} from command line".format(
|
"{!r} from command line".format(format_options(location_options.keys()))
|
||||||
format_options(location_options.keys())
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not offenders:
|
if not offenders:
|
||||||
|
@ -697,14 +712,13 @@ def reject_location_related_install_options(requirements, options):
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Location-changing options found in --install-option: {}."
|
"Location-changing options found in --install-option: {}."
|
||||||
" This is unsupported, use pip-level options like --user,"
|
" This is unsupported, use pip-level options like --user,"
|
||||||
" --prefix, --root, and --target instead.".format(
|
" --prefix, --root, and --target instead.".format("; ".join(offenders))
|
||||||
"; ".join(offenders)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_os_error_message(error, show_traceback, using_user_site):
|
def create_os_error_message(
|
||||||
# type: (OSError, bool, bool) -> str
|
error: OSError, show_traceback: bool, using_user_site: bool
|
||||||
|
) -> str:
|
||||||
"""Format an error message for an OSError
|
"""Format an error message for an OSError
|
||||||
|
|
||||||
It may occur anytime during the execution of the install command.
|
It may occur anytime during the execution of the install command.
|
||||||
|
@ -729,12 +743,31 @@ def create_os_error_message(error, show_traceback, using_user_site):
|
||||||
permissions_part = "Check the permissions"
|
permissions_part = "Check the permissions"
|
||||||
|
|
||||||
if not running_under_virtualenv() and not using_user_site:
|
if not running_under_virtualenv() and not using_user_site:
|
||||||
parts.extend([
|
parts.extend(
|
||||||
user_option_part, " or ",
|
[
|
||||||
permissions_part.lower(),
|
user_option_part,
|
||||||
])
|
" or ",
|
||||||
|
permissions_part.lower(),
|
||||||
|
]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
parts.append(permissions_part)
|
parts.append(permissions_part)
|
||||||
parts.append(".\n")
|
parts.append(".\n")
|
||||||
|
|
||||||
|
# Suggest the user to enable Long Paths if path length is
|
||||||
|
# more than 260
|
||||||
|
if (
|
||||||
|
WINDOWS
|
||||||
|
and error.errno == errno.ENOENT
|
||||||
|
and error.filename
|
||||||
|
and len(error.filename) > 260
|
||||||
|
):
|
||||||
|
parts.append(
|
||||||
|
"HINT: This error might have occurred since "
|
||||||
|
"this system does not have Windows Long Path "
|
||||||
|
"support enabled. You can find information on "
|
||||||
|
"how to enable this at "
|
||||||
|
"https://pip.pypa.io/warnings/enable-long-paths\n"
|
||||||
|
)
|
||||||
|
|
||||||
return "".join(parts).strip() + "\n"
|
return "".join(parts).strip() + "\n"
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Iterator, List, Set, Tuple
|
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.req_command import IndexGroupCommand
|
from pip._internal.cli.req_command import IndexGroupCommand
|
||||||
|
@ -11,17 +11,27 @@ from pip._internal.cli.status_codes import SUCCESS
|
||||||
from pip._internal.exceptions import CommandError
|
from pip._internal.exceptions import CommandError
|
||||||
from pip._internal.index.collector import LinkCollector
|
from pip._internal.index.collector import LinkCollector
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution, get_environment
|
||||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
from pip._internal.utils.compat import stdlib_pkgs
|
from pip._internal.utils.compat import stdlib_pkgs
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import tabulate, write_output
|
||||||
dist_is_editable,
|
|
||||||
get_installed_distributions,
|
if TYPE_CHECKING:
|
||||||
tabulate,
|
from pip._internal.metadata.base import DistributionVersion
|
||||||
write_output,
|
|
||||||
)
|
class _DistWithLatestInfo(BaseDistribution):
|
||||||
from pip._internal.utils.packaging import get_installer
|
"""Give the distribution object a couple of extra fields.
|
||||||
from pip._internal.utils.parallel import map_multithread
|
|
||||||
|
These will be populated during ``get_outdated()``. This is dirty but
|
||||||
|
makes the rest of the code much cleaner.
|
||||||
|
"""
|
||||||
|
|
||||||
|
latest_version: DistributionVersion
|
||||||
|
latest_filetype: str
|
||||||
|
|
||||||
|
_ProcessedDists = Sequence[_DistWithLatestInfo]
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -37,86 +47,94 @@ class ListCommand(IndexGroupCommand):
|
||||||
usage = """
|
usage = """
|
||||||
%prog [options]"""
|
%prog [options]"""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-o', '--outdated',
|
"-o",
|
||||||
action='store_true',
|
"--outdated",
|
||||||
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='List outdated packages')
|
help="List outdated packages",
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-u', '--uptodate',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List uptodate packages')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-e', '--editable',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List editable projects.')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-l', '--local',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=('If in a virtualenv that has global access, do not list '
|
|
||||||
'globally-installed packages.'),
|
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"-u",
|
||||||
dest='user',
|
"--uptodate",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Only output packages installed in user-site.')
|
help="List uptodate packages",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"-e",
|
||||||
|
"--editable",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="List editable projects.",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"-l",
|
||||||
|
"--local",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"If in a virtualenv that has global access, do not list "
|
||||||
|
"globally-installed packages."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"--user",
|
||||||
|
dest="user",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Only output packages installed in user-site.",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--pre',
|
"--pre",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help=("Include pre-release and development versions. By default, "
|
help=(
|
||||||
"pip only finds stable versions."),
|
"Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--format',
|
"--format",
|
||||||
action='store',
|
action="store",
|
||||||
dest='list_format',
|
dest="list_format",
|
||||||
default="columns",
|
default="columns",
|
||||||
choices=('columns', 'freeze', 'json'),
|
choices=("columns", "freeze", "json"),
|
||||||
help="Select the output format among: columns (default), freeze, "
|
help="Select the output format among: columns (default), freeze, or json",
|
||||||
"or json",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--not-required',
|
"--not-required",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
dest='not_required',
|
dest="not_required",
|
||||||
help="List packages that are not dependencies of "
|
help="List packages that are not dependencies of installed packages.",
|
||||||
"installed packages.",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--exclude-editable',
|
"--exclude-editable",
|
||||||
action='store_false',
|
action="store_false",
|
||||||
dest='include_editable',
|
dest="include_editable",
|
||||||
help='Exclude editable package from output.',
|
help="Exclude editable package from output.",
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--include-editable',
|
"--include-editable",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
dest='include_editable',
|
dest="include_editable",
|
||||||
help='Include editable package from output.',
|
help="Include editable package from output.",
|
||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||||
index_opts = cmdoptions.make_option_group(
|
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
||||||
cmdoptions.index_group, self.parser
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, index_opts)
|
self.parser.insert_option_group(0, index_opts)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def _build_package_finder(self, options, session):
|
def _build_package_finder(
|
||||||
# type: (Values, PipSession) -> PackageFinder
|
self, options: Values, session: PipSession
|
||||||
|
) -> PackageFinder:
|
||||||
"""
|
"""
|
||||||
Create a package finder appropriate to this list command.
|
Create a package finder appropriate to this list command.
|
||||||
"""
|
"""
|
||||||
|
@ -131,28 +149,29 @@ class ListCommand(IndexGroupCommand):
|
||||||
return PackageFinder.create(
|
return PackageFinder.create(
|
||||||
link_collector=link_collector,
|
link_collector=link_collector,
|
||||||
selection_prefs=selection_prefs,
|
selection_prefs=selection_prefs,
|
||||||
|
use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
|
||||||
)
|
)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
if options.outdated and options.uptodate:
|
if options.outdated and options.uptodate:
|
||||||
raise CommandError(
|
raise CommandError("Options --outdated and --uptodate cannot be combined.")
|
||||||
"Options --outdated and --uptodate cannot be combined.")
|
|
||||||
|
|
||||||
cmdoptions.check_list_path_option(options)
|
cmdoptions.check_list_path_option(options)
|
||||||
|
|
||||||
skip = set(stdlib_pkgs)
|
skip = set(stdlib_pkgs)
|
||||||
if options.excludes:
|
if options.excludes:
|
||||||
skip.update(options.excludes)
|
skip.update(canonicalize_name(n) for n in options.excludes)
|
||||||
|
|
||||||
packages = get_installed_distributions(
|
packages: "_ProcessedDists" = [
|
||||||
local_only=options.local,
|
cast("_DistWithLatestInfo", d)
|
||||||
user_only=options.user,
|
for d in get_environment(options.path).iter_installed_distributions(
|
||||||
editables_only=options.editable,
|
local_only=options.local,
|
||||||
include_editables=options.include_editable,
|
user_only=options.user,
|
||||||
paths=options.path,
|
editables_only=options.editable,
|
||||||
skip=skip,
|
include_editables=options.include_editable,
|
||||||
)
|
skip=skip,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
# get_not_required must be called firstly in order to find and
|
# get_not_required must be called firstly in order to find and
|
||||||
# filter out all dependencies correctly. Otherwise a package
|
# filter out all dependencies correctly. Otherwise a package
|
||||||
|
@ -169,46 +188,58 @@ class ListCommand(IndexGroupCommand):
|
||||||
self.output_package_listing(packages, options)
|
self.output_package_listing(packages, options)
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
def get_outdated(self, packages, options):
|
def get_outdated(
|
||||||
# type: (List[Distribution], Values) -> List[Distribution]
|
self, packages: "_ProcessedDists", options: Values
|
||||||
|
) -> "_ProcessedDists":
|
||||||
return [
|
return [
|
||||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
dist
|
||||||
if dist.latest_version > dist.parsed_version
|
for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
|
if dist.latest_version > dist.version
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_uptodate(self, packages, options):
|
def get_uptodate(
|
||||||
# type: (List[Distribution], Values) -> List[Distribution]
|
self, packages: "_ProcessedDists", options: Values
|
||||||
|
) -> "_ProcessedDists":
|
||||||
return [
|
return [
|
||||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
dist
|
||||||
if dist.latest_version == dist.parsed_version
|
for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
|
if dist.latest_version == dist.version
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_not_required(self, packages, options):
|
def get_not_required(
|
||||||
# type: (List[Distribution], Values) -> List[Distribution]
|
self, packages: "_ProcessedDists", options: Values
|
||||||
dep_keys = set() # type: Set[Distribution]
|
) -> "_ProcessedDists":
|
||||||
for dist in packages:
|
dep_keys = {
|
||||||
dep_keys.update(requirement.key for requirement in dist.requires())
|
canonicalize_name(dep.name)
|
||||||
|
for dist in packages
|
||||||
|
for dep in (dist.iter_dependencies() or ())
|
||||||
|
}
|
||||||
|
|
||||||
# Create a set to remove duplicate packages, and cast it to a list
|
# Create a set to remove duplicate packages, and cast it to a list
|
||||||
# to keep the return type consistent with get_outdated and
|
# to keep the return type consistent with get_outdated and
|
||||||
# get_uptodate
|
# get_uptodate
|
||||||
return list({pkg for pkg in packages if pkg.key not in dep_keys})
|
return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
|
||||||
|
|
||||||
def iter_packages_latest_infos(self, packages, options):
|
def iter_packages_latest_infos(
|
||||||
# type: (List[Distribution], Values) -> Iterator[Distribution]
|
self, packages: "_ProcessedDists", options: Values
|
||||||
|
) -> Generator["_DistWithLatestInfo", None, None]:
|
||||||
with self._build_session(options) as session:
|
with self._build_session(options) as session:
|
||||||
finder = self._build_package_finder(options, session)
|
finder = self._build_package_finder(options, session)
|
||||||
|
|
||||||
def latest_info(dist):
|
def latest_info(
|
||||||
# type: (Distribution) -> Distribution
|
dist: "_DistWithLatestInfo",
|
||||||
all_candidates = finder.find_all_candidates(dist.key)
|
) -> Optional["_DistWithLatestInfo"]:
|
||||||
|
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
||||||
if not options.pre:
|
if not options.pre:
|
||||||
# Remove prereleases
|
# Remove prereleases
|
||||||
all_candidates = [candidate for candidate in all_candidates
|
all_candidates = [
|
||||||
if not candidate.version.is_prerelease]
|
candidate
|
||||||
|
for candidate in all_candidates
|
||||||
|
if not candidate.version.is_prerelease
|
||||||
|
]
|
||||||
|
|
||||||
evaluator = finder.make_candidate_evaluator(
|
evaluator = finder.make_candidate_evaluator(
|
||||||
project_name=dist.project_name,
|
project_name=dist.canonical_name,
|
||||||
)
|
)
|
||||||
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
||||||
if best_candidate is None:
|
if best_candidate is None:
|
||||||
|
@ -216,39 +247,41 @@ class ListCommand(IndexGroupCommand):
|
||||||
|
|
||||||
remote_version = best_candidate.version
|
remote_version = best_candidate.version
|
||||||
if best_candidate.link.is_wheel:
|
if best_candidate.link.is_wheel:
|
||||||
typ = 'wheel'
|
typ = "wheel"
|
||||||
else:
|
else:
|
||||||
typ = 'sdist'
|
typ = "sdist"
|
||||||
# This is dirty but makes the rest of the code much cleaner
|
|
||||||
dist.latest_version = remote_version
|
dist.latest_version = remote_version
|
||||||
dist.latest_filetype = typ
|
dist.latest_filetype = typ
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
for dist in map_multithread(latest_info, packages):
|
for dist in map(latest_info, packages):
|
||||||
if dist is not None:
|
if dist is not None:
|
||||||
yield dist
|
yield dist
|
||||||
|
|
||||||
def output_package_listing(self, packages, options):
|
def output_package_listing(
|
||||||
# type: (List[Distribution], Values) -> None
|
self, packages: "_ProcessedDists", options: Values
|
||||||
|
) -> None:
|
||||||
packages = sorted(
|
packages = sorted(
|
||||||
packages,
|
packages,
|
||||||
key=lambda dist: dist.project_name.lower(),
|
key=lambda dist: dist.canonical_name,
|
||||||
)
|
)
|
||||||
if options.list_format == 'columns' and packages:
|
if options.list_format == "columns" and packages:
|
||||||
data, header = format_for_columns(packages, options)
|
data, header = format_for_columns(packages, options)
|
||||||
self.output_package_listing_columns(data, header)
|
self.output_package_listing_columns(data, header)
|
||||||
elif options.list_format == 'freeze':
|
elif options.list_format == "freeze":
|
||||||
for dist in packages:
|
for dist in packages:
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
write_output("%s==%s (%s)", dist.project_name,
|
write_output(
|
||||||
dist.version, dist.location)
|
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
write_output("%s==%s", dist.project_name, dist.version)
|
write_output("%s==%s", dist.raw_name, dist.version)
|
||||||
elif options.list_format == 'json':
|
elif options.list_format == "json":
|
||||||
write_output(format_for_json(packages, options))
|
write_output(format_for_json(packages, options))
|
||||||
|
|
||||||
def output_package_listing_columns(self, data, header):
|
def output_package_listing_columns(
|
||||||
# type: (List[List[str]], List[str]) -> None
|
self, data: List[List[str]], header: List[str]
|
||||||
|
) -> None:
|
||||||
# insert the header first: we need to know the size of column names
|
# insert the header first: we need to know the size of column names
|
||||||
if len(data) > 0:
|
if len(data) > 0:
|
||||||
data.insert(0, header)
|
data.insert(0, header)
|
||||||
|
@ -257,63 +290,72 @@ class ListCommand(IndexGroupCommand):
|
||||||
|
|
||||||
# Create and add a separator.
|
# Create and add a separator.
|
||||||
if len(data) > 0:
|
if len(data) > 0:
|
||||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
||||||
|
|
||||||
for val in pkg_strings:
|
for val in pkg_strings:
|
||||||
write_output(val)
|
write_output(val)
|
||||||
|
|
||||||
|
|
||||||
def format_for_columns(pkgs, options):
|
def format_for_columns(
|
||||||
# type: (List[Distribution], Values) -> Tuple[List[List[str]], List[str]]
|
pkgs: "_ProcessedDists", options: Values
|
||||||
|
) -> Tuple[List[List[str]], List[str]]:
|
||||||
"""
|
"""
|
||||||
Convert the package data into something usable
|
Convert the package data into something usable
|
||||||
by output_package_listing_columns.
|
by output_package_listing_columns.
|
||||||
"""
|
"""
|
||||||
running_outdated = options.outdated
|
header = ["Package", "Version"]
|
||||||
# Adjust the header for the `pip list --outdated` case.
|
|
||||||
if running_outdated:
|
|
||||||
header = ["Package", "Version", "Latest", "Type"]
|
|
||||||
else:
|
|
||||||
header = ["Package", "Version"]
|
|
||||||
|
|
||||||
data = []
|
running_outdated = options.outdated
|
||||||
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
|
if running_outdated:
|
||||||
|
header.extend(["Latest", "Type"])
|
||||||
|
|
||||||
|
has_editables = any(x.editable for x in pkgs)
|
||||||
|
if has_editables:
|
||||||
|
header.append("Editable project location")
|
||||||
|
|
||||||
|
if options.verbose >= 1:
|
||||||
header.append("Location")
|
header.append("Location")
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
header.append("Installer")
|
header.append("Installer")
|
||||||
|
|
||||||
|
data = []
|
||||||
for proj in pkgs:
|
for proj in pkgs:
|
||||||
# if we're working on the 'outdated' list, separate out the
|
# if we're working on the 'outdated' list, separate out the
|
||||||
# latest_version and type
|
# latest_version and type
|
||||||
row = [proj.project_name, proj.version]
|
row = [proj.raw_name, str(proj.version)]
|
||||||
|
|
||||||
if running_outdated:
|
if running_outdated:
|
||||||
row.append(proj.latest_version)
|
row.append(str(proj.latest_version))
|
||||||
row.append(proj.latest_filetype)
|
row.append(proj.latest_filetype)
|
||||||
|
|
||||||
if options.verbose >= 1 or dist_is_editable(proj):
|
if has_editables:
|
||||||
row.append(proj.location)
|
row.append(proj.editable_project_location or "")
|
||||||
|
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
row.append(get_installer(proj))
|
row.append(proj.location or "")
|
||||||
|
if options.verbose >= 1:
|
||||||
|
row.append(proj.installer)
|
||||||
|
|
||||||
data.append(row)
|
data.append(row)
|
||||||
|
|
||||||
return data, header
|
return data, header
|
||||||
|
|
||||||
|
|
||||||
def format_for_json(packages, options):
|
def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
|
||||||
# type: (List[Distribution], Values) -> str
|
|
||||||
data = []
|
data = []
|
||||||
for dist in packages:
|
for dist in packages:
|
||||||
info = {
|
info = {
|
||||||
'name': dist.project_name,
|
"name": dist.raw_name,
|
||||||
'version': str(dist.version),
|
"version": str(dist.version),
|
||||||
}
|
}
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
info['location'] = dist.location
|
info["location"] = dist.location or ""
|
||||||
info['installer'] = get_installer(dist)
|
info["installer"] = dist.installer
|
||||||
if options.outdated:
|
if options.outdated:
|
||||||
info['latest_version'] = str(dist.latest_version)
|
info["latest_version"] = str(dist.latest_version)
|
||||||
info['latest_filetype'] = dist.latest_filetype
|
info["latest_filetype"] = dist.latest_filetype
|
||||||
|
editable_project_location = dist.editable_project_location
|
||||||
|
if editable_project_location:
|
||||||
|
info["editable_project_location"] = editable_project_location
|
||||||
data.append(info)
|
data.append(info)
|
||||||
return json.dumps(data)
|
return json.dumps(data)
|
||||||
|
|
|
@ -27,6 +27,7 @@ if TYPE_CHECKING:
|
||||||
summary: str
|
summary: str
|
||||||
versions: List[str]
|
versions: List[str]
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,21 +38,21 @@ class SearchCommand(Command, SessionCommandMixin):
|
||||||
%prog [options] <query>"""
|
%prog [options] <query>"""
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-i', '--index',
|
"-i",
|
||||||
dest='index',
|
"--index",
|
||||||
metavar='URL',
|
dest="index",
|
||||||
|
metavar="URL",
|
||||||
default=PyPI.pypi_url,
|
default=PyPI.pypi_url,
|
||||||
help='Base URL of Python Package Index (default %default)')
|
help="Base URL of Python Package Index (default %default)",
|
||||||
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
if not args:
|
if not args:
|
||||||
raise CommandError('Missing required argument (search query).')
|
raise CommandError("Missing required argument (search query).")
|
||||||
query = args
|
query = args
|
||||||
pypi_hits = self.search(query, options)
|
pypi_hits = self.search(query, options)
|
||||||
hits = transform_hits(pypi_hits)
|
hits = transform_hits(pypi_hits)
|
||||||
|
@ -65,8 +66,7 @@ class SearchCommand(Command, SessionCommandMixin):
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
return NO_MATCHES_FOUND
|
return NO_MATCHES_FOUND
|
||||||
|
|
||||||
def search(self, query, options):
|
def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
|
||||||
# type: (List[str], Values) -> List[Dict[str, str]]
|
|
||||||
index_url = options.index
|
index_url = options.index
|
||||||
|
|
||||||
session = self.get_default_session(options)
|
session = self.get_default_session(options)
|
||||||
|
@ -74,7 +74,7 @@ class SearchCommand(Command, SessionCommandMixin):
|
||||||
transport = PipXmlrpcTransport(index_url, session)
|
transport = PipXmlrpcTransport(index_url, session)
|
||||||
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
||||||
try:
|
try:
|
||||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
hits = pypi.search({"name": query, "summary": query}, "or")
|
||||||
except xmlrpc.client.Fault as fault:
|
except xmlrpc.client.Fault as fault:
|
||||||
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
||||||
code=fault.faultCode,
|
code=fault.faultCode,
|
||||||
|
@ -85,78 +85,90 @@ class SearchCommand(Command, SessionCommandMixin):
|
||||||
return hits
|
return hits
|
||||||
|
|
||||||
|
|
||||||
def transform_hits(hits):
|
def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
||||||
# type: (List[Dict[str, str]]) -> List[TransformedHit]
|
|
||||||
"""
|
"""
|
||||||
The list from pypi is really a list of versions. We want a list of
|
The list from pypi is really a list of versions. We want a list of
|
||||||
packages with the list of versions stored inline. This converts the
|
packages with the list of versions stored inline. This converts the
|
||||||
list from pypi into one we can use.
|
list from pypi into one we can use.
|
||||||
"""
|
"""
|
||||||
packages = OrderedDict() # type: OrderedDict[str, TransformedHit]
|
packages: Dict[str, "TransformedHit"] = OrderedDict()
|
||||||
for hit in hits:
|
for hit in hits:
|
||||||
name = hit['name']
|
name = hit["name"]
|
||||||
summary = hit['summary']
|
summary = hit["summary"]
|
||||||
version = hit['version']
|
version = hit["version"]
|
||||||
|
|
||||||
if name not in packages.keys():
|
if name not in packages.keys():
|
||||||
packages[name] = {
|
packages[name] = {
|
||||||
'name': name,
|
"name": name,
|
||||||
'summary': summary,
|
"summary": summary,
|
||||||
'versions': [version],
|
"versions": [version],
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
packages[name]['versions'].append(version)
|
packages[name]["versions"].append(version)
|
||||||
|
|
||||||
# if this is the highest version, replace summary and score
|
# if this is the highest version, replace summary and score
|
||||||
if version == highest_version(packages[name]['versions']):
|
if version == highest_version(packages[name]["versions"]):
|
||||||
packages[name]['summary'] = summary
|
packages[name]["summary"] = summary
|
||||||
|
|
||||||
return list(packages.values())
|
return list(packages.values())
|
||||||
|
|
||||||
|
|
||||||
def print_results(hits, name_column_width=None, terminal_width=None):
|
def print_dist_installation_info(name: str, latest: str) -> None:
|
||||||
# type: (List[TransformedHit], Optional[int], Optional[int]) -> None
|
env = get_default_environment()
|
||||||
|
dist = env.get_distribution(name)
|
||||||
|
if dist is not None:
|
||||||
|
with indent_log():
|
||||||
|
if dist.version == latest:
|
||||||
|
write_output("INSTALLED: %s (latest)", dist.version)
|
||||||
|
else:
|
||||||
|
write_output("INSTALLED: %s", dist.version)
|
||||||
|
if parse_version(latest).pre:
|
||||||
|
write_output(
|
||||||
|
"LATEST: %s (pre-release; install"
|
||||||
|
" with `pip install --pre`)",
|
||||||
|
latest,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
write_output("LATEST: %s", latest)
|
||||||
|
|
||||||
|
|
||||||
|
def print_results(
|
||||||
|
hits: List["TransformedHit"],
|
||||||
|
name_column_width: Optional[int] = None,
|
||||||
|
terminal_width: Optional[int] = None,
|
||||||
|
) -> None:
|
||||||
if not hits:
|
if not hits:
|
||||||
return
|
return
|
||||||
if name_column_width is None:
|
if name_column_width is None:
|
||||||
name_column_width = max([
|
name_column_width = (
|
||||||
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
max(
|
||||||
for hit in hits
|
[
|
||||||
]) + 4
|
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
||||||
|
for hit in hits
|
||||||
|
]
|
||||||
|
)
|
||||||
|
+ 4
|
||||||
|
)
|
||||||
|
|
||||||
env = get_default_environment()
|
|
||||||
for hit in hits:
|
for hit in hits:
|
||||||
name = hit['name']
|
name = hit["name"]
|
||||||
summary = hit['summary'] or ''
|
summary = hit["summary"] or ""
|
||||||
latest = highest_version(hit.get('versions', ['-']))
|
latest = highest_version(hit.get("versions", ["-"]))
|
||||||
if terminal_width is not None:
|
if terminal_width is not None:
|
||||||
target_width = terminal_width - name_column_width - 5
|
target_width = terminal_width - name_column_width - 5
|
||||||
if target_width > 10:
|
if target_width > 10:
|
||||||
# wrap and indent summary to fit terminal
|
# wrap and indent summary to fit terminal
|
||||||
summary_lines = textwrap.wrap(summary, target_width)
|
summary_lines = textwrap.wrap(summary, target_width)
|
||||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(
|
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
||||||
summary_lines)
|
|
||||||
|
|
||||||
name_latest = f'{name} ({latest})'
|
name_latest = f"{name} ({latest})"
|
||||||
line = f'{name_latest:{name_column_width}} - {summary}'
|
line = f"{name_latest:{name_column_width}} - {summary}"
|
||||||
try:
|
try:
|
||||||
write_output(line)
|
write_output(line)
|
||||||
dist = env.get_distribution(name)
|
print_dist_installation_info(name, latest)
|
||||||
if dist is not None:
|
|
||||||
with indent_log():
|
|
||||||
if dist.version == latest:
|
|
||||||
write_output('INSTALLED: %s (latest)', dist.version)
|
|
||||||
else:
|
|
||||||
write_output('INSTALLED: %s', dist.version)
|
|
||||||
if parse_version(latest).pre:
|
|
||||||
write_output('LATEST: %s (pre-release; install'
|
|
||||||
' with "pip install --pre")', latest)
|
|
||||||
else:
|
|
||||||
write_output('LATEST: %s', latest)
|
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def highest_version(versions):
|
def highest_version(versions: List[str]) -> str:
|
||||||
# type: (List[str]) -> str
|
|
||||||
return max(versions, key=parse_version)
|
return max(versions, key=parse_version)
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
from email.parser import FeedParser
|
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Dict, Iterator, List
|
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.metadata import BaseDistribution, get_default_environment
|
||||||
from pip._internal.utils.misc import write_output
|
from pip._internal.utils.misc import write_output
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -25,123 +23,124 @@ class ShowCommand(Command):
|
||||||
%prog [options] <package> ..."""
|
%prog [options] <package> ..."""
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-f', '--files',
|
"-f",
|
||||||
dest='files',
|
"--files",
|
||||||
action='store_true',
|
dest="files",
|
||||||
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Show the full list of installed files for each package.')
|
help="Show the full list of installed files for each package.",
|
||||||
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
if not args:
|
if not args:
|
||||||
logger.warning('ERROR: Please provide a package name or names.')
|
logger.warning("ERROR: Please provide a package name or names.")
|
||||||
return ERROR
|
return ERROR
|
||||||
query = args
|
query = args
|
||||||
|
|
||||||
results = search_packages_info(query)
|
results = search_packages_info(query)
|
||||||
if not print_results(
|
if not print_results(
|
||||||
results, list_files=options.files, verbose=options.verbose):
|
results, list_files=options.files, verbose=options.verbose
|
||||||
|
):
|
||||||
return ERROR
|
return ERROR
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def search_packages_info(query):
|
class _PackageInfo(NamedTuple):
|
||||||
# type: (List[str]) -> Iterator[Dict[str, str]]
|
name: str
|
||||||
|
version: str
|
||||||
|
location: str
|
||||||
|
requires: List[str]
|
||||||
|
required_by: List[str]
|
||||||
|
installer: str
|
||||||
|
metadata_version: str
|
||||||
|
classifiers: List[str]
|
||||||
|
summary: str
|
||||||
|
homepage: str
|
||||||
|
project_urls: List[str]
|
||||||
|
author: str
|
||||||
|
author_email: str
|
||||||
|
license: str
|
||||||
|
entry_points: List[str]
|
||||||
|
files: Optional[List[str]]
|
||||||
|
|
||||||
|
|
||||||
|
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
|
||||||
"""
|
"""
|
||||||
Gather details from installed distributions. Print distribution name,
|
Gather details from installed distributions. Print distribution name,
|
||||||
version, location, and installed files. Installed files requires a
|
version, location, and installed files. Installed files requires a
|
||||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||||
directory.
|
directory.
|
||||||
"""
|
"""
|
||||||
installed = {}
|
env = get_default_environment()
|
||||||
for p in pkg_resources.working_set:
|
|
||||||
installed[canonicalize_name(p.project_name)] = p
|
|
||||||
|
|
||||||
|
installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
|
||||||
query_names = [canonicalize_name(name) for name in query]
|
query_names = [canonicalize_name(name) for name in query]
|
||||||
missing = sorted(
|
missing = sorted(
|
||||||
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
||||||
)
|
)
|
||||||
if missing:
|
if missing:
|
||||||
logger.warning('Package(s) not found: %s', ', '.join(missing))
|
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
||||||
|
|
||||||
def get_requiring_packages(package_name):
|
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
||||||
# type: (str) -> List[str]
|
return (
|
||||||
canonical_name = canonicalize_name(package_name)
|
dist.metadata["Name"] or "UNKNOWN"
|
||||||
return [
|
for dist in installed.values()
|
||||||
pkg.project_name for pkg in pkg_resources.working_set
|
if current_dist.canonical_name
|
||||||
if canonical_name in
|
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
||||||
[canonicalize_name(required.name) for required in
|
)
|
||||||
pkg.requires()]
|
|
||||||
]
|
|
||||||
|
|
||||||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
for query_name in query_names:
|
||||||
package = {
|
try:
|
||||||
'name': dist.project_name,
|
dist = installed[query_name]
|
||||||
'version': dist.version,
|
except KeyError:
|
||||||
'location': dist.location,
|
continue
|
||||||
'requires': [dep.project_name for dep in dist.requires()],
|
|
||||||
'required_by': get_requiring_packages(dist.project_name)
|
|
||||||
}
|
|
||||||
file_list = None
|
|
||||||
metadata = ''
|
|
||||||
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
|
||||||
# RECORDs should be part of .dist-info metadatas
|
|
||||||
if dist.has_metadata('RECORD'):
|
|
||||||
lines = dist.get_metadata_lines('RECORD')
|
|
||||||
paths = [line.split(',')[0] for line in lines]
|
|
||||||
paths = [os.path.join(dist.location, p) for p in paths]
|
|
||||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
|
||||||
|
|
||||||
if dist.has_metadata('METADATA'):
|
requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
|
||||||
metadata = dist.get_metadata('METADATA')
|
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
||||||
|
|
||||||
|
try:
|
||||||
|
entry_points_text = dist.read_text("entry_points.txt")
|
||||||
|
entry_points = entry_points_text.splitlines(keepends=False)
|
||||||
|
except FileNotFoundError:
|
||||||
|
entry_points = []
|
||||||
|
|
||||||
|
files_iter = dist.iter_declared_entries()
|
||||||
|
if files_iter is None:
|
||||||
|
files: Optional[List[str]] = None
|
||||||
else:
|
else:
|
||||||
# Otherwise use pip's log for .egg-info's
|
files = sorted(files_iter)
|
||||||
if dist.has_metadata('installed-files.txt'):
|
|
||||||
paths = dist.get_metadata_lines('installed-files.txt')
|
|
||||||
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
|
||||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
|
||||||
|
|
||||||
if dist.has_metadata('PKG-INFO'):
|
metadata = dist.metadata
|
||||||
metadata = dist.get_metadata('PKG-INFO')
|
|
||||||
|
|
||||||
if dist.has_metadata('entry_points.txt'):
|
yield _PackageInfo(
|
||||||
entry_points = dist.get_metadata_lines('entry_points.txt')
|
name=dist.raw_name,
|
||||||
package['entry_points'] = entry_points
|
version=str(dist.version),
|
||||||
|
location=dist.location or "",
|
||||||
if dist.has_metadata('INSTALLER'):
|
requires=requires,
|
||||||
for line in dist.get_metadata_lines('INSTALLER'):
|
required_by=required_by,
|
||||||
if line.strip():
|
installer=dist.installer,
|
||||||
package['installer'] = line.strip()
|
metadata_version=dist.metadata_version or "",
|
||||||
break
|
classifiers=metadata.get_all("Classifier", []),
|
||||||
|
summary=metadata.get("Summary", ""),
|
||||||
# @todo: Should pkg_resources.Distribution have a
|
homepage=metadata.get("Home-page", ""),
|
||||||
# `get_pkg_info` method?
|
project_urls=metadata.get_all("Project-URL", []),
|
||||||
feed_parser = FeedParser()
|
author=metadata.get("Author", ""),
|
||||||
feed_parser.feed(metadata)
|
author_email=metadata.get("Author-email", ""),
|
||||||
pkg_info_dict = feed_parser.close()
|
license=metadata.get("License", ""),
|
||||||
for key in ('metadata-version', 'summary',
|
entry_points=entry_points,
|
||||||
'home-page', 'author', 'author-email', 'license'):
|
files=files,
|
||||||
package[key] = pkg_info_dict.get(key)
|
)
|
||||||
|
|
||||||
# It looks like FeedParser cannot deal with repeated headers
|
|
||||||
classifiers = []
|
|
||||||
for line in metadata.splitlines():
|
|
||||||
if line.startswith('Classifier: '):
|
|
||||||
classifiers.append(line[len('Classifier: '):])
|
|
||||||
package['classifiers'] = classifiers
|
|
||||||
|
|
||||||
if file_list:
|
|
||||||
package['files'] = sorted(file_list)
|
|
||||||
yield package
|
|
||||||
|
|
||||||
|
|
||||||
def print_results(distributions, list_files=False, verbose=False):
|
def print_results(
|
||||||
# type: (Iterator[Dict[str, str]], bool, bool) -> bool
|
distributions: Iterable[_PackageInfo],
|
||||||
|
list_files: bool,
|
||||||
|
verbose: bool,
|
||||||
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Print the information from installed distributions found.
|
Print the information from installed distributions found.
|
||||||
"""
|
"""
|
||||||
|
@ -151,31 +150,34 @@ def print_results(distributions, list_files=False, verbose=False):
|
||||||
if i > 0:
|
if i > 0:
|
||||||
write_output("---")
|
write_output("---")
|
||||||
|
|
||||||
write_output("Name: %s", dist.get('name', ''))
|
write_output("Name: %s", dist.name)
|
||||||
write_output("Version: %s", dist.get('version', ''))
|
write_output("Version: %s", dist.version)
|
||||||
write_output("Summary: %s", dist.get('summary', ''))
|
write_output("Summary: %s", dist.summary)
|
||||||
write_output("Home-page: %s", dist.get('home-page', ''))
|
write_output("Home-page: %s", dist.homepage)
|
||||||
write_output("Author: %s", dist.get('author', ''))
|
write_output("Author: %s", dist.author)
|
||||||
write_output("Author-email: %s", dist.get('author-email', ''))
|
write_output("Author-email: %s", dist.author_email)
|
||||||
write_output("License: %s", dist.get('license', ''))
|
write_output("License: %s", dist.license)
|
||||||
write_output("Location: %s", dist.get('location', ''))
|
write_output("Location: %s", dist.location)
|
||||||
write_output("Requires: %s", ', '.join(dist.get('requires', [])))
|
write_output("Requires: %s", ", ".join(dist.requires))
|
||||||
write_output("Required-by: %s", ', '.join(dist.get('required_by', [])))
|
write_output("Required-by: %s", ", ".join(dist.required_by))
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
write_output("Metadata-Version: %s",
|
write_output("Metadata-Version: %s", dist.metadata_version)
|
||||||
dist.get('metadata-version', ''))
|
write_output("Installer: %s", dist.installer)
|
||||||
write_output("Installer: %s", dist.get('installer', ''))
|
|
||||||
write_output("Classifiers:")
|
write_output("Classifiers:")
|
||||||
for classifier in dist.get('classifiers', []):
|
for classifier in dist.classifiers:
|
||||||
write_output(" %s", classifier)
|
write_output(" %s", classifier)
|
||||||
write_output("Entry-points:")
|
write_output("Entry-points:")
|
||||||
for entry in dist.get('entry_points', []):
|
for entry in dist.entry_points:
|
||||||
write_output(" %s", entry.strip())
|
write_output(" %s", entry.strip())
|
||||||
|
write_output("Project-URLs:")
|
||||||
|
for project_url in dist.project_urls:
|
||||||
|
write_output(" %s", project_url)
|
||||||
if list_files:
|
if list_files:
|
||||||
write_output("Files:")
|
write_output("Files:")
|
||||||
for line in dist.get('files', []):
|
if dist.files is None:
|
||||||
write_output(" %s", line.strip())
|
write_output("Cannot locate RECORD or installed-files.txt")
|
||||||
if "files" not in dist:
|
else:
|
||||||
write_output("Cannot locate installed-files.txt")
|
for line in dist.files:
|
||||||
|
write_output(" %s", line.strip())
|
||||||
return results_printed
|
return results_printed
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
|
import logging
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
|
@ -14,6 +16,8 @@ from pip._internal.req.constructors import (
|
||||||
)
|
)
|
||||||
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class UninstallCommand(Command, SessionCommandMixin):
|
class UninstallCommand(Command, SessionCommandMixin):
|
||||||
"""
|
"""
|
||||||
|
@ -30,50 +34,59 @@ class UninstallCommand(Command, SessionCommandMixin):
|
||||||
%prog [options] <package> ...
|
%prog [options] <package> ...
|
||||||
%prog [options] -r <requirements file> ..."""
|
%prog [options] -r <requirements file> ..."""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-r', '--requirement',
|
"-r",
|
||||||
dest='requirements',
|
"--requirement",
|
||||||
action='append',
|
dest="requirements",
|
||||||
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
metavar='file',
|
metavar="file",
|
||||||
help='Uninstall all the packages listed in the given requirements '
|
help=(
|
||||||
'file. This option can be used multiple times.',
|
"Uninstall all the packages listed in the given requirements "
|
||||||
|
"file. This option can be used multiple times."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-y', '--yes',
|
"-y",
|
||||||
dest='yes',
|
"--yes",
|
||||||
action='store_true',
|
dest="yes",
|
||||||
help="Don't ask for confirmation of uninstall deletions.")
|
action="store_true",
|
||||||
|
help="Don't ask for confirmation of uninstall deletions.",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
session = self.get_default_session(options)
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
reqs_to_uninstall = {}
|
reqs_to_uninstall = {}
|
||||||
for name in args:
|
for name in args:
|
||||||
req = install_req_from_line(
|
req = install_req_from_line(
|
||||||
name, isolated=options.isolated_mode,
|
name,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
)
|
)
|
||||||
if req.name:
|
if req.name:
|
||||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"Invalid requirement: %r ignored -"
|
||||||
|
" the uninstall command expects named"
|
||||||
|
" requirements.",
|
||||||
|
name,
|
||||||
|
)
|
||||||
for filename in options.requirements:
|
for filename in options.requirements:
|
||||||
for parsed_req in parse_requirements(
|
for parsed_req in parse_requirements(
|
||||||
filename,
|
filename, options=options, session=session
|
||||||
options=options,
|
):
|
||||||
session=session):
|
|
||||||
req = install_req_from_parsed_requirement(
|
req = install_req_from_parsed_requirement(
|
||||||
parsed_req,
|
parsed_req, isolated=options.isolated_mode
|
||||||
isolated=options.isolated_mode
|
|
||||||
)
|
)
|
||||||
if req.name:
|
if req.name:
|
||||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
if not reqs_to_uninstall:
|
if not reqs_to_uninstall:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f'You must give at least one requirement to {self.name} (see '
|
f"You must give at least one requirement to {self.name} (see "
|
||||||
f'"pip help {self.name}")'
|
f'"pip help {self.name}")'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -83,10 +96,11 @@ class UninstallCommand(Command, SessionCommandMixin):
|
||||||
|
|
||||||
for req in reqs_to_uninstall.values():
|
for req in reqs_to_uninstall.values():
|
||||||
uninstall_pathset = req.uninstall(
|
uninstall_pathset = req.uninstall(
|
||||||
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
auto_confirm=options.yes,
|
||||||
|
verbose=self.verbosity > 0,
|
||||||
)
|
)
|
||||||
if uninstall_pathset:
|
if uninstall_pathset:
|
||||||
uninstall_pathset.commit()
|
uninstall_pathset.commit()
|
||||||
|
if options.root_user_action == "warn":
|
||||||
warn_if_run_as_root()
|
warn_if_run_as_root()
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
|
@ -9,8 +9,8 @@ from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
from pip._internal.exceptions import CommandError
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
|
||||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
||||||
|
@ -26,10 +26,8 @@ class WheelCommand(RequirementCommand):
|
||||||
recompiling your software during every install. For more details, see the
|
recompiling your software during every install. For more details, see the
|
||||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||||
|
|
||||||
Requirements: setuptools>=0.8, and wheel.
|
'pip wheel' uses the build system interface as described here:
|
||||||
|
https://pip.pypa.io/en/stable/reference/build-system/
|
||||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
|
||||||
package to build individual wheels.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -40,16 +38,18 @@ class WheelCommand(RequirementCommand):
|
||||||
%prog [options] [-e] <local project path> ...
|
%prog [options] [-e] <local project path> ...
|
||||||
%prog [options] <archive url/path> ..."""
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
def add_options(self):
|
def add_options(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-w', '--wheel-dir',
|
"-w",
|
||||||
dest='wheel_dir',
|
"--wheel-dir",
|
||||||
metavar='dir',
|
dest="wheel_dir",
|
||||||
|
metavar="dir",
|
||||||
default=os.curdir,
|
default=os.curdir,
|
||||||
help=("Build wheels into <dir>, where the default is the "
|
help=(
|
||||||
"current working directory."),
|
"Build wheels into <dir>, where the default is the "
|
||||||
|
"current working directory."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
@ -57,32 +57,35 @@ class WheelCommand(RequirementCommand):
|
||||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||||
self.cmd_opts.add_option(cmdoptions.editable())
|
self.cmd_opts.add_option(cmdoptions.editable())
|
||||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||||
self.cmd_opts.add_option(cmdoptions.src())
|
self.cmd_opts.add_option(cmdoptions.src())
|
||||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--no-verify',
|
"--no-verify",
|
||||||
dest='no_verify',
|
dest="no_verify",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Don't verify if built wheel is valid.",
|
help="Don't verify if built wheel is valid.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||||
self.cmd_opts.add_option(cmdoptions.build_options())
|
self.cmd_opts.add_option(cmdoptions.build_options())
|
||||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--pre',
|
"--pre",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help=("Include pre-release and development versions. By default, "
|
help=(
|
||||||
"pip only finds stable versions."),
|
"Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
@ -96,8 +99,7 @@ class WheelCommand(RequirementCommand):
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
@with_cleanup
|
@with_cleanup
|
||||||
def run(self, options, args):
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
# type: (Values, List[str]) -> int
|
|
||||||
cmdoptions.check_install_build_global(options)
|
cmdoptions.check_install_build_global(options)
|
||||||
|
|
||||||
session = self.get_default_session(options)
|
session = self.get_default_session(options)
|
||||||
|
@ -108,7 +110,7 @@ class WheelCommand(RequirementCommand):
|
||||||
options.wheel_dir = normalize_path(options.wheel_dir)
|
options.wheel_dir = normalize_path(options.wheel_dir)
|
||||||
ensure_dir(options.wheel_dir)
|
ensure_dir(options.wheel_dir)
|
||||||
|
|
||||||
req_tracker = self.enter_context(get_requirement_tracker())
|
build_tracker = self.enter_context(get_build_tracker())
|
||||||
|
|
||||||
directory = TempDirectory(
|
directory = TempDirectory(
|
||||||
delete=not options.no_clean,
|
delete=not options.no_clean,
|
||||||
|
@ -121,11 +123,12 @@ class WheelCommand(RequirementCommand):
|
||||||
preparer = self.make_requirement_preparer(
|
preparer = self.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
req_tracker=req_tracker,
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
download_dir=options.wheel_dir,
|
download_dir=options.wheel_dir,
|
||||||
use_user_site=False,
|
use_user_site=False,
|
||||||
|
verbosity=self.verbosity,
|
||||||
)
|
)
|
||||||
|
|
||||||
resolver = self.make_resolver(
|
resolver = self.make_resolver(
|
||||||
|
@ -139,11 +142,9 @@ class WheelCommand(RequirementCommand):
|
||||||
|
|
||||||
self.trace_basic_info(finder)
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
requirement_set = resolver.resolve(
|
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||||
reqs, check_supported_wheels=True
|
|
||||||
)
|
|
||||||
|
|
||||||
reqs_to_build = [] # type: List[InstallRequirement]
|
reqs_to_build: List[InstallRequirement] = []
|
||||||
for req in requirement_set.requirements.values():
|
for req in requirement_set.requirements.values():
|
||||||
if req.is_wheel:
|
if req.is_wheel:
|
||||||
preparer.save_linked_requirement(req)
|
preparer.save_linked_requirement(req)
|
||||||
|
@ -167,12 +168,11 @@ class WheelCommand(RequirementCommand):
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Building wheel for %s failed: %s",
|
"Building wheel for %s failed: %s",
|
||||||
req.name, e,
|
req.name,
|
||||||
|
e,
|
||||||
)
|
)
|
||||||
build_failures.append(req)
|
build_failures.append(req)
|
||||||
if len(build_failures) != 0:
|
if len(build_failures) != 0:
|
||||||
raise CommandError(
|
raise CommandError("Failed to build one or more wheels")
|
||||||
"Failed to build one or more wheels"
|
|
||||||
)
|
|
||||||
|
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
|
@ -13,7 +13,6 @@ Some terminology:
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
import locale
|
import locale
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||||
|
@ -24,41 +23,39 @@ from pip._internal.exceptions import (
|
||||||
)
|
)
|
||||||
from pip._internal.utils import appdirs
|
from pip._internal.utils import appdirs
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.logging import getLogger
|
||||||
from pip._internal.utils.misc import ensure_dir, enum
|
from pip._internal.utils.misc import ensure_dir, enum
|
||||||
|
|
||||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||||
Kind = NewType("Kind", str)
|
Kind = NewType("Kind", str)
|
||||||
|
|
||||||
CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
|
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
||||||
ENV_NAMES_IGNORED = "version", "help"
|
ENV_NAMES_IGNORED = "version", "help"
|
||||||
|
|
||||||
# The kinds of configurations there are.
|
# The kinds of configurations there are.
|
||||||
kinds = enum(
|
kinds = enum(
|
||||||
USER="user", # User Specific
|
USER="user", # User Specific
|
||||||
GLOBAL="global", # System Wide
|
GLOBAL="global", # System Wide
|
||||||
SITE="site", # [Virtual] Environment Specific
|
SITE="site", # [Virtual] Environment Specific
|
||||||
ENV="env", # from PIP_CONFIG_FILE
|
ENV="env", # from PIP_CONFIG_FILE
|
||||||
ENV_VAR="env-var", # from Environment Variables
|
ENV_VAR="env-var", # from Environment Variables
|
||||||
)
|
)
|
||||||
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
||||||
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||||
def _normalize_name(name):
|
def _normalize_name(name: str) -> str:
|
||||||
# type: (str) -> str
|
"""Make a name consistent regardless of source (environment or file)"""
|
||||||
"""Make a name consistent regardless of source (environment or file)
|
name = name.lower().replace("_", "-")
|
||||||
"""
|
if name.startswith("--"):
|
||||||
name = name.lower().replace('_', '-')
|
|
||||||
if name.startswith('--'):
|
|
||||||
name = name[2:] # only prefer long opts
|
name = name[2:] # only prefer long opts
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def _disassemble_key(name):
|
def _disassemble_key(name: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
if "." not in name:
|
if "." not in name:
|
||||||
error_message = (
|
error_message = (
|
||||||
"Key does not contain dot separated section and key. "
|
"Key does not contain dot separated section and key. "
|
||||||
|
@ -68,22 +65,18 @@ def _disassemble_key(name):
|
||||||
return name.split(".", 1)
|
return name.split(".", 1)
|
||||||
|
|
||||||
|
|
||||||
def get_configuration_files():
|
def get_configuration_files() -> Dict[Kind, List[str]]:
|
||||||
# type: () -> Dict[Kind, List[str]]
|
|
||||||
global_config_files = [
|
global_config_files = [
|
||||||
os.path.join(path, CONFIG_BASENAME)
|
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
||||||
for path in appdirs.site_config_dirs('pip')
|
|
||||||
]
|
]
|
||||||
|
|
||||||
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
||||||
legacy_config_file = os.path.join(
|
legacy_config_file = os.path.join(
|
||||||
os.path.expanduser('~'),
|
os.path.expanduser("~"),
|
||||||
'pip' if WINDOWS else '.pip',
|
"pip" if WINDOWS else ".pip",
|
||||||
CONFIG_BASENAME,
|
CONFIG_BASENAME,
|
||||||
)
|
)
|
||||||
new_config_file = os.path.join(
|
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
||||||
appdirs.user_config_dir("pip"), CONFIG_BASENAME
|
|
||||||
)
|
|
||||||
return {
|
return {
|
||||||
kinds.GLOBAL: global_config_files,
|
kinds.GLOBAL: global_config_files,
|
||||||
kinds.SITE: [site_config_file],
|
kinds.SITE: [site_config_file],
|
||||||
|
@ -105,8 +98,7 @@ class Configuration:
|
||||||
and the data stored is also nice.
|
and the data stored is also nice.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, isolated, load_only=None):
|
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
||||||
# type: (bool, Optional[Kind]) -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
||||||
|
@ -119,54 +111,50 @@ class Configuration:
|
||||||
self.load_only = load_only
|
self.load_only = load_only
|
||||||
|
|
||||||
# Because we keep track of where we got the data from
|
# Because we keep track of where we got the data from
|
||||||
self._parsers = {
|
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
||||||
variant: [] for variant in OVERRIDE_ORDER
|
variant: [] for variant in OVERRIDE_ORDER
|
||||||
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
}
|
||||||
self._config = {
|
self._config: Dict[Kind, Dict[str, Any]] = {
|
||||||
variant: {} for variant in OVERRIDE_ORDER
|
variant: {} for variant in OVERRIDE_ORDER
|
||||||
} # type: Dict[Kind, Dict[str, Any]]
|
}
|
||||||
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
||||||
|
|
||||||
def load(self):
|
def load(self) -> None:
|
||||||
# type: () -> None
|
"""Loads configuration from configuration files and environment"""
|
||||||
"""Loads configuration from configuration files and environment
|
|
||||||
"""
|
|
||||||
self._load_config_files()
|
self._load_config_files()
|
||||||
if not self.isolated:
|
if not self.isolated:
|
||||||
self._load_environment_vars()
|
self._load_environment_vars()
|
||||||
|
|
||||||
def get_file_to_edit(self):
|
def get_file_to_edit(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
"""Returns the file with highest priority in configuration"""
|
||||||
"""Returns the file with highest priority in configuration
|
assert self.load_only is not None, "Need to be specified a file to be editing"
|
||||||
"""
|
|
||||||
assert self.load_only is not None, \
|
|
||||||
"Need to be specified a file to be editing"
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._get_parser_to_modify()[0]
|
return self._get_parser_to_modify()[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def items(self):
|
def items(self) -> Iterable[Tuple[str, Any]]:
|
||||||
# type: () -> Iterable[Tuple[str, Any]]
|
|
||||||
"""Returns key-value pairs like dict.items() representing the loaded
|
"""Returns key-value pairs like dict.items() representing the loaded
|
||||||
configuration
|
configuration
|
||||||
"""
|
"""
|
||||||
return self._dictionary.items()
|
return self._dictionary.items()
|
||||||
|
|
||||||
def get_value(self, key):
|
def get_value(self, key: str) -> Any:
|
||||||
# type: (str) -> Any
|
"""Get a value from the configuration."""
|
||||||
"""Get a value from the configuration.
|
orig_key = key
|
||||||
"""
|
key = _normalize_name(key)
|
||||||
try:
|
try:
|
||||||
return self._dictionary[key]
|
return self._dictionary[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ConfigurationError(f"No such key - {key}")
|
# disassembling triggers a more useful error message than simply
|
||||||
|
# "No such key" in the case that the key isn't in the form command.option
|
||||||
|
_disassemble_key(key)
|
||||||
|
raise ConfigurationError(f"No such key - {orig_key}")
|
||||||
|
|
||||||
def set_value(self, key, value):
|
def set_value(self, key: str, value: Any) -> None:
|
||||||
# type: (str, Any) -> None
|
"""Modify a value in the configuration."""
|
||||||
"""Modify a value in the configuration.
|
key = _normalize_name(key)
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
assert self.load_only
|
assert self.load_only
|
||||||
|
@ -183,21 +171,23 @@ class Configuration:
|
||||||
self._config[self.load_only][key] = value
|
self._config[self.load_only][key] = value
|
||||||
self._mark_as_modified(fname, parser)
|
self._mark_as_modified(fname, parser)
|
||||||
|
|
||||||
def unset_value(self, key):
|
def unset_value(self, key: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Unset a value in the configuration."""
|
"""Unset a value in the configuration."""
|
||||||
|
orig_key = key
|
||||||
|
key = _normalize_name(key)
|
||||||
self._ensure_have_load_only()
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
assert self.load_only
|
assert self.load_only
|
||||||
if key not in self._config[self.load_only]:
|
if key not in self._config[self.load_only]:
|
||||||
raise ConfigurationError(f"No such key - {key}")
|
raise ConfigurationError(f"No such key - {orig_key}")
|
||||||
|
|
||||||
fname, parser = self._get_parser_to_modify()
|
fname, parser = self._get_parser_to_modify()
|
||||||
|
|
||||||
if parser is not None:
|
if parser is not None:
|
||||||
section, name = _disassemble_key(key)
|
section, name = _disassemble_key(key)
|
||||||
if not (parser.has_section(section)
|
if not (
|
||||||
and parser.remove_option(section, name)):
|
parser.has_section(section) and parser.remove_option(section, name)
|
||||||
|
):
|
||||||
# The option was not removed.
|
# The option was not removed.
|
||||||
raise ConfigurationError(
|
raise ConfigurationError(
|
||||||
"Fatal Internal error [id=1]. Please report as a bug."
|
"Fatal Internal error [id=1]. Please report as a bug."
|
||||||
|
@ -210,10 +200,8 @@ class Configuration:
|
||||||
|
|
||||||
del self._config[self.load_only][key]
|
del self._config[self.load_only][key]
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
"""Save the current in-memory state."""
|
||||||
"""Save the current in-memory state.
|
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
for fname, parser in self._modified_parsers:
|
for fname, parser in self._modified_parsers:
|
||||||
|
@ -229,17 +217,14 @@ class Configuration:
|
||||||
# Private routines
|
# Private routines
|
||||||
#
|
#
|
||||||
|
|
||||||
def _ensure_have_load_only(self):
|
def _ensure_have_load_only(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if self.load_only is None:
|
if self.load_only is None:
|
||||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||||
logger.debug("Will be working with %s variant only", self.load_only)
|
logger.debug("Will be working with %s variant only", self.load_only)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _dictionary(self):
|
def _dictionary(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
"""A dictionary representing the loaded configuration."""
|
||||||
"""A dictionary representing the loaded configuration.
|
|
||||||
"""
|
|
||||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||||
# are not needed here.
|
# are not needed here.
|
||||||
retval = {}
|
retval = {}
|
||||||
|
@ -249,10 +234,8 @@ class Configuration:
|
||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
def _load_config_files(self):
|
def _load_config_files(self) -> None:
|
||||||
# type: () -> None
|
"""Loads configuration from configuration files"""
|
||||||
"""Loads configuration from configuration files
|
|
||||||
"""
|
|
||||||
config_files = dict(self.iter_config_files())
|
config_files = dict(self.iter_config_files())
|
||||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
@ -266,9 +249,7 @@ class Configuration:
|
||||||
# If there's specific variant set in `load_only`, load only
|
# If there's specific variant set in `load_only`, load only
|
||||||
# that variant, not the others.
|
# that variant, not the others.
|
||||||
if self.load_only is not None and variant != self.load_only:
|
if self.load_only is not None and variant != self.load_only:
|
||||||
logger.debug(
|
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
||||||
"Skipping file '%s' (variant: %s)", fname, variant
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
parser = self._load_file(variant, fname)
|
parser = self._load_file(variant, fname)
|
||||||
|
@ -276,9 +257,8 @@ class Configuration:
|
||||||
# Keeping track of the parsers used
|
# Keeping track of the parsers used
|
||||||
self._parsers[variant].append((fname, parser))
|
self._parsers[variant].append((fname, parser))
|
||||||
|
|
||||||
def _load_file(self, variant, fname):
|
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
||||||
# type: (Kind, str) -> RawConfigParser
|
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
||||||
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
|
||||||
parser = self._construct_parser(fname)
|
parser = self._construct_parser(fname)
|
||||||
|
|
||||||
for section in parser.sections():
|
for section in parser.sections():
|
||||||
|
@ -287,22 +267,20 @@ class Configuration:
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
def _construct_parser(self, fname):
|
def _construct_parser(self, fname: str) -> RawConfigParser:
|
||||||
# type: (str) -> RawConfigParser
|
|
||||||
parser = configparser.RawConfigParser()
|
parser = configparser.RawConfigParser()
|
||||||
# If there is no such file, don't bother reading it but create the
|
# If there is no such file, don't bother reading it but create the
|
||||||
# parser anyway, to hold the data.
|
# parser anyway, to hold the data.
|
||||||
# Doing this is useful when modifying and saving files, where we don't
|
# Doing this is useful when modifying and saving files, where we don't
|
||||||
# need to construct a parser.
|
# need to construct a parser.
|
||||||
if os.path.exists(fname):
|
if os.path.exists(fname):
|
||||||
|
locale_encoding = locale.getpreferredencoding(False)
|
||||||
try:
|
try:
|
||||||
parser.read(fname)
|
parser.read(fname, encoding=locale_encoding)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# See https://github.com/pypa/pip/issues/4963
|
# See https://github.com/pypa/pip/issues/4963
|
||||||
raise ConfigurationFileCouldNotBeLoaded(
|
raise ConfigurationFileCouldNotBeLoaded(
|
||||||
reason="contains invalid {} characters".format(
|
reason=f"contains invalid {locale_encoding} characters",
|
||||||
locale.getpreferredencoding(False)
|
|
||||||
),
|
|
||||||
fname=fname,
|
fname=fname,
|
||||||
)
|
)
|
||||||
except configparser.Error as error:
|
except configparser.Error as error:
|
||||||
|
@ -310,16 +288,15 @@ class Configuration:
|
||||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
def _load_environment_vars(self):
|
def _load_environment_vars(self) -> None:
|
||||||
# type: () -> None
|
"""Loads configuration from environment variables"""
|
||||||
"""Loads configuration from environment variables
|
|
||||||
"""
|
|
||||||
self._config[kinds.ENV_VAR].update(
|
self._config[kinds.ENV_VAR].update(
|
||||||
self._normalized_keys(":env:", self.get_environ_vars())
|
self._normalized_keys(":env:", self.get_environ_vars())
|
||||||
)
|
)
|
||||||
|
|
||||||
def _normalized_keys(self, section, items):
|
def _normalized_keys(
|
||||||
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
self, section: str, items: Iterable[Tuple[str, Any]]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""Normalizes items to construct a dictionary with normalized keys.
|
"""Normalizes items to construct a dictionary with normalized keys.
|
||||||
|
|
||||||
This routine is where the names become keys and are made the same
|
This routine is where the names become keys and are made the same
|
||||||
|
@ -331,8 +308,7 @@ class Configuration:
|
||||||
normalized[key] = val
|
normalized[key] = val
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
def get_environ_vars(self):
|
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
||||||
# type: () -> Iterable[Tuple[str, str]]
|
|
||||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||||
for key, val in os.environ.items():
|
for key, val in os.environ.items():
|
||||||
if key.startswith("PIP_"):
|
if key.startswith("PIP_"):
|
||||||
|
@ -341,8 +317,7 @@ class Configuration:
|
||||||
yield name, val
|
yield name, val
|
||||||
|
|
||||||
# XXX: This is patched in the tests.
|
# XXX: This is patched in the tests.
|
||||||
def iter_config_files(self):
|
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
||||||
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
|
||||||
"""Yields variant and configuration files associated with it.
|
"""Yields variant and configuration files associated with it.
|
||||||
|
|
||||||
This should be treated like items of a dictionary.
|
This should be treated like items of a dictionary.
|
||||||
|
@ -350,7 +325,7 @@ class Configuration:
|
||||||
# SMELL: Move the conditions out of this function
|
# SMELL: Move the conditions out of this function
|
||||||
|
|
||||||
# environment variables have the lowest priority
|
# environment variables have the lowest priority
|
||||||
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
||||||
if config_file is not None:
|
if config_file is not None:
|
||||||
yield kinds.ENV, [config_file]
|
yield kinds.ENV, [config_file]
|
||||||
else:
|
else:
|
||||||
|
@ -372,13 +347,11 @@ class Configuration:
|
||||||
# finally virtualenv configuration first trumping others
|
# finally virtualenv configuration first trumping others
|
||||||
yield kinds.SITE, config_files[kinds.SITE]
|
yield kinds.SITE, config_files[kinds.SITE]
|
||||||
|
|
||||||
def get_values_in_config(self, variant):
|
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
||||||
# type: (Kind) -> Dict[str, Any]
|
|
||||||
"""Get values present in a config file"""
|
"""Get values present in a config file"""
|
||||||
return self._config[variant]
|
return self._config[variant]
|
||||||
|
|
||||||
def _get_parser_to_modify(self):
|
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
||||||
# type: () -> Tuple[str, RawConfigParser]
|
|
||||||
# Determine which parser to modify
|
# Determine which parser to modify
|
||||||
assert self.load_only
|
assert self.load_only
|
||||||
parsers = self._parsers[self.load_only]
|
parsers = self._parsers[self.load_only]
|
||||||
|
@ -392,12 +365,10 @@ class Configuration:
|
||||||
return parsers[-1]
|
return parsers[-1]
|
||||||
|
|
||||||
# XXX: This is patched in the tests.
|
# XXX: This is patched in the tests.
|
||||||
def _mark_as_modified(self, fname, parser):
|
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
||||||
# type: (str, RawConfigParser) -> None
|
|
||||||
file_parser_tuple = (fname, parser)
|
file_parser_tuple = (fname, parser)
|
||||||
if file_parser_tuple not in self._modified_parsers:
|
if file_parser_tuple not in self._modified_parsers:
|
||||||
self._modified_parsers.append(file_parser_tuple)
|
self._modified_parsers.append(file_parser_tuple)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return f"{self.__class__.__name__}({self._dictionary!r})"
|
return f"{self.__class__.__name__}({self._dictionary!r})"
|
||||||
|
|
|
@ -4,8 +4,9 @@ from pip._internal.distributions.wheel import WheelDistribution
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
def make_distribution_for_install_requirement(install_req):
|
def make_distribution_for_install_requirement(
|
||||||
# type: (InstallRequirement) -> AbstractDistribution
|
install_req: InstallRequirement,
|
||||||
|
) -> AbstractDistribution:
|
||||||
"""Returns a Distribution for the given InstallRequirement"""
|
"""Returns a Distribution for the given InstallRequirement"""
|
||||||
# Editable requirements will always be source distributions. They use the
|
# Editable requirements will always be source distributions. They use the
|
||||||
# legacy logic until we create a modern standard for them.
|
# legacy logic until we create a modern standard for them.
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
import abc
|
import abc
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata.base import BaseDistribution
|
||||||
from pip._internal.req import InstallRequirement
|
from pip._internal.req import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,17 +21,19 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
|
||||||
above metadata.
|
above metadata.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, req):
|
def __init__(self, req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.req = req
|
self.req = req
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_pkg_resources_distribution(self):
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
# type: () -> Optional[Distribution]
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
def prepare_distribution_metadata(
|
||||||
# type: (PackageFinder, bool) -> None
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
|
) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.distributions.base import AbstractDistribution
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
|
|
||||||
|
|
||||||
class InstalledDistribution(AbstractDistribution):
|
class InstalledDistribution(AbstractDistribution):
|
||||||
|
@ -13,10 +10,14 @@ class InstalledDistribution(AbstractDistribution):
|
||||||
been computed.
|
been computed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_pkg_resources_distribution(self):
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
# type: () -> Optional[Distribution]
|
assert self.req.satisfied_by is not None, "not actually installed"
|
||||||
return self.req.satisfied_by
|
return self.req.satisfied_by
|
||||||
|
|
||||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
def prepare_distribution_metadata(
|
||||||
# type: (PackageFinder, bool) -> None
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
|
) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Set, Tuple
|
from typing import Iterable, Set, Tuple
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
from pip._internal.distributions.base import AbstractDistribution
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -19,40 +18,49 @@ class SourceDistribution(AbstractDistribution):
|
||||||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_pkg_resources_distribution(self):
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
# type: () -> Distribution
|
|
||||||
return self.req.get_dist()
|
return self.req.get_dist()
|
||||||
|
|
||||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
def prepare_distribution_metadata(
|
||||||
# type: (PackageFinder, bool) -> None
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
|
) -> None:
|
||||||
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
||||||
self.req.load_pyproject_toml()
|
self.req.load_pyproject_toml()
|
||||||
|
|
||||||
# Set up the build isolation, if this requirement should be isolated
|
# Set up the build isolation, if this requirement should be isolated
|
||||||
should_isolate = self.req.use_pep517 and build_isolation
|
should_isolate = self.req.use_pep517 and build_isolation
|
||||||
if should_isolate:
|
if should_isolate:
|
||||||
self._setup_isolation(finder)
|
# Setup an isolated environment and install the build backend static
|
||||||
|
# requirements in it.
|
||||||
|
self._prepare_build_backend(finder)
|
||||||
|
# Check that if the requirement is editable, it either supports PEP 660 or
|
||||||
|
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
||||||
|
# to setup the build backend to verify it supports build_editable, nor can
|
||||||
|
# it be done later, because we want to avoid installing build requirements
|
||||||
|
# needlessly. Doing it here also works around setuptools generating
|
||||||
|
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
||||||
|
# without setup.py nor setup.cfg.
|
||||||
|
self.req.isolated_editable_sanity_check()
|
||||||
|
# Install the dynamic build requirements.
|
||||||
|
self._install_build_reqs(finder)
|
||||||
|
# Check if the current environment provides build dependencies
|
||||||
|
should_check_deps = self.req.use_pep517 and check_build_deps
|
||||||
|
if should_check_deps:
|
||||||
|
pyproject_requires = self.req.pyproject_requires
|
||||||
|
assert pyproject_requires is not None
|
||||||
|
conflicting, missing = self.req.build_env.check_requirements(
|
||||||
|
pyproject_requires
|
||||||
|
)
|
||||||
|
if conflicting:
|
||||||
|
self._raise_conflicts("the backend dependencies", conflicting)
|
||||||
|
if missing:
|
||||||
|
self._raise_missing_reqs(missing)
|
||||||
self.req.prepare_metadata()
|
self.req.prepare_metadata()
|
||||||
|
|
||||||
def _setup_isolation(self, finder):
|
def _prepare_build_backend(self, finder: PackageFinder) -> None:
|
||||||
# type: (PackageFinder) -> None
|
|
||||||
def _raise_conflicts(conflicting_with, conflicting_reqs):
|
|
||||||
# type: (str, Set[Tuple[str, str]]) -> None
|
|
||||||
format_string = (
|
|
||||||
"Some build dependencies for {requirement} "
|
|
||||||
"conflict with {conflicting_with}: {description}."
|
|
||||||
)
|
|
||||||
error_message = format_string.format(
|
|
||||||
requirement=self.req,
|
|
||||||
conflicting_with=conflicting_with,
|
|
||||||
description=", ".join(
|
|
||||||
f"{installed} is incompatible with {wanted}"
|
|
||||||
for installed, wanted in sorted(conflicting)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
raise InstallationError(error_message)
|
|
||||||
|
|
||||||
# Isolate in a BuildEnvironment and install the build-time
|
# Isolate in a BuildEnvironment and install the build-time
|
||||||
# requirements.
|
# requirements.
|
||||||
pyproject_requires = self.req.pyproject_requires
|
pyproject_requires = self.req.pyproject_requires
|
||||||
|
@ -60,13 +68,13 @@ class SourceDistribution(AbstractDistribution):
|
||||||
|
|
||||||
self.req.build_env = BuildEnvironment()
|
self.req.build_env = BuildEnvironment()
|
||||||
self.req.build_env.install_requirements(
|
self.req.build_env.install_requirements(
|
||||||
finder, pyproject_requires, "overlay", "Installing build dependencies"
|
finder, pyproject_requires, "overlay", kind="build dependencies"
|
||||||
)
|
)
|
||||||
conflicting, missing = self.req.build_env.check_requirements(
|
conflicting, missing = self.req.build_env.check_requirements(
|
||||||
self.req.requirements_to_check
|
self.req.requirements_to_check
|
||||||
)
|
)
|
||||||
if conflicting:
|
if conflicting:
|
||||||
_raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
||||||
if missing:
|
if missing:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Missing build requirements in pyproject.toml for %s.",
|
"Missing build requirements in pyproject.toml for %s.",
|
||||||
|
@ -77,19 +85,66 @@ class SourceDistribution(AbstractDistribution):
|
||||||
"pip cannot fall back to setuptools without %s.",
|
"pip cannot fall back to setuptools without %s.",
|
||||||
" and ".join(map(repr, sorted(missing))),
|
" and ".join(map(repr, sorted(missing))),
|
||||||
)
|
)
|
||||||
# Install any extra build dependencies that the backend requests.
|
|
||||||
# This must be done in a second pass, as the pyproject.toml
|
def _get_build_requires_wheel(self) -> Iterable[str]:
|
||||||
# dependencies must be installed before we can call the backend.
|
|
||||||
with self.req.build_env:
|
with self.req.build_env:
|
||||||
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
||||||
backend = self.req.pep517_backend
|
backend = self.req.pep517_backend
|
||||||
assert backend is not None
|
assert backend is not None
|
||||||
with backend.subprocess_runner(runner):
|
with backend.subprocess_runner(runner):
|
||||||
reqs = backend.get_requires_for_build_wheel()
|
return backend.get_requires_for_build_wheel()
|
||||||
|
|
||||||
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
def _get_build_requires_editable(self) -> Iterable[str]:
|
||||||
|
with self.req.build_env:
|
||||||
|
runner = runner_with_spinner_message(
|
||||||
|
"Getting requirements to build editable"
|
||||||
|
)
|
||||||
|
backend = self.req.pep517_backend
|
||||||
|
assert backend is not None
|
||||||
|
with backend.subprocess_runner(runner):
|
||||||
|
return backend.get_requires_for_build_editable()
|
||||||
|
|
||||||
|
def _install_build_reqs(self, finder: PackageFinder) -> None:
|
||||||
|
# Install any extra build dependencies that the backend requests.
|
||||||
|
# This must be done in a second pass, as the pyproject.toml
|
||||||
|
# dependencies must be installed before we can call the backend.
|
||||||
|
if (
|
||||||
|
self.req.editable
|
||||||
|
and self.req.permit_editable_wheels
|
||||||
|
and self.req.supports_pyproject_editable()
|
||||||
|
):
|
||||||
|
build_reqs = self._get_build_requires_editable()
|
||||||
|
else:
|
||||||
|
build_reqs = self._get_build_requires_wheel()
|
||||||
|
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
||||||
if conflicting:
|
if conflicting:
|
||||||
_raise_conflicts("the backend dependencies", conflicting)
|
self._raise_conflicts("the backend dependencies", conflicting)
|
||||||
self.req.build_env.install_requirements(
|
self.req.build_env.install_requirements(
|
||||||
finder, missing, "normal", "Installing backend dependencies"
|
finder, missing, "normal", kind="backend dependencies"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _raise_conflicts(
|
||||||
|
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
||||||
|
) -> None:
|
||||||
|
format_string = (
|
||||||
|
"Some build dependencies for {requirement} "
|
||||||
|
"conflict with {conflicting_with}: {description}."
|
||||||
|
)
|
||||||
|
error_message = format_string.format(
|
||||||
|
requirement=self.req,
|
||||||
|
conflicting_with=conflicting_with,
|
||||||
|
description=", ".join(
|
||||||
|
f"{installed} is incompatible with {wanted}"
|
||||||
|
for installed, wanted in sorted(conflicting_reqs)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
raise InstallationError(error_message)
|
||||||
|
|
||||||
|
def _raise_missing_reqs(self, missing: Set[str]) -> None:
|
||||||
|
format_string = (
|
||||||
|
"Some build dependencies for {requirement} are missing: {missing}."
|
||||||
|
)
|
||||||
|
error_message = format_string.format(
|
||||||
|
requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
|
||||||
|
)
|
||||||
|
raise InstallationError(error_message)
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
from zipfile import ZipFile
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.distributions.base import AbstractDistribution
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
from pip._internal.metadata import (
|
||||||
|
BaseDistribution,
|
||||||
|
FilesystemWheel,
|
||||||
|
get_wheel_distribution,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class WheelDistribution(AbstractDistribution):
|
class WheelDistribution(AbstractDistribution):
|
||||||
|
@ -13,22 +15,20 @@ class WheelDistribution(AbstractDistribution):
|
||||||
This does not need any preparation as wheels can be directly unpacked.
|
This does not need any preparation as wheels can be directly unpacked.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_pkg_resources_distribution(self):
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
# type: () -> Distribution
|
|
||||||
"""Loads the metadata from the wheel file into memory and returns a
|
"""Loads the metadata from the wheel file into memory and returns a
|
||||||
Distribution that uses it, not relying on the wheel file or
|
Distribution that uses it, not relying on the wheel file or
|
||||||
requirement.
|
requirement.
|
||||||
"""
|
"""
|
||||||
# Set as part of preparation during download.
|
assert self.req.local_file_path, "Set as part of preparation during download"
|
||||||
assert self.req.local_file_path
|
assert self.req.name, "Wheels are never unnamed"
|
||||||
# Wheels are never unnamed.
|
wheel = FilesystemWheel(self.req.local_file_path)
|
||||||
assert self.req.name
|
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
||||||
|
|
||||||
with ZipFile(self.req.local_file_path, allowZip64=True) as z:
|
def prepare_distribution_metadata(
|
||||||
return pkg_resources_distribution_for_wheel(
|
self,
|
||||||
z, self.req.name, self.req.local_file_path
|
finder: PackageFinder,
|
||||||
)
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
) -> None:
|
||||||
# type: (PackageFinder, bool) -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,22 +1,174 @@
|
||||||
"""Exceptions used throughout package"""
|
"""Exceptions used throughout package.
|
||||||
|
|
||||||
|
This module MUST NOT try to import from anything within `pip._internal` to
|
||||||
|
operate. This is expected to be importable from any/all files within the
|
||||||
|
subpackage and, thus, should not depend on them.
|
||||||
|
"""
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
|
import re
|
||||||
from itertools import chain, groupby, repeat
|
from itertools import chain, groupby, repeat
|
||||||
from typing import TYPE_CHECKING, Dict, List, Optional
|
from typing import TYPE_CHECKING, Dict, List, Optional, Union
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
from pip._vendor.requests.models import Request, Response
|
from pip._vendor.requests.models import Request, Response
|
||||||
|
from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
|
||||||
|
from pip._vendor.rich.markup import escape
|
||||||
|
from pip._vendor.rich.text import Text
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from hashlib import _Hash
|
from hashlib import _Hash
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Scaffolding
|
||||||
|
#
|
||||||
|
def _is_kebab_case(s: str) -> bool:
|
||||||
|
return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _prefix_with_indent(
|
||||||
|
s: Union[Text, str],
|
||||||
|
console: Console,
|
||||||
|
*,
|
||||||
|
prefix: str,
|
||||||
|
indent: str,
|
||||||
|
) -> Text:
|
||||||
|
if isinstance(s, Text):
|
||||||
|
text = s
|
||||||
|
else:
|
||||||
|
text = console.render_str(s)
|
||||||
|
|
||||||
|
return console.render_str(prefix, overflow="ignore") + console.render_str(
|
||||||
|
f"\n{indent}", overflow="ignore"
|
||||||
|
).join(text.split(allow_blank=True))
|
||||||
|
|
||||||
|
|
||||||
class PipError(Exception):
|
class PipError(Exception):
|
||||||
"""Base pip exception"""
|
"""The base pip error."""
|
||||||
|
|
||||||
|
|
||||||
|
class DiagnosticPipError(PipError):
|
||||||
|
"""An error, that presents diagnostic information to the user.
|
||||||
|
|
||||||
|
This contains a bunch of logic, to enable pretty presentation of our error
|
||||||
|
messages. Each error gets a unique reference. Each error can also include
|
||||||
|
additional context, a hint and/or a note -- which are presented with the
|
||||||
|
main error message in a consistent style.
|
||||||
|
|
||||||
|
This is adapted from the error output styling in `sphinx-theme-builder`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
reference: str
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
kind: 'Literal["error", "warning"]' = "error",
|
||||||
|
reference: Optional[str] = None,
|
||||||
|
message: Union[str, Text],
|
||||||
|
context: Optional[Union[str, Text]],
|
||||||
|
hint_stmt: Optional[Union[str, Text]],
|
||||||
|
note_stmt: Optional[Union[str, Text]] = None,
|
||||||
|
link: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
# Ensure a proper reference is provided.
|
||||||
|
if reference is None:
|
||||||
|
assert hasattr(self, "reference"), "error reference not provided!"
|
||||||
|
reference = self.reference
|
||||||
|
assert _is_kebab_case(reference), "error reference must be kebab-case!"
|
||||||
|
|
||||||
|
self.kind = kind
|
||||||
|
self.reference = reference
|
||||||
|
|
||||||
|
self.message = message
|
||||||
|
self.context = context
|
||||||
|
|
||||||
|
self.note_stmt = note_stmt
|
||||||
|
self.hint_stmt = hint_stmt
|
||||||
|
|
||||||
|
self.link = link
|
||||||
|
|
||||||
|
super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return (
|
||||||
|
f"<{self.__class__.__name__}("
|
||||||
|
f"reference={self.reference!r}, "
|
||||||
|
f"message={self.message!r}, "
|
||||||
|
f"context={self.context!r}, "
|
||||||
|
f"note_stmt={self.note_stmt!r}, "
|
||||||
|
f"hint_stmt={self.hint_stmt!r}"
|
||||||
|
")>"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __rich_console__(
|
||||||
|
self,
|
||||||
|
console: Console,
|
||||||
|
options: ConsoleOptions,
|
||||||
|
) -> RenderResult:
|
||||||
|
colour = "red" if self.kind == "error" else "yellow"
|
||||||
|
|
||||||
|
yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
|
||||||
|
yield ""
|
||||||
|
|
||||||
|
if not options.ascii_only:
|
||||||
|
# Present the main message, with relevant context indented.
|
||||||
|
if self.context is not None:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.message,
|
||||||
|
console,
|
||||||
|
prefix=f"[{colour}]×[/] ",
|
||||||
|
indent=f"[{colour}]│[/] ",
|
||||||
|
)
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.context,
|
||||||
|
console,
|
||||||
|
prefix=f"[{colour}]╰─>[/] ",
|
||||||
|
indent=f"[{colour}] [/] ",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.message,
|
||||||
|
console,
|
||||||
|
prefix="[red]×[/] ",
|
||||||
|
indent=" ",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield self.message
|
||||||
|
if self.context is not None:
|
||||||
|
yield ""
|
||||||
|
yield self.context
|
||||||
|
|
||||||
|
if self.note_stmt is not None or self.hint_stmt is not None:
|
||||||
|
yield ""
|
||||||
|
|
||||||
|
if self.note_stmt is not None:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.note_stmt,
|
||||||
|
console,
|
||||||
|
prefix="[magenta bold]note[/]: ",
|
||||||
|
indent=" ",
|
||||||
|
)
|
||||||
|
if self.hint_stmt is not None:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.hint_stmt,
|
||||||
|
console,
|
||||||
|
prefix="[cyan bold]hint[/]: ",
|
||||||
|
indent=" ",
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.link is not None:
|
||||||
|
yield ""
|
||||||
|
yield f"Link: {self.link}"
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Actual Errors
|
||||||
|
#
|
||||||
class ConfigurationError(PipError):
|
class ConfigurationError(PipError):
|
||||||
"""General exception in configuration"""
|
"""General exception in configuration"""
|
||||||
|
|
||||||
|
@ -29,17 +181,54 @@ class UninstallationError(PipError):
|
||||||
"""General exception during uninstallation"""
|
"""General exception during uninstallation"""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingPyProjectBuildRequires(DiagnosticPipError):
|
||||||
|
"""Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
|
||||||
|
|
||||||
|
reference = "missing-pyproject-build-system-requires"
|
||||||
|
|
||||||
|
def __init__(self, *, package: str) -> None:
|
||||||
|
super().__init__(
|
||||||
|
message=f"Can not process {escape(package)}",
|
||||||
|
context=Text(
|
||||||
|
"This package has an invalid pyproject.toml file.\n"
|
||||||
|
"The [build-system] table is missing the mandatory `requires` key."
|
||||||
|
),
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPyProjectBuildRequires(DiagnosticPipError):
|
||||||
|
"""Raised when pyproject.toml an invalid `build-system.requires`."""
|
||||||
|
|
||||||
|
reference = "invalid-pyproject-build-system-requires"
|
||||||
|
|
||||||
|
def __init__(self, *, package: str, reason: str) -> None:
|
||||||
|
super().__init__(
|
||||||
|
message=f"Can not process {escape(package)}",
|
||||||
|
context=Text(
|
||||||
|
"This package has an invalid `build-system.requires` key in "
|
||||||
|
f"pyproject.toml.\n{reason}"
|
||||||
|
),
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NoneMetadataError(PipError):
|
class NoneMetadataError(PipError):
|
||||||
"""
|
"""Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
|
||||||
Raised when accessing "METADATA" or "PKG-INFO" metadata for a
|
|
||||||
pip._vendor.pkg_resources.Distribution object and
|
This signifies an inconsistency, when the Distribution claims to have
|
||||||
`dist.has_metadata('METADATA')` returns True but
|
the metadata file (if not, raise ``FileNotFoundError`` instead), but is
|
||||||
`dist.get_metadata('METADATA')` returns None (and similarly for
|
not actually able to produce its content. This may be due to permission
|
||||||
"PKG-INFO").
|
errors.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, dist, metadata_name):
|
def __init__(
|
||||||
# type: (Distribution, str) -> None
|
self,
|
||||||
|
dist: "BaseDistribution",
|
||||||
|
metadata_name: str,
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
:param dist: A Distribution object.
|
:param dist: A Distribution object.
|
||||||
:param metadata_name: The name of the metadata being accessed
|
:param metadata_name: The name of the metadata being accessed
|
||||||
|
@ -48,28 +237,24 @@ class NoneMetadataError(PipError):
|
||||||
self.dist = dist
|
self.dist = dist
|
||||||
self.metadata_name = metadata_name
|
self.metadata_name = metadata_name
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
# Use `dist` in the error message because its stringification
|
# Use `dist` in the error message because its stringification
|
||||||
# includes more information, like the version and location.
|
# includes more information, like the version and location.
|
||||||
return (
|
return "None {} metadata found for distribution: {}".format(
|
||||||
'None {} metadata found for distribution: {}'.format(
|
self.metadata_name,
|
||||||
self.metadata_name, self.dist,
|
self.dist,
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class UserInstallationInvalid(InstallationError):
|
class UserInstallationInvalid(InstallationError):
|
||||||
"""A --user install is requested on an environment without user site."""
|
"""A --user install is requested on an environment without user site."""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return "User base directory is not specified"
|
return "User base directory is not specified"
|
||||||
|
|
||||||
|
|
||||||
class InvalidSchemeCombination(InstallationError):
|
class InvalidSchemeCombination(InstallationError):
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
before = ", ".join(str(a) for a in self.args[:-1])
|
before = ", ".join(str(a) for a in self.args[:-1])
|
||||||
return f"Cannot set {before} and {self.args[-1]} together"
|
return f"Cannot set {before} and {self.args[-1]} together"
|
||||||
|
|
||||||
|
@ -102,8 +287,9 @@ class PreviousBuildDirError(PipError):
|
||||||
class NetworkConnectionError(PipError):
|
class NetworkConnectionError(PipError):
|
||||||
"""HTTP connection error"""
|
"""HTTP connection error"""
|
||||||
|
|
||||||
def __init__(self, error_msg, response=None, request=None):
|
def __init__(
|
||||||
# type: (str, Response, Request) -> None
|
self, error_msg: str, response: Response = None, request: Request = None
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize NetworkConnectionError with `request` and `response`
|
Initialize NetworkConnectionError with `request` and `response`
|
||||||
objects.
|
objects.
|
||||||
|
@ -111,13 +297,15 @@ class NetworkConnectionError(PipError):
|
||||||
self.response = response
|
self.response = response
|
||||||
self.request = request
|
self.request = request
|
||||||
self.error_msg = error_msg
|
self.error_msg = error_msg
|
||||||
if (self.response is not None and not self.request and
|
if (
|
||||||
hasattr(response, 'request')):
|
self.response is not None
|
||||||
|
and not self.request
|
||||||
|
and hasattr(response, "request")
|
||||||
|
):
|
||||||
self.request = self.response.request
|
self.request = self.response.request
|
||||||
super().__init__(error_msg, response, request)
|
super().__init__(error_msg, response, request)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return str(self.error_msg)
|
return str(self.error_msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -129,6 +317,17 @@ class UnsupportedWheel(InstallationError):
|
||||||
"""Unsupported wheel."""
|
"""Unsupported wheel."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidWheel(InstallationError):
|
||||||
|
"""Invalid (e.g. corrupt) wheel."""
|
||||||
|
|
||||||
|
def __init__(self, location: str, name: str):
|
||||||
|
self.location = location
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Wheel '{self.name}' located at {self.location} is invalid."
|
||||||
|
|
||||||
|
|
||||||
class MetadataInconsistent(InstallationError):
|
class MetadataInconsistent(InstallationError):
|
||||||
"""Built metadata contains inconsistent information.
|
"""Built metadata contains inconsistent information.
|
||||||
|
|
||||||
|
@ -136,15 +335,16 @@ class MetadataInconsistent(InstallationError):
|
||||||
that do not match the information previously obtained from sdist filename
|
that do not match the information previously obtained from sdist filename
|
||||||
or user-supplied ``#egg=`` value.
|
or user-supplied ``#egg=`` value.
|
||||||
"""
|
"""
|
||||||
def __init__(self, ireq, field, f_val, m_val):
|
|
||||||
# type: (InstallRequirement, str, str, str) -> None
|
def __init__(
|
||||||
|
self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
|
||||||
|
) -> None:
|
||||||
self.ireq = ireq
|
self.ireq = ireq
|
||||||
self.field = field
|
self.field = field
|
||||||
self.f_val = f_val
|
self.f_val = f_val
|
||||||
self.m_val = m_val
|
self.m_val = m_val
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
template = (
|
template = (
|
||||||
"Requested {} has inconsistent {}: "
|
"Requested {} has inconsistent {}: "
|
||||||
"filename has {!r}, but metadata has {!r}"
|
"filename has {!r}, but metadata has {!r}"
|
||||||
|
@ -152,51 +352,102 @@ class MetadataInconsistent(InstallationError):
|
||||||
return template.format(self.ireq, self.field, self.f_val, self.m_val)
|
return template.format(self.ireq, self.field, self.f_val, self.m_val)
|
||||||
|
|
||||||
|
|
||||||
class InstallationSubprocessError(InstallationError):
|
class LegacyInstallFailure(DiagnosticPipError):
|
||||||
"""A subprocess call failed during installation."""
|
"""Error occurred while executing `setup.py install`"""
|
||||||
def __init__(self, returncode, description):
|
|
||||||
# type: (int, str) -> None
|
|
||||||
self.returncode = returncode
|
|
||||||
self.description = description
|
|
||||||
|
|
||||||
def __str__(self):
|
reference = "legacy-install-failure"
|
||||||
# type: () -> str
|
|
||||||
return (
|
def __init__(self, package_details: str) -> None:
|
||||||
"Command errored out with exit status {}: {} "
|
super().__init__(
|
||||||
"Check the logs for full command output."
|
message="Encountered error while trying to install package.",
|
||||||
).format(self.returncode, self.description)
|
context=package_details,
|
||||||
|
hint_stmt="See above for output from the failure.",
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
|
||||||
|
"""A subprocess call failed."""
|
||||||
|
|
||||||
|
reference = "subprocess-exited-with-error"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
command_description: str,
|
||||||
|
exit_code: int,
|
||||||
|
output_lines: Optional[List[str]],
|
||||||
|
) -> None:
|
||||||
|
if output_lines is None:
|
||||||
|
output_prompt = Text("See above for output.")
|
||||||
|
else:
|
||||||
|
output_prompt = (
|
||||||
|
Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
|
||||||
|
+ Text("".join(output_lines))
|
||||||
|
+ Text.from_markup(R"[red]\[end of output][/]")
|
||||||
|
)
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
message=(
|
||||||
|
f"[green]{escape(command_description)}[/] did not run successfully.\n"
|
||||||
|
f"exit code: {exit_code}"
|
||||||
|
),
|
||||||
|
context=output_prompt,
|
||||||
|
hint_stmt=None,
|
||||||
|
note_stmt=(
|
||||||
|
"This error originates from a subprocess, and is likely not a "
|
||||||
|
"problem with pip."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.command_description = command_description
|
||||||
|
self.exit_code = exit_code
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.command_description} exited with {self.exit_code}"
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
|
||||||
|
reference = "metadata-generation-failed"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
package_details: str,
|
||||||
|
) -> None:
|
||||||
|
super(InstallationSubprocessError, self).__init__(
|
||||||
|
message="Encountered error while generating package metadata.",
|
||||||
|
context=escape(package_details),
|
||||||
|
hint_stmt="See above for details.",
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return "metadata generation failed"
|
||||||
|
|
||||||
|
|
||||||
class HashErrors(InstallationError):
|
class HashErrors(InstallationError):
|
||||||
"""Multiple HashError instances rolled into one for reporting"""
|
"""Multiple HashError instances rolled into one for reporting"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
self.errors: List["HashError"] = []
|
||||||
self.errors = [] # type: List[HashError]
|
|
||||||
|
|
||||||
def append(self, error):
|
def append(self, error: "HashError") -> None:
|
||||||
# type: (HashError) -> None
|
|
||||||
self.errors.append(error)
|
self.errors.append(error)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
lines = []
|
lines = []
|
||||||
self.errors.sort(key=lambda e: e.order)
|
self.errors.sort(key=lambda e: e.order)
|
||||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||||
lines.append(cls.head)
|
lines.append(cls.head)
|
||||||
lines.extend(e.body() for e in errors_of_cls)
|
lines.extend(e.body() for e in errors_of_cls)
|
||||||
if lines:
|
if lines:
|
||||||
return '\n'.join(lines)
|
return "\n".join(lines)
|
||||||
return ''
|
return ""
|
||||||
|
|
||||||
def __nonzero__(self):
|
def __bool__(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return bool(self.errors)
|
return bool(self.errors)
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
# type: () -> bool
|
|
||||||
return self.__nonzero__()
|
|
||||||
|
|
||||||
|
|
||||||
class HashError(InstallationError):
|
class HashError(InstallationError):
|
||||||
"""
|
"""
|
||||||
|
@ -214,12 +465,12 @@ class HashError(InstallationError):
|
||||||
typically available earlier.
|
typically available earlier.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
req = None # type: Optional[InstallRequirement]
|
|
||||||
head = ''
|
|
||||||
order = -1 # type: int
|
|
||||||
|
|
||||||
def body(self):
|
req: Optional["InstallRequirement"] = None
|
||||||
# type: () -> str
|
head = ""
|
||||||
|
order: int = -1
|
||||||
|
|
||||||
|
def body(self) -> str:
|
||||||
"""Return a summary of me for display under the heading.
|
"""Return a summary of me for display under the heading.
|
||||||
|
|
||||||
This default implementation simply prints a description of the
|
This default implementation simply prints a description of the
|
||||||
|
@ -229,21 +480,19 @@ class HashError(InstallationError):
|
||||||
its link already populated by the resolver's _populate_link().
|
its link already populated by the resolver's _populate_link().
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return f' {self._requirement_name()}'
|
return f" {self._requirement_name()}"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
return f"{self.head}\n{self.body()}"
|
||||||
return f'{self.head}\n{self.body()}'
|
|
||||||
|
|
||||||
def _requirement_name(self):
|
def _requirement_name(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Return a description of the requirement that triggered me.
|
"""Return a description of the requirement that triggered me.
|
||||||
|
|
||||||
This default implementation returns long description of the req, with
|
This default implementation returns long description of the req, with
|
||||||
line numbers
|
line numbers
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return str(self.req) if self.req else 'unknown package'
|
return str(self.req) if self.req else "unknown package"
|
||||||
|
|
||||||
|
|
||||||
class VcsHashUnsupported(HashError):
|
class VcsHashUnsupported(HashError):
|
||||||
|
@ -251,8 +500,10 @@ class VcsHashUnsupported(HashError):
|
||||||
we don't have a method for hashing those."""
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
order = 0
|
order = 0
|
||||||
head = ("Can't verify hashes for these requirements because we don't "
|
head = (
|
||||||
"have a way to hash version control repositories:")
|
"Can't verify hashes for these requirements because we don't "
|
||||||
|
"have a way to hash version control repositories:"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DirectoryUrlHashUnsupported(HashError):
|
class DirectoryUrlHashUnsupported(HashError):
|
||||||
|
@ -260,32 +511,34 @@ class DirectoryUrlHashUnsupported(HashError):
|
||||||
we don't have a method for hashing those."""
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
order = 1
|
order = 1
|
||||||
head = ("Can't verify hashes for these file:// requirements because they "
|
head = (
|
||||||
"point to directories:")
|
"Can't verify hashes for these file:// requirements because they "
|
||||||
|
"point to directories:"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HashMissing(HashError):
|
class HashMissing(HashError):
|
||||||
"""A hash was needed for a requirement but is absent."""
|
"""A hash was needed for a requirement but is absent."""
|
||||||
|
|
||||||
order = 2
|
order = 2
|
||||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
head = (
|
||||||
'missing from some requirements. Here is a list of those '
|
"Hashes are required in --require-hashes mode, but they are "
|
||||||
'requirements along with the hashes their downloaded archives '
|
"missing from some requirements. Here is a list of those "
|
||||||
'actually had. Add lines like these to your requirements files to '
|
"requirements along with the hashes their downloaded archives "
|
||||||
'prevent tampering. (If you did not enable --require-hashes '
|
"actually had. Add lines like these to your requirements files to "
|
||||||
'manually, note that it turns on automatically when any package '
|
"prevent tampering. (If you did not enable --require-hashes "
|
||||||
'has a hash.)')
|
"manually, note that it turns on automatically when any package "
|
||||||
|
"has a hash.)"
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, gotten_hash):
|
def __init__(self, gotten_hash: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""
|
"""
|
||||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||||
just downloaded
|
just downloaded
|
||||||
"""
|
"""
|
||||||
self.gotten_hash = gotten_hash
|
self.gotten_hash = gotten_hash
|
||||||
|
|
||||||
def body(self):
|
def body(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
# Dodge circular import.
|
# Dodge circular import.
|
||||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||||
|
|
||||||
|
@ -294,13 +547,16 @@ class HashMissing(HashError):
|
||||||
# In the case of URL-based requirements, display the original URL
|
# In the case of URL-based requirements, display the original URL
|
||||||
# seen in the requirements file rather than the package name,
|
# seen in the requirements file rather than the package name,
|
||||||
# so the output can be directly copied into the requirements file.
|
# so the output can be directly copied into the requirements file.
|
||||||
package = (self.req.original_link if self.req.original_link
|
package = (
|
||||||
# In case someone feeds something downright stupid
|
self.req.original_link
|
||||||
# to InstallRequirement's constructor.
|
if self.req.original_link
|
||||||
else getattr(self.req, 'req', None))
|
# In case someone feeds something downright stupid
|
||||||
return ' {} --hash={}:{}'.format(package or 'unknown package',
|
# to InstallRequirement's constructor.
|
||||||
FAVORITE_HASH,
|
else getattr(self.req, "req", None)
|
||||||
self.gotten_hash)
|
)
|
||||||
|
return " {} --hash={}:{}".format(
|
||||||
|
package or "unknown package", FAVORITE_HASH, self.gotten_hash
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HashUnpinned(HashError):
|
class HashUnpinned(HashError):
|
||||||
|
@ -308,8 +564,10 @@ class HashUnpinned(HashError):
|
||||||
version."""
|
version."""
|
||||||
|
|
||||||
order = 3
|
order = 3
|
||||||
head = ('In --require-hashes mode, all requirements must have their '
|
head = (
|
||||||
'versions pinned with ==. These do not:')
|
"In --require-hashes mode, all requirements must have their "
|
||||||
|
"versions pinned with ==. These do not:"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HashMismatch(HashError):
|
class HashMismatch(HashError):
|
||||||
|
@ -321,14 +579,16 @@ class HashMismatch(HashError):
|
||||||
improve its error message.
|
improve its error message.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
order = 4
|
|
||||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
|
||||||
'FILE. If you have updated the package versions, please update '
|
|
||||||
'the hashes. Otherwise, examine the package contents carefully; '
|
|
||||||
'someone may have tampered with them.')
|
|
||||||
|
|
||||||
def __init__(self, allowed, gots):
|
order = 4
|
||||||
# type: (Dict[str, List[str]], Dict[str, _Hash]) -> None
|
head = (
|
||||||
|
"THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
|
||||||
|
"FILE. If you have updated the package versions, please update "
|
||||||
|
"the hashes. Otherwise, examine the package contents carefully; "
|
||||||
|
"someone may have tampered with them."
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
|
||||||
"""
|
"""
|
||||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||||
hex digests
|
hex digests
|
||||||
|
@ -338,13 +598,10 @@ class HashMismatch(HashError):
|
||||||
self.allowed = allowed
|
self.allowed = allowed
|
||||||
self.gots = gots
|
self.gots = gots
|
||||||
|
|
||||||
def body(self):
|
def body(self) -> str:
|
||||||
# type: () -> str
|
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
|
||||||
return ' {}:\n{}'.format(self._requirement_name(),
|
|
||||||
self._hash_comparison())
|
|
||||||
|
|
||||||
def _hash_comparison(self):
|
def _hash_comparison(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""
|
"""
|
||||||
Return a comparison of actual and expected hash values.
|
Return a comparison of actual and expected hash values.
|
||||||
|
|
||||||
|
@ -355,20 +612,22 @@ class HashMismatch(HashError):
|
||||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def hash_then_or(hash_name):
|
|
||||||
# type: (str) -> chain[str]
|
def hash_then_or(hash_name: str) -> "chain[str]":
|
||||||
# For now, all the decent hashes have 6-char names, so we can get
|
# For now, all the decent hashes have 6-char names, so we can get
|
||||||
# away with hard-coding space literals.
|
# away with hard-coding space literals.
|
||||||
return chain([hash_name], repeat(' or'))
|
return chain([hash_name], repeat(" or"))
|
||||||
|
|
||||||
lines = [] # type: List[str]
|
lines: List[str] = []
|
||||||
for hash_name, expecteds in self.allowed.items():
|
for hash_name, expecteds in self.allowed.items():
|
||||||
prefix = hash_then_or(hash_name)
|
prefix = hash_then_or(hash_name)
|
||||||
lines.extend((' Expected {} {}'.format(next(prefix), e))
|
lines.extend(
|
||||||
for e in expecteds)
|
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
|
||||||
lines.append(' Got {}\n'.format(
|
)
|
||||||
self.gots[hash_name].hexdigest()))
|
lines.append(
|
||||||
return '\n'.join(lines)
|
" Got {}\n".format(self.gots[hash_name].hexdigest())
|
||||||
|
)
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedPythonVersion(InstallationError):
|
class UnsupportedPythonVersion(InstallationError):
|
||||||
|
@ -377,18 +636,20 @@ class UnsupportedPythonVersion(InstallationError):
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||||
"""When there are errors while loading a configuration file
|
"""When there are errors while loading a configuration file"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
def __init__(
|
||||||
# type: (str, Optional[str], Optional[configparser.Error]) -> None
|
self,
|
||||||
|
reason: str = "could not be loaded",
|
||||||
|
fname: Optional[str] = None,
|
||||||
|
error: Optional[configparser.Error] = None,
|
||||||
|
) -> None:
|
||||||
super().__init__(error)
|
super().__init__(error)
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
self.fname = fname
|
self.fname = fname
|
||||||
self.error = error
|
self.error = error
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
if self.fname is not None:
|
if self.fname is not None:
|
||||||
message_part = f" in {self.fname}."
|
message_part = f" in {self.fname}."
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -5,7 +5,6 @@ The main purpose of this module is to expose LinkCollector.collect_sources().
|
||||||
import cgi
|
import cgi
|
||||||
import collections
|
import collections
|
||||||
import functools
|
import functools
|
||||||
import html
|
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -13,15 +12,19 @@ import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
|
from html.parser import HTMLParser
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import (
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
Callable,
|
Callable,
|
||||||
|
Dict,
|
||||||
Iterable,
|
Iterable,
|
||||||
List,
|
List,
|
||||||
MutableMapping,
|
MutableMapping,
|
||||||
NamedTuple,
|
NamedTuple,
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
Tuple,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -40,34 +43,36 @@ from pip._internal.vcs import vcs
|
||||||
|
|
||||||
from .sources import CandidatesFromPage, LinkSource, build_source
|
from .sources import CandidatesFromPage, LinkSource, build_source
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Protocol
|
||||||
|
else:
|
||||||
|
Protocol = object
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HTMLElement = xml.etree.ElementTree.Element
|
HTMLElement = xml.etree.ElementTree.Element
|
||||||
ResponseHeaders = MutableMapping[str, str]
|
ResponseHeaders = MutableMapping[str, str]
|
||||||
|
|
||||||
|
|
||||||
def _match_vcs_scheme(url):
|
def _match_vcs_scheme(url: str) -> Optional[str]:
|
||||||
# type: (str) -> Optional[str]
|
|
||||||
"""Look for VCS schemes in the URL.
|
"""Look for VCS schemes in the URL.
|
||||||
|
|
||||||
Returns the matched VCS scheme, or None if there's no match.
|
Returns the matched VCS scheme, or None if there's no match.
|
||||||
"""
|
"""
|
||||||
for scheme in vcs.schemes:
|
for scheme in vcs.schemes:
|
||||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
|
||||||
return scheme
|
return scheme
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class _NotHTML(Exception):
|
class _NotHTML(Exception):
|
||||||
def __init__(self, content_type, request_desc):
|
def __init__(self, content_type: str, request_desc: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
super().__init__(content_type, request_desc)
|
super().__init__(content_type, request_desc)
|
||||||
self.content_type = content_type
|
self.content_type = content_type
|
||||||
self.request_desc = request_desc
|
self.request_desc = request_desc
|
||||||
|
|
||||||
|
|
||||||
def _ensure_html_header(response):
|
def _ensure_html_header(response: Response) -> None:
|
||||||
# type: (Response) -> None
|
|
||||||
"""Check the Content-Type header to ensure the response contains HTML.
|
"""Check the Content-Type header to ensure the response contains HTML.
|
||||||
|
|
||||||
Raises `_NotHTML` if the content type is not text/html.
|
Raises `_NotHTML` if the content type is not text/html.
|
||||||
|
@ -81,15 +86,14 @@ class _NotHTTP(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _ensure_html_response(url, session):
|
def _ensure_html_response(url: str, session: PipSession) -> None:
|
||||||
# type: (str, PipSession) -> None
|
|
||||||
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
||||||
|
|
||||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||||
`_NotHTML` if the content type is not text/html.
|
`_NotHTML` if the content type is not text/html.
|
||||||
"""
|
"""
|
||||||
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
||||||
if scheme not in {'http', 'https'}:
|
if scheme not in {"http", "https"}:
|
||||||
raise _NotHTTP()
|
raise _NotHTTP()
|
||||||
|
|
||||||
resp = session.head(url, allow_redirects=True)
|
resp = session.head(url, allow_redirects=True)
|
||||||
|
@ -98,8 +102,7 @@ def _ensure_html_response(url, session):
|
||||||
_ensure_html_header(resp)
|
_ensure_html_header(resp)
|
||||||
|
|
||||||
|
|
||||||
def _get_html_response(url, session):
|
def _get_html_response(url: str, session: PipSession) -> Response:
|
||||||
# type: (str, PipSession) -> Response
|
|
||||||
"""Access an HTML page with GET, and return the response.
|
"""Access an HTML page with GET, and return the response.
|
||||||
|
|
||||||
This consists of three parts:
|
This consists of three parts:
|
||||||
|
@ -115,7 +118,7 @@ def _get_html_response(url, session):
|
||||||
if is_archive_file(Link(url).filename):
|
if is_archive_file(Link(url).filename):
|
||||||
_ensure_html_response(url, session=session)
|
_ensure_html_response(url, session=session)
|
||||||
|
|
||||||
logger.debug('Getting page %s', redact_auth_from_url(url))
|
logger.debug("Getting page %s", redact_auth_from_url(url))
|
||||||
|
|
||||||
resp = session.get(
|
resp = session.get(
|
||||||
url,
|
url,
|
||||||
|
@ -149,19 +152,16 @@ def _get_html_response(url, session):
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
def _get_encoding_from_headers(headers):
|
def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
|
||||||
# type: (ResponseHeaders) -> Optional[str]
|
"""Determine if we have any encoding information in our headers."""
|
||||||
"""Determine if we have any encoding information in our headers.
|
|
||||||
"""
|
|
||||||
if headers and "Content-Type" in headers:
|
if headers and "Content-Type" in headers:
|
||||||
content_type, params = cgi.parse_header(headers["Content-Type"])
|
content_type, params = cgi.parse_header(headers["Content-Type"])
|
||||||
if "charset" in params:
|
if "charset" in params:
|
||||||
return params['charset']
|
return params["charset"]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _determine_base_url(document, page_url):
|
def _determine_base_url(document: HTMLElement, page_url: str) -> str:
|
||||||
# type: (HTMLElement, str) -> str
|
|
||||||
"""Determine the HTML document's base URL.
|
"""Determine the HTML document's base URL.
|
||||||
|
|
||||||
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
||||||
|
@ -172,6 +172,8 @@ def _determine_base_url(document, page_url):
|
||||||
:param document: An HTML document representation. The current
|
:param document: An HTML document representation. The current
|
||||||
implementation expects the result of ``html5lib.parse()``.
|
implementation expects the result of ``html5lib.parse()``.
|
||||||
:param page_url: The URL of the HTML document.
|
:param page_url: The URL of the HTML document.
|
||||||
|
|
||||||
|
TODO: Remove when `html5lib` is dropped.
|
||||||
"""
|
"""
|
||||||
for base in document.findall(".//base"):
|
for base in document.findall(".//base"):
|
||||||
href = base.get("href")
|
href = base.get("href")
|
||||||
|
@ -180,8 +182,7 @@ def _determine_base_url(document, page_url):
|
||||||
return page_url
|
return page_url
|
||||||
|
|
||||||
|
|
||||||
def _clean_url_path_part(part):
|
def _clean_url_path_part(part: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
"""
|
||||||
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||||
"""
|
"""
|
||||||
|
@ -189,8 +190,7 @@ def _clean_url_path_part(part):
|
||||||
return urllib.parse.quote(urllib.parse.unquote(part))
|
return urllib.parse.quote(urllib.parse.unquote(part))
|
||||||
|
|
||||||
|
|
||||||
def _clean_file_url_path(part):
|
def _clean_file_url_path(part: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
"""
|
||||||
Clean the first part of a URL path that corresponds to a local
|
Clean the first part of a URL path that corresponds to a local
|
||||||
filesystem path (i.e. the first part after splitting on "@" characters).
|
filesystem path (i.e. the first part after splitting on "@" characters).
|
||||||
|
@ -204,11 +204,10 @@ def _clean_file_url_path(part):
|
||||||
|
|
||||||
|
|
||||||
# percent-encoded: /
|
# percent-encoded: /
|
||||||
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
|
_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
def _clean_url_path(path, is_local_path):
|
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
||||||
# type: (str, bool) -> str
|
|
||||||
"""
|
"""
|
||||||
Clean the path portion of a URL.
|
Clean the path portion of a URL.
|
||||||
"""
|
"""
|
||||||
|
@ -222,16 +221,15 @@ def _clean_url_path(path, is_local_path):
|
||||||
parts = _reserved_chars_re.split(path)
|
parts = _reserved_chars_re.split(path)
|
||||||
|
|
||||||
cleaned_parts = []
|
cleaned_parts = []
|
||||||
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
|
for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
|
||||||
cleaned_parts.append(clean_func(to_clean))
|
cleaned_parts.append(clean_func(to_clean))
|
||||||
# Normalize %xx escapes (e.g. %2f -> %2F)
|
# Normalize %xx escapes (e.g. %2f -> %2F)
|
||||||
cleaned_parts.append(reserved.upper())
|
cleaned_parts.append(reserved.upper())
|
||||||
|
|
||||||
return ''.join(cleaned_parts)
|
return "".join(cleaned_parts)
|
||||||
|
|
||||||
|
|
||||||
def _clean_link(url):
|
def _clean_link(url: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
"""
|
||||||
Make sure a link is fully quoted.
|
Make sure a link is fully quoted.
|
||||||
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
||||||
|
@ -247,25 +245,20 @@ def _clean_link(url):
|
||||||
|
|
||||||
|
|
||||||
def _create_link_from_element(
|
def _create_link_from_element(
|
||||||
anchor, # type: HTMLElement
|
element_attribs: Dict[str, Optional[str]],
|
||||||
page_url, # type: str
|
page_url: str,
|
||||||
base_url, # type: str
|
base_url: str,
|
||||||
):
|
) -> Optional[Link]:
|
||||||
# type: (...) -> Optional[Link]
|
|
||||||
"""
|
"""
|
||||||
Convert an anchor element in a simple repository page to a Link.
|
Convert an anchor element's attributes in a simple repository page to a Link.
|
||||||
"""
|
"""
|
||||||
href = anchor.get("href")
|
href = element_attribs.get("href")
|
||||||
if not href:
|
if not href:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
url = _clean_link(urllib.parse.urljoin(base_url, href))
|
url = _clean_link(urllib.parse.urljoin(base_url, href))
|
||||||
pyrequire = anchor.get('data-requires-python')
|
pyrequire = element_attribs.get("data-requires-python")
|
||||||
pyrequire = html.unescape(pyrequire) if pyrequire else None
|
yanked_reason = element_attribs.get("data-yanked")
|
||||||
|
|
||||||
yanked_reason = anchor.get('data-yanked')
|
|
||||||
if yanked_reason:
|
|
||||||
yanked_reason = html.unescape(yanked_reason)
|
|
||||||
|
|
||||||
link = Link(
|
link = Link(
|
||||||
url,
|
url,
|
||||||
|
@ -278,25 +271,25 @@ def _create_link_from_element(
|
||||||
|
|
||||||
|
|
||||||
class CacheablePageContent:
|
class CacheablePageContent:
|
||||||
def __init__(self, page):
|
def __init__(self, page: "HTMLPage") -> None:
|
||||||
# type: (HTMLPage) -> None
|
|
||||||
assert page.cache_link_parsing
|
assert page.cache_link_parsing
|
||||||
self.page = page
|
self.page = page
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other: object) -> bool:
|
||||||
# type: (object) -> bool
|
return isinstance(other, type(self)) and self.page.url == other.page.url
|
||||||
return (isinstance(other, type(self)) and
|
|
||||||
self.page.url == other.page.url)
|
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self) -> int:
|
||||||
# type: () -> int
|
|
||||||
return hash(self.page.url)
|
return hash(self.page.url)
|
||||||
|
|
||||||
|
|
||||||
def with_cached_html_pages(
|
class ParseLinks(Protocol):
|
||||||
fn, # type: Callable[[HTMLPage], Iterable[Link]]
|
def __call__(
|
||||||
):
|
self, page: "HTMLPage", use_deprecated_html5lib: bool
|
||||||
# type: (...) -> Callable[[HTMLPage], List[Link]]
|
) -> Iterable[Link]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def with_cached_html_pages(fn: ParseLinks) -> ParseLinks:
|
||||||
"""
|
"""
|
||||||
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
||||||
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
||||||
|
@ -304,25 +297,25 @@ def with_cached_html_pages(
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def wrapper(cacheable_page):
|
def wrapper(
|
||||||
# type: (CacheablePageContent) -> List[Link]
|
cacheable_page: CacheablePageContent, use_deprecated_html5lib: bool
|
||||||
return list(fn(cacheable_page.page))
|
) -> List[Link]:
|
||||||
|
return list(fn(cacheable_page.page, use_deprecated_html5lib))
|
||||||
|
|
||||||
@functools.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def wrapper_wrapper(page):
|
def wrapper_wrapper(page: "HTMLPage", use_deprecated_html5lib: bool) -> List[Link]:
|
||||||
# type: (HTMLPage) -> List[Link]
|
|
||||||
if page.cache_link_parsing:
|
if page.cache_link_parsing:
|
||||||
return wrapper(CacheablePageContent(page))
|
return wrapper(CacheablePageContent(page), use_deprecated_html5lib)
|
||||||
return list(fn(page))
|
return list(fn(page, use_deprecated_html5lib))
|
||||||
|
|
||||||
return wrapper_wrapper
|
return wrapper_wrapper
|
||||||
|
|
||||||
|
|
||||||
@with_cached_html_pages
|
def _parse_links_html5lib(page: "HTMLPage") -> Iterable[Link]:
|
||||||
def parse_links(page):
|
|
||||||
# type: (HTMLPage) -> Iterable[Link]
|
|
||||||
"""
|
"""
|
||||||
Parse an HTML document, and yield its anchor elements as Link objects.
|
Parse an HTML document, and yield its anchor elements as Link objects.
|
||||||
|
|
||||||
|
TODO: Remove when `html5lib` is dropped.
|
||||||
"""
|
"""
|
||||||
document = html5lib.parse(
|
document = html5lib.parse(
|
||||||
page.content,
|
page.content,
|
||||||
|
@ -333,6 +326,33 @@ def parse_links(page):
|
||||||
url = page.url
|
url = page.url
|
||||||
base_url = _determine_base_url(document, url)
|
base_url = _determine_base_url(document, url)
|
||||||
for anchor in document.findall(".//a"):
|
for anchor in document.findall(".//a"):
|
||||||
|
link = _create_link_from_element(
|
||||||
|
anchor.attrib,
|
||||||
|
page_url=url,
|
||||||
|
base_url=base_url,
|
||||||
|
)
|
||||||
|
if link is None:
|
||||||
|
continue
|
||||||
|
yield link
|
||||||
|
|
||||||
|
|
||||||
|
@with_cached_html_pages
|
||||||
|
def parse_links(page: "HTMLPage", use_deprecated_html5lib: bool) -> Iterable[Link]:
|
||||||
|
"""
|
||||||
|
Parse an HTML document, and yield its anchor elements as Link objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if use_deprecated_html5lib:
|
||||||
|
yield from _parse_links_html5lib(page)
|
||||||
|
return
|
||||||
|
|
||||||
|
parser = HTMLLinkParser(page.url)
|
||||||
|
encoding = page.encoding or "utf-8"
|
||||||
|
parser.feed(page.content.decode(encoding))
|
||||||
|
|
||||||
|
url = page.url
|
||||||
|
base_url = parser.base_url or url
|
||||||
|
for anchor in parser.anchors:
|
||||||
link = _create_link_from_element(
|
link = _create_link_from_element(
|
||||||
anchor,
|
anchor,
|
||||||
page_url=url,
|
page_url=url,
|
||||||
|
@ -348,12 +368,11 @@ class HTMLPage:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
content, # type: bytes
|
content: bytes,
|
||||||
encoding, # type: Optional[str]
|
encoding: Optional[str],
|
||||||
url, # type: str
|
url: str,
|
||||||
cache_link_parsing=True, # type: bool
|
cache_link_parsing: bool = True,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param encoding: the encoding to decode the given content.
|
:param encoding: the encoding to decode the given content.
|
||||||
:param url: the URL from which the HTML was downloaded.
|
:param url: the URL from which the HTML was downloaded.
|
||||||
|
@ -366,70 +385,103 @@ class HTMLPage:
|
||||||
self.url = url
|
self.url = url
|
||||||
self.cache_link_parsing = cache_link_parsing
|
self.cache_link_parsing = cache_link_parsing
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return redact_auth_from_url(self.url)
|
return redact_auth_from_url(self.url)
|
||||||
|
|
||||||
|
|
||||||
|
class HTMLLinkParser(HTMLParser):
|
||||||
|
"""
|
||||||
|
HTMLParser that keeps the first base HREF and a list of all anchor
|
||||||
|
elements' attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url: str) -> None:
|
||||||
|
super().__init__(convert_charrefs=True)
|
||||||
|
|
||||||
|
self.url: str = url
|
||||||
|
self.base_url: Optional[str] = None
|
||||||
|
self.anchors: List[Dict[str, Optional[str]]] = []
|
||||||
|
|
||||||
|
def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
|
||||||
|
if tag == "base" and self.base_url is None:
|
||||||
|
href = self.get_href(attrs)
|
||||||
|
if href is not None:
|
||||||
|
self.base_url = href
|
||||||
|
elif tag == "a":
|
||||||
|
self.anchors.append(dict(attrs))
|
||||||
|
|
||||||
|
def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
|
||||||
|
for name, value in attrs:
|
||||||
|
if name == "href":
|
||||||
|
return value
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _handle_get_page_fail(
|
def _handle_get_page_fail(
|
||||||
link, # type: Link
|
link: Link,
|
||||||
reason, # type: Union[str, Exception]
|
reason: Union[str, Exception],
|
||||||
meth=None # type: Optional[Callable[..., None]]
|
meth: Optional[Callable[..., None]] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
if meth is None:
|
if meth is None:
|
||||||
meth = logger.debug
|
meth = logger.debug
|
||||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||||
|
|
||||||
|
|
||||||
def _make_html_page(response, cache_link_parsing=True):
|
def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage:
|
||||||
# type: (Response, bool) -> HTMLPage
|
|
||||||
encoding = _get_encoding_from_headers(response.headers)
|
encoding = _get_encoding_from_headers(response.headers)
|
||||||
return HTMLPage(
|
return HTMLPage(
|
||||||
response.content,
|
response.content,
|
||||||
encoding=encoding,
|
encoding=encoding,
|
||||||
url=response.url,
|
url=response.url,
|
||||||
cache_link_parsing=cache_link_parsing)
|
cache_link_parsing=cache_link_parsing,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_html_page(link, session=None):
|
def _get_html_page(
|
||||||
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
|
link: Link, session: Optional[PipSession] = None
|
||||||
|
) -> Optional["HTMLPage"]:
|
||||||
if session is None:
|
if session is None:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"_get_html_page() missing 1 required keyword argument: 'session'"
|
"_get_html_page() missing 1 required keyword argument: 'session'"
|
||||||
)
|
)
|
||||||
|
|
||||||
url = link.url.split('#', 1)[0]
|
url = link.url.split("#", 1)[0]
|
||||||
|
|
||||||
# Check for VCS schemes that do not support lookup as web pages.
|
# Check for VCS schemes that do not support lookup as web pages.
|
||||||
vcs_scheme = _match_vcs_scheme(url)
|
vcs_scheme = _match_vcs_scheme(url)
|
||||||
if vcs_scheme:
|
if vcs_scheme:
|
||||||
logger.warning('Cannot look at %s URL %s because it does not support '
|
logger.warning(
|
||||||
'lookup as web pages.', vcs_scheme, link)
|
"Cannot look at %s URL %s because it does not support lookup as web pages.",
|
||||||
|
vcs_scheme,
|
||||||
|
link,
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Tack index.html onto file:// URLs that point to directories
|
# Tack index.html onto file:// URLs that point to directories
|
||||||
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
||||||
if (scheme == 'file' and os.path.isdir(urllib.request.url2pathname(path))):
|
if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
|
||||||
# add trailing slash if not present so urljoin doesn't trim
|
# add trailing slash if not present so urljoin doesn't trim
|
||||||
# final segment
|
# final segment
|
||||||
if not url.endswith('/'):
|
if not url.endswith("/"):
|
||||||
url += '/'
|
url += "/"
|
||||||
url = urllib.parse.urljoin(url, 'index.html')
|
url = urllib.parse.urljoin(url, "index.html")
|
||||||
logger.debug(' file: URL is directory, getting %s', url)
|
logger.debug(" file: URL is directory, getting %s", url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = _get_html_response(url, session=session)
|
resp = _get_html_response(url, session=session)
|
||||||
except _NotHTTP:
|
except _NotHTTP:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Skipping page %s because it looks like an archive, and cannot '
|
"Skipping page %s because it looks like an archive, and cannot "
|
||||||
'be checked by a HTTP HEAD request.', link,
|
"be checked by a HTTP HEAD request.",
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
except _NotHTML as exc:
|
except _NotHTML as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Skipping page %s because the %s request got Content-Type: %s.'
|
"Skipping page %s because the %s request got Content-Type: %s."
|
||||||
'The only supported Content-Type is text/html',
|
"The only supported Content-Type is text/html",
|
||||||
link, exc.request_desc, exc.content_type,
|
link,
|
||||||
|
exc.request_desc,
|
||||||
|
exc.content_type,
|
||||||
)
|
)
|
||||||
except NetworkConnectionError as exc:
|
except NetworkConnectionError as exc:
|
||||||
_handle_get_page_fail(link, exc)
|
_handle_get_page_fail(link, exc)
|
||||||
|
@ -444,8 +496,7 @@ def _get_html_page(link, session=None):
|
||||||
except requests.Timeout:
|
except requests.Timeout:
|
||||||
_handle_get_page_fail(link, "timed out")
|
_handle_get_page_fail(link, "timed out")
|
||||||
else:
|
else:
|
||||||
return _make_html_page(resp,
|
return _make_html_page(resp, cache_link_parsing=link.cache_link_parsing)
|
||||||
cache_link_parsing=link.cache_link_parsing)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -465,16 +516,19 @@ class LinkCollector:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
search_scope, # type: SearchScope
|
search_scope: SearchScope,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self.search_scope = search_scope
|
self.search_scope = search_scope
|
||||||
self.session = session
|
self.session = session
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, session, options, suppress_no_index=False):
|
def create(
|
||||||
# type: (PipSession, Values, bool) -> LinkCollector
|
cls,
|
||||||
|
session: PipSession,
|
||||||
|
options: Values,
|
||||||
|
suppress_no_index: bool = False,
|
||||||
|
) -> "LinkCollector":
|
||||||
"""
|
"""
|
||||||
:param session: The Session to use to make requests.
|
:param session: The Session to use to make requests.
|
||||||
:param suppress_no_index: Whether to ignore the --no-index option
|
:param suppress_no_index: Whether to ignore the --no-index option
|
||||||
|
@ -483,8 +537,8 @@ class LinkCollector:
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
if options.no_index and not suppress_no_index:
|
if options.no_index and not suppress_no_index:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Ignoring indexes: %s',
|
"Ignoring indexes: %s",
|
||||||
','.join(redact_auth_from_url(url) for url in index_urls),
|
",".join(redact_auth_from_url(url) for url in index_urls),
|
||||||
)
|
)
|
||||||
index_urls = []
|
index_urls = []
|
||||||
|
|
||||||
|
@ -492,20 +546,20 @@ class LinkCollector:
|
||||||
find_links = options.find_links or []
|
find_links = options.find_links or []
|
||||||
|
|
||||||
search_scope = SearchScope.create(
|
search_scope = SearchScope.create(
|
||||||
find_links=find_links, index_urls=index_urls,
|
find_links=find_links,
|
||||||
|
index_urls=index_urls,
|
||||||
)
|
)
|
||||||
link_collector = LinkCollector(
|
link_collector = LinkCollector(
|
||||||
session=session, search_scope=search_scope,
|
session=session,
|
||||||
|
search_scope=search_scope,
|
||||||
)
|
)
|
||||||
return link_collector
|
return link_collector
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def find_links(self):
|
def find_links(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
return self.search_scope.find_links
|
return self.search_scope.find_links
|
||||||
|
|
||||||
def fetch_page(self, location):
|
def fetch_page(self, location: Link) -> Optional[HTMLPage]:
|
||||||
# type: (Link) -> Optional[HTMLPage]
|
|
||||||
"""
|
"""
|
||||||
Fetch an HTML page containing package links.
|
Fetch an HTML page containing package links.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import enum
|
||||||
import functools
|
import functools
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
|
@ -30,31 +31,28 @@ from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
from pip._internal.models.target_python import TargetPython
|
from pip._internal.models.target_python import TargetPython
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.req import InstallRequirement
|
from pip._internal.req import InstallRequirement
|
||||||
|
from pip._internal.utils._log import getLogger
|
||||||
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||||
from pip._internal.utils.hashes import Hashes
|
from pip._internal.utils.hashes import Hashes
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
from pip._internal.utils.misc import build_netloc
|
from pip._internal.utils.misc import build_netloc
|
||||||
from pip._internal.utils.packaging import check_requires_python
|
from pip._internal.utils.packaging import check_requires_python
|
||||||
from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
|
from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
|
||||||
from pip._internal.utils.urls import url_to_path
|
|
||||||
|
|
||||||
__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder']
|
__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||||
CandidateSortingKey = (
|
CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
||||||
Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_link_requires_python(
|
def _check_link_requires_python(
|
||||||
link, # type: Link
|
link: Link,
|
||||||
version_info, # type: Tuple[int, int, int]
|
version_info: Tuple[int, int, int],
|
||||||
ignore_requires_python=False, # type: bool
|
ignore_requires_python: bool = False,
|
||||||
):
|
) -> bool:
|
||||||
# type: (...) -> bool
|
|
||||||
"""
|
"""
|
||||||
Return whether the given Python version is compatible with a link's
|
Return whether the given Python version is compatible with a link's
|
||||||
"Requires-Python" value.
|
"Requires-Python" value.
|
||||||
|
@ -66,39 +64,54 @@ def _check_link_requires_python(
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
is_compatible = check_requires_python(
|
is_compatible = check_requires_python(
|
||||||
link.requires_python, version_info=version_info,
|
link.requires_python,
|
||||||
|
version_info=version_info,
|
||||||
)
|
)
|
||||||
except specifiers.InvalidSpecifier:
|
except specifiers.InvalidSpecifier:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Ignoring invalid Requires-Python (%r) for link: %s",
|
"Ignoring invalid Requires-Python (%r) for link: %s",
|
||||||
link.requires_python, link,
|
link.requires_python,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not is_compatible:
|
if not is_compatible:
|
||||||
version = '.'.join(map(str, version_info))
|
version = ".".join(map(str, version_info))
|
||||||
if not ignore_requires_python:
|
if not ignore_requires_python:
|
||||||
logger.debug(
|
logger.verbose(
|
||||||
'Link requires a different Python (%s not in: %r): %s',
|
"Link requires a different Python (%s not in: %r): %s",
|
||||||
version, link.requires_python, link,
|
version,
|
||||||
|
link.requires_python,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Ignoring failed Requires-Python check (%s not in: %r) '
|
"Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
|
||||||
'for link: %s',
|
version,
|
||||||
version, link.requires_python, link,
|
link.requires_python,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class LinkType(enum.Enum):
|
||||||
|
candidate = enum.auto()
|
||||||
|
different_project = enum.auto()
|
||||||
|
yanked = enum.auto()
|
||||||
|
format_unsupported = enum.auto()
|
||||||
|
format_invalid = enum.auto()
|
||||||
|
platform_mismatch = enum.auto()
|
||||||
|
requires_python_mismatch = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
class LinkEvaluator:
|
class LinkEvaluator:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Responsible for evaluating links for a particular project.
|
Responsible for evaluating links for a particular project.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
|
_py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
|
||||||
|
|
||||||
# Don't include an allow_yanked default value to make sure each call
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
# site considers whether yanked releases are allowed. This also causes
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
|
@ -106,14 +119,13 @@ class LinkEvaluator:
|
||||||
# people when reading the code.
|
# people when reading the code.
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
project_name, # type: str
|
project_name: str,
|
||||||
canonical_name, # type: str
|
canonical_name: str,
|
||||||
formats, # type: FrozenSet[str]
|
formats: FrozenSet[str],
|
||||||
target_python, # type: TargetPython
|
target_python: TargetPython,
|
||||||
allow_yanked, # type: bool
|
allow_yanked: bool,
|
||||||
ignore_requires_python=None, # type: Optional[bool]
|
ignore_requires_python: Optional[bool] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param project_name: The user supplied package name.
|
:param project_name: The user supplied package name.
|
||||||
:param canonical_name: The canonical package name.
|
:param canonical_name: The canonical package name.
|
||||||
|
@ -142,20 +154,20 @@ class LinkEvaluator:
|
||||||
|
|
||||||
self.project_name = project_name
|
self.project_name = project_name
|
||||||
|
|
||||||
def evaluate_link(self, link):
|
def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
|
||||||
# type: (Link) -> Tuple[bool, Optional[str]]
|
|
||||||
"""
|
"""
|
||||||
Determine whether a link is a candidate for installation.
|
Determine whether a link is a candidate for installation.
|
||||||
|
|
||||||
:return: A tuple (is_candidate, result), where `result` is (1) a
|
:return: A tuple (result, detail), where *result* is an enum
|
||||||
version string if `is_candidate` is True, and (2) if
|
representing whether the evaluation found a candidate, or the reason
|
||||||
`is_candidate` is False, an optional string to log the reason
|
why one is not found. If a candidate is found, *detail* will be the
|
||||||
the link fails to qualify.
|
candidate's version string; if one is not found, it contains the
|
||||||
|
reason the link fails to qualify.
|
||||||
"""
|
"""
|
||||||
version = None
|
version = None
|
||||||
if link.is_yanked and not self._allow_yanked:
|
if link.is_yanked and not self._allow_yanked:
|
||||||
reason = link.yanked_reason or '<none given>'
|
reason = link.yanked_reason or "<none given>"
|
||||||
return (False, f'yanked for reason: {reason}')
|
return (LinkType.yanked, f"yanked for reason: {reason}")
|
||||||
|
|
||||||
if link.egg_fragment:
|
if link.egg_fragment:
|
||||||
egg_info = link.egg_fragment
|
egg_info = link.egg_fragment
|
||||||
|
@ -163,80 +175,85 @@ class LinkEvaluator:
|
||||||
else:
|
else:
|
||||||
egg_info, ext = link.splitext()
|
egg_info, ext = link.splitext()
|
||||||
if not ext:
|
if not ext:
|
||||||
return (False, 'not a file')
|
return (LinkType.format_unsupported, "not a file")
|
||||||
if ext not in SUPPORTED_EXTENSIONS:
|
if ext not in SUPPORTED_EXTENSIONS:
|
||||||
return (False, f'unsupported archive format: {ext}')
|
return (
|
||||||
|
LinkType.format_unsupported,
|
||||||
|
f"unsupported archive format: {ext}",
|
||||||
|
)
|
||||||
if "binary" not in self._formats and ext == WHEEL_EXTENSION:
|
if "binary" not in self._formats and ext == WHEEL_EXTENSION:
|
||||||
reason = 'No binaries permitted for {}'.format(
|
reason = f"No binaries permitted for {self.project_name}"
|
||||||
self.project_name)
|
return (LinkType.format_unsupported, reason)
|
||||||
return (False, reason)
|
if "macosx10" in link.path and ext == ".zip":
|
||||||
if "macosx10" in link.path and ext == '.zip':
|
return (LinkType.format_unsupported, "macosx10 one")
|
||||||
return (False, 'macosx10 one')
|
|
||||||
if ext == WHEEL_EXTENSION:
|
if ext == WHEEL_EXTENSION:
|
||||||
try:
|
try:
|
||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
except InvalidWheelFilename:
|
except InvalidWheelFilename:
|
||||||
return (False, 'invalid wheel filename')
|
return (
|
||||||
|
LinkType.format_invalid,
|
||||||
|
"invalid wheel filename",
|
||||||
|
)
|
||||||
if canonicalize_name(wheel.name) != self._canonical_name:
|
if canonicalize_name(wheel.name) != self._canonical_name:
|
||||||
reason = 'wrong project name (not {})'.format(
|
reason = f"wrong project name (not {self.project_name})"
|
||||||
self.project_name)
|
return (LinkType.different_project, reason)
|
||||||
return (False, reason)
|
|
||||||
|
|
||||||
supported_tags = self._target_python.get_tags()
|
supported_tags = self._target_python.get_tags()
|
||||||
if not wheel.supported(supported_tags):
|
if not wheel.supported(supported_tags):
|
||||||
# Include the wheel's tags in the reason string to
|
# Include the wheel's tags in the reason string to
|
||||||
# simplify troubleshooting compatibility issues.
|
# simplify troubleshooting compatibility issues.
|
||||||
file_tags = wheel.get_formatted_file_tags()
|
file_tags = ", ".join(wheel.get_formatted_file_tags())
|
||||||
reason = (
|
reason = (
|
||||||
"none of the wheel's tags ({}) are compatible "
|
f"none of the wheel's tags ({file_tags}) are compatible "
|
||||||
"(run pip debug --verbose to show compatible tags)".format(
|
f"(run pip debug --verbose to show compatible tags)"
|
||||||
', '.join(file_tags)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return (False, reason)
|
return (LinkType.platform_mismatch, reason)
|
||||||
|
|
||||||
version = wheel.version
|
version = wheel.version
|
||||||
|
|
||||||
# This should be up by the self.ok_binary check, but see issue 2700.
|
# This should be up by the self.ok_binary check, but see issue 2700.
|
||||||
if "source" not in self._formats and ext != WHEEL_EXTENSION:
|
if "source" not in self._formats and ext != WHEEL_EXTENSION:
|
||||||
reason = f'No sources permitted for {self.project_name}'
|
reason = f"No sources permitted for {self.project_name}"
|
||||||
return (False, reason)
|
return (LinkType.format_unsupported, reason)
|
||||||
|
|
||||||
if not version:
|
if not version:
|
||||||
version = _extract_version_from_fragment(
|
version = _extract_version_from_fragment(
|
||||||
egg_info, self._canonical_name,
|
egg_info,
|
||||||
|
self._canonical_name,
|
||||||
)
|
)
|
||||||
if not version:
|
if not version:
|
||||||
reason = f'Missing project version for {self.project_name}'
|
reason = f"Missing project version for {self.project_name}"
|
||||||
return (False, reason)
|
return (LinkType.format_invalid, reason)
|
||||||
|
|
||||||
match = self._py_version_re.search(version)
|
match = self._py_version_re.search(version)
|
||||||
if match:
|
if match:
|
||||||
version = version[:match.start()]
|
version = version[: match.start()]
|
||||||
py_version = match.group(1)
|
py_version = match.group(1)
|
||||||
if py_version != self._target_python.py_version:
|
if py_version != self._target_python.py_version:
|
||||||
return (False, 'Python version is incorrect')
|
return (
|
||||||
|
LinkType.platform_mismatch,
|
||||||
|
"Python version is incorrect",
|
||||||
|
)
|
||||||
|
|
||||||
supports_python = _check_link_requires_python(
|
supports_python = _check_link_requires_python(
|
||||||
link, version_info=self._target_python.py_version_info,
|
link,
|
||||||
|
version_info=self._target_python.py_version_info,
|
||||||
ignore_requires_python=self._ignore_requires_python,
|
ignore_requires_python=self._ignore_requires_python,
|
||||||
)
|
)
|
||||||
if not supports_python:
|
if not supports_python:
|
||||||
# Return None for the reason text to suppress calling
|
reason = f"{version} Requires-Python {link.requires_python}"
|
||||||
# _log_skipped_link().
|
return (LinkType.requires_python_mismatch, reason)
|
||||||
return (False, None)
|
|
||||||
|
|
||||||
logger.debug('Found link %s, version: %s', link, version)
|
logger.debug("Found link %s, version: %s", link, version)
|
||||||
|
|
||||||
return (True, version)
|
return (LinkType.candidate, version)
|
||||||
|
|
||||||
|
|
||||||
def filter_unallowed_hashes(
|
def filter_unallowed_hashes(
|
||||||
candidates, # type: List[InstallationCandidate]
|
candidates: List[InstallationCandidate],
|
||||||
hashes, # type: Hashes
|
hashes: Hashes,
|
||||||
project_name, # type: str
|
project_name: str,
|
||||||
):
|
) -> List[InstallationCandidate]:
|
||||||
# type: (...) -> List[InstallationCandidate]
|
|
||||||
"""
|
"""
|
||||||
Filter out candidates whose hashes aren't allowed, and return a new
|
Filter out candidates whose hashes aren't allowed, and return a new
|
||||||
list of candidates.
|
list of candidates.
|
||||||
|
@ -254,8 +271,8 @@ def filter_unallowed_hashes(
|
||||||
"""
|
"""
|
||||||
if not hashes:
|
if not hashes:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Given no hashes to check %s links for project %r: '
|
"Given no hashes to check %s links for project %r: "
|
||||||
'discarding no candidates',
|
"discarding no candidates",
|
||||||
len(candidates),
|
len(candidates),
|
||||||
project_name,
|
project_name,
|
||||||
)
|
)
|
||||||
|
@ -285,22 +302,22 @@ def filter_unallowed_hashes(
|
||||||
filtered = list(candidates)
|
filtered = list(candidates)
|
||||||
|
|
||||||
if len(filtered) == len(candidates):
|
if len(filtered) == len(candidates):
|
||||||
discard_message = 'discarding no candidates'
|
discard_message = "discarding no candidates"
|
||||||
else:
|
else:
|
||||||
discard_message = 'discarding {} non-matches:\n {}'.format(
|
discard_message = "discarding {} non-matches:\n {}".format(
|
||||||
len(non_matches),
|
len(non_matches),
|
||||||
'\n '.join(str(candidate.link) for candidate in non_matches)
|
"\n ".join(str(candidate.link) for candidate in non_matches),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Checked %s links for project %r against %s hashes '
|
"Checked %s links for project %r against %s hashes "
|
||||||
'(%s matches, %s no digest): %s',
|
"(%s matches, %s no digest): %s",
|
||||||
len(candidates),
|
len(candidates),
|
||||||
project_name,
|
project_name,
|
||||||
hashes.digest_count,
|
hashes.digest_count,
|
||||||
match_count,
|
match_count,
|
||||||
len(matches_or_no_digest) - match_count,
|
len(matches_or_no_digest) - match_count,
|
||||||
discard_message
|
discard_message,
|
||||||
)
|
)
|
||||||
|
|
||||||
return filtered
|
return filtered
|
||||||
|
@ -315,10 +332,9 @@ class CandidatePreferences:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
prefer_binary=False, # type: bool
|
prefer_binary: bool = False,
|
||||||
allow_all_prereleases=False, # type: bool
|
allow_all_prereleases: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param allow_all_prereleases: Whether to allow all pre-releases.
|
:param allow_all_prereleases: Whether to allow all pre-releases.
|
||||||
"""
|
"""
|
||||||
|
@ -335,11 +351,10 @@ class BestCandidateResult:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
candidates, # type: List[InstallationCandidate]
|
candidates: List[InstallationCandidate],
|
||||||
applicable_candidates, # type: List[InstallationCandidate]
|
applicable_candidates: List[InstallationCandidate],
|
||||||
best_candidate, # type: Optional[InstallationCandidate]
|
best_candidate: Optional[InstallationCandidate],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param candidates: A sequence of all available candidates found.
|
:param candidates: A sequence of all available candidates found.
|
||||||
:param applicable_candidates: The applicable candidates.
|
:param applicable_candidates: The applicable candidates.
|
||||||
|
@ -358,16 +373,12 @@ class BestCandidateResult:
|
||||||
|
|
||||||
self.best_candidate = best_candidate
|
self.best_candidate = best_candidate
|
||||||
|
|
||||||
def iter_all(self):
|
def iter_all(self) -> Iterable[InstallationCandidate]:
|
||||||
# type: () -> Iterable[InstallationCandidate]
|
"""Iterate through all candidates."""
|
||||||
"""Iterate through all candidates.
|
|
||||||
"""
|
|
||||||
return iter(self._candidates)
|
return iter(self._candidates)
|
||||||
|
|
||||||
def iter_applicable(self):
|
def iter_applicable(self) -> Iterable[InstallationCandidate]:
|
||||||
# type: () -> Iterable[InstallationCandidate]
|
"""Iterate through the applicable candidates."""
|
||||||
"""Iterate through the applicable candidates.
|
|
||||||
"""
|
|
||||||
return iter(self._applicable_candidates)
|
return iter(self._applicable_candidates)
|
||||||
|
|
||||||
|
|
||||||
|
@ -381,14 +392,13 @@ class CandidateEvaluator:
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
cls,
|
cls,
|
||||||
project_name, # type: str
|
project_name: str,
|
||||||
target_python=None, # type: Optional[TargetPython]
|
target_python: Optional[TargetPython] = None,
|
||||||
prefer_binary=False, # type: bool
|
prefer_binary: bool = False,
|
||||||
allow_all_prereleases=False, # type: bool
|
allow_all_prereleases: bool = False,
|
||||||
specifier=None, # type: Optional[specifiers.BaseSpecifier]
|
specifier: Optional[specifiers.BaseSpecifier] = None,
|
||||||
hashes=None, # type: Optional[Hashes]
|
hashes: Optional[Hashes] = None,
|
||||||
):
|
) -> "CandidateEvaluator":
|
||||||
# type: (...) -> CandidateEvaluator
|
|
||||||
"""Create a CandidateEvaluator object.
|
"""Create a CandidateEvaluator object.
|
||||||
|
|
||||||
:param target_python: The target Python interpreter to use when
|
:param target_python: The target Python interpreter to use when
|
||||||
|
@ -417,14 +427,13 @@ class CandidateEvaluator:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
project_name, # type: str
|
project_name: str,
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
specifier, # type: specifiers.BaseSpecifier
|
specifier: specifiers.BaseSpecifier,
|
||||||
prefer_binary=False, # type: bool
|
prefer_binary: bool = False,
|
||||||
allow_all_prereleases=False, # type: bool
|
allow_all_prereleases: bool = False,
|
||||||
hashes=None, # type: Optional[Hashes]
|
hashes: Optional[Hashes] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param supported_tags: The PEP 425 tags supported by the target
|
:param supported_tags: The PEP 425 tags supported by the target
|
||||||
Python in order of preference (most preferred first).
|
Python in order of preference (most preferred first).
|
||||||
|
@ -444,9 +453,8 @@ class CandidateEvaluator:
|
||||||
|
|
||||||
def get_applicable_candidates(
|
def get_applicable_candidates(
|
||||||
self,
|
self,
|
||||||
candidates, # type: List[InstallationCandidate]
|
candidates: List[InstallationCandidate],
|
||||||
):
|
) -> List[InstallationCandidate]:
|
||||||
# type: (...) -> List[InstallationCandidate]
|
|
||||||
"""
|
"""
|
||||||
Return the applicable candidates from a list of candidates.
|
Return the applicable candidates from a list of candidates.
|
||||||
"""
|
"""
|
||||||
|
@ -454,7 +462,8 @@ class CandidateEvaluator:
|
||||||
allow_prereleases = self._allow_all_prereleases or None
|
allow_prereleases = self._allow_all_prereleases or None
|
||||||
specifier = self._specifier
|
specifier = self._specifier
|
||||||
versions = {
|
versions = {
|
||||||
str(v) for v in specifier.filter(
|
str(v)
|
||||||
|
for v in specifier.filter(
|
||||||
# We turn the version object into a str here because otherwise
|
# We turn the version object into a str here because otherwise
|
||||||
# when we're debundled but setuptools isn't, Python will see
|
# when we're debundled but setuptools isn't, Python will see
|
||||||
# packaging.version.Version and
|
# packaging.version.Version and
|
||||||
|
@ -468,9 +477,7 @@ class CandidateEvaluator:
|
||||||
}
|
}
|
||||||
|
|
||||||
# Again, converting version to str to deal with debundling.
|
# Again, converting version to str to deal with debundling.
|
||||||
applicable_candidates = [
|
applicable_candidates = [c for c in candidates if str(c.version) in versions]
|
||||||
c for c in candidates if str(c.version) in versions
|
|
||||||
]
|
|
||||||
|
|
||||||
filtered_applicable_candidates = filter_unallowed_hashes(
|
filtered_applicable_candidates = filter_unallowed_hashes(
|
||||||
candidates=applicable_candidates,
|
candidates=applicable_candidates,
|
||||||
|
@ -480,8 +487,7 @@ class CandidateEvaluator:
|
||||||
|
|
||||||
return sorted(filtered_applicable_candidates, key=self._sort_key)
|
return sorted(filtered_applicable_candidates, key=self._sort_key)
|
||||||
|
|
||||||
def _sort_key(self, candidate):
|
def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
|
||||||
# type: (InstallationCandidate) -> CandidateSortingKey
|
|
||||||
"""
|
"""
|
||||||
Function to pass as the `key` argument to a call to sorted() to sort
|
Function to pass as the `key` argument to a call to sorted() to sort
|
||||||
InstallationCandidates by preference.
|
InstallationCandidates by preference.
|
||||||
|
@ -513,16 +519,18 @@ class CandidateEvaluator:
|
||||||
"""
|
"""
|
||||||
valid_tags = self._supported_tags
|
valid_tags = self._supported_tags
|
||||||
support_num = len(valid_tags)
|
support_num = len(valid_tags)
|
||||||
build_tag = () # type: BuildTag
|
build_tag: BuildTag = ()
|
||||||
binary_preference = 0
|
binary_preference = 0
|
||||||
link = candidate.link
|
link = candidate.link
|
||||||
if link.is_wheel:
|
if link.is_wheel:
|
||||||
# can raise InvalidWheelFilename
|
# can raise InvalidWheelFilename
|
||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
try:
|
try:
|
||||||
pri = -(wheel.find_most_preferred_tag(
|
pri = -(
|
||||||
valid_tags, self._wheel_tag_preferences
|
wheel.find_most_preferred_tag(
|
||||||
))
|
valid_tags, self._wheel_tag_preferences
|
||||||
|
)
|
||||||
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise UnsupportedWheel(
|
raise UnsupportedWheel(
|
||||||
"{} is not a supported wheel for this platform. It "
|
"{} is not a supported wheel for this platform. It "
|
||||||
|
@ -531,7 +539,7 @@ class CandidateEvaluator:
|
||||||
if self._prefer_binary:
|
if self._prefer_binary:
|
||||||
binary_preference = 1
|
binary_preference = 1
|
||||||
if wheel.build_tag is not None:
|
if wheel.build_tag is not None:
|
||||||
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
|
match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
|
||||||
build_tag_groups = match.groups()
|
build_tag_groups = match.groups()
|
||||||
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
||||||
else: # sdist
|
else: # sdist
|
||||||
|
@ -539,15 +547,18 @@ class CandidateEvaluator:
|
||||||
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
|
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
|
||||||
yank_value = -1 * int(link.is_yanked) # -1 for yanked.
|
yank_value = -1 * int(link.is_yanked) # -1 for yanked.
|
||||||
return (
|
return (
|
||||||
has_allowed_hash, yank_value, binary_preference, candidate.version,
|
has_allowed_hash,
|
||||||
pri, build_tag,
|
yank_value,
|
||||||
|
binary_preference,
|
||||||
|
candidate.version,
|
||||||
|
pri,
|
||||||
|
build_tag,
|
||||||
)
|
)
|
||||||
|
|
||||||
def sort_best_candidate(
|
def sort_best_candidate(
|
||||||
self,
|
self,
|
||||||
candidates, # type: List[InstallationCandidate]
|
candidates: List[InstallationCandidate],
|
||||||
):
|
) -> Optional[InstallationCandidate]:
|
||||||
# type: (...) -> Optional[InstallationCandidate]
|
|
||||||
"""
|
"""
|
||||||
Return the best candidate per the instance's sort order, or None if
|
Return the best candidate per the instance's sort order, or None if
|
||||||
no candidate is acceptable.
|
no candidate is acceptable.
|
||||||
|
@ -559,9 +570,8 @@ class CandidateEvaluator:
|
||||||
|
|
||||||
def compute_best_candidate(
|
def compute_best_candidate(
|
||||||
self,
|
self,
|
||||||
candidates, # type: List[InstallationCandidate]
|
candidates: List[InstallationCandidate],
|
||||||
):
|
) -> BestCandidateResult:
|
||||||
# type: (...) -> BestCandidateResult
|
|
||||||
"""
|
"""
|
||||||
Compute and return a `BestCandidateResult` instance.
|
Compute and return a `BestCandidateResult` instance.
|
||||||
"""
|
"""
|
||||||
|
@ -585,14 +595,14 @@ class PackageFinder:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
link_collector, # type: LinkCollector
|
link_collector: LinkCollector,
|
||||||
target_python, # type: TargetPython
|
target_python: TargetPython,
|
||||||
allow_yanked, # type: bool
|
allow_yanked: bool,
|
||||||
format_control=None, # type: Optional[FormatControl]
|
use_deprecated_html5lib: bool,
|
||||||
candidate_prefs=None, # type: CandidatePreferences
|
format_control: Optional[FormatControl] = None,
|
||||||
ignore_requires_python=None, # type: Optional[bool]
|
candidate_prefs: Optional[CandidatePreferences] = None,
|
||||||
):
|
ignore_requires_python: Optional[bool] = None,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
"""
|
"""
|
||||||
This constructor is primarily meant to be used by the create() class
|
This constructor is primarily meant to be used by the create() class
|
||||||
method and from tests.
|
method and from tests.
|
||||||
|
@ -613,11 +623,12 @@ class PackageFinder:
|
||||||
self._ignore_requires_python = ignore_requires_python
|
self._ignore_requires_python = ignore_requires_python
|
||||||
self._link_collector = link_collector
|
self._link_collector = link_collector
|
||||||
self._target_python = target_python
|
self._target_python = target_python
|
||||||
|
self._use_deprecated_html5lib = use_deprecated_html5lib
|
||||||
|
|
||||||
self.format_control = format_control
|
self.format_control = format_control
|
||||||
|
|
||||||
# These are boring links that have already been logged somehow.
|
# These are boring links that have already been logged somehow.
|
||||||
self._logged_links = set() # type: Set[Link]
|
self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
|
||||||
|
|
||||||
# Don't include an allow_yanked default value to make sure each call
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
# site considers whether yanked releases are allowed. This also causes
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
|
@ -626,11 +637,12 @@ class PackageFinder:
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
cls,
|
cls,
|
||||||
link_collector, # type: LinkCollector
|
link_collector: LinkCollector,
|
||||||
selection_prefs, # type: SelectionPreferences
|
selection_prefs: SelectionPreferences,
|
||||||
target_python=None, # type: Optional[TargetPython]
|
target_python: Optional[TargetPython] = None,
|
||||||
):
|
*,
|
||||||
# type: (...) -> PackageFinder
|
use_deprecated_html5lib: bool,
|
||||||
|
) -> "PackageFinder":
|
||||||
"""Create a PackageFinder.
|
"""Create a PackageFinder.
|
||||||
|
|
||||||
:param selection_prefs: The candidate selection preferences, as a
|
:param selection_prefs: The candidate selection preferences, as a
|
||||||
|
@ -654,59 +666,57 @@ class PackageFinder:
|
||||||
allow_yanked=selection_prefs.allow_yanked,
|
allow_yanked=selection_prefs.allow_yanked,
|
||||||
format_control=selection_prefs.format_control,
|
format_control=selection_prefs.format_control,
|
||||||
ignore_requires_python=selection_prefs.ignore_requires_python,
|
ignore_requires_python=selection_prefs.ignore_requires_python,
|
||||||
|
use_deprecated_html5lib=use_deprecated_html5lib,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def target_python(self):
|
def target_python(self) -> TargetPython:
|
||||||
# type: () -> TargetPython
|
|
||||||
return self._target_python
|
return self._target_python
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def search_scope(self):
|
def search_scope(self) -> SearchScope:
|
||||||
# type: () -> SearchScope
|
|
||||||
return self._link_collector.search_scope
|
return self._link_collector.search_scope
|
||||||
|
|
||||||
@search_scope.setter
|
@search_scope.setter
|
||||||
def search_scope(self, search_scope):
|
def search_scope(self, search_scope: SearchScope) -> None:
|
||||||
# type: (SearchScope) -> None
|
|
||||||
self._link_collector.search_scope = search_scope
|
self._link_collector.search_scope = search_scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def find_links(self):
|
def find_links(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
return self._link_collector.find_links
|
return self._link_collector.find_links
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def index_urls(self):
|
def index_urls(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
return self.search_scope.index_urls
|
return self.search_scope.index_urls
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def trusted_hosts(self):
|
def trusted_hosts(self) -> Iterable[str]:
|
||||||
# type: () -> Iterable[str]
|
|
||||||
for host_port in self._link_collector.session.pip_trusted_origins:
|
for host_port in self._link_collector.session.pip_trusted_origins:
|
||||||
yield build_netloc(*host_port)
|
yield build_netloc(*host_port)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def allow_all_prereleases(self):
|
def allow_all_prereleases(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return self._candidate_prefs.allow_all_prereleases
|
return self._candidate_prefs.allow_all_prereleases
|
||||||
|
|
||||||
def set_allow_all_prereleases(self):
|
def set_allow_all_prereleases(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._candidate_prefs.allow_all_prereleases = True
|
self._candidate_prefs.allow_all_prereleases = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def prefer_binary(self):
|
def prefer_binary(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return self._candidate_prefs.prefer_binary
|
return self._candidate_prefs.prefer_binary
|
||||||
|
|
||||||
def set_prefer_binary(self):
|
def set_prefer_binary(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._candidate_prefs.prefer_binary = True
|
self._candidate_prefs.prefer_binary = True
|
||||||
|
|
||||||
def make_link_evaluator(self, project_name):
|
def requires_python_skipped_reasons(self) -> List[str]:
|
||||||
# type: (str) -> LinkEvaluator
|
reasons = {
|
||||||
|
detail
|
||||||
|
for _, result, detail in self._logged_links
|
||||||
|
if result == LinkType.requires_python_mismatch
|
||||||
|
}
|
||||||
|
return sorted(reasons)
|
||||||
|
|
||||||
|
def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
|
||||||
canonical_name = canonicalize_name(project_name)
|
canonical_name = canonicalize_name(project_name)
|
||||||
formats = self.format_control.get_allowed_formats(canonical_name)
|
formats = self.format_control.get_allowed_formats(canonical_name)
|
||||||
|
|
||||||
|
@ -719,14 +729,13 @@ class PackageFinder:
|
||||||
ignore_requires_python=self._ignore_requires_python,
|
ignore_requires_python=self._ignore_requires_python,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _sort_links(self, links):
|
def _sort_links(self, links: Iterable[Link]) -> List[Link]:
|
||||||
# type: (Iterable[Link]) -> List[Link]
|
|
||||||
"""
|
"""
|
||||||
Returns elements of links in order, non-egg links first, egg links
|
Returns elements of links in order, non-egg links first, egg links
|
||||||
second, while eliminating duplicates
|
second, while eliminating duplicates
|
||||||
"""
|
"""
|
||||||
eggs, no_eggs = [], []
|
eggs, no_eggs = [], []
|
||||||
seen = set() # type: Set[Link]
|
seen: Set[Link] = set()
|
||||||
for link in links:
|
for link in links:
|
||||||
if link not in seen:
|
if link not in seen:
|
||||||
seen.add(link)
|
seen.add(link)
|
||||||
|
@ -736,34 +745,35 @@ class PackageFinder:
|
||||||
no_eggs.append(link)
|
no_eggs.append(link)
|
||||||
return no_eggs + eggs
|
return no_eggs + eggs
|
||||||
|
|
||||||
def _log_skipped_link(self, link, reason):
|
def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
|
||||||
# type: (Link, str) -> None
|
entry = (link, result, detail)
|
||||||
if link not in self._logged_links:
|
if entry not in self._logged_links:
|
||||||
# Put the link at the end so the reason is more visible and because
|
# Put the link at the end so the reason is more visible and because
|
||||||
# the link string is usually very long.
|
# the link string is usually very long.
|
||||||
logger.debug('Skipping link: %s: %s', reason, link)
|
logger.debug("Skipping link: %s: %s", detail, link)
|
||||||
self._logged_links.add(link)
|
self._logged_links.add(entry)
|
||||||
|
|
||||||
def get_install_candidate(self, link_evaluator, link):
|
def get_install_candidate(
|
||||||
# type: (LinkEvaluator, Link) -> Optional[InstallationCandidate]
|
self, link_evaluator: LinkEvaluator, link: Link
|
||||||
|
) -> Optional[InstallationCandidate]:
|
||||||
"""
|
"""
|
||||||
If the link is a candidate for install, convert it to an
|
If the link is a candidate for install, convert it to an
|
||||||
InstallationCandidate and return it. Otherwise, return None.
|
InstallationCandidate and return it. Otherwise, return None.
|
||||||
"""
|
"""
|
||||||
is_candidate, result = link_evaluator.evaluate_link(link)
|
result, detail = link_evaluator.evaluate_link(link)
|
||||||
if not is_candidate:
|
if result != LinkType.candidate:
|
||||||
if result:
|
self._log_skipped_link(link, result, detail)
|
||||||
self._log_skipped_link(link, reason=result)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return InstallationCandidate(
|
return InstallationCandidate(
|
||||||
name=link_evaluator.project_name,
|
name=link_evaluator.project_name,
|
||||||
link=link,
|
link=link,
|
||||||
version=result,
|
version=detail,
|
||||||
)
|
)
|
||||||
|
|
||||||
def evaluate_links(self, link_evaluator, links):
|
def evaluate_links(
|
||||||
# type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate]
|
self, link_evaluator: LinkEvaluator, links: Iterable[Link]
|
||||||
|
) -> List[InstallationCandidate]:
|
||||||
"""
|
"""
|
||||||
Convert links that are candidates to InstallationCandidate objects.
|
Convert links that are candidates to InstallationCandidate objects.
|
||||||
"""
|
"""
|
||||||
|
@ -775,16 +785,18 @@ class PackageFinder:
|
||||||
|
|
||||||
return candidates
|
return candidates
|
||||||
|
|
||||||
def process_project_url(self, project_url, link_evaluator):
|
def process_project_url(
|
||||||
# type: (Link, LinkEvaluator) -> List[InstallationCandidate]
|
self, project_url: Link, link_evaluator: LinkEvaluator
|
||||||
|
) -> List[InstallationCandidate]:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Fetching project page and analyzing links: %s', project_url,
|
"Fetching project page and analyzing links: %s",
|
||||||
|
project_url,
|
||||||
)
|
)
|
||||||
html_page = self._link_collector.fetch_page(project_url)
|
html_page = self._link_collector.fetch_page(project_url)
|
||||||
if html_page is None:
|
if html_page is None:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
page_links = list(parse_links(html_page))
|
page_links = list(parse_links(html_page, self._use_deprecated_html5lib))
|
||||||
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
package_links = self.evaluate_links(
|
package_links = self.evaluate_links(
|
||||||
|
@ -795,8 +807,7 @@ class PackageFinder:
|
||||||
return package_links
|
return package_links
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def find_all_candidates(self, project_name):
|
def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
|
||||||
# type: (str) -> List[InstallationCandidate]
|
|
||||||
"""Find all available InstallationCandidate for project_name
|
"""Find all available InstallationCandidate for project_name
|
||||||
|
|
||||||
This checks index_urls and find_links.
|
This checks index_urls and find_links.
|
||||||
|
@ -835,7 +846,14 @@ class PackageFinder:
|
||||||
)
|
)
|
||||||
|
|
||||||
if logger.isEnabledFor(logging.DEBUG) and file_candidates:
|
if logger.isEnabledFor(logging.DEBUG) and file_candidates:
|
||||||
paths = [url_to_path(c.link.url) for c in file_candidates]
|
paths = []
|
||||||
|
for candidate in file_candidates:
|
||||||
|
assert candidate.link.url # we need to have a URL
|
||||||
|
try:
|
||||||
|
paths.append(candidate.link.file_path)
|
||||||
|
except Exception:
|
||||||
|
paths.append(candidate.link.url) # it's not a local file
|
||||||
|
|
||||||
logger.debug("Local files found: %s", ", ".join(paths))
|
logger.debug("Local files found: %s", ", ".join(paths))
|
||||||
|
|
||||||
# This is an intentional priority ordering
|
# This is an intentional priority ordering
|
||||||
|
@ -843,13 +861,11 @@ class PackageFinder:
|
||||||
|
|
||||||
def make_candidate_evaluator(
|
def make_candidate_evaluator(
|
||||||
self,
|
self,
|
||||||
project_name, # type: str
|
project_name: str,
|
||||||
specifier=None, # type: Optional[specifiers.BaseSpecifier]
|
specifier: Optional[specifiers.BaseSpecifier] = None,
|
||||||
hashes=None, # type: Optional[Hashes]
|
hashes: Optional[Hashes] = None,
|
||||||
):
|
) -> CandidateEvaluator:
|
||||||
# type: (...) -> CandidateEvaluator
|
"""Create a CandidateEvaluator object to use."""
|
||||||
"""Create a CandidateEvaluator object to use.
|
|
||||||
"""
|
|
||||||
candidate_prefs = self._candidate_prefs
|
candidate_prefs = self._candidate_prefs
|
||||||
return CandidateEvaluator.create(
|
return CandidateEvaluator.create(
|
||||||
project_name=project_name,
|
project_name=project_name,
|
||||||
|
@ -863,11 +879,10 @@ class PackageFinder:
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def find_best_candidate(
|
def find_best_candidate(
|
||||||
self,
|
self,
|
||||||
project_name, # type: str
|
project_name: str,
|
||||||
specifier=None, # type: Optional[specifiers.BaseSpecifier]
|
specifier: Optional[specifiers.BaseSpecifier] = None,
|
||||||
hashes=None, # type: Optional[Hashes]
|
hashes: Optional[Hashes] = None,
|
||||||
):
|
) -> BestCandidateResult:
|
||||||
# type: (...) -> BestCandidateResult
|
|
||||||
"""Find matches for the given project and specifier.
|
"""Find matches for the given project and specifier.
|
||||||
|
|
||||||
:param specifier: An optional object implementing `filter`
|
:param specifier: An optional object implementing `filter`
|
||||||
|
@ -884,8 +899,9 @@ class PackageFinder:
|
||||||
)
|
)
|
||||||
return candidate_evaluator.compute_best_candidate(candidates)
|
return candidate_evaluator.compute_best_candidate(candidates)
|
||||||
|
|
||||||
def find_requirement(self, req, upgrade):
|
def find_requirement(
|
||||||
# type: (InstallRequirement, bool) -> Optional[InstallationCandidate]
|
self, req: InstallRequirement, upgrade: bool
|
||||||
|
) -> Optional[InstallationCandidate]:
|
||||||
"""Try to find a Link matching req
|
"""Try to find a Link matching req
|
||||||
|
|
||||||
Expects req, an InstallRequirement and upgrade, a boolean
|
Expects req, an InstallRequirement and upgrade, a boolean
|
||||||
|
@ -894,55 +910,60 @@ class PackageFinder:
|
||||||
"""
|
"""
|
||||||
hashes = req.hashes(trust_internet=False)
|
hashes = req.hashes(trust_internet=False)
|
||||||
best_candidate_result = self.find_best_candidate(
|
best_candidate_result = self.find_best_candidate(
|
||||||
req.name, specifier=req.specifier, hashes=hashes,
|
req.name,
|
||||||
|
specifier=req.specifier,
|
||||||
|
hashes=hashes,
|
||||||
)
|
)
|
||||||
best_candidate = best_candidate_result.best_candidate
|
best_candidate = best_candidate_result.best_candidate
|
||||||
|
|
||||||
installed_version = None # type: Optional[_BaseVersion]
|
installed_version: Optional[_BaseVersion] = None
|
||||||
if req.satisfied_by is not None:
|
if req.satisfied_by is not None:
|
||||||
installed_version = parse_version(req.satisfied_by.version)
|
installed_version = req.satisfied_by.version
|
||||||
|
|
||||||
def _format_versions(cand_iter):
|
def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
|
||||||
# type: (Iterable[InstallationCandidate]) -> str
|
|
||||||
# This repeated parse_version and str() conversion is needed to
|
# This repeated parse_version and str() conversion is needed to
|
||||||
# handle different vendoring sources from pip and pkg_resources.
|
# handle different vendoring sources from pip and pkg_resources.
|
||||||
# If we stop using the pkg_resources provided specifier and start
|
# If we stop using the pkg_resources provided specifier and start
|
||||||
# using our own, we can drop the cast to str().
|
# using our own, we can drop the cast to str().
|
||||||
return ", ".join(sorted(
|
return (
|
||||||
{str(c.version) for c in cand_iter},
|
", ".join(
|
||||||
key=parse_version,
|
sorted(
|
||||||
)) or "none"
|
{str(c.version) for c in cand_iter},
|
||||||
|
key=parse_version,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
or "none"
|
||||||
|
)
|
||||||
|
|
||||||
if installed_version is None and best_candidate is None:
|
if installed_version is None and best_candidate is None:
|
||||||
logger.critical(
|
logger.critical(
|
||||||
'Could not find a version that satisfies the requirement %s '
|
"Could not find a version that satisfies the requirement %s "
|
||||||
'(from versions: %s)',
|
"(from versions: %s)",
|
||||||
req,
|
req,
|
||||||
_format_versions(best_candidate_result.iter_all()),
|
_format_versions(best_candidate_result.iter_all()),
|
||||||
)
|
)
|
||||||
|
|
||||||
raise DistributionNotFound(
|
raise DistributionNotFound(
|
||||||
'No matching distribution found for {}'.format(
|
"No matching distribution found for {}".format(req)
|
||||||
req)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
best_installed = False
|
best_installed = False
|
||||||
if installed_version and (
|
if installed_version and (
|
||||||
best_candidate is None or
|
best_candidate is None or best_candidate.version <= installed_version
|
||||||
best_candidate.version <= installed_version):
|
):
|
||||||
best_installed = True
|
best_installed = True
|
||||||
|
|
||||||
if not upgrade and installed_version is not None:
|
if not upgrade and installed_version is not None:
|
||||||
if best_installed:
|
if best_installed:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Existing installed version (%s) is most up-to-date and '
|
"Existing installed version (%s) is most up-to-date and "
|
||||||
'satisfies requirement',
|
"satisfies requirement",
|
||||||
installed_version,
|
installed_version,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Existing installed version (%s) satisfies requirement '
|
"Existing installed version (%s) satisfies requirement "
|
||||||
'(most up-to-date version is %s)',
|
"(most up-to-date version is %s)",
|
||||||
installed_version,
|
installed_version,
|
||||||
best_candidate.version,
|
best_candidate.version,
|
||||||
)
|
)
|
||||||
|
@ -951,23 +972,21 @@ class PackageFinder:
|
||||||
if best_installed:
|
if best_installed:
|
||||||
# We have an existing version, and its the best version
|
# We have an existing version, and its the best version
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Installed version (%s) is most up-to-date (past versions: '
|
"Installed version (%s) is most up-to-date (past versions: %s)",
|
||||||
'%s)',
|
|
||||||
installed_version,
|
installed_version,
|
||||||
_format_versions(best_candidate_result.iter_applicable()),
|
_format_versions(best_candidate_result.iter_applicable()),
|
||||||
)
|
)
|
||||||
raise BestVersionAlreadyInstalled
|
raise BestVersionAlreadyInstalled
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Using version %s (newest of versions: %s)',
|
"Using version %s (newest of versions: %s)",
|
||||||
best_candidate.version,
|
best_candidate.version,
|
||||||
_format_versions(best_candidate_result.iter_applicable()),
|
_format_versions(best_candidate_result.iter_applicable()),
|
||||||
)
|
)
|
||||||
return best_candidate
|
return best_candidate
|
||||||
|
|
||||||
|
|
||||||
def _find_name_version_sep(fragment, canonical_name):
|
def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
|
||||||
# type: (str, str) -> int
|
|
||||||
"""Find the separator's index based on the package's canonical name.
|
"""Find the separator's index based on the package's canonical name.
|
||||||
|
|
||||||
:param fragment: A <package>+<version> filename "fragment" (stem) or
|
:param fragment: A <package>+<version> filename "fragment" (stem) or
|
||||||
|
@ -993,8 +1012,7 @@ def _find_name_version_sep(fragment, canonical_name):
|
||||||
raise ValueError(f"{fragment} does not match {canonical_name}")
|
raise ValueError(f"{fragment} does not match {canonical_name}")
|
||||||
|
|
||||||
|
|
||||||
def _extract_version_from_fragment(fragment, canonical_name):
|
def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
|
||||||
# type: (str, str) -> Optional[str]
|
|
||||||
"""Parse the version string from a <package>+<version> filename
|
"""Parse the version string from a <package>+<version> filename
|
||||||
"fragment" (stem) or egg fragment.
|
"fragment" (stem) or egg fragment.
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,22 @@
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
from typing import List, Optional
|
from typing import Any, Dict, Generator, List, Optional, Tuple
|
||||||
|
|
||||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||||
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
from . import _distutils, _sysconfig
|
from . import _distutils, _sysconfig
|
||||||
from .base import (
|
from .base import (
|
||||||
USER_CACHE_DIR,
|
USER_CACHE_DIR,
|
||||||
get_major_minor_version,
|
get_major_minor_version,
|
||||||
get_src_prefix,
|
get_src_prefix,
|
||||||
|
is_osx_framework,
|
||||||
site_packages,
|
site_packages,
|
||||||
user_site,
|
user_site,
|
||||||
)
|
)
|
||||||
|
@ -33,28 +39,171 @@ __all__ = [
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _default_base(*, user: bool) -> str:
|
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
||||||
if user:
|
|
||||||
base = sysconfig.get_config_var("userbase")
|
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
||||||
else:
|
|
||||||
base = sysconfig.get_config_var("base")
|
|
||||||
assert base is not None
|
|
||||||
return base
|
|
||||||
|
|
||||||
|
|
||||||
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
def _should_use_sysconfig() -> bool:
|
||||||
if old == new:
|
"""This function determines the value of _USE_SYSCONFIG.
|
||||||
|
|
||||||
|
By default, pip uses sysconfig on Python 3.10+.
|
||||||
|
But Python distributors can override this decision by setting:
|
||||||
|
sysconfig._PIP_USE_SYSCONFIG = True / False
|
||||||
|
Rationale in https://github.com/pypa/pip/issues/10647
|
||||||
|
|
||||||
|
This is a function for testability, but should be constant during any one
|
||||||
|
run.
|
||||||
|
"""
|
||||||
|
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
||||||
|
|
||||||
|
|
||||||
|
_USE_SYSCONFIG = _should_use_sysconfig()
|
||||||
|
|
||||||
|
# Be noisy about incompatibilities if this platforms "should" be using
|
||||||
|
# sysconfig, but is explicitly opting out and using distutils instead.
|
||||||
|
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
||||||
|
_MISMATCH_LEVEL = logging.WARNING
|
||||||
|
else:
|
||||||
|
_MISMATCH_LEVEL = logging.DEBUG
|
||||||
|
|
||||||
|
|
||||||
|
def _looks_like_bpo_44860() -> bool:
|
||||||
|
"""The resolution to bpo-44860 will change this incorrect platlib.
|
||||||
|
|
||||||
|
See <https://bugs.python.org/issue44860>.
|
||||||
|
"""
|
||||||
|
from distutils.command.install import INSTALL_SCHEMES
|
||||||
|
|
||||||
|
try:
|
||||||
|
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
||||||
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
issue_url = "https://github.com/pypa/pip/issues/9617"
|
return unix_user_platlib == "$usersite"
|
||||||
|
|
||||||
|
|
||||||
|
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
||||||
|
platlib = scheme["platlib"]
|
||||||
|
if "/$platlibdir/" in platlib:
|
||||||
|
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
||||||
|
if "/lib64/" not in platlib:
|
||||||
|
return False
|
||||||
|
unpatched = platlib.replace("/lib64/", "/lib/")
|
||||||
|
return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _looks_like_red_hat_lib() -> bool:
|
||||||
|
"""Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
|
||||||
|
|
||||||
|
This is the only way I can see to tell a Red Hat-patched Python.
|
||||||
|
"""
|
||||||
|
from distutils.command.install import INSTALL_SCHEMES
|
||||||
|
|
||||||
|
return all(
|
||||||
|
k in INSTALL_SCHEMES
|
||||||
|
and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
|
||||||
|
for k in ("unix_prefix", "unix_home")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _looks_like_debian_scheme() -> bool:
|
||||||
|
"""Debian adds two additional schemes."""
|
||||||
|
from distutils.command.install import INSTALL_SCHEMES
|
||||||
|
|
||||||
|
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _looks_like_red_hat_scheme() -> bool:
|
||||||
|
"""Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
|
||||||
|
|
||||||
|
Red Hat's ``00251-change-user-install-location.patch`` changes the install
|
||||||
|
command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
|
||||||
|
(fortunately?) done quite unconditionally, so we create a default command
|
||||||
|
object without any configuration to detect this.
|
||||||
|
"""
|
||||||
|
from distutils.command.install import install
|
||||||
|
from distutils.dist import Distribution
|
||||||
|
|
||||||
|
cmd: Any = install(Distribution())
|
||||||
|
cmd.finalize_options()
|
||||||
|
return (
|
||||||
|
cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
|
||||||
|
and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _looks_like_slackware_scheme() -> bool:
|
||||||
|
"""Slackware patches sysconfig but fails to patch distutils and site.
|
||||||
|
|
||||||
|
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
||||||
|
path, but does not do the same to the site module.
|
||||||
|
"""
|
||||||
|
if user_site is None: # User-site not available.
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
||||||
|
except KeyError: # User-site not available.
|
||||||
|
return False
|
||||||
|
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _looks_like_msys2_mingw_scheme() -> bool:
|
||||||
|
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
||||||
|
|
||||||
|
However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
|
||||||
|
likely going to be included in their 3.10 release, so we ignore the warning.
|
||||||
|
See msys2/MINGW-packages#9319.
|
||||||
|
|
||||||
|
MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
|
||||||
|
and is missing the final ``"site-packages"``.
|
||||||
|
"""
|
||||||
|
paths = sysconfig.get_paths("nt", expand=False)
|
||||||
|
return all(
|
||||||
|
"Lib" not in p and "lib" in p and not p.endswith("site-packages")
|
||||||
|
for p in (paths[key] for key in ("platlib", "purelib"))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
|
||||||
|
ldversion = sysconfig.get_config_var("LDVERSION")
|
||||||
|
abiflags = getattr(sys, "abiflags", None)
|
||||||
|
|
||||||
|
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
||||||
|
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
||||||
|
yield from parts
|
||||||
|
return
|
||||||
|
|
||||||
|
# Strip sys.abiflags from LDVERSION-based path components.
|
||||||
|
for part in parts:
|
||||||
|
if part.endswith(ldversion):
|
||||||
|
part = part[: (0 - len(abiflags))]
|
||||||
|
yield part
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
|
||||||
|
issue_url = "https://github.com/pypa/pip/issues/10151"
|
||||||
message = (
|
message = (
|
||||||
"Value for %s does not match. Please report this to <%s>"
|
"Value for %s does not match. Please report this to <%s>"
|
||||||
"\ndistutils: %s"
|
"\ndistutils: %s"
|
||||||
"\nsysconfig: %s"
|
"\nsysconfig: %s"
|
||||||
)
|
)
|
||||||
logger.debug(message, key, issue_url, old, new)
|
logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
|
||||||
|
|
||||||
|
|
||||||
|
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
||||||
|
if old == new:
|
||||||
|
return False
|
||||||
|
_warn_mismatched(old, new, key=key)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
def _log_context(
|
def _log_context(
|
||||||
*,
|
*,
|
||||||
user: bool = False,
|
user: bool = False,
|
||||||
|
@ -62,29 +211,25 @@ def _log_context(
|
||||||
root: Optional[str] = None,
|
root: Optional[str] = None,
|
||||||
prefix: Optional[str] = None,
|
prefix: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
message = (
|
parts = [
|
||||||
"Additional context:" "\nuser = %r" "\nhome = %r" "\nroot = %r" "\nprefix = %r"
|
"Additional context:",
|
||||||
)
|
"user = %r",
|
||||||
logger.debug(message, user, home, root, prefix)
|
"home = %r",
|
||||||
|
"root = %r",
|
||||||
|
"prefix = %r",
|
||||||
|
]
|
||||||
|
|
||||||
|
logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
|
||||||
|
|
||||||
|
|
||||||
def get_scheme(
|
def get_scheme(
|
||||||
dist_name, # type: str
|
dist_name: str,
|
||||||
user=False, # type: bool
|
user: bool = False,
|
||||||
home=None, # type: Optional[str]
|
home: Optional[str] = None,
|
||||||
root=None, # type: Optional[str]
|
root: Optional[str] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
prefix=None, # type: Optional[str]
|
prefix: Optional[str] = None,
|
||||||
):
|
) -> Scheme:
|
||||||
# type: (...) -> Scheme
|
|
||||||
old = _distutils.get_scheme(
|
|
||||||
dist_name,
|
|
||||||
user=user,
|
|
||||||
home=home,
|
|
||||||
root=root,
|
|
||||||
isolated=isolated,
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
new = _sysconfig.get_scheme(
|
new = _sysconfig.get_scheme(
|
||||||
dist_name,
|
dist_name,
|
||||||
user=user,
|
user=user,
|
||||||
|
@ -93,14 +238,26 @@ def get_scheme(
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
)
|
)
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
base = prefix or home or _default_base(user=user)
|
old = _distutils.get_scheme(
|
||||||
warned = []
|
dist_name,
|
||||||
|
user=user,
|
||||||
|
home=home,
|
||||||
|
root=root,
|
||||||
|
isolated=isolated,
|
||||||
|
prefix=prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
warning_contexts = []
|
||||||
for k in SCHEME_KEYS:
|
for k in SCHEME_KEYS:
|
||||||
# Extra join because distutils can return relative paths.
|
old_v = pathlib.Path(getattr(old, k))
|
||||||
old_v = pathlib.Path(base, getattr(old, k))
|
|
||||||
new_v = pathlib.Path(getattr(new, k))
|
new_v = pathlib.Path(getattr(new, k))
|
||||||
|
|
||||||
|
if old_v == new_v:
|
||||||
|
continue
|
||||||
|
|
||||||
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
||||||
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
||||||
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
||||||
|
@ -110,59 +267,240 @@ def get_scheme(
|
||||||
and home is not None
|
and home is not None
|
||||||
and k in ("platlib", "purelib")
|
and k in ("platlib", "purelib")
|
||||||
and old_v.parent == new_v.parent
|
and old_v.parent == new_v.parent
|
||||||
and old_v.name == "python"
|
and old_v.name.startswith("python")
|
||||||
and new_v.name == "pypy"
|
and new_v.name.startswith("pypy")
|
||||||
)
|
)
|
||||||
if skip_pypy_special_case:
|
if skip_pypy_special_case:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
warned.append(_warn_if_mismatch(old_v, new_v, key=f"scheme.{k}"))
|
# sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
|
||||||
|
# the ``include`` value, but distutils's ``headers`` does. We'll let
|
||||||
|
# CPython decide whether this is a bug or feature. See bpo-43948.
|
||||||
|
skip_osx_framework_user_special_case = (
|
||||||
|
user
|
||||||
|
and is_osx_framework()
|
||||||
|
and k == "headers"
|
||||||
|
and old_v.parent.parent == new_v.parent
|
||||||
|
and old_v.parent.name.startswith("python")
|
||||||
|
)
|
||||||
|
if skip_osx_framework_user_special_case:
|
||||||
|
continue
|
||||||
|
|
||||||
if any(warned):
|
# On Red Hat and derived Linux distributions, distutils is patched to
|
||||||
_log_context(user=user, home=home, root=root, prefix=prefix)
|
# use "lib64" instead of "lib" for platlib.
|
||||||
|
if k == "platlib" and _looks_like_red_hat_lib():
|
||||||
|
continue
|
||||||
|
|
||||||
|
# On Python 3.9+, sysconfig's posix_user scheme sets platlib against
|
||||||
|
# sys.platlibdir, but distutils's unix_user incorrectly coninutes
|
||||||
|
# using the same $usersite for both platlib and purelib. This creates a
|
||||||
|
# mismatch when sys.platlibdir is not "lib".
|
||||||
|
skip_bpo_44860 = (
|
||||||
|
user
|
||||||
|
and k == "platlib"
|
||||||
|
and not WINDOWS
|
||||||
|
and sys.version_info >= (3, 9)
|
||||||
|
and _PLATLIBDIR != "lib"
|
||||||
|
and _looks_like_bpo_44860()
|
||||||
|
)
|
||||||
|
if skip_bpo_44860:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
||||||
|
# but not usersite to match the location.
|
||||||
|
skip_slackware_user_scheme = (
|
||||||
|
user
|
||||||
|
and k in ("platlib", "purelib")
|
||||||
|
and not WINDOWS
|
||||||
|
and _looks_like_slackware_scheme()
|
||||||
|
)
|
||||||
|
if skip_slackware_user_scheme:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Both Debian and Red Hat patch Python to place the system site under
|
||||||
|
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
||||||
|
# instead of site-packages, but the /usr/local check should cover it.
|
||||||
|
skip_linux_system_special_case = (
|
||||||
|
not (user or home or prefix or running_under_virtualenv())
|
||||||
|
and old_v.parts[1:3] == ("usr", "local")
|
||||||
|
and len(new_v.parts) > 1
|
||||||
|
and new_v.parts[1] == "usr"
|
||||||
|
and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
|
||||||
|
and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
|
||||||
|
)
|
||||||
|
if skip_linux_system_special_case:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
|
||||||
|
# the "pythonX.Y" part of the path, but distutils does.
|
||||||
|
skip_sysconfig_abiflag_bug = (
|
||||||
|
sys.version_info < (3, 8)
|
||||||
|
and not WINDOWS
|
||||||
|
and k in ("headers", "platlib", "purelib")
|
||||||
|
and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
|
||||||
|
)
|
||||||
|
if skip_sysconfig_abiflag_bug:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# MSYS2 MINGW's sysconfig patch does not include the "site-packages"
|
||||||
|
# part of the path. This is incorrect and will be fixed in MSYS.
|
||||||
|
skip_msys2_mingw_bug = (
|
||||||
|
WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
|
||||||
|
)
|
||||||
|
if skip_msys2_mingw_bug:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
||||||
|
# interpreter located in the source tree, not the install site. This
|
||||||
|
# triggers special logic in sysconfig that's not present in distutils.
|
||||||
|
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
||||||
|
skip_cpython_build = (
|
||||||
|
sysconfig.is_python_build(check_home=True)
|
||||||
|
and not WINDOWS
|
||||||
|
and k in ("headers", "include", "platinclude")
|
||||||
|
)
|
||||||
|
if skip_cpython_build:
|
||||||
|
continue
|
||||||
|
|
||||||
|
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
||||||
|
|
||||||
|
if not warning_contexts:
|
||||||
|
return old
|
||||||
|
|
||||||
|
# Check if this path mismatch is caused by distutils config files. Those
|
||||||
|
# files will no longer work once we switch to sysconfig, so this raises a
|
||||||
|
# deprecation message for them.
|
||||||
|
default_old = _distutils.distutils_scheme(
|
||||||
|
dist_name,
|
||||||
|
user,
|
||||||
|
home,
|
||||||
|
root,
|
||||||
|
isolated,
|
||||||
|
prefix,
|
||||||
|
ignore_config_files=True,
|
||||||
|
)
|
||||||
|
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
||||||
|
deprecated(
|
||||||
|
reason=(
|
||||||
|
"Configuring installation scheme with distutils config files "
|
||||||
|
"is deprecated and will no longer work in the near future. If you "
|
||||||
|
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
||||||
|
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
||||||
|
),
|
||||||
|
replacement=None,
|
||||||
|
gone_in=None,
|
||||||
|
)
|
||||||
|
return old
|
||||||
|
|
||||||
|
# Post warnings about this mismatch so user can report them back.
|
||||||
|
for old_v, new_v, key in warning_contexts:
|
||||||
|
_warn_mismatched(old_v, new_v, key=key)
|
||||||
|
_log_context(user=user, home=home, root=root, prefix=prefix)
|
||||||
|
|
||||||
return old
|
return old
|
||||||
|
|
||||||
|
|
||||||
def get_bin_prefix():
|
def get_bin_prefix() -> str:
|
||||||
# type: () -> str
|
|
||||||
old = _distutils.get_bin_prefix()
|
|
||||||
new = _sysconfig.get_bin_prefix()
|
new = _sysconfig.get_bin_prefix()
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
old = _distutils.get_bin_prefix()
|
||||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
||||||
_log_context()
|
_log_context()
|
||||||
return old
|
return old
|
||||||
|
|
||||||
|
|
||||||
def get_bin_user():
|
def get_bin_user() -> str:
|
||||||
# type: () -> str
|
|
||||||
return _sysconfig.get_scheme("", user=True).scripts
|
return _sysconfig.get_scheme("", user=True).scripts
|
||||||
|
|
||||||
|
|
||||||
def get_purelib():
|
def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
||||||
# type: () -> str
|
"""Check if the value is Debian's APT-controlled dist-packages.
|
||||||
|
|
||||||
|
Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
|
||||||
|
default package path controlled by APT, but does not patch ``sysconfig`` to
|
||||||
|
do the same. This is similar to the bug worked around in ``get_scheme()``,
|
||||||
|
but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
|
||||||
|
we can't do anything about this Debian bug, and this detection allows us to
|
||||||
|
skip the warning when needed.
|
||||||
|
"""
|
||||||
|
if not _looks_like_debian_scheme():
|
||||||
|
return False
|
||||||
|
if value == "/usr/lib/python3/dist-packages":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_purelib() -> str:
|
||||||
"""Return the default pure-Python lib location."""
|
"""Return the default pure-Python lib location."""
|
||||||
old = _distutils.get_purelib()
|
|
||||||
new = _sysconfig.get_purelib()
|
new = _sysconfig.get_purelib()
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
old = _distutils.get_purelib()
|
||||||
|
if _looks_like_deb_system_dist_packages(old):
|
||||||
|
return old
|
||||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
||||||
_log_context()
|
_log_context()
|
||||||
return old
|
return old
|
||||||
|
|
||||||
|
|
||||||
def get_platlib():
|
def get_platlib() -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Return the default platform-shared lib location."""
|
"""Return the default platform-shared lib location."""
|
||||||
old = _distutils.get_platlib()
|
|
||||||
new = _sysconfig.get_platlib()
|
new = _sysconfig.get_platlib()
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
old = _distutils.get_platlib()
|
||||||
|
if _looks_like_deb_system_dist_packages(old):
|
||||||
|
return old
|
||||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
||||||
_log_context()
|
_log_context()
|
||||||
return old
|
return old
|
||||||
|
|
||||||
|
|
||||||
def get_prefixed_libs(prefix):
|
def _deduplicated(v1: str, v2: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
"""Deduplicate values from a list."""
|
||||||
|
if v1 == v2:
|
||||||
|
return [v1]
|
||||||
|
return [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
|
def _looks_like_apple_library(path: str) -> bool:
|
||||||
|
"""Apple patches sysconfig to *always* look under */Library/Python*."""
|
||||||
|
if sys.platform[:6] != "darwin":
|
||||||
|
return False
|
||||||
|
return path == f"/Library/Python/{get_major_minor_version()}/site-packages"
|
||||||
|
|
||||||
|
|
||||||
|
def get_prefixed_libs(prefix: str) -> List[str]:
|
||||||
"""Return the lib locations under ``prefix``."""
|
"""Return the lib locations under ``prefix``."""
|
||||||
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
|
||||||
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return _deduplicated(new_pure, new_plat)
|
||||||
|
|
||||||
|
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
||||||
|
old_lib_paths = _deduplicated(old_pure, old_plat)
|
||||||
|
|
||||||
|
# Apple's Python (shipped with Xcode and Command Line Tools) hard-code
|
||||||
|
# platlib and purelib to '/Library/Python/X.Y/site-packages'. This will
|
||||||
|
# cause serious build isolation bugs when Apple starts shipping 3.10 because
|
||||||
|
# pip will install build backends to the wrong location. This tells users
|
||||||
|
# who is at fault so Apple may notice it and fix the issue in time.
|
||||||
|
if all(_looks_like_apple_library(p) for p in old_lib_paths):
|
||||||
|
deprecated(
|
||||||
|
reason=(
|
||||||
|
"Python distributed by Apple's Command Line Tools incorrectly "
|
||||||
|
"patches sysconfig to always point to '/Library/Python'. This "
|
||||||
|
"will cause build isolation to operate incorrectly on Python "
|
||||||
|
"3.10 or later. Please help report this to Apple so they can "
|
||||||
|
"fix this. https://developer.apple.com/bug-reporting/"
|
||||||
|
),
|
||||||
|
replacement=None,
|
||||||
|
gone_in=None,
|
||||||
|
)
|
||||||
|
return old_lib_paths
|
||||||
|
|
||||||
warned = [
|
warned = [
|
||||||
_warn_if_mismatch(
|
_warn_if_mismatch(
|
||||||
|
@ -179,6 +517,4 @@ def get_prefixed_libs(prefix):
|
||||||
if any(warned):
|
if any(warned):
|
||||||
_log_context(prefix=prefix)
|
_log_context(prefix=prefix)
|
||||||
|
|
||||||
if old_pure == old_plat:
|
return old_lib_paths
|
||||||
return [old_pure]
|
|
||||||
return [old_pure, old_plat]
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from distutils.cmd import Command as DistutilsCommand
|
from distutils.cmd import Command as DistutilsCommand
|
||||||
|
@ -17,23 +18,40 @@ from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
from .base import get_major_minor_version
|
from .base import get_major_minor_version
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def _distutils_scheme(
|
|
||||||
dist_name, user=False, home=None, root=None, isolated=False, prefix=None
|
def distutils_scheme(
|
||||||
):
|
dist_name: str,
|
||||||
# type:(str, bool, str, str, bool, str) -> Dict[str, str]
|
user: bool = False,
|
||||||
|
home: str = None,
|
||||||
|
root: str = None,
|
||||||
|
isolated: bool = False,
|
||||||
|
prefix: str = None,
|
||||||
|
*,
|
||||||
|
ignore_config_files: bool = False,
|
||||||
|
) -> Dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Return a distutils install scheme
|
Return a distutils install scheme
|
||||||
"""
|
"""
|
||||||
from distutils.dist import Distribution
|
from distutils.dist import Distribution
|
||||||
|
|
||||||
dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]]
|
dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
|
||||||
if isolated:
|
if isolated:
|
||||||
dist_args["script_args"] = ["--no-user-cfg"]
|
dist_args["script_args"] = ["--no-user-cfg"]
|
||||||
|
|
||||||
d = Distribution(dist_args)
|
d = Distribution(dist_args)
|
||||||
d.parse_config_files()
|
if not ignore_config_files:
|
||||||
obj = None # type: Optional[DistutilsCommand]
|
try:
|
||||||
|
d.parse_config_files()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# Typeshed does not include find_config_files() for some reason.
|
||||||
|
paths = d.find_config_files() # type: ignore
|
||||||
|
logger.warning(
|
||||||
|
"Ignore distutils configs in %s due to encoding errors.",
|
||||||
|
", ".join(os.path.basename(p) for p in paths),
|
||||||
|
)
|
||||||
|
obj: Optional[DistutilsCommand] = None
|
||||||
obj = d.get_command_obj("install", create=True)
|
obj = d.get_command_obj("install", create=True)
|
||||||
assert obj is not None
|
assert obj is not None
|
||||||
i = cast(distutils_install_command, obj)
|
i = cast(distutils_install_command, obj)
|
||||||
|
@ -63,8 +81,14 @@ def _distutils_scheme(
|
||||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||||
|
|
||||||
if running_under_virtualenv():
|
if running_under_virtualenv():
|
||||||
|
if home:
|
||||||
|
prefix = home
|
||||||
|
elif user:
|
||||||
|
prefix = i.install_userbase
|
||||||
|
else:
|
||||||
|
prefix = i.prefix
|
||||||
scheme["headers"] = os.path.join(
|
scheme["headers"] = os.path.join(
|
||||||
i.prefix,
|
prefix,
|
||||||
"include",
|
"include",
|
||||||
"site",
|
"site",
|
||||||
f"python{get_major_minor_version()}",
|
f"python{get_major_minor_version()}",
|
||||||
|
@ -73,23 +97,19 @@ def _distutils_scheme(
|
||||||
|
|
||||||
if root is not None:
|
if root is not None:
|
||||||
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
||||||
scheme["headers"] = os.path.join(
|
scheme["headers"] = os.path.join(root, path_no_drive[1:])
|
||||||
root,
|
|
||||||
path_no_drive[1:],
|
|
||||||
)
|
|
||||||
|
|
||||||
return scheme
|
return scheme
|
||||||
|
|
||||||
|
|
||||||
def get_scheme(
|
def get_scheme(
|
||||||
dist_name, # type: str
|
dist_name: str,
|
||||||
user=False, # type: bool
|
user: bool = False,
|
||||||
home=None, # type: Optional[str]
|
home: Optional[str] = None,
|
||||||
root=None, # type: Optional[str]
|
root: Optional[str] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
prefix=None, # type: Optional[str]
|
prefix: Optional[str] = None,
|
||||||
):
|
) -> Scheme:
|
||||||
# type: (...) -> Scheme
|
|
||||||
"""
|
"""
|
||||||
Get the "scheme" corresponding to the input parameters. The distutils
|
Get the "scheme" corresponding to the input parameters. The distutils
|
||||||
documentation provides the context for the available schemes:
|
documentation provides the context for the available schemes:
|
||||||
|
@ -107,7 +127,7 @@ def get_scheme(
|
||||||
:param prefix: indicates to use the "prefix" scheme and provides the
|
:param prefix: indicates to use the "prefix" scheme and provides the
|
||||||
base directory for the same
|
base directory for the same
|
||||||
"""
|
"""
|
||||||
scheme = _distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
||||||
return Scheme(
|
return Scheme(
|
||||||
platlib=scheme["platlib"],
|
platlib=scheme["platlib"],
|
||||||
purelib=scheme["purelib"],
|
purelib=scheme["purelib"],
|
||||||
|
@ -117,33 +137,32 @@ def get_scheme(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_bin_prefix():
|
def get_bin_prefix() -> str:
|
||||||
# type: () -> str
|
# XXX: In old virtualenv versions, sys.prefix can contain '..' components,
|
||||||
|
# so we need to call normpath to eliminate them.
|
||||||
|
prefix = os.path.normpath(sys.prefix)
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
bin_py = os.path.join(sys.prefix, "Scripts")
|
bin_py = os.path.join(prefix, "Scripts")
|
||||||
# buildout uses 'bin' on Windows too?
|
# buildout uses 'bin' on Windows too?
|
||||||
if not os.path.exists(bin_py):
|
if not os.path.exists(bin_py):
|
||||||
bin_py = os.path.join(sys.prefix, "bin")
|
bin_py = os.path.join(prefix, "bin")
|
||||||
return bin_py
|
return bin_py
|
||||||
# Forcing to use /usr/local/bin for standard macOS framework installs
|
# Forcing to use /usr/local/bin for standard macOS framework installs
|
||||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||||
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
|
||||||
return "/usr/local/bin"
|
return "/usr/local/bin"
|
||||||
return os.path.join(sys.prefix, "bin")
|
return os.path.join(prefix, "bin")
|
||||||
|
|
||||||
|
|
||||||
def get_purelib():
|
def get_purelib() -> str:
|
||||||
# type: () -> str
|
|
||||||
return get_python_lib(plat_specific=False)
|
return get_python_lib(plat_specific=False)
|
||||||
|
|
||||||
|
|
||||||
def get_platlib():
|
def get_platlib() -> str:
|
||||||
# type: () -> str
|
|
||||||
return get_python_lib(plat_specific=True)
|
return get_python_lib(plat_specific=True)
|
||||||
|
|
||||||
|
|
||||||
def get_prefixed_libs(prefix):
|
def get_prefixed_libs(prefix: str) -> Tuple[str, str]:
|
||||||
# type: (str) -> Tuple[str, str]
|
|
||||||
return (
|
return (
|
||||||
get_python_lib(plat_specific=False, prefix=prefix),
|
get_python_lib(plat_specific=False, prefix=prefix),
|
||||||
get_python_lib(plat_specific=True, prefix=prefix),
|
get_python_lib(plat_specific=True, prefix=prefix),
|
||||||
|
|
|
@ -9,14 +9,14 @@ from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationI
|
||||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
from .base import get_major_minor_version
|
from .base import get_major_minor_version, is_osx_framework
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# Notes on _infer_* functions.
|
# Notes on _infer_* functions.
|
||||||
# Unfortunately ``_get_default_scheme()`` is private, so there's no way to
|
# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
|
||||||
# ask things like "what is the '_prefix' scheme on this platform". These
|
# way to ask things like "what is the '_prefix' scheme on this platform". These
|
||||||
# functions try to answer that with some heuristics while accounting for ad-hoc
|
# functions try to answer that with some heuristics while accounting for ad-hoc
|
||||||
# platforms not covered by CPython's default sysconfig implementation. If the
|
# platforms not covered by CPython's default sysconfig implementation. If the
|
||||||
# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
|
# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
|
||||||
|
@ -24,13 +24,42 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
|
_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
|
||||||
|
|
||||||
|
_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
|
||||||
|
|
||||||
def _infer_prefix():
|
|
||||||
# type: () -> str
|
def _should_use_osx_framework_prefix() -> bool:
|
||||||
|
"""Check for Apple's ``osx_framework_library`` scheme.
|
||||||
|
|
||||||
|
Python distributed by Apple's Command Line Tools has this special scheme
|
||||||
|
that's used when:
|
||||||
|
|
||||||
|
* This is a framework build.
|
||||||
|
* We are installing into the system prefix.
|
||||||
|
|
||||||
|
This does not account for ``pip install --prefix`` (also means we're not
|
||||||
|
installing to the system prefix), which should use ``posix_prefix``, but
|
||||||
|
logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
|
||||||
|
since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
|
||||||
|
which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
|
||||||
|
wouldn't be able to magically switch between ``osx_framework_library`` and
|
||||||
|
``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
|
||||||
|
means its behavior is consistent whether we use the stdlib implementation
|
||||||
|
or our own, and we deal with this special case in ``get_scheme()`` instead.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
"osx_framework_library" in _AVAILABLE_SCHEMES
|
||||||
|
and not running_under_virtualenv()
|
||||||
|
and is_osx_framework()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _infer_prefix() -> str:
|
||||||
"""Try to find a prefix scheme for the current platform.
|
"""Try to find a prefix scheme for the current platform.
|
||||||
|
|
||||||
This tries:
|
This tries:
|
||||||
|
|
||||||
|
* A special ``osx_framework_library`` for Python distributed by Apple's
|
||||||
|
Command Line Tools, when not running in a virtual environment.
|
||||||
* Implementation + OS, used by PyPy on Windows (``pypy_nt``).
|
* Implementation + OS, used by PyPy on Windows (``pypy_nt``).
|
||||||
* Implementation without OS, used by PyPy on POSIX (``pypy``).
|
* Implementation without OS, used by PyPy on POSIX (``pypy``).
|
||||||
* OS + "prefix", used by CPython on POSIX (``posix_prefix``).
|
* OS + "prefix", used by CPython on POSIX (``posix_prefix``).
|
||||||
|
@ -38,6 +67,10 @@ def _infer_prefix():
|
||||||
|
|
||||||
If none of the above works, fall back to ``posix_prefix``.
|
If none of the above works, fall back to ``posix_prefix``.
|
||||||
"""
|
"""
|
||||||
|
if _PREFERRED_SCHEME_API:
|
||||||
|
return _PREFERRED_SCHEME_API("prefix")
|
||||||
|
if _should_use_osx_framework_prefix():
|
||||||
|
return "osx_framework_library"
|
||||||
implementation_suffixed = f"{sys.implementation.name}_{os.name}"
|
implementation_suffixed = f"{sys.implementation.name}_{os.name}"
|
||||||
if implementation_suffixed in _AVAILABLE_SCHEMES:
|
if implementation_suffixed in _AVAILABLE_SCHEMES:
|
||||||
return implementation_suffixed
|
return implementation_suffixed
|
||||||
|
@ -51,10 +84,14 @@ def _infer_prefix():
|
||||||
return "posix_prefix"
|
return "posix_prefix"
|
||||||
|
|
||||||
|
|
||||||
def _infer_user():
|
def _infer_user() -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Try to find a user scheme for the current platform."""
|
"""Try to find a user scheme for the current platform."""
|
||||||
suffixed = f"{os.name}_user"
|
if _PREFERRED_SCHEME_API:
|
||||||
|
return _PREFERRED_SCHEME_API("user")
|
||||||
|
if is_osx_framework() and not running_under_virtualenv():
|
||||||
|
suffixed = "osx_framework_user"
|
||||||
|
else:
|
||||||
|
suffixed = f"{os.name}_user"
|
||||||
if suffixed in _AVAILABLE_SCHEMES:
|
if suffixed in _AVAILABLE_SCHEMES:
|
||||||
return suffixed
|
return suffixed
|
||||||
if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
|
if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
|
||||||
|
@ -62,9 +99,10 @@ def _infer_user():
|
||||||
return "posix_user"
|
return "posix_user"
|
||||||
|
|
||||||
|
|
||||||
def _infer_home():
|
def _infer_home() -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Try to find a home for the current platform."""
|
"""Try to find a home for the current platform."""
|
||||||
|
if _PREFERRED_SCHEME_API:
|
||||||
|
return _PREFERRED_SCHEME_API("home")
|
||||||
suffixed = f"{os.name}_home"
|
suffixed = f"{os.name}_home"
|
||||||
if suffixed in _AVAILABLE_SCHEMES:
|
if suffixed in _AVAILABLE_SCHEMES:
|
||||||
return suffixed
|
return suffixed
|
||||||
|
@ -85,14 +123,13 @@ if sysconfig.get_config_var("userbase") is not None:
|
||||||
|
|
||||||
|
|
||||||
def get_scheme(
|
def get_scheme(
|
||||||
dist_name, # type: str
|
dist_name: str,
|
||||||
user=False, # type: bool
|
user: bool = False,
|
||||||
home=None, # type: typing.Optional[str]
|
home: typing.Optional[str] = None,
|
||||||
root=None, # type: typing.Optional[str]
|
root: typing.Optional[str] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
prefix=None, # type: typing.Optional[str]
|
prefix: typing.Optional[str] = None,
|
||||||
):
|
) -> Scheme:
|
||||||
# type: (...) -> Scheme
|
|
||||||
"""
|
"""
|
||||||
Get the "scheme" corresponding to the input parameters.
|
Get the "scheme" corresponding to the input parameters.
|
||||||
|
|
||||||
|
@ -118,6 +155,12 @@ def get_scheme(
|
||||||
else:
|
else:
|
||||||
scheme_name = _infer_prefix()
|
scheme_name = _infer_prefix()
|
||||||
|
|
||||||
|
# Special case: When installing into a custom prefix, use posix_prefix
|
||||||
|
# instead of osx_framework_library. See _should_use_osx_framework_prefix()
|
||||||
|
# docstring for details.
|
||||||
|
if prefix is not None and scheme_name == "osx_framework_library":
|
||||||
|
scheme_name = "posix_prefix"
|
||||||
|
|
||||||
if home is not None:
|
if home is not None:
|
||||||
variables = {k: home for k in _HOME_KEYS}
|
variables = {k: home for k in _HOME_KEYS}
|
||||||
elif prefix is not None:
|
elif prefix is not None:
|
||||||
|
@ -156,25 +199,21 @@ def get_scheme(
|
||||||
return scheme
|
return scheme
|
||||||
|
|
||||||
|
|
||||||
def get_bin_prefix():
|
def get_bin_prefix() -> str:
|
||||||
# type: () -> str
|
|
||||||
# Forcing to use /usr/local/bin for standard macOS framework installs.
|
# Forcing to use /usr/local/bin for standard macOS framework installs.
|
||||||
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
||||||
return "/usr/local/bin"
|
return "/usr/local/bin"
|
||||||
return sysconfig.get_paths()["scripts"]
|
return sysconfig.get_paths()["scripts"]
|
||||||
|
|
||||||
|
|
||||||
def get_purelib():
|
def get_purelib() -> str:
|
||||||
# type: () -> str
|
|
||||||
return sysconfig.get_paths()["purelib"]
|
return sysconfig.get_paths()["purelib"]
|
||||||
|
|
||||||
|
|
||||||
def get_platlib():
|
def get_platlib() -> str:
|
||||||
# type: () -> str
|
|
||||||
return sysconfig.get_paths()["platlib"]
|
return sysconfig.get_paths()["platlib"]
|
||||||
|
|
||||||
|
|
||||||
def get_prefixed_libs(prefix):
|
def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]:
|
||||||
# type: (str) -> typing.Tuple[str, str]
|
|
||||||
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
||||||
return (paths["purelib"], paths["platlib"])
|
return (paths["purelib"], paths["platlib"])
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import functools
|
||||||
import os
|
import os
|
||||||
import site
|
import site
|
||||||
import sys
|
import sys
|
||||||
|
@ -11,11 +12,10 @@ from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||||
|
|
||||||
# FIXME doesn't account for venv linked to global site-packages
|
# FIXME doesn't account for venv linked to global site-packages
|
||||||
site_packages = sysconfig.get_path("purelib") # type: typing.Optional[str]
|
site_packages: typing.Optional[str] = sysconfig.get_path("purelib")
|
||||||
|
|
||||||
|
|
||||||
def get_major_minor_version():
|
def get_major_minor_version() -> str:
|
||||||
# type: () -> str
|
|
||||||
"""
|
"""
|
||||||
Return the major-minor version of the current Python as a string, e.g.
|
Return the major-minor version of the current Python as a string, e.g.
|
||||||
"3.7" or "3.10".
|
"3.7" or "3.10".
|
||||||
|
@ -23,8 +23,7 @@ def get_major_minor_version():
|
||||||
return "{}.{}".format(*sys.version_info)
|
return "{}.{}".format(*sys.version_info)
|
||||||
|
|
||||||
|
|
||||||
def get_src_prefix():
|
def get_src_prefix() -> str:
|
||||||
# type: () -> str
|
|
||||||
if running_under_virtualenv():
|
if running_under_virtualenv():
|
||||||
src_prefix = os.path.join(sys.prefix, "src")
|
src_prefix = os.path.join(sys.prefix, "src")
|
||||||
else:
|
else:
|
||||||
|
@ -43,6 +42,11 @@ def get_src_prefix():
|
||||||
try:
|
try:
|
||||||
# Use getusersitepackages if this is present, as it ensures that the
|
# Use getusersitepackages if this is present, as it ensures that the
|
||||||
# value is initialised properly.
|
# value is initialised properly.
|
||||||
user_site = site.getusersitepackages() # type: typing.Optional[str]
|
user_site: typing.Optional[str] = site.getusersitepackages()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
user_site = site.USER_SITE
|
user_site = site.USER_SITE
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def is_osx_framework() -> bool:
|
||||||
|
return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args: Optional[List[str]] = None) -> int:
|
||||||
# type: (Optional[List[str]]) -> int
|
|
||||||
"""This is preserved for old console scripts that may still be referencing
|
"""This is preserved for old console scripts that may still be referencing
|
||||||
it.
|
it.
|
||||||
|
|
||||||
|
|
|
@ -1,36 +1,100 @@
|
||||||
from typing import List, Optional
|
import contextlib
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from typing import TYPE_CHECKING, List, Optional, Type, cast
|
||||||
|
|
||||||
from .base import BaseDistribution, BaseEnvironment
|
from pip._internal.utils.misc import strtobool
|
||||||
|
|
||||||
|
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Protocol
|
||||||
|
else:
|
||||||
|
Protocol = object
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseDistribution",
|
||||||
|
"BaseEnvironment",
|
||||||
|
"FilesystemWheel",
|
||||||
|
"MemoryWheel",
|
||||||
|
"Wheel",
|
||||||
|
"get_default_environment",
|
||||||
|
"get_environment",
|
||||||
|
"get_wheel_distribution",
|
||||||
|
"select_backend",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_default_environment():
|
def _should_use_importlib_metadata() -> bool:
|
||||||
# type: () -> BaseEnvironment
|
"""Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
|
||||||
|
|
||||||
|
By default, pip uses ``importlib.metadata`` on Python 3.11+, and
|
||||||
|
``pkg_resourcess`` otherwise. This can be overriden by a couple of ways:
|
||||||
|
|
||||||
|
* If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
|
||||||
|
dictates whether ``importlib.metadata`` is used, regardless of Python
|
||||||
|
version.
|
||||||
|
* On Python 3.11+, Python distributors can patch ``importlib.metadata``
|
||||||
|
to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
|
||||||
|
makes pip use ``pkg_resources`` (unless the user set the aforementioned
|
||||||
|
environment variable to *True*).
|
||||||
|
"""
|
||||||
|
with contextlib.suppress(KeyError, ValueError):
|
||||||
|
return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
|
||||||
|
if sys.version_info < (3, 11):
|
||||||
|
return False
|
||||||
|
import importlib.metadata
|
||||||
|
|
||||||
|
return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
|
||||||
|
|
||||||
|
|
||||||
|
class Backend(Protocol):
|
||||||
|
Distribution: Type[BaseDistribution]
|
||||||
|
Environment: Type[BaseEnvironment]
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def select_backend() -> Backend:
|
||||||
|
if _should_use_importlib_metadata():
|
||||||
|
from . import importlib
|
||||||
|
|
||||||
|
return cast(Backend, importlib)
|
||||||
|
from . import pkg_resources
|
||||||
|
|
||||||
|
return cast(Backend, pkg_resources)
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_environment() -> BaseEnvironment:
|
||||||
"""Get the default representation for the current environment.
|
"""Get the default representation for the current environment.
|
||||||
|
|
||||||
This returns an Environment instance from the chosen backend. The default
|
This returns an Environment instance from the chosen backend. The default
|
||||||
Environment instance should be built from ``sys.path`` and may use caching
|
Environment instance should be built from ``sys.path`` and may use caching
|
||||||
to share instance state accorss calls.
|
to share instance state accorss calls.
|
||||||
"""
|
"""
|
||||||
from .pkg_resources import Environment
|
return select_backend().Environment.default()
|
||||||
|
|
||||||
return Environment.default()
|
|
||||||
|
|
||||||
|
|
||||||
def get_environment(paths):
|
def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
||||||
# type: (Optional[List[str]]) -> BaseEnvironment
|
|
||||||
"""Get a representation of the environment specified by ``paths``.
|
"""Get a representation of the environment specified by ``paths``.
|
||||||
|
|
||||||
This returns an Environment instance from the chosen backend based on the
|
This returns an Environment instance from the chosen backend based on the
|
||||||
given import paths. The backend must build a fresh instance representing
|
given import paths. The backend must build a fresh instance representing
|
||||||
the state of installed distributions when this function is called.
|
the state of installed distributions when this function is called.
|
||||||
"""
|
"""
|
||||||
from .pkg_resources import Environment
|
return select_backend().Environment.from_paths(paths)
|
||||||
|
|
||||||
return Environment.from_paths(paths)
|
|
||||||
|
|
||||||
|
|
||||||
def get_wheel_distribution(wheel_path, canonical_name):
|
def get_directory_distribution(directory: str) -> BaseDistribution:
|
||||||
# type: (str, str) -> BaseDistribution
|
"""Get the distribution metadata representation in the specified directory.
|
||||||
|
|
||||||
|
This returns a Distribution instance from the chosen backend based on
|
||||||
|
the given on-disk ``.dist-info`` directory.
|
||||||
|
"""
|
||||||
|
return select_backend().Distribution.from_directory(directory)
|
||||||
|
|
||||||
|
|
||||||
|
def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
|
||||||
"""Get the representation of the specified wheel's distribution metadata.
|
"""Get the representation of the specified wheel's distribution metadata.
|
||||||
|
|
||||||
This returns a Distribution instance from the chosen backend based on
|
This returns a Distribution instance from the chosen backend based on
|
||||||
|
@ -38,6 +102,4 @@ def get_wheel_distribution(wheel_path, canonical_name):
|
||||||
|
|
||||||
:param canonical_name: Normalized project name of the given wheel.
|
:param canonical_name: Normalized project name of the given wheel.
|
||||||
"""
|
"""
|
||||||
from .pkg_resources import Distribution
|
return select_backend().Distribution.from_wheel(wheel, canonical_name)
|
||||||
|
|
||||||
return Distribution.from_wheel(wheel_path, canonical_name)
|
|
||||||
|
|
|
@ -1,85 +1,477 @@
|
||||||
|
import csv
|
||||||
|
import email.message
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import pathlib
|
||||||
import re
|
import re
|
||||||
from typing import Container, Iterator, List, Optional, Union
|
import zipfile
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Collection,
|
||||||
|
Container,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
|
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
||||||
|
from pip._vendor.packaging.utils import NormalizedName
|
||||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||||
|
|
||||||
from pip._internal.utils.misc import stdlib_pkgs # TODO: Move definition here.
|
from pip._internal.exceptions import NoneMetadataError
|
||||||
|
from pip._internal.locations import site_packages, user_site
|
||||||
|
from pip._internal.models.direct_url import (
|
||||||
|
DIRECT_URL_METADATA_NAME,
|
||||||
|
DirectUrl,
|
||||||
|
DirectUrlValidationError,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
||||||
|
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
|
||||||
|
from pip._internal.utils.misc import is_local, normalize_path
|
||||||
|
from pip._internal.utils.urls import url_to_path
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Protocol
|
||||||
|
else:
|
||||||
|
Protocol = object
|
||||||
|
|
||||||
DistributionVersion = Union[LegacyVersion, Version]
|
DistributionVersion = Union[LegacyVersion, Version]
|
||||||
|
|
||||||
|
InfoPath = Union[str, pathlib.PurePath]
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BaseDistribution:
|
class BaseEntryPoint(Protocol):
|
||||||
@property
|
@property
|
||||||
def location(self):
|
def name(self) -> str:
|
||||||
# type: () -> Optional[str]
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def group(self) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_installed_files_path(
|
||||||
|
entry: Tuple[str, ...],
|
||||||
|
info: Tuple[str, ...],
|
||||||
|
) -> str:
|
||||||
|
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
||||||
|
|
||||||
|
The legacy format stores paths relative to the info directory, while the
|
||||||
|
modern format stores paths relative to the package root, e.g. the
|
||||||
|
site-packages directory.
|
||||||
|
|
||||||
|
:param entry: Path parts of the installed-files.txt entry.
|
||||||
|
:param info: Path parts of the egg-info directory relative to package root.
|
||||||
|
:returns: The converted entry.
|
||||||
|
|
||||||
|
For best compatibility with symlinks, this does not use ``abspath()`` or
|
||||||
|
``Path.resolve()``, but tries to work with path parts:
|
||||||
|
|
||||||
|
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
||||||
|
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
||||||
|
2. Join the two directly.
|
||||||
|
"""
|
||||||
|
while entry and entry[0] == "..":
|
||||||
|
if not info or info[-1] == "..":
|
||||||
|
info += ("..",)
|
||||||
|
else:
|
||||||
|
info = info[:-1]
|
||||||
|
entry = entry[1:]
|
||||||
|
return str(pathlib.Path(*info, *entry))
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDistribution(Protocol):
|
||||||
|
@classmethod
|
||||||
|
def from_directory(cls, directory: str) -> "BaseDistribution":
|
||||||
|
"""Load the distribution from a metadata directory.
|
||||||
|
|
||||||
|
:param directory: Path to a metadata directory, e.g. ``.dist-info``.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
|
||||||
|
"""Load the distribution from a given wheel.
|
||||||
|
|
||||||
|
:param wheel: A concrete wheel definition.
|
||||||
|
:param name: File name of the wheel.
|
||||||
|
|
||||||
|
:raises InvalidWheel: Whenever loading of the wheel causes a
|
||||||
|
:py:exc:`zipfile.BadZipFile` exception to be thrown.
|
||||||
|
:raises UnsupportedWheel: If the wheel is a valid zip, but malformed
|
||||||
|
internally.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.raw_name} {self.version} ({self.location})"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.raw_name} {self.version}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def location(self) -> Optional[str]:
|
||||||
"""Where the distribution is loaded from.
|
"""Where the distribution is loaded from.
|
||||||
|
|
||||||
A string value is not necessarily a filesystem path, since distributions
|
A string value is not necessarily a filesystem path, since distributions
|
||||||
can be loaded from other sources, e.g. arbitrary zip archives. ``None``
|
can be loaded from other sources, e.g. arbitrary zip archives. ``None``
|
||||||
means the distribution is created in-memory.
|
means the distribution is created in-memory.
|
||||||
|
|
||||||
|
Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
|
||||||
|
this is a symbolic link, we want to preserve the relative path between
|
||||||
|
it and files in the distribution.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata_version(self):
|
def editable_project_location(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
"""The project location for editable distributions.
|
||||||
"""Value of "Metadata-Version:" in the distribution, if available."""
|
|
||||||
|
This is the directory where pyproject.toml or setup.py is located.
|
||||||
|
None if the distribution is not installed in editable mode.
|
||||||
|
"""
|
||||||
|
# TODO: this property is relatively costly to compute, memoize it ?
|
||||||
|
direct_url = self.direct_url
|
||||||
|
if direct_url:
|
||||||
|
if direct_url.is_local_editable():
|
||||||
|
return url_to_path(direct_url.url)
|
||||||
|
else:
|
||||||
|
# Search for an .egg-link file by walking sys.path, as it was
|
||||||
|
# done before by dist_is_editable().
|
||||||
|
egg_link_path = egg_link_path_from_sys_path(self.raw_name)
|
||||||
|
if egg_link_path:
|
||||||
|
# TODO: get project location from second line of egg_link file
|
||||||
|
# (https://github.com/pypa/pip/issues/10243)
|
||||||
|
return self.location
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_location(self) -> Optional[str]:
|
||||||
|
"""The distribution's "installed" location.
|
||||||
|
|
||||||
|
This should generally be a ``site-packages`` directory. This is
|
||||||
|
usually ``dist.location``, except for legacy develop-installed packages,
|
||||||
|
where ``dist.location`` is the source code location, and this is where
|
||||||
|
the ``.egg-link`` file is.
|
||||||
|
|
||||||
|
The returned location is normalized (in particular, with symlinks removed).
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def canonical_name(self):
|
def info_location(self) -> Optional[str]:
|
||||||
# type: () -> str
|
"""Location of the .[egg|dist]-info directory or file.
|
||||||
|
|
||||||
|
Similarly to ``location``, a string value is not necessarily a
|
||||||
|
filesystem path. ``None`` means the distribution is created in-memory.
|
||||||
|
|
||||||
|
For a modern .dist-info installation on disk, this should be something
|
||||||
|
like ``{location}/{raw_name}-{version}.dist-info``.
|
||||||
|
|
||||||
|
Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
|
||||||
|
this is a symbolic link, we want to preserve the relative path between
|
||||||
|
it and other files in the distribution.
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def installed_by_distutils(self) -> bool:
|
||||||
# type: () -> DistributionVersion
|
"""Whether this distribution is installed with legacy distutils format.
|
||||||
|
|
||||||
|
A distribution installed with "raw" distutils not patched by setuptools
|
||||||
|
uses one single file at ``info_location`` to store metadata. We need to
|
||||||
|
treat this specially on uninstallation.
|
||||||
|
"""
|
||||||
|
info_location = self.info_location
|
||||||
|
if not info_location:
|
||||||
|
return False
|
||||||
|
return pathlib.Path(info_location).is_file()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_as_egg(self) -> bool:
|
||||||
|
"""Whether this distribution is installed as an egg.
|
||||||
|
|
||||||
|
This usually indicates the distribution was installed by (older versions
|
||||||
|
of) easy_install.
|
||||||
|
"""
|
||||||
|
location = self.location
|
||||||
|
if not location:
|
||||||
|
return False
|
||||||
|
return location.endswith(".egg")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_with_setuptools_egg_info(self) -> bool:
|
||||||
|
"""Whether this distribution is installed with the ``.egg-info`` format.
|
||||||
|
|
||||||
|
This usually indicates the distribution was installed with setuptools
|
||||||
|
with an old pip version or with ``single-version-externally-managed``.
|
||||||
|
|
||||||
|
Note that this ensure the metadata store is a directory. distutils can
|
||||||
|
also installs an ``.egg-info``, but as a file, not a directory. This
|
||||||
|
property is *False* for that case. Also see ``installed_by_distutils``.
|
||||||
|
"""
|
||||||
|
info_location = self.info_location
|
||||||
|
if not info_location:
|
||||||
|
return False
|
||||||
|
if not info_location.endswith(".egg-info"):
|
||||||
|
return False
|
||||||
|
return pathlib.Path(info_location).is_dir()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_with_dist_info(self) -> bool:
|
||||||
|
"""Whether this distribution is installed with the "modern format".
|
||||||
|
|
||||||
|
This indicates a "modern" installation, e.g. storing metadata in the
|
||||||
|
``.dist-info`` directory. This applies to installations made by
|
||||||
|
setuptools (but through pip, not directly), or anything using the
|
||||||
|
standardized build backend interface (PEP 517).
|
||||||
|
"""
|
||||||
|
info_location = self.info_location
|
||||||
|
if not info_location:
|
||||||
|
return False
|
||||||
|
if not info_location.endswith(".dist-info"):
|
||||||
|
return False
|
||||||
|
return pathlib.Path(info_location).is_dir()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def canonical_name(self) -> NormalizedName:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def installer(self):
|
def version(self) -> DistributionVersion:
|
||||||
# type: () -> str
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def editable(self):
|
def setuptools_filename(self) -> str:
|
||||||
# type: () -> bool
|
"""Convert a project name to its setuptools-compatible filename.
|
||||||
|
|
||||||
|
This is a copy of ``pkg_resources.to_filename()`` for compatibility.
|
||||||
|
"""
|
||||||
|
return self.raw_name.replace("-", "_")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def direct_url(self) -> Optional[DirectUrl]:
|
||||||
|
"""Obtain a DirectUrl from this distribution.
|
||||||
|
|
||||||
|
Returns None if the distribution has no `direct_url.json` metadata,
|
||||||
|
or if `direct_url.json` is invalid.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
content = self.read_text(DIRECT_URL_METADATA_NAME)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return DirectUrl.from_json(content)
|
||||||
|
except (
|
||||||
|
UnicodeDecodeError,
|
||||||
|
json.JSONDecodeError,
|
||||||
|
DirectUrlValidationError,
|
||||||
|
) as e:
|
||||||
|
logger.warning(
|
||||||
|
"Error parsing %s for %s: %s",
|
||||||
|
DIRECT_URL_METADATA_NAME,
|
||||||
|
self.canonical_name,
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installer(self) -> str:
|
||||||
|
try:
|
||||||
|
installer_text = self.read_text("INSTALLER")
|
||||||
|
except (OSError, ValueError, NoneMetadataError):
|
||||||
|
return "" # Fail silently if the installer file cannot be read.
|
||||||
|
for line in installer_text.splitlines():
|
||||||
|
cleaned_line = line.strip()
|
||||||
|
if cleaned_line:
|
||||||
|
return cleaned_line
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def editable(self) -> bool:
|
||||||
|
return bool(self.editable_project_location)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def local(self) -> bool:
|
||||||
|
"""If distribution is installed in the current virtual environment.
|
||||||
|
|
||||||
|
Always True if we're not in a virtualenv.
|
||||||
|
"""
|
||||||
|
if self.installed_location is None:
|
||||||
|
return False
|
||||||
|
return is_local(self.installed_location)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_usersite(self) -> bool:
|
||||||
|
if self.installed_location is None or user_site is None:
|
||||||
|
return False
|
||||||
|
return self.installed_location.startswith(normalize_path(user_site))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_site_packages(self) -> bool:
|
||||||
|
if self.installed_location is None or site_packages is None:
|
||||||
|
return False
|
||||||
|
return self.installed_location.startswith(normalize_path(site_packages))
|
||||||
|
|
||||||
|
def is_file(self, path: InfoPath) -> bool:
|
||||||
|
"""Check whether an entry in the info directory is a file."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def iter_distutils_script_names(self) -> Iterator[str]:
|
||||||
|
"""Find distutils 'scripts' entries metadata.
|
||||||
|
|
||||||
|
If 'scripts' is supplied in ``setup.py``, distutils records those in the
|
||||||
|
installed distribution's ``scripts`` directory, a file for each script.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def read_text(self, path: InfoPath) -> str:
|
||||||
|
"""Read a file in the info directory.
|
||||||
|
|
||||||
|
:raise FileNotFoundError: If ``path`` does not exist in the directory.
|
||||||
|
:raise NoneMetadataError: If ``path`` exists in the info directory, but
|
||||||
|
cannot be read.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def local(self):
|
def metadata(self) -> email.message.Message:
|
||||||
# type: () -> bool
|
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
|
||||||
|
|
||||||
|
This should return an empty message if the metadata file is unavailable.
|
||||||
|
|
||||||
|
:raises NoneMetadataError: If the metadata file is available, but does
|
||||||
|
not contain valid metadata.
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_usersite(self):
|
def metadata_version(self) -> Optional[str]:
|
||||||
# type: () -> bool
|
"""Value of "Metadata-Version:" in distribution metadata, if available."""
|
||||||
|
return self.metadata.get("Metadata-Version")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def raw_name(self) -> str:
|
||||||
|
"""Value of "Name:" in distribution metadata."""
|
||||||
|
# The metadata should NEVER be missing the Name: key, but if it somehow
|
||||||
|
# does, fall back to the known canonical name.
|
||||||
|
return self.metadata.get("Name", self.canonical_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def requires_python(self) -> SpecifierSet:
|
||||||
|
"""Value of "Requires-Python:" in distribution metadata.
|
||||||
|
|
||||||
|
If the key does not exist or contains an invalid value, an empty
|
||||||
|
SpecifierSet should be returned.
|
||||||
|
"""
|
||||||
|
value = self.metadata.get("Requires-Python")
|
||||||
|
if value is None:
|
||||||
|
return SpecifierSet()
|
||||||
|
try:
|
||||||
|
# Convert to str to satisfy the type checker; this can be a Header object.
|
||||||
|
spec = SpecifierSet(str(value))
|
||||||
|
except InvalidSpecifier as e:
|
||||||
|
message = "Package %r has an invalid Requires-Python: %s"
|
||||||
|
logger.warning(message, self.raw_name, e)
|
||||||
|
return SpecifierSet()
|
||||||
|
return spec
|
||||||
|
|
||||||
|
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||||
|
"""Dependencies of this distribution.
|
||||||
|
|
||||||
|
For modern .dist-info distributions, this is the collection of
|
||||||
|
"Requires-Dist:" entries in distribution metadata.
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def iter_provided_extras(self) -> Iterable[str]:
|
||||||
|
"""Extras provided by this distribution.
|
||||||
|
|
||||||
|
For modern .dist-info distributions, this is the collection of
|
||||||
|
"Provides-Extra:" entries in distribution metadata.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
|
||||||
|
try:
|
||||||
|
text = self.read_text("RECORD")
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
# This extra Path-str cast normalizes entries.
|
||||||
|
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
||||||
|
|
||||||
|
def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
|
||||||
|
try:
|
||||||
|
text = self.read_text("installed-files.txt")
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
paths = (p for p in text.splitlines(keepends=False) if p)
|
||||||
|
root = self.location
|
||||||
|
info = self.info_location
|
||||||
|
if root is None or info is None:
|
||||||
|
return paths
|
||||||
|
try:
|
||||||
|
info_rel = pathlib.Path(info).relative_to(root)
|
||||||
|
except ValueError: # info is not relative to root.
|
||||||
|
return paths
|
||||||
|
if not info_rel.parts: # info *is* root.
|
||||||
|
return paths
|
||||||
|
return (
|
||||||
|
_convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
|
||||||
|
for p in paths
|
||||||
|
)
|
||||||
|
|
||||||
|
def iter_declared_entries(self) -> Optional[Iterator[str]]:
|
||||||
|
"""Iterate through file entires declared in this distribution.
|
||||||
|
|
||||||
|
For modern .dist-info distributions, this is the files listed in the
|
||||||
|
``RECORD`` metadata file. For legacy setuptools distributions, this
|
||||||
|
comes from ``installed-files.txt``, with entries normalized to be
|
||||||
|
compatible with the format used by ``RECORD``.
|
||||||
|
|
||||||
|
:return: An iterator for listed entries, or None if the distribution
|
||||||
|
contains neither ``RECORD`` nor ``installed-files.txt``.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
self._iter_declared_entries_from_record()
|
||||||
|
or self._iter_declared_entries_from_legacy()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class BaseEnvironment:
|
class BaseEnvironment:
|
||||||
"""An environment containing distributions to introspect."""
|
"""An environment containing distributions to introspect."""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def default(cls):
|
def default(cls) -> "BaseEnvironment":
|
||||||
# type: () -> BaseEnvironment
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_paths(cls, paths):
|
def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
|
||||||
# type: (Optional[List[str]]) -> BaseEnvironment
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get_distribution(self, name):
|
def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
|
||||||
# type: (str) -> Optional[BaseDistribution]
|
"""Given a requirement name, return the installed distributions.
|
||||||
"""Given a requirement name, return the installed distributions."""
|
|
||||||
|
The name may not be normalized. The implementation must canonicalize
|
||||||
|
it for lookup.
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def _iter_distributions(self):
|
def _iter_distributions(self) -> Iterator["BaseDistribution"]:
|
||||||
# type: () -> Iterator[BaseDistribution]
|
|
||||||
"""Iterate through installed distributions.
|
"""Iterate through installed distributions.
|
||||||
|
|
||||||
This function should be implemented by subclass, but never called
|
This function should be implemented by subclass, but never called
|
||||||
|
@ -88,9 +480,8 @@ class BaseEnvironment:
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def iter_distributions(self):
|
def iter_all_distributions(self) -> Iterator[BaseDistribution]:
|
||||||
# type: () -> Iterator[BaseDistribution]
|
"""Iterate through all installed distributions without any filtering."""
|
||||||
"""Iterate through installed distributions."""
|
|
||||||
for dist in self._iter_distributions():
|
for dist in self._iter_distributions():
|
||||||
# Make sure the distribution actually comes from a valid Python
|
# Make sure the distribution actually comes from a valid Python
|
||||||
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
||||||
|
@ -112,15 +503,19 @@ class BaseEnvironment:
|
||||||
|
|
||||||
def iter_installed_distributions(
|
def iter_installed_distributions(
|
||||||
self,
|
self,
|
||||||
local_only=True, # type: bool
|
local_only: bool = True,
|
||||||
skip=stdlib_pkgs, # type: Container[str]
|
skip: Container[str] = stdlib_pkgs,
|
||||||
include_editables=True, # type: bool
|
include_editables: bool = True,
|
||||||
editables_only=False, # type: bool
|
editables_only: bool = False,
|
||||||
user_only=False, # type: bool
|
user_only: bool = False,
|
||||||
):
|
) -> Iterator[BaseDistribution]:
|
||||||
# type: (...) -> Iterator[BaseDistribution]
|
|
||||||
"""Return a list of installed distributions.
|
"""Return a list of installed distributions.
|
||||||
|
|
||||||
|
This is based on ``iter_all_distributions()`` with additional filtering
|
||||||
|
options. Note that ``iter_installed_distributions()`` without arguments
|
||||||
|
is *not* equal to ``iter_all_distributions()``, since some of the
|
||||||
|
configurations exclude packages by default.
|
||||||
|
|
||||||
:param local_only: If True (default), only return installations
|
:param local_only: If True (default), only return installations
|
||||||
local to the current virtualenv, if in a virtualenv.
|
local to the current virtualenv, if in a virtualenv.
|
||||||
:param skip: An iterable of canonicalized project names to ignore;
|
:param skip: An iterable of canonicalized project names to ignore;
|
||||||
|
@ -130,7 +525,7 @@ class BaseEnvironment:
|
||||||
:param user_only: If True, only report installations in the user
|
:param user_only: If True, only report installations in the user
|
||||||
site directory.
|
site directory.
|
||||||
"""
|
"""
|
||||||
it = self.iter_distributions()
|
it = self.iter_all_distributions()
|
||||||
if local_only:
|
if local_only:
|
||||||
it = (d for d in it if d.local)
|
it = (d for d in it if d.local)
|
||||||
if not include_editables:
|
if not include_editables:
|
||||||
|
@ -140,3 +535,27 @@ class BaseEnvironment:
|
||||||
if user_only:
|
if user_only:
|
||||||
it = (d for d in it if d.in_usersite)
|
it = (d for d in it if d.in_usersite)
|
||||||
return (d for d in it if d.canonical_name not in skip)
|
return (d for d in it if d.canonical_name not in skip)
|
||||||
|
|
||||||
|
|
||||||
|
class Wheel(Protocol):
|
||||||
|
location: str
|
||||||
|
|
||||||
|
def as_zipfile(self) -> zipfile.ZipFile:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class FilesystemWheel(Wheel):
|
||||||
|
def __init__(self, location: str) -> None:
|
||||||
|
self.location = location
|
||||||
|
|
||||||
|
def as_zipfile(self) -> zipfile.ZipFile:
|
||||||
|
return zipfile.ZipFile(self.location, allowZip64=True)
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryWheel(Wheel):
|
||||||
|
def __init__(self, location: str, stream: IO[bytes]) -> None:
|
||||||
|
self.location = location
|
||||||
|
self.stream = stream
|
||||||
|
|
||||||
|
def as_zipfile(self) -> zipfile.ZipFile:
|
||||||
|
return zipfile.ZipFile(self.stream, allowZip64=True)
|
||||||
|
|
|
@ -1,104 +1,237 @@
|
||||||
|
import email.message
|
||||||
|
import email.parser
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
from typing import Iterator, List, Optional
|
from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
|
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
|
||||||
from pip._internal.utils import misc # TODO: Move definition here.
|
from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
|
||||||
from pip._internal.utils.packaging import get_installer
|
from pip._internal.utils.egg_link import egg_link_path_from_location
|
||||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
from pip._internal.utils.misc import display_path, normalize_path
|
||||||
|
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
|
||||||
|
|
||||||
from .base import BaseDistribution, BaseEnvironment, DistributionVersion
|
from .base import (
|
||||||
|
BaseDistribution,
|
||||||
|
BaseEntryPoint,
|
||||||
|
BaseEnvironment,
|
||||||
|
DistributionVersion,
|
||||||
|
InfoPath,
|
||||||
|
Wheel,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class EntryPoint(NamedTuple):
|
||||||
|
name: str
|
||||||
|
value: str
|
||||||
|
group: str
|
||||||
|
|
||||||
|
|
||||||
|
class WheelMetadata:
|
||||||
|
"""IMetadataProvider that reads metadata files from a dictionary.
|
||||||
|
|
||||||
|
This also maps metadata decoding exceptions to our internal exception type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
|
||||||
|
self._metadata = metadata
|
||||||
|
self._wheel_name = wheel_name
|
||||||
|
|
||||||
|
def has_metadata(self, name: str) -> bool:
|
||||||
|
return name in self._metadata
|
||||||
|
|
||||||
|
def get_metadata(self, name: str) -> str:
|
||||||
|
try:
|
||||||
|
return self._metadata[name].decode()
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
# Augment the default error with the origin of the file.
|
||||||
|
raise UnsupportedWheel(
|
||||||
|
f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_metadata_lines(self, name: str) -> Iterable[str]:
|
||||||
|
return pkg_resources.yield_lines(self.get_metadata(name))
|
||||||
|
|
||||||
|
def metadata_isdir(self, name: str) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def metadata_listdir(self, name: str) -> List[str]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def run_script(self, script_name: str, namespace: str) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Distribution(BaseDistribution):
|
class Distribution(BaseDistribution):
|
||||||
def __init__(self, dist):
|
def __init__(self, dist: pkg_resources.Distribution) -> None:
|
||||||
# type: (pkg_resources.Distribution) -> None
|
|
||||||
self._dist = dist
|
self._dist = dist
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_wheel(cls, path, name):
|
def from_directory(cls, directory: str) -> BaseDistribution:
|
||||||
# type: (str, str) -> Distribution
|
dist_dir = directory.rstrip(os.sep)
|
||||||
with zipfile.ZipFile(path, allowZip64=True) as zf:
|
|
||||||
dist = pkg_resources_distribution_for_wheel(zf, name, path)
|
# Build a PathMetadata object, from path to metadata. :wink:
|
||||||
|
base_dir, dist_dir_name = os.path.split(dist_dir)
|
||||||
|
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
||||||
|
|
||||||
|
# Determine the correct Distribution object type.
|
||||||
|
if dist_dir.endswith(".egg-info"):
|
||||||
|
dist_cls = pkg_resources.Distribution
|
||||||
|
dist_name = os.path.splitext(dist_dir_name)[0]
|
||||||
|
else:
|
||||||
|
assert dist_dir.endswith(".dist-info")
|
||||||
|
dist_cls = pkg_resources.DistInfoDistribution
|
||||||
|
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
||||||
|
|
||||||
|
dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
|
||||||
|
try:
|
||||||
|
with wheel.as_zipfile() as zf:
|
||||||
|
info_dir, _ = parse_wheel(zf, name)
|
||||||
|
metadata_text = {
|
||||||
|
path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
|
||||||
|
for path in zf.namelist()
|
||||||
|
if path.startswith(f"{info_dir}/")
|
||||||
|
}
|
||||||
|
except zipfile.BadZipFile as e:
|
||||||
|
raise InvalidWheel(wheel.location, name) from e
|
||||||
|
except UnsupportedWheel as e:
|
||||||
|
raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
|
||||||
|
dist = pkg_resources.DistInfoDistribution(
|
||||||
|
location=wheel.location,
|
||||||
|
metadata=WheelMetadata(metadata_text, wheel.location),
|
||||||
|
project_name=name,
|
||||||
|
)
|
||||||
return cls(dist)
|
return cls(dist)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def location(self):
|
def location(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
|
||||||
return self._dist.location
|
return self._dist.location
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata_version(self):
|
def installed_location(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
egg_link = egg_link_path_from_location(self.raw_name)
|
||||||
for line in self._dist.get_metadata_lines(self._dist.PKG_INFO):
|
if egg_link:
|
||||||
if line.lower().startswith("metadata-version:"):
|
location = egg_link
|
||||||
return line.split(":", 1)[-1].strip()
|
elif self.location:
|
||||||
return None
|
location = self.location
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
return normalize_path(location)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def canonical_name(self):
|
def info_location(self) -> Optional[str]:
|
||||||
# type: () -> str
|
return self._dist.egg_info
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_by_distutils(self) -> bool:
|
||||||
|
# A distutils-installed distribution is provided by FileMetadata. This
|
||||||
|
# provider has a "path" attribute not present anywhere else. Not the
|
||||||
|
# best introspection logic, but pip has been doing this for a long time.
|
||||||
|
try:
|
||||||
|
return bool(self._dist._provider.path)
|
||||||
|
except AttributeError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def canonical_name(self) -> NormalizedName:
|
||||||
return canonicalize_name(self._dist.project_name)
|
return canonicalize_name(self._dist.project_name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self) -> DistributionVersion:
|
||||||
# type: () -> DistributionVersion
|
|
||||||
return parse_version(self._dist.version)
|
return parse_version(self._dist.version)
|
||||||
|
|
||||||
@property
|
def is_file(self, path: InfoPath) -> bool:
|
||||||
def installer(self):
|
return self._dist.has_metadata(str(path))
|
||||||
# type: () -> str
|
|
||||||
return get_installer(self._dist)
|
def iter_distutils_script_names(self) -> Iterator[str]:
|
||||||
|
yield from self._dist.metadata_listdir("scripts")
|
||||||
|
|
||||||
|
def read_text(self, path: InfoPath) -> str:
|
||||||
|
name = str(path)
|
||||||
|
if not self._dist.has_metadata(name):
|
||||||
|
raise FileNotFoundError(name)
|
||||||
|
content = self._dist.get_metadata(name)
|
||||||
|
if content is None:
|
||||||
|
raise NoneMetadataError(self, name)
|
||||||
|
return content
|
||||||
|
|
||||||
|
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
||||||
|
for group, entries in self._dist.get_entry_map().items():
|
||||||
|
for name, entry_point in entries.items():
|
||||||
|
name, _, value = str(entry_point).partition("=")
|
||||||
|
yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def editable(self):
|
def metadata(self) -> email.message.Message:
|
||||||
# type: () -> bool
|
"""
|
||||||
return misc.dist_is_editable(self._dist)
|
:raises NoneMetadataError: if the distribution reports `has_metadata()`
|
||||||
|
True but `get_metadata()` returns None.
|
||||||
|
"""
|
||||||
|
if isinstance(self._dist, pkg_resources.DistInfoDistribution):
|
||||||
|
metadata_name = "METADATA"
|
||||||
|
else:
|
||||||
|
metadata_name = "PKG-INFO"
|
||||||
|
try:
|
||||||
|
metadata = self.read_text(metadata_name)
|
||||||
|
except FileNotFoundError:
|
||||||
|
if self.location:
|
||||||
|
displaying_path = display_path(self.location)
|
||||||
|
else:
|
||||||
|
displaying_path = repr(self.location)
|
||||||
|
logger.warning("No metadata found in %s", displaying_path)
|
||||||
|
metadata = ""
|
||||||
|
feed_parser = email.parser.FeedParser()
|
||||||
|
feed_parser.feed(metadata)
|
||||||
|
return feed_parser.close()
|
||||||
|
|
||||||
@property
|
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||||
def local(self):
|
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
||||||
# type: () -> bool
|
extras = frozenset(extras).intersection(self._dist.extras)
|
||||||
return misc.dist_is_local(self._dist)
|
return self._dist.requires(extras)
|
||||||
|
|
||||||
@property
|
def iter_provided_extras(self) -> Iterable[str]:
|
||||||
def in_usersite(self):
|
return self._dist.extras
|
||||||
# type: () -> bool
|
|
||||||
return misc.dist_in_usersite(self._dist)
|
|
||||||
|
|
||||||
|
|
||||||
class Environment(BaseEnvironment):
|
class Environment(BaseEnvironment):
|
||||||
def __init__(self, ws):
|
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
||||||
# type: (pkg_resources.WorkingSet) -> None
|
|
||||||
self._ws = ws
|
self._ws = ws
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def default(cls):
|
def default(cls) -> BaseEnvironment:
|
||||||
# type: () -> BaseEnvironment
|
|
||||||
return cls(pkg_resources.working_set)
|
return cls(pkg_resources.working_set)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_paths(cls, paths):
|
def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
|
||||||
# type: (Optional[List[str]]) -> BaseEnvironment
|
|
||||||
return cls(pkg_resources.WorkingSet(paths))
|
return cls(pkg_resources.WorkingSet(paths))
|
||||||
|
|
||||||
def _search_distribution(self, name):
|
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
||||||
# type: (str) -> Optional[BaseDistribution]
|
for dist in self._ws:
|
||||||
|
yield Distribution(dist)
|
||||||
|
|
||||||
|
def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
|
||||||
"""Find a distribution matching the ``name`` in the environment.
|
"""Find a distribution matching the ``name`` in the environment.
|
||||||
|
|
||||||
This searches from *all* distributions available in the environment, to
|
This searches from *all* distributions available in the environment, to
|
||||||
match the behavior of ``pkg_resources.get_distribution()``.
|
match the behavior of ``pkg_resources.get_distribution()``.
|
||||||
"""
|
"""
|
||||||
canonical_name = canonicalize_name(name)
|
canonical_name = canonicalize_name(name)
|
||||||
for dist in self.iter_distributions():
|
for dist in self.iter_all_distributions():
|
||||||
if dist.canonical_name == canonical_name:
|
if dist.canonical_name == canonical_name:
|
||||||
return dist
|
return dist
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_distribution(self, name):
|
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
|
||||||
# type: (str) -> Optional[BaseDistribution]
|
|
||||||
|
|
||||||
# Search the distribution by looking through the working set.
|
# Search the distribution by looking through the working set.
|
||||||
dist = self._search_distribution(name)
|
dist = self._search_distribution(name)
|
||||||
if dist:
|
if dist:
|
||||||
|
@ -119,8 +252,3 @@ class Environment(BaseEnvironment):
|
||||||
except pkg_resources.DistributionNotFound:
|
except pkg_resources.DistributionNotFound:
|
||||||
return None
|
return None
|
||||||
return self._search_distribution(name)
|
return self._search_distribution(name)
|
||||||
|
|
||||||
def _iter_distributions(self):
|
|
||||||
# type: () -> Iterator[BaseDistribution]
|
|
||||||
for dist in self._ws:
|
|
||||||
yield Distribution(dist)
|
|
||||||
|
|
|
@ -5,30 +5,30 @@ from pip._internal.utils.models import KeyBasedCompareMixin
|
||||||
|
|
||||||
|
|
||||||
class InstallationCandidate(KeyBasedCompareMixin):
|
class InstallationCandidate(KeyBasedCompareMixin):
|
||||||
"""Represents a potential "candidate" for installation.
|
"""Represents a potential "candidate" for installation."""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ["name", "version", "link"]
|
__slots__ = ["name", "version", "link"]
|
||||||
|
|
||||||
def __init__(self, name, version, link):
|
def __init__(self, name: str, version: str, link: Link) -> None:
|
||||||
# type: (str, str, Link) -> None
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.version = parse_version(version)
|
self.version = parse_version(version)
|
||||||
self.link = link
|
self.link = link
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
key=(self.name, self.version, self.link),
|
key=(self.name, self.version, self.link),
|
||||||
defining_class=InstallationCandidate
|
defining_class=InstallationCandidate,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||||
self.name, self.version, self.link,
|
self.name,
|
||||||
|
self.version,
|
||||||
|
self.link,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
return "{!r} candidate (version {} at {})".format(
|
||||||
return '{!r} candidate (version {} at {})'.format(
|
self.name,
|
||||||
self.name, self.version, self.link,
|
self.version,
|
||||||
|
self.link,
|
||||||
)
|
)
|
||||||
|
|
|
@ -22,8 +22,9 @@ class DirectUrlValidationError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _get(d, expected_type, key, default=None):
|
def _get(
|
||||||
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T]
|
d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
|
||||||
|
) -> Optional[T]:
|
||||||
"""Get value from dictionary and verify expected type."""
|
"""Get value from dictionary and verify expected type."""
|
||||||
if key not in d:
|
if key not in d:
|
||||||
return default
|
return default
|
||||||
|
@ -37,16 +38,16 @@ def _get(d, expected_type, key, default=None):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def _get_required(d, expected_type, key, default=None):
|
def _get_required(
|
||||||
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> T
|
d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
|
||||||
|
) -> T:
|
||||||
value = _get(d, expected_type, key, default)
|
value = _get(d, expected_type, key, default)
|
||||||
if value is None:
|
if value is None:
|
||||||
raise DirectUrlValidationError(f"{key} must have a value")
|
raise DirectUrlValidationError(f"{key} must have a value")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def _exactly_one_of(infos):
|
def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
|
||||||
# type: (Iterable[Optional[InfoType]]) -> InfoType
|
|
||||||
infos = [info for info in infos if info is not None]
|
infos = [info for info in infos if info is not None]
|
||||||
if not infos:
|
if not infos:
|
||||||
raise DirectUrlValidationError(
|
raise DirectUrlValidationError(
|
||||||
|
@ -60,8 +61,7 @@ def _exactly_one_of(infos):
|
||||||
return infos[0]
|
return infos[0]
|
||||||
|
|
||||||
|
|
||||||
def _filter_none(**kwargs):
|
def _filter_none(**kwargs: Any) -> Dict[str, Any]:
|
||||||
# type: (Any) -> Dict[str, Any]
|
|
||||||
"""Make dict excluding None values."""
|
"""Make dict excluding None values."""
|
||||||
return {k: v for k, v in kwargs.items() if v is not None}
|
return {k: v for k, v in kwargs.items() if v is not None}
|
||||||
|
|
||||||
|
@ -71,39 +71,29 @@ class VcsInfo:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
vcs, # type: str
|
vcs: str,
|
||||||
commit_id, # type: str
|
commit_id: str,
|
||||||
requested_revision=None, # type: Optional[str]
|
requested_revision: Optional[str] = None,
|
||||||
resolved_revision=None, # type: Optional[str]
|
) -> None:
|
||||||
resolved_revision_type=None, # type: Optional[str]
|
|
||||||
):
|
|
||||||
self.vcs = vcs
|
self.vcs = vcs
|
||||||
self.requested_revision = requested_revision
|
self.requested_revision = requested_revision
|
||||||
self.commit_id = commit_id
|
self.commit_id = commit_id
|
||||||
self.resolved_revision = resolved_revision
|
|
||||||
self.resolved_revision_type = resolved_revision_type
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, d):
|
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
|
||||||
# type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo]
|
|
||||||
if d is None:
|
if d is None:
|
||||||
return None
|
return None
|
||||||
return cls(
|
return cls(
|
||||||
vcs=_get_required(d, str, "vcs"),
|
vcs=_get_required(d, str, "vcs"),
|
||||||
commit_id=_get_required(d, str, "commit_id"),
|
commit_id=_get_required(d, str, "commit_id"),
|
||||||
requested_revision=_get(d, str, "requested_revision"),
|
requested_revision=_get(d, str, "requested_revision"),
|
||||||
resolved_revision=_get(d, str, "resolved_revision"),
|
|
||||||
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _to_dict(self):
|
def _to_dict(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
return _filter_none(
|
return _filter_none(
|
||||||
vcs=self.vcs,
|
vcs=self.vcs,
|
||||||
requested_revision=self.requested_revision,
|
requested_revision=self.requested_revision,
|
||||||
commit_id=self.commit_id,
|
commit_id=self.commit_id,
|
||||||
resolved_revision=self.resolved_revision,
|
|
||||||
resolved_revision_type=self.resolved_revision_type,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,19 +102,17 @@ class ArchiveInfo:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hash=None, # type: Optional[str]
|
hash: Optional[str] = None,
|
||||||
):
|
) -> None:
|
||||||
self.hash = hash
|
self.hash = hash
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, d):
|
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
|
||||||
# type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo]
|
|
||||||
if d is None:
|
if d is None:
|
||||||
return None
|
return None
|
||||||
return cls(hash=_get(d, str, "hash"))
|
return cls(hash=_get(d, str, "hash"))
|
||||||
|
|
||||||
def _to_dict(self):
|
def _to_dict(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
return _filter_none(hash=self.hash)
|
return _filter_none(hash=self.hash)
|
||||||
|
|
||||||
|
|
||||||
|
@ -133,21 +121,17 @@ class DirInfo:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
editable=False, # type: bool
|
editable: bool = False,
|
||||||
):
|
) -> None:
|
||||||
self.editable = editable
|
self.editable = editable
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, d):
|
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
|
||||||
# type: (Optional[Dict[str, Any]]) -> Optional[DirInfo]
|
|
||||||
if d is None:
|
if d is None:
|
||||||
return None
|
return None
|
||||||
return cls(
|
return cls(editable=_get_required(d, bool, "editable", default=False))
|
||||||
editable=_get_required(d, bool, "editable", default=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _to_dict(self):
|
def _to_dict(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
return _filter_none(editable=self.editable or None)
|
return _filter_none(editable=self.editable or None)
|
||||||
|
|
||||||
|
|
||||||
|
@ -155,26 +139,24 @@ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
||||||
|
|
||||||
|
|
||||||
class DirectUrl:
|
class DirectUrl:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url, # type: str
|
url: str,
|
||||||
info, # type: InfoType
|
info: InfoType,
|
||||||
subdirectory=None, # type: Optional[str]
|
subdirectory: Optional[str] = None,
|
||||||
):
|
) -> None:
|
||||||
self.url = url
|
self.url = url
|
||||||
self.info = info
|
self.info = info
|
||||||
self.subdirectory = subdirectory
|
self.subdirectory = subdirectory
|
||||||
|
|
||||||
def _remove_auth_from_netloc(self, netloc):
|
def _remove_auth_from_netloc(self, netloc: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
if "@" not in netloc:
|
if "@" not in netloc:
|
||||||
return netloc
|
return netloc
|
||||||
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
||||||
if (
|
if (
|
||||||
isinstance(self.info, VcsInfo) and
|
isinstance(self.info, VcsInfo)
|
||||||
self.info.vcs == "git" and
|
and self.info.vcs == "git"
|
||||||
user_pass == "git"
|
and user_pass == "git"
|
||||||
):
|
):
|
||||||
return netloc
|
return netloc
|
||||||
if ENV_VAR_RE.match(user_pass):
|
if ENV_VAR_RE.match(user_pass):
|
||||||
|
@ -182,8 +164,7 @@ class DirectUrl:
|
||||||
return netloc_no_user_pass
|
return netloc_no_user_pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def redacted_url(self):
|
def redacted_url(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""url with user:password part removed unless it is formed with
|
"""url with user:password part removed unless it is formed with
|
||||||
environment variables as specified in PEP 610, or it is ``git``
|
environment variables as specified in PEP 610, or it is ``git``
|
||||||
in the case of a git URL.
|
in the case of a git URL.
|
||||||
|
@ -195,13 +176,11 @@ class DirectUrl:
|
||||||
)
|
)
|
||||||
return surl
|
return surl
|
||||||
|
|
||||||
def validate(self):
|
def validate(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.from_dict(self.to_dict())
|
self.from_dict(self.to_dict())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls, d):
|
def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
|
||||||
# type: (Dict[str, Any]) -> DirectUrl
|
|
||||||
return DirectUrl(
|
return DirectUrl(
|
||||||
url=_get_required(d, str, "url"),
|
url=_get_required(d, str, "url"),
|
||||||
subdirectory=_get(d, str, "subdirectory"),
|
subdirectory=_get(d, str, "subdirectory"),
|
||||||
|
@ -214,8 +193,7 @@ class DirectUrl:
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
res = _filter_none(
|
res = _filter_none(
|
||||||
url=self.redacted_url,
|
url=self.redacted_url,
|
||||||
subdirectory=self.subdirectory,
|
subdirectory=self.subdirectory,
|
||||||
|
@ -224,10 +202,11 @@ class DirectUrl:
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, s):
|
def from_json(cls, s: str) -> "DirectUrl":
|
||||||
# type: (str) -> DirectUrl
|
|
||||||
return cls.from_dict(json.loads(s))
|
return cls.from_dict(json.loads(s))
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return json.dumps(self.to_dict(), sort_keys=True)
|
return json.dumps(self.to_dict(), sort_keys=True)
|
||||||
|
|
||||||
|
def is_local_editable(self) -> bool:
|
||||||
|
return isinstance(self.info, DirInfo) and self.info.editable
|
||||||
|
|
|
@ -6,13 +6,15 @@ from pip._internal.exceptions import CommandError
|
||||||
|
|
||||||
|
|
||||||
class FormatControl:
|
class FormatControl:
|
||||||
"""Helper for managing formats from which a package can be installed.
|
"""Helper for managing formats from which a package can be installed."""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ["no_binary", "only_binary"]
|
__slots__ = ["no_binary", "only_binary"]
|
||||||
|
|
||||||
def __init__(self, no_binary=None, only_binary=None):
|
def __init__(
|
||||||
# type: (Optional[Set[str]], Optional[Set[str]]) -> None
|
self,
|
||||||
|
no_binary: Optional[Set[str]] = None,
|
||||||
|
only_binary: Optional[Set[str]] = None,
|
||||||
|
) -> None:
|
||||||
if no_binary is None:
|
if no_binary is None:
|
||||||
no_binary = set()
|
no_binary = set()
|
||||||
if only_binary is None:
|
if only_binary is None:
|
||||||
|
@ -21,66 +23,58 @@ class FormatControl:
|
||||||
self.no_binary = no_binary
|
self.no_binary = no_binary
|
||||||
self.only_binary = only_binary
|
self.only_binary = only_binary
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other: object) -> bool:
|
||||||
# type: (object) -> bool
|
|
||||||
if not isinstance(other, self.__class__):
|
if not isinstance(other, self.__class__):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
if self.__slots__ != other.__slots__:
|
if self.__slots__ != other.__slots__:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return all(
|
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
|
||||||
getattr(self, k) == getattr(other, k)
|
|
||||||
for k in self.__slots__
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return "{}({}, {})".format(
|
return "{}({}, {})".format(
|
||||||
self.__class__.__name__,
|
self.__class__.__name__, self.no_binary, self.only_binary
|
||||||
self.no_binary,
|
|
||||||
self.only_binary
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def handle_mutual_excludes(value, target, other):
|
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
|
||||||
# type: (str, Set[str], Set[str]) -> None
|
if value.startswith("-"):
|
||||||
if value.startswith('-'):
|
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"--no-binary / --only-binary option requires 1 argument."
|
"--no-binary / --only-binary option requires 1 argument."
|
||||||
)
|
)
|
||||||
new = value.split(',')
|
new = value.split(",")
|
||||||
while ':all:' in new:
|
while ":all:" in new:
|
||||||
other.clear()
|
other.clear()
|
||||||
target.clear()
|
target.clear()
|
||||||
target.add(':all:')
|
target.add(":all:")
|
||||||
del new[:new.index(':all:') + 1]
|
del new[: new.index(":all:") + 1]
|
||||||
# Without a none, we want to discard everything as :all: covers it
|
# Without a none, we want to discard everything as :all: covers it
|
||||||
if ':none:' not in new:
|
if ":none:" not in new:
|
||||||
return
|
return
|
||||||
for name in new:
|
for name in new:
|
||||||
if name == ':none:':
|
if name == ":none:":
|
||||||
target.clear()
|
target.clear()
|
||||||
continue
|
continue
|
||||||
name = canonicalize_name(name)
|
name = canonicalize_name(name)
|
||||||
other.discard(name)
|
other.discard(name)
|
||||||
target.add(name)
|
target.add(name)
|
||||||
|
|
||||||
def get_allowed_formats(self, canonical_name):
|
def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
|
||||||
# type: (str) -> FrozenSet[str]
|
|
||||||
result = {"binary", "source"}
|
result = {"binary", "source"}
|
||||||
if canonical_name in self.only_binary:
|
if canonical_name in self.only_binary:
|
||||||
result.discard('source')
|
result.discard("source")
|
||||||
elif canonical_name in self.no_binary:
|
elif canonical_name in self.no_binary:
|
||||||
result.discard('binary')
|
result.discard("binary")
|
||||||
elif ':all:' in self.only_binary:
|
elif ":all:" in self.only_binary:
|
||||||
result.discard('source')
|
result.discard("source")
|
||||||
elif ':all:' in self.no_binary:
|
elif ":all:" in self.no_binary:
|
||||||
result.discard('binary')
|
result.discard("binary")
|
||||||
return frozenset(result)
|
return frozenset(result)
|
||||||
|
|
||||||
def disallow_binaries(self):
|
def disallow_binaries(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.handle_mutual_excludes(
|
self.handle_mutual_excludes(
|
||||||
':all:', self.no_binary, self.only_binary,
|
":all:",
|
||||||
|
self.no_binary,
|
||||||
|
self.only_binary,
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,33 +2,27 @@ import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
class PackageIndex:
|
class PackageIndex:
|
||||||
"""Represents a Package Index and provides easier access to endpoints
|
"""Represents a Package Index and provides easier access to endpoints"""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ['url', 'netloc', 'simple_url', 'pypi_url',
|
__slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
|
||||||
'file_storage_domain']
|
|
||||||
|
|
||||||
def __init__(self, url, file_storage_domain):
|
def __init__(self, url: str, file_storage_domain: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.url = url
|
self.url = url
|
||||||
self.netloc = urllib.parse.urlsplit(url).netloc
|
self.netloc = urllib.parse.urlsplit(url).netloc
|
||||||
self.simple_url = self._url_for_path('simple')
|
self.simple_url = self._url_for_path("simple")
|
||||||
self.pypi_url = self._url_for_path('pypi')
|
self.pypi_url = self._url_for_path("pypi")
|
||||||
|
|
||||||
# This is part of a temporary hack used to block installs of PyPI
|
# This is part of a temporary hack used to block installs of PyPI
|
||||||
# packages which depend on external urls only necessary until PyPI can
|
# packages which depend on external urls only necessary until PyPI can
|
||||||
# block such packages themselves
|
# block such packages themselves
|
||||||
self.file_storage_domain = file_storage_domain
|
self.file_storage_domain = file_storage_domain
|
||||||
|
|
||||||
def _url_for_path(self, path):
|
def _url_for_path(self, path: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return urllib.parse.urljoin(self.url, path)
|
return urllib.parse.urljoin(self.url, path)
|
||||||
|
|
||||||
|
|
||||||
PyPI = PackageIndex(
|
PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
|
||||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
|
||||||
)
|
|
||||||
TestPyPI = PackageIndex(
|
TestPyPI = PackageIndex(
|
||||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
"https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import TYPE_CHECKING, Optional, Tuple, Union
|
from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union
|
||||||
|
|
||||||
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||||
from pip._internal.utils.hashes import Hashes
|
from pip._internal.utils.hashes import Hashes
|
||||||
|
@ -17,10 +19,14 @@ from pip._internal.utils.urls import path_to_url, url_to_path
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pip._internal.index.collector import HTMLPage
|
from pip._internal.index.collector import HTMLPage
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5")
|
||||||
|
|
||||||
|
|
||||||
class Link(KeyBasedCompareMixin):
|
class Link(KeyBasedCompareMixin):
|
||||||
"""Represents a parsed link from a Package Index's simple URL
|
"""Represents a parsed link from a Package Index's simple URL"""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
__slots__ = [
|
||||||
"_parsed_url",
|
"_parsed_url",
|
||||||
|
@ -33,13 +39,12 @@ class Link(KeyBasedCompareMixin):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url, # type: str
|
url: str,
|
||||||
comes_from=None, # type: Optional[Union[str, HTMLPage]]
|
comes_from: Optional[Union[str, "HTMLPage"]] = None,
|
||||||
requires_python=None, # type: Optional[str]
|
requires_python: Optional[str] = None,
|
||||||
yanked_reason=None, # type: Optional[str]
|
yanked_reason: Optional[str] = None,
|
||||||
cache_link_parsing=True, # type: bool
|
cache_link_parsing: bool = True,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param url: url of the resource pointed to (href of the link)
|
:param url: url of the resource pointed to (href of the link)
|
||||||
:param comes_from: instance of HTMLPage where the link was found,
|
:param comes_from: instance of HTMLPage where the link was found,
|
||||||
|
@ -62,7 +67,7 @@ class Link(KeyBasedCompareMixin):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# url can be a UNC windows share
|
# url can be a UNC windows share
|
||||||
if url.startswith('\\\\'):
|
if url.startswith("\\\\"):
|
||||||
url = path_to_url(url)
|
url = path_to_url(url)
|
||||||
|
|
||||||
self._parsed_url = urllib.parse.urlsplit(url)
|
self._parsed_url = urllib.parse.urlsplit(url)
|
||||||
|
@ -78,31 +83,28 @@ class Link(KeyBasedCompareMixin):
|
||||||
|
|
||||||
self.cache_link_parsing = cache_link_parsing
|
self.cache_link_parsing = cache_link_parsing
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
if self.requires_python:
|
if self.requires_python:
|
||||||
rp = f' (requires-python:{self.requires_python})'
|
rp = f" (requires-python:{self.requires_python})"
|
||||||
else:
|
else:
|
||||||
rp = ''
|
rp = ""
|
||||||
if self.comes_from:
|
if self.comes_from:
|
||||||
return '{} (from {}){}'.format(
|
return "{} (from {}){}".format(
|
||||||
redact_auth_from_url(self._url), self.comes_from, rp)
|
redact_auth_from_url(self._url), self.comes_from, rp
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return redact_auth_from_url(str(self._url))
|
return redact_auth_from_url(str(self._url))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
return f"<Link {self}>"
|
||||||
return f'<Link {self}>'
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return self._url
|
return self._url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filename(self):
|
def filename(self) -> str:
|
||||||
# type: () -> str
|
path = self.path.rstrip("/")
|
||||||
path = self.path.rstrip('/')
|
|
||||||
name = posixpath.basename(path)
|
name = posixpath.basename(path)
|
||||||
if not name:
|
if not name:
|
||||||
# Make sure we don't leak auth information if the netloc
|
# Make sure we don't leak auth information if the netloc
|
||||||
|
@ -111,125 +113,106 @@ class Link(KeyBasedCompareMixin):
|
||||||
return netloc
|
return netloc
|
||||||
|
|
||||||
name = urllib.parse.unquote(name)
|
name = urllib.parse.unquote(name)
|
||||||
assert name, f'URL {self._url!r} produced no filename'
|
assert name, f"URL {self._url!r} produced no filename"
|
||||||
return name
|
return name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def file_path(self):
|
def file_path(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return url_to_path(self.url)
|
return url_to_path(self.url)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def scheme(self):
|
def scheme(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return self._parsed_url.scheme
|
return self._parsed_url.scheme
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def netloc(self):
|
def netloc(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""
|
"""
|
||||||
This can contain auth information.
|
This can contain auth information.
|
||||||
"""
|
"""
|
||||||
return self._parsed_url.netloc
|
return self._parsed_url.netloc
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return urllib.parse.unquote(self._parsed_url.path)
|
return urllib.parse.unquote(self._parsed_url.path)
|
||||||
|
|
||||||
def splitext(self):
|
def splitext(self) -> Tuple[str, str]:
|
||||||
# type: () -> Tuple[str, str]
|
return splitext(posixpath.basename(self.path.rstrip("/")))
|
||||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ext(self):
|
def ext(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return self.splitext()[1]
|
return self.splitext()[1]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url_without_fragment(self):
|
def url_without_fragment(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
scheme, netloc, path, query, fragment = self._parsed_url
|
scheme, netloc, path, query, fragment = self._parsed_url
|
||||||
return urllib.parse.urlunsplit((scheme, netloc, path, query, None))
|
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
||||||
|
|
||||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def egg_fragment(self):
|
def egg_fragment(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._egg_fragment_re.search(self._url)
|
match = self._egg_fragment_re.search(self._url)
|
||||||
if not match:
|
if not match:
|
||||||
return None
|
return None
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def subdirectory_fragment(self):
|
def subdirectory_fragment(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._subdirectory_fragment_re.search(self._url)
|
match = self._subdirectory_fragment_re.search(self._url)
|
||||||
if not match:
|
if not match:
|
||||||
return None
|
return None
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
_hash_re = re.compile(
|
_hash_re = re.compile(
|
||||||
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
|
r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES))
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hash(self):
|
def hash(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._hash_re.search(self._url)
|
match = self._hash_re.search(self._url)
|
||||||
if match:
|
if match:
|
||||||
return match.group(2)
|
return match.group(2)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hash_name(self):
|
def hash_name(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._hash_re.search(self._url)
|
match = self._hash_re.search(self._url)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def show_url(self):
|
def show_url(self) -> str:
|
||||||
# type: () -> str
|
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
|
||||||
return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_file(self):
|
def is_file(self) -> bool:
|
||||||
# type: () -> bool
|
return self.scheme == "file"
|
||||||
return self.scheme == 'file'
|
|
||||||
|
|
||||||
def is_existing_dir(self):
|
def is_existing_dir(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return self.is_file and os.path.isdir(self.file_path)
|
return self.is_file and os.path.isdir(self.file_path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_wheel(self):
|
def is_wheel(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return self.ext == WHEEL_EXTENSION
|
return self.ext == WHEEL_EXTENSION
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_vcs(self):
|
def is_vcs(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
from pip._internal.vcs import vcs
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
return self.scheme in vcs.all_schemes
|
return self.scheme in vcs.all_schemes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_yanked(self):
|
def is_yanked(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return self.yanked_reason is not None
|
return self.yanked_reason is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_hash(self):
|
def has_hash(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return self.hash_name is not None
|
return self.hash_name is not None
|
||||||
|
|
||||||
def is_hash_allowed(self, hashes):
|
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
||||||
# type: (Optional[Hashes]) -> bool
|
|
||||||
"""
|
"""
|
||||||
Return True if the link has a hash and it is allowed.
|
Return True if the link has a hash and it is allowed.
|
||||||
"""
|
"""
|
||||||
|
@ -242,7 +225,64 @@ class Link(KeyBasedCompareMixin):
|
||||||
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
||||||
|
|
||||||
|
|
||||||
# TODO: Relax this comparison logic to ignore, for example, fragments.
|
class _CleanResult(NamedTuple):
|
||||||
def links_equivalent(link1, link2):
|
"""Convert link for equivalency check.
|
||||||
# type: (Link, Link) -> bool
|
|
||||||
return link1 == link2
|
This is used in the resolver to check whether two URL-specified requirements
|
||||||
|
likely point to the same distribution and can be considered equivalent. This
|
||||||
|
equivalency logic avoids comparing URLs literally, which can be too strict
|
||||||
|
(e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
|
||||||
|
|
||||||
|
Currently this does three things:
|
||||||
|
|
||||||
|
1. Drop the basic auth part. This is technically wrong since a server can
|
||||||
|
serve different content based on auth, but if it does that, it is even
|
||||||
|
impossible to guarantee two URLs without auth are equivalent, since
|
||||||
|
the user can input different auth information when prompted. So the
|
||||||
|
practical solution is to assume the auth doesn't affect the response.
|
||||||
|
2. Parse the query to avoid the ordering issue. Note that ordering under the
|
||||||
|
same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
|
||||||
|
still considered different.
|
||||||
|
3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
|
||||||
|
hash values, since it should have no impact the downloaded content. Note
|
||||||
|
that this drops the "egg=" part historically used to denote the requested
|
||||||
|
project (and extras), which is wrong in the strictest sense, but too many
|
||||||
|
people are supplying it inconsistently to cause superfluous resolution
|
||||||
|
conflicts, so we choose to also ignore them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parsed: urllib.parse.SplitResult
|
||||||
|
query: Dict[str, List[str]]
|
||||||
|
subdirectory: str
|
||||||
|
hashes: Dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_link(link: Link) -> _CleanResult:
|
||||||
|
parsed = link._parsed_url
|
||||||
|
netloc = parsed.netloc.rsplit("@", 1)[-1]
|
||||||
|
# According to RFC 8089, an empty host in file: means localhost.
|
||||||
|
if parsed.scheme == "file" and not netloc:
|
||||||
|
netloc = "localhost"
|
||||||
|
fragment = urllib.parse.parse_qs(parsed.fragment)
|
||||||
|
if "egg" in fragment:
|
||||||
|
logger.debug("Ignoring egg= fragment in %s", link)
|
||||||
|
try:
|
||||||
|
# If there are multiple subdirectory values, use the first one.
|
||||||
|
# This matches the behavior of Link.subdirectory_fragment.
|
||||||
|
subdirectory = fragment["subdirectory"][0]
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
subdirectory = ""
|
||||||
|
# If there are multiple hash values under the same algorithm, use the
|
||||||
|
# first one. This matches the behavior of Link.hash_value.
|
||||||
|
hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
|
||||||
|
return _CleanResult(
|
||||||
|
parsed=parsed._replace(netloc=netloc, query="", fragment=""),
|
||||||
|
query=urllib.parse.parse_qs(parsed.query),
|
||||||
|
subdirectory=subdirectory,
|
||||||
|
hashes=hashes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def links_equivalent(link1: Link, link2: Link) -> bool:
|
||||||
|
return _clean_link(link1) == _clean_link(link2)
|
||||||
|
|
|
@ -6,7 +6,7 @@ https://docs.python.org/3/install/index.html#alternate-installation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data']
|
SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
|
||||||
|
|
||||||
|
|
||||||
class Scheme:
|
class Scheme:
|
||||||
|
@ -18,12 +18,12 @@ class Scheme:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
platlib, # type: str
|
platlib: str,
|
||||||
purelib, # type: str
|
purelib: str,
|
||||||
headers, # type: str
|
headers: str,
|
||||||
scripts, # type: str
|
scripts: str,
|
||||||
data, # type: str
|
data: str,
|
||||||
):
|
) -> None:
|
||||||
self.platlib = platlib
|
self.platlib = platlib
|
||||||
self.purelib = purelib
|
self.purelib = purelib
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
|
|
|
@ -25,10 +25,9 @@ class SearchScope:
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
cls,
|
cls,
|
||||||
find_links, # type: List[str]
|
find_links: List[str],
|
||||||
index_urls, # type: List[str]
|
index_urls: List[str],
|
||||||
):
|
) -> "SearchScope":
|
||||||
# type: (...) -> SearchScope
|
|
||||||
"""
|
"""
|
||||||
Create a SearchScope object after normalizing the `find_links`.
|
Create a SearchScope object after normalizing the `find_links`.
|
||||||
"""
|
"""
|
||||||
|
@ -37,9 +36,9 @@ class SearchScope:
|
||||||
# it and if it exists, use the normalized version.
|
# it and if it exists, use the normalized version.
|
||||||
# This is deliberately conservative - it might be fine just to
|
# This is deliberately conservative - it might be fine just to
|
||||||
# blindly normalize anything starting with a ~...
|
# blindly normalize anything starting with a ~...
|
||||||
built_find_links = [] # type: List[str]
|
built_find_links: List[str] = []
|
||||||
for link in find_links:
|
for link in find_links:
|
||||||
if link.startswith('~'):
|
if link.startswith("~"):
|
||||||
new_link = normalize_path(link)
|
new_link = normalize_path(link)
|
||||||
if os.path.exists(new_link):
|
if os.path.exists(new_link):
|
||||||
link = new_link
|
link = new_link
|
||||||
|
@ -50,11 +49,11 @@ class SearchScope:
|
||||||
if not has_tls():
|
if not has_tls():
|
||||||
for link in itertools.chain(index_urls, built_find_links):
|
for link in itertools.chain(index_urls, built_find_links):
|
||||||
parsed = urllib.parse.urlparse(link)
|
parsed = urllib.parse.urlparse(link)
|
||||||
if parsed.scheme == 'https':
|
if parsed.scheme == "https":
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'pip is configured with locations that require '
|
"pip is configured with locations that require "
|
||||||
'TLS/SSL, however the ssl module in Python is not '
|
"TLS/SSL, however the ssl module in Python is not "
|
||||||
'available.'
|
"available."
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -65,15 +64,13 @@ class SearchScope:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
find_links, # type: List[str]
|
find_links: List[str],
|
||||||
index_urls, # type: List[str]
|
index_urls: List[str],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self.find_links = find_links
|
self.find_links = find_links
|
||||||
self.index_urls = index_urls
|
self.index_urls = index_urls
|
||||||
|
|
||||||
def get_formatted_locations(self):
|
def get_formatted_locations(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
lines = []
|
lines = []
|
||||||
redacted_index_urls = []
|
redacted_index_urls = []
|
||||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||||
|
@ -91,41 +88,42 @@ class SearchScope:
|
||||||
# exceptions for malformed URLs
|
# exceptions for malformed URLs
|
||||||
if not purl.scheme and not purl.netloc:
|
if not purl.scheme and not purl.netloc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'The index url "%s" seems invalid, '
|
'The index url "%s" seems invalid, please provide a scheme.',
|
||||||
'please provide a scheme.', redacted_index_url)
|
redacted_index_url,
|
||||||
|
)
|
||||||
|
|
||||||
redacted_index_urls.append(redacted_index_url)
|
redacted_index_urls.append(redacted_index_url)
|
||||||
|
|
||||||
lines.append('Looking in indexes: {}'.format(
|
lines.append(
|
||||||
', '.join(redacted_index_urls)))
|
"Looking in indexes: {}".format(", ".join(redacted_index_urls))
|
||||||
|
)
|
||||||
|
|
||||||
if self.find_links:
|
if self.find_links:
|
||||||
lines.append(
|
lines.append(
|
||||||
'Looking in links: {}'.format(', '.join(
|
"Looking in links: {}".format(
|
||||||
redact_auth_from_url(url) for url in self.find_links))
|
", ".join(redact_auth_from_url(url) for url in self.find_links)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return '\n'.join(lines)
|
return "\n".join(lines)
|
||||||
|
|
||||||
def get_index_urls_locations(self, project_name):
|
def get_index_urls_locations(self, project_name: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
"""Returns the locations found via self.index_urls
|
"""Returns the locations found via self.index_urls
|
||||||
|
|
||||||
Checks the url_name on the main (first in the list) index and
|
Checks the url_name on the main (first in the list) index and
|
||||||
use this url_name to produce all locations
|
use this url_name to produce all locations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def mkurl_pypi_url(url):
|
def mkurl_pypi_url(url: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
loc = posixpath.join(
|
loc = posixpath.join(
|
||||||
url,
|
url, urllib.parse.quote(canonicalize_name(project_name))
|
||||||
urllib.parse.quote(canonicalize_name(project_name)))
|
)
|
||||||
# For maximum compatibility with easy_install, ensure the path
|
# For maximum compatibility with easy_install, ensure the path
|
||||||
# ends in a trailing slash. Although this isn't in the spec
|
# ends in a trailing slash. Although this isn't in the spec
|
||||||
# (and PyPI can handle it without the slash) some other index
|
# (and PyPI can handle it without the slash) some other index
|
||||||
# implementations might break if they relied on easy_install's
|
# implementations might break if they relied on easy_install's
|
||||||
# behavior.
|
# behavior.
|
||||||
if not loc.endswith('/'):
|
if not loc.endswith("/"):
|
||||||
loc = loc + '/'
|
loc = loc + "/"
|
||||||
return loc
|
return loc
|
||||||
|
|
||||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
return [mkurl_pypi_url(url) for url in self.index_urls]
|
||||||
|
|
|
@ -9,8 +9,13 @@ class SelectionPreferences:
|
||||||
and installing files.
|
and installing files.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control',
|
__slots__ = [
|
||||||
'prefer_binary', 'ignore_requires_python']
|
"allow_yanked",
|
||||||
|
"allow_all_prereleases",
|
||||||
|
"format_control",
|
||||||
|
"prefer_binary",
|
||||||
|
"ignore_requires_python",
|
||||||
|
]
|
||||||
|
|
||||||
# Don't include an allow_yanked default value to make sure each call
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
# site considers whether yanked releases are allowed. This also causes
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
|
@ -18,13 +23,12 @@ class SelectionPreferences:
|
||||||
# people when reading the code.
|
# people when reading the code.
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
allow_yanked, # type: bool
|
allow_yanked: bool,
|
||||||
allow_all_prereleases=False, # type: bool
|
allow_all_prereleases: bool = False,
|
||||||
format_control=None, # type: Optional[FormatControl]
|
format_control: Optional[FormatControl] = None,
|
||||||
prefer_binary=False, # type: bool
|
prefer_binary: bool = False,
|
||||||
ignore_requires_python=None, # type: Optional[bool]
|
ignore_requires_python: Optional[bool] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Create a SelectionPreferences object.
|
"""Create a SelectionPreferences object.
|
||||||
|
|
||||||
:param allow_yanked: Whether files marked as yanked (in the sense
|
:param allow_yanked: Whether files marked as yanked (in the sense
|
||||||
|
|
|
@ -26,12 +26,11 @@ class TargetPython:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
platforms=None, # type: Optional[List[str]]
|
platforms: Optional[List[str]] = None,
|
||||||
py_version_info=None, # type: Optional[Tuple[int, ...]]
|
py_version_info: Optional[Tuple[int, ...]] = None,
|
||||||
abis=None, # type: Optional[List[str]]
|
abis: Optional[List[str]] = None,
|
||||||
implementation=None, # type: Optional[str]
|
implementation: Optional[str] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param platforms: A list of strings or None. If None, searches for
|
:param platforms: A list of strings or None. If None, searches for
|
||||||
packages that are supported by the current system. Otherwise, will
|
packages that are supported by the current system. Otherwise, will
|
||||||
|
@ -54,7 +53,7 @@ class TargetPython:
|
||||||
else:
|
else:
|
||||||
py_version_info = normalize_version_info(py_version_info)
|
py_version_info = normalize_version_info(py_version_info)
|
||||||
|
|
||||||
py_version = '.'.join(map(str, py_version_info[:2]))
|
py_version = ".".join(map(str, py_version_info[:2]))
|
||||||
|
|
||||||
self.abis = abis
|
self.abis = abis
|
||||||
self.implementation = implementation
|
self.implementation = implementation
|
||||||
|
@ -63,32 +62,29 @@ class TargetPython:
|
||||||
self.py_version_info = py_version_info
|
self.py_version_info = py_version_info
|
||||||
|
|
||||||
# This is used to cache the return value of get_tags().
|
# This is used to cache the return value of get_tags().
|
||||||
self._valid_tags = None # type: Optional[List[Tag]]
|
self._valid_tags: Optional[List[Tag]] = None
|
||||||
|
|
||||||
def format_given(self):
|
def format_given(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""
|
"""
|
||||||
Format the given, non-None attributes for display.
|
Format the given, non-None attributes for display.
|
||||||
"""
|
"""
|
||||||
display_version = None
|
display_version = None
|
||||||
if self._given_py_version_info is not None:
|
if self._given_py_version_info is not None:
|
||||||
display_version = '.'.join(
|
display_version = ".".join(
|
||||||
str(part) for part in self._given_py_version_info
|
str(part) for part in self._given_py_version_info
|
||||||
)
|
)
|
||||||
|
|
||||||
key_values = [
|
key_values = [
|
||||||
('platforms', self.platforms),
|
("platforms", self.platforms),
|
||||||
('version_info', display_version),
|
("version_info", display_version),
|
||||||
('abis', self.abis),
|
("abis", self.abis),
|
||||||
('implementation', self.implementation),
|
("implementation", self.implementation),
|
||||||
]
|
]
|
||||||
return ' '.join(
|
return " ".join(
|
||||||
f'{key}={value!r}' for key, value in key_values
|
f"{key}={value!r}" for key, value in key_values if value is not None
|
||||||
if value is not None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_tags(self):
|
def get_tags(self) -> List[Tag]:
|
||||||
# type: () -> List[Tag]
|
|
||||||
"""
|
"""
|
||||||
Return the supported PEP 425 tags to check wheel candidates against.
|
Return the supported PEP 425 tags to check wheel candidates against.
|
||||||
|
|
||||||
|
|
|
@ -16,42 +16,36 @@ class Wheel:
|
||||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
||||||
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||||
\.whl|\.dist-info)$""",
|
\.whl|\.dist-info)$""",
|
||||||
re.VERBOSE
|
re.VERBOSE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, filename):
|
def __init__(self, filename: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""
|
"""
|
||||||
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||||
"""
|
"""
|
||||||
wheel_info = self.wheel_file_re.match(filename)
|
wheel_info = self.wheel_file_re.match(filename)
|
||||||
if not wheel_info:
|
if not wheel_info:
|
||||||
raise InvalidWheelFilename(
|
raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
|
||||||
f"{filename} is not a valid wheel filename."
|
|
||||||
)
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.name = wheel_info.group('name').replace('_', '-')
|
self.name = wheel_info.group("name").replace("_", "-")
|
||||||
# we'll assume "_" means "-" due to wheel naming scheme
|
# we'll assume "_" means "-" due to wheel naming scheme
|
||||||
# (https://github.com/pypa/pip/issues/1150)
|
# (https://github.com/pypa/pip/issues/1150)
|
||||||
self.version = wheel_info.group('ver').replace('_', '-')
|
self.version = wheel_info.group("ver").replace("_", "-")
|
||||||
self.build_tag = wheel_info.group('build')
|
self.build_tag = wheel_info.group("build")
|
||||||
self.pyversions = wheel_info.group('pyver').split('.')
|
self.pyversions = wheel_info.group("pyver").split(".")
|
||||||
self.abis = wheel_info.group('abi').split('.')
|
self.abis = wheel_info.group("abi").split(".")
|
||||||
self.plats = wheel_info.group('plat').split('.')
|
self.plats = wheel_info.group("plat").split(".")
|
||||||
|
|
||||||
# All the tag combinations from this file
|
# All the tag combinations from this file
|
||||||
self.file_tags = {
|
self.file_tags = {
|
||||||
Tag(x, y, z) for x in self.pyversions
|
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
|
||||||
for y in self.abis for z in self.plats
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_formatted_file_tags(self):
|
def get_formatted_file_tags(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
"""Return the wheel's tags as a sorted list of strings."""
|
"""Return the wheel's tags as a sorted list of strings."""
|
||||||
return sorted(str(tag) for tag in self.file_tags)
|
return sorted(str(tag) for tag in self.file_tags)
|
||||||
|
|
||||||
def support_index_min(self, tags):
|
def support_index_min(self, tags: List[Tag]) -> int:
|
||||||
# type: (List[Tag]) -> int
|
|
||||||
"""Return the lowest index that one of the wheel's file_tag combinations
|
"""Return the lowest index that one of the wheel's file_tag combinations
|
||||||
achieves in the given list of supported tags.
|
achieves in the given list of supported tags.
|
||||||
|
|
||||||
|
@ -66,10 +60,11 @@ class Wheel:
|
||||||
"""
|
"""
|
||||||
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
||||||
|
|
||||||
def find_most_preferred_tag(self, tags, tag_to_priority):
|
def find_most_preferred_tag(
|
||||||
# type: (List[Tag], Dict[Tag, int]) -> int
|
self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
|
||||||
|
) -> int:
|
||||||
"""Return the priority of the most preferred tag that one of the wheel's file
|
"""Return the priority of the most preferred tag that one of the wheel's file
|
||||||
tag combinations acheives in the given list of supported tags using the given
|
tag combinations achieves in the given list of supported tags using the given
|
||||||
tag_to_priority mapping, where lower priorities are more-preferred.
|
tag_to_priority mapping, where lower priorities are more-preferred.
|
||||||
|
|
||||||
This is used in place of support_index_min in some cases in order to avoid
|
This is used in place of support_index_min in some cases in order to avoid
|
||||||
|
@ -86,8 +81,7 @@ class Wheel:
|
||||||
tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
|
tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
|
||||||
)
|
)
|
||||||
|
|
||||||
def supported(self, tags):
|
def supported(self, tags: Iterable[Tag]) -> bool:
|
||||||
# type: (Iterable[Tag]) -> bool
|
|
||||||
"""Return whether the wheel is compatible with one of the given tags.
|
"""Return whether the wheel is compatible with one of the given tags.
|
||||||
|
|
||||||
:param tags: the PEP 425 tags to check the wheel against.
|
:param tags: the PEP 425 tags to check the wheel against.
|
||||||
|
|
|
@ -4,7 +4,6 @@ Contains interface (MultiDomainBasicAuth) and associated glue code for
|
||||||
providing credentials in the context of network requests.
|
providing credentials in the context of network requests.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
@ -12,6 +11,7 @@ from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||||
from pip._vendor.requests.models import Request, Response
|
from pip._vendor.requests.models import Request, Response
|
||||||
from pip._vendor.requests.utils import get_netrc_auth
|
from pip._vendor.requests.utils import get_netrc_auth
|
||||||
|
|
||||||
|
from pip._internal.utils.logging import getLogger
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import (
|
||||||
ask,
|
ask,
|
||||||
ask_input,
|
ask_input,
|
||||||
|
@ -21,23 +21,23 @@ from pip._internal.utils.misc import (
|
||||||
)
|
)
|
||||||
from pip._internal.vcs.versioncontrol import AuthInfo
|
from pip._internal.vcs.versioncontrol import AuthInfo
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
Credentials = Tuple[str, str, str]
|
Credentials = Tuple[str, str, str]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import keyring
|
import keyring
|
||||||
except ImportError:
|
except ImportError:
|
||||||
keyring = None
|
keyring = None # type: ignore[assignment]
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Keyring is skipped due to an exception: %s", str(exc),
|
"Keyring is skipped due to an exception: %s",
|
||||||
|
str(exc),
|
||||||
)
|
)
|
||||||
keyring = None
|
keyring = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
|
||||||
def get_keyring_auth(url, username):
|
def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]:
|
||||||
# type: (Optional[str], Optional[str]) -> Optional[AuthInfo]
|
|
||||||
"""Return the tuple auth for a given url from keyring."""
|
"""Return the tuple auth for a given url from keyring."""
|
||||||
global keyring
|
global keyring
|
||||||
if not url or not keyring:
|
if not url or not keyring:
|
||||||
|
@ -63,28 +63,28 @@ def get_keyring_auth(url, username):
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Keyring is skipped due to an exception: %s", str(exc),
|
"Keyring is skipped due to an exception: %s",
|
||||||
|
str(exc),
|
||||||
)
|
)
|
||||||
keyring = None
|
keyring = None # type: ignore[assignment]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class MultiDomainBasicAuth(AuthBase):
|
class MultiDomainBasicAuth(AuthBase):
|
||||||
|
def __init__(
|
||||||
def __init__(self, prompting=True, index_urls=None):
|
self, prompting: bool = True, index_urls: Optional[List[str]] = None
|
||||||
# type: (bool, Optional[List[str]]) -> None
|
) -> None:
|
||||||
self.prompting = prompting
|
self.prompting = prompting
|
||||||
self.index_urls = index_urls
|
self.index_urls = index_urls
|
||||||
self.passwords = {} # type: Dict[str, AuthInfo]
|
self.passwords: Dict[str, AuthInfo] = {}
|
||||||
# When the user is prompted to enter credentials and keyring is
|
# When the user is prompted to enter credentials and keyring is
|
||||||
# available, we will offer to save them. If the user accepts,
|
# available, we will offer to save them. If the user accepts,
|
||||||
# this value is set to the credentials they entered. After the
|
# this value is set to the credentials they entered. After the
|
||||||
# request authenticates, the caller should call
|
# request authenticates, the caller should call
|
||||||
# ``save_credentials`` to save these.
|
# ``save_credentials`` to save these.
|
||||||
self._credentials_to_save = None # type: Optional[Credentials]
|
self._credentials_to_save: Optional[Credentials] = None
|
||||||
|
|
||||||
def _get_index_url(self, url):
|
def _get_index_url(self, url: str) -> Optional[str]:
|
||||||
# type: (str) -> Optional[str]
|
|
||||||
"""Return the original index URL matching the requested URL.
|
"""Return the original index URL matching the requested URL.
|
||||||
|
|
||||||
Cached or dynamically generated credentials may work against
|
Cached or dynamically generated credentials may work against
|
||||||
|
@ -106,9 +106,12 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
return u
|
return u
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _get_new_credentials(self, original_url, allow_netrc=True,
|
def _get_new_credentials(
|
||||||
allow_keyring=False):
|
self,
|
||||||
# type: (str, bool, bool) -> AuthInfo
|
original_url: str,
|
||||||
|
allow_netrc: bool = True,
|
||||||
|
allow_keyring: bool = False,
|
||||||
|
) -> AuthInfo:
|
||||||
"""Find and return credentials for the specified URL."""
|
"""Find and return credentials for the specified URL."""
|
||||||
# Split the credentials and netloc from the url.
|
# Split the credentials and netloc from the url.
|
||||||
url, netloc, url_user_password = split_auth_netloc_from_url(
|
url, netloc, url_user_password = split_auth_netloc_from_url(
|
||||||
|
@ -147,18 +150,21 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
# If we don't have a password and keyring is available, use it.
|
# If we don't have a password and keyring is available, use it.
|
||||||
if allow_keyring:
|
if allow_keyring:
|
||||||
# The index url is more specific than the netloc, so try it first
|
# The index url is more specific than the netloc, so try it first
|
||||||
|
# fmt: off
|
||||||
kr_auth = (
|
kr_auth = (
|
||||||
get_keyring_auth(index_url, username) or
|
get_keyring_auth(index_url, username) or
|
||||||
get_keyring_auth(netloc, username)
|
get_keyring_auth(netloc, username)
|
||||||
)
|
)
|
||||||
|
# fmt: on
|
||||||
if kr_auth:
|
if kr_auth:
|
||||||
logger.debug("Found credentials in keyring for %s", netloc)
|
logger.debug("Found credentials in keyring for %s", netloc)
|
||||||
return kr_auth
|
return kr_auth
|
||||||
|
|
||||||
return username, password
|
return username, password
|
||||||
|
|
||||||
def _get_url_and_credentials(self, original_url):
|
def _get_url_and_credentials(
|
||||||
# type: (str) -> Tuple[str, Optional[str], Optional[str]]
|
self, original_url: str
|
||||||
|
) -> Tuple[str, Optional[str], Optional[str]]:
|
||||||
"""Return the credentials to use for the provided URL.
|
"""Return the credentials to use for the provided URL.
|
||||||
|
|
||||||
If allowed, netrc and keyring may be used to obtain the
|
If allowed, netrc and keyring may be used to obtain the
|
||||||
|
@ -170,13 +176,19 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
"""
|
"""
|
||||||
url, netloc, _ = split_auth_netloc_from_url(original_url)
|
url, netloc, _ = split_auth_netloc_from_url(original_url)
|
||||||
|
|
||||||
# Use any stored credentials that we have for this netloc
|
# Try to get credentials from original url
|
||||||
username, password = self.passwords.get(netloc, (None, None))
|
username, password = self._get_new_credentials(original_url)
|
||||||
|
|
||||||
if username is None and password is None:
|
# If credentials not found, use any stored credentials for this netloc.
|
||||||
# No stored credentials. Acquire new credentials without prompting
|
# Do this if either the username or the password is missing.
|
||||||
# the user. (e.g. from netrc, keyring, or the URL itself)
|
# This accounts for the situation in which the user has specified
|
||||||
username, password = self._get_new_credentials(original_url)
|
# the username in the index url, but the password comes from keyring.
|
||||||
|
if (username is None or password is None) and netloc in self.passwords:
|
||||||
|
un, pw = self.passwords[netloc]
|
||||||
|
# It is possible that the cached credentials are for a different username,
|
||||||
|
# in which case the cache should be ignored.
|
||||||
|
if username is None or username == un:
|
||||||
|
username, password = un, pw
|
||||||
|
|
||||||
if username is not None or password is not None:
|
if username is not None or password is not None:
|
||||||
# Convert the username and password if they're None, so that
|
# Convert the username and password if they're None, so that
|
||||||
|
@ -191,15 +203,14 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
# Credentials were found
|
# Credentials were found
|
||||||
(username is not None and password is not None) or
|
(username is not None and password is not None)
|
||||||
# Credentials were not found
|
# Credentials were not found
|
||||||
(username is None and password is None)
|
or (username is None and password is None)
|
||||||
), f"Could not load credentials from url: {original_url}"
|
), f"Could not load credentials from url: {original_url}"
|
||||||
|
|
||||||
return url, username, password
|
return url, username, password
|
||||||
|
|
||||||
def __call__(self, req):
|
def __call__(self, req: Request) -> Request:
|
||||||
# type: (Request) -> Request
|
|
||||||
# Get credentials for this request
|
# Get credentials for this request
|
||||||
url, username, password = self._get_url_and_credentials(req.url)
|
url, username, password = self._get_url_and_credentials(req.url)
|
||||||
|
|
||||||
|
@ -216,8 +227,9 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
return req
|
return req
|
||||||
|
|
||||||
# Factored out to allow for easy patching in tests
|
# Factored out to allow for easy patching in tests
|
||||||
def _prompt_for_password(self, netloc):
|
def _prompt_for_password(
|
||||||
# type: (str) -> Tuple[Optional[str], Optional[str], bool]
|
self, netloc: str
|
||||||
|
) -> Tuple[Optional[str], Optional[str], bool]:
|
||||||
username = ask_input(f"User for {netloc}: ")
|
username = ask_input(f"User for {netloc}: ")
|
||||||
if not username:
|
if not username:
|
||||||
return None, None, False
|
return None, None, False
|
||||||
|
@ -228,14 +240,12 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
return username, password, True
|
return username, password, True
|
||||||
|
|
||||||
# Factored out to allow for easy patching in tests
|
# Factored out to allow for easy patching in tests
|
||||||
def _should_save_password_to_keyring(self):
|
def _should_save_password_to_keyring(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
if not keyring:
|
if not keyring:
|
||||||
return False
|
return False
|
||||||
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
||||||
|
|
||||||
def handle_401(self, resp, **kwargs):
|
def handle_401(self, resp: Response, **kwargs: Any) -> Response:
|
||||||
# type: (Response, **Any) -> Response
|
|
||||||
# We only care about 401 responses, anything else we want to just
|
# We only care about 401 responses, anything else we want to just
|
||||||
# pass through the actual response
|
# pass through the actual response
|
||||||
if resp.status_code != 401:
|
if resp.status_code != 401:
|
||||||
|
@ -248,9 +258,11 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
parsed = urllib.parse.urlparse(resp.url)
|
parsed = urllib.parse.urlparse(resp.url)
|
||||||
|
|
||||||
# Query the keyring for credentials:
|
# Query the keyring for credentials:
|
||||||
username, password = self._get_new_credentials(resp.url,
|
username, password = self._get_new_credentials(
|
||||||
allow_netrc=False,
|
resp.url,
|
||||||
allow_keyring=True)
|
allow_netrc=False,
|
||||||
|
allow_keyring=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Prompt the user for a new username and password
|
# Prompt the user for a new username and password
|
||||||
save = False
|
save = False
|
||||||
|
@ -287,16 +299,15 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
|
|
||||||
return new_resp
|
return new_resp
|
||||||
|
|
||||||
def warn_on_401(self, resp, **kwargs):
|
def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
|
||||||
# type: (Response, **Any) -> None
|
|
||||||
"""Response callback to warn about incorrect credentials."""
|
"""Response callback to warn about incorrect credentials."""
|
||||||
if resp.status_code == 401:
|
if resp.status_code == 401:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'401 Error, Credentials not correct for %s', resp.request.url,
|
"401 Error, Credentials not correct for %s",
|
||||||
|
resp.request.url,
|
||||||
)
|
)
|
||||||
|
|
||||||
def save_credentials(self, resp, **kwargs):
|
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
|
||||||
# type: (Response, **Any) -> None
|
|
||||||
"""Response callback to save credentials on success."""
|
"""Response callback to save credentials on success."""
|
||||||
assert keyring is not None, "should never reach here without keyring"
|
assert keyring is not None, "should never reach here without keyring"
|
||||||
if not keyring:
|
if not keyring:
|
||||||
|
@ -306,7 +317,7 @@ class MultiDomainBasicAuth(AuthBase):
|
||||||
self._credentials_to_save = None
|
self._credentials_to_save = None
|
||||||
if creds and resp.status_code < 400:
|
if creds and resp.status_code < 400:
|
||||||
try:
|
try:
|
||||||
logger.info('Saving credentials to keyring')
|
logger.info("Saving credentials to keyring")
|
||||||
keyring.set_password(*creds)
|
keyring.set_password(*creds)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception('Failed to save credentials')
|
logger.exception("Failed to save credentials")
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Iterator, Optional
|
from typing import Generator, Optional
|
||||||
|
|
||||||
from pip._vendor.cachecontrol.cache import BaseCache
|
from pip._vendor.cachecontrol.cache import BaseCache
|
||||||
from pip._vendor.cachecontrol.caches import FileCache
|
from pip._vendor.cachecontrol.caches import FileCache
|
||||||
|
@ -13,14 +13,12 @@ from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||||
from pip._internal.utils.misc import ensure_dir
|
from pip._internal.utils.misc import ensure_dir
|
||||||
|
|
||||||
|
|
||||||
def is_from_cache(response):
|
def is_from_cache(response: Response) -> bool:
|
||||||
# type: (Response) -> bool
|
|
||||||
return getattr(response, "from_cache", False)
|
return getattr(response, "from_cache", False)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def suppressed_cache_errors():
|
def suppressed_cache_errors() -> Generator[None, None, None]:
|
||||||
# type: () -> Iterator[None]
|
|
||||||
"""If we can't access the cache then we can just skip caching and process
|
"""If we can't access the cache then we can just skip caching and process
|
||||||
requests as if caching wasn't enabled.
|
requests as if caching wasn't enabled.
|
||||||
"""
|
"""
|
||||||
|
@ -36,14 +34,12 @@ class SafeFileCache(BaseCache):
|
||||||
not be accessible or writable.
|
not be accessible or writable.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, directory):
|
def __init__(self, directory: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
assert directory is not None, "Cache directory must not be None."
|
assert directory is not None, "Cache directory must not be None."
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.directory = directory
|
self.directory = directory
|
||||||
|
|
||||||
def _get_cache_path(self, name):
|
def _get_cache_path(self, name: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
||||||
# class for backwards-compatibility and to avoid using a non-public
|
# class for backwards-compatibility and to avoid using a non-public
|
||||||
# method.
|
# method.
|
||||||
|
@ -51,15 +47,13 @@ class SafeFileCache(BaseCache):
|
||||||
parts = list(hashed[:5]) + [hashed]
|
parts = list(hashed[:5]) + [hashed]
|
||||||
return os.path.join(self.directory, *parts)
|
return os.path.join(self.directory, *parts)
|
||||||
|
|
||||||
def get(self, key):
|
def get(self, key: str) -> Optional[bytes]:
|
||||||
# type: (str) -> Optional[bytes]
|
|
||||||
path = self._get_cache_path(key)
|
path = self._get_cache_path(key)
|
||||||
with suppressed_cache_errors():
|
with suppressed_cache_errors():
|
||||||
with open(path, 'rb') as f:
|
with open(path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
def set(self, key, value):
|
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
|
||||||
# type: (str, bytes) -> None
|
|
||||||
path = self._get_cache_path(key)
|
path = self._get_cache_path(key)
|
||||||
with suppressed_cache_errors():
|
with suppressed_cache_errors():
|
||||||
ensure_dir(os.path.dirname(path))
|
ensure_dir(os.path.dirname(path))
|
||||||
|
@ -69,8 +63,7 @@ class SafeFileCache(BaseCache):
|
||||||
|
|
||||||
replace(f.name, path)
|
replace(f.name, path)
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
path = self._get_cache_path(key)
|
path = self._get_cache_path(key)
|
||||||
with suppressed_cache_errors():
|
with suppressed_cache_errors():
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
|
|
|
@ -8,7 +8,7 @@ from typing import Iterable, Optional, Tuple
|
||||||
|
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
from pip._internal.cli.progress_bars import DownloadProgressProvider
|
from pip._internal.cli.progress_bars import get_download_progress_renderer
|
||||||
from pip._internal.exceptions import NetworkConnectionError
|
from pip._internal.exceptions import NetworkConnectionError
|
||||||
from pip._internal.models.index import PyPI
|
from pip._internal.models.index import PyPI
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
|
@ -20,20 +20,18 @@ from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_http_response_size(resp):
|
def _get_http_response_size(resp: Response) -> Optional[int]:
|
||||||
# type: (Response) -> Optional[int]
|
|
||||||
try:
|
try:
|
||||||
return int(resp.headers['content-length'])
|
return int(resp.headers["content-length"])
|
||||||
except (ValueError, KeyError, TypeError):
|
except (ValueError, KeyError, TypeError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _prepare_download(
|
def _prepare_download(
|
||||||
resp, # type: Response
|
resp: Response,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
progress_bar # type: str
|
progress_bar: str,
|
||||||
):
|
) -> Iterable[bytes]:
|
||||||
# type: (...) -> Iterable[bytes]
|
|
||||||
total_length = _get_http_response_size(resp)
|
total_length = _get_http_response_size(resp)
|
||||||
|
|
||||||
if link.netloc == PyPI.file_storage_domain:
|
if link.netloc == PyPI.file_storage_domain:
|
||||||
|
@ -44,7 +42,7 @@ def _prepare_download(
|
||||||
logged_url = redact_auth_from_url(url)
|
logged_url = redact_auth_from_url(url)
|
||||||
|
|
||||||
if total_length:
|
if total_length:
|
||||||
logged_url = '{} ({})'.format(logged_url, format_size(total_length))
|
logged_url = "{} ({})".format(logged_url, format_size(total_length))
|
||||||
|
|
||||||
if is_from_cache(resp):
|
if is_from_cache(resp):
|
||||||
logger.info("Using cached %s", logged_url)
|
logger.info("Using cached %s", logged_url)
|
||||||
|
@ -67,27 +65,24 @@ def _prepare_download(
|
||||||
if not show_progress:
|
if not show_progress:
|
||||||
return chunks
|
return chunks
|
||||||
|
|
||||||
return DownloadProgressProvider(
|
renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
|
||||||
progress_bar, max=total_length
|
return renderer(chunks)
|
||||||
)(chunks)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_content_filename(filename):
|
def sanitize_content_filename(filename: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
"""
|
||||||
Sanitize the "filename" value from a Content-Disposition header.
|
Sanitize the "filename" value from a Content-Disposition header.
|
||||||
"""
|
"""
|
||||||
return os.path.basename(filename)
|
return os.path.basename(filename)
|
||||||
|
|
||||||
|
|
||||||
def parse_content_disposition(content_disposition, default_filename):
|
def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
|
||||||
# type: (str, str) -> str
|
|
||||||
"""
|
"""
|
||||||
Parse the "filename" value from a Content-Disposition header, and
|
Parse the "filename" value from a Content-Disposition header, and
|
||||||
return the default filename if the result is empty.
|
return the default filename if the result is empty.
|
||||||
"""
|
"""
|
||||||
_type, params = cgi.parse_header(content_disposition)
|
_type, params = cgi.parse_header(content_disposition)
|
||||||
filename = params.get('filename')
|
filename = params.get("filename")
|
||||||
if filename:
|
if filename:
|
||||||
# We need to sanitize the filename to prevent directory traversal
|
# We need to sanitize the filename to prevent directory traversal
|
||||||
# in case the filename contains ".." path parts.
|
# in case the filename contains ".." path parts.
|
||||||
|
@ -95,21 +90,18 @@ def parse_content_disposition(content_disposition, default_filename):
|
||||||
return filename or default_filename
|
return filename or default_filename
|
||||||
|
|
||||||
|
|
||||||
def _get_http_response_filename(resp, link):
|
def _get_http_response_filename(resp: Response, link: Link) -> str:
|
||||||
# type: (Response, Link) -> str
|
|
||||||
"""Get an ideal filename from the given HTTP response, falling back to
|
"""Get an ideal filename from the given HTTP response, falling back to
|
||||||
the link filename if not provided.
|
the link filename if not provided.
|
||||||
"""
|
"""
|
||||||
filename = link.filename # fallback
|
filename = link.filename # fallback
|
||||||
# Have a look at the Content-Disposition header for a better guess
|
# Have a look at the Content-Disposition header for a better guess
|
||||||
content_disposition = resp.headers.get('content-disposition')
|
content_disposition = resp.headers.get("content-disposition")
|
||||||
if content_disposition:
|
if content_disposition:
|
||||||
filename = parse_content_disposition(content_disposition, filename)
|
filename = parse_content_disposition(content_disposition, filename)
|
||||||
ext = splitext(filename)[1] # type: Optional[str]
|
ext: Optional[str] = splitext(filename)[1]
|
||||||
if not ext:
|
if not ext:
|
||||||
ext = mimetypes.guess_extension(
|
ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
|
||||||
resp.headers.get('content-type', '')
|
|
||||||
)
|
|
||||||
if ext:
|
if ext:
|
||||||
filename += ext
|
filename += ext
|
||||||
if not ext and link.url != resp.url:
|
if not ext and link.url != resp.url:
|
||||||
|
@ -119,9 +111,8 @@ def _get_http_response_filename(resp, link):
|
||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def _http_get_download(session, link):
|
def _http_get_download(session: PipSession, link: Link) -> Response:
|
||||||
# type: (PipSession, Link) -> Response
|
target_url = link.url.split("#", 1)[0]
|
||||||
target_url = link.url.split('#', 1)[0]
|
|
||||||
resp = session.get(target_url, headers=HEADERS, stream=True)
|
resp = session.get(target_url, headers=HEADERS, stream=True)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp
|
return resp
|
||||||
|
@ -130,15 +121,13 @@ def _http_get_download(session, link):
|
||||||
class Downloader:
|
class Downloader:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
progress_bar, # type: str
|
progress_bar: str,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self._session = session
|
self._session = session
|
||||||
self._progress_bar = progress_bar
|
self._progress_bar = progress_bar
|
||||||
|
|
||||||
def __call__(self, link, location):
|
def __call__(self, link: Link, location: str) -> Tuple[str, str]:
|
||||||
# type: (Link, str) -> Tuple[str, str]
|
|
||||||
"""Download the file given by link into location."""
|
"""Download the file given by link into location."""
|
||||||
try:
|
try:
|
||||||
resp = _http_get_download(self._session, link)
|
resp = _http_get_download(self._session, link)
|
||||||
|
@ -153,26 +142,25 @@ class Downloader:
|
||||||
filepath = os.path.join(location, filename)
|
filepath = os.path.join(location, filename)
|
||||||
|
|
||||||
chunks = _prepare_download(resp, link, self._progress_bar)
|
chunks = _prepare_download(resp, link, self._progress_bar)
|
||||||
with open(filepath, 'wb') as content_file:
|
with open(filepath, "wb") as content_file:
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
content_file.write(chunk)
|
content_file.write(chunk)
|
||||||
content_type = resp.headers.get('Content-Type', '')
|
content_type = resp.headers.get("Content-Type", "")
|
||||||
return filepath, content_type
|
return filepath, content_type
|
||||||
|
|
||||||
|
|
||||||
class BatchDownloader:
|
class BatchDownloader:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
progress_bar, # type: str
|
progress_bar: str,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self._session = session
|
self._session = session
|
||||||
self._progress_bar = progress_bar
|
self._progress_bar = progress_bar
|
||||||
|
|
||||||
def __call__(self, links, location):
|
def __call__(
|
||||||
# type: (Iterable[Link], str) -> Iterable[Tuple[Link, Tuple[str, str]]]
|
self, links: Iterable[Link], location: str
|
||||||
|
) -> Iterable[Tuple[Link, Tuple[str, str]]]:
|
||||||
"""Download the files given by links into location."""
|
"""Download the files given by links into location."""
|
||||||
for link in links:
|
for link in links:
|
||||||
try:
|
try:
|
||||||
|
@ -181,7 +169,8 @@ class BatchDownloader:
|
||||||
assert e.response is not None
|
assert e.response is not None
|
||||||
logger.critical(
|
logger.critical(
|
||||||
"HTTP error %s while getting %s",
|
"HTTP error %s while getting %s",
|
||||||
e.response.status_code, link,
|
e.response.status_code,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -189,8 +178,8 @@ class BatchDownloader:
|
||||||
filepath = os.path.join(location, filename)
|
filepath = os.path.join(location, filename)
|
||||||
|
|
||||||
chunks = _prepare_download(resp, link, self._progress_bar)
|
chunks = _prepare_download(resp, link, self._progress_bar)
|
||||||
with open(filepath, 'wb') as content_file:
|
with open(filepath, "wb") as content_file:
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
content_file.write(chunk)
|
content_file.write(chunk)
|
||||||
content_type = resp.headers.get('Content-Type', '')
|
content_type = resp.headers.get("Content-Type", "")
|
||||||
yield link, (filepath, content_type)
|
yield link, (filepath, content_type)
|
||||||
|
|
|
@ -1,41 +1,40 @@
|
||||||
"""Lazy ZIP over HTTP"""
|
"""Lazy ZIP over HTTP"""
|
||||||
|
|
||||||
__all__ = ['HTTPRangeRequestUnsupported', 'dist_from_wheel_url']
|
__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
|
||||||
|
|
||||||
from bisect import bisect_left, bisect_right
|
from bisect import bisect_left, bisect_right
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
from typing import Any, Dict, Generator, List, Optional, Tuple
|
||||||
from zipfile import BadZipfile, ZipFile
|
from zipfile import BadZipfile, ZipFile
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
|
from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
||||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRangeRequestUnsupported(Exception):
|
class HTTPRangeRequestUnsupported(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def dist_from_wheel_url(name, url, session):
|
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
|
||||||
# type: (str, str, PipSession) -> Distribution
|
"""Return a distribution object from the given wheel URL.
|
||||||
"""Return a pkg_resources.Distribution from the given wheel URL.
|
|
||||||
|
|
||||||
This uses HTTP range requests to only fetch the potion of the wheel
|
This uses HTTP range requests to only fetch the potion of the wheel
|
||||||
containing metadata, just enough for the object to be constructed.
|
containing metadata, just enough for the object to be constructed.
|
||||||
If such requests are not supported, HTTPRangeRequestUnsupported
|
If such requests are not supported, HTTPRangeRequestUnsupported
|
||||||
is raised.
|
is raised.
|
||||||
"""
|
"""
|
||||||
with LazyZipOverHTTP(url, session) as wheel:
|
with LazyZipOverHTTP(url, session) as zf:
|
||||||
# For read-only ZIP files, ZipFile only needs methods read,
|
# For read-only ZIP files, ZipFile only needs methods read,
|
||||||
# seek, seekable and tell, not the whole IO protocol.
|
# seek, seekable and tell, not the whole IO protocol.
|
||||||
zip_file = ZipFile(wheel) # type: ignore
|
wheel = MemoryWheel(zf.name, zf) # type: ignore
|
||||||
# After context manager exit, wheel.name
|
# After context manager exit, wheel.name
|
||||||
# is an invalid file by intention.
|
# is an invalid file by intention.
|
||||||
return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name)
|
return get_wheel_distribution(wheel, canonicalize_name(name))
|
||||||
|
|
||||||
|
|
||||||
class LazyZipOverHTTP:
|
class LazyZipOverHTTP:
|
||||||
|
@ -47,51 +46,46 @@ class LazyZipOverHTTP:
|
||||||
during initialization.
|
during initialization.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE):
|
def __init__(
|
||||||
# type: (str, PipSession, int) -> None
|
self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
|
||||||
|
) -> None:
|
||||||
head = session.head(url, headers=HEADERS)
|
head = session.head(url, headers=HEADERS)
|
||||||
raise_for_status(head)
|
raise_for_status(head)
|
||||||
assert head.status_code == 200
|
assert head.status_code == 200
|
||||||
self._session, self._url, self._chunk_size = session, url, chunk_size
|
self._session, self._url, self._chunk_size = session, url, chunk_size
|
||||||
self._length = int(head.headers['Content-Length'])
|
self._length = int(head.headers["Content-Length"])
|
||||||
self._file = NamedTemporaryFile()
|
self._file = NamedTemporaryFile()
|
||||||
self.truncate(self._length)
|
self.truncate(self._length)
|
||||||
self._left = [] # type: List[int]
|
self._left: List[int] = []
|
||||||
self._right = [] # type: List[int]
|
self._right: List[int] = []
|
||||||
if 'bytes' not in head.headers.get('Accept-Ranges', 'none'):
|
if "bytes" not in head.headers.get("Accept-Ranges", "none"):
|
||||||
raise HTTPRangeRequestUnsupported('range request is not supported')
|
raise HTTPRangeRequestUnsupported("range request is not supported")
|
||||||
self._check_zip()
|
self._check_zip()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mode(self):
|
def mode(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Opening mode, which is always rb."""
|
"""Opening mode, which is always rb."""
|
||||||
return 'rb'
|
return "rb"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Path to the underlying file."""
|
"""Path to the underlying file."""
|
||||||
return self._file.name
|
return self._file.name
|
||||||
|
|
||||||
def seekable(self):
|
def seekable(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""Return whether random access is supported, which is True."""
|
"""Return whether random access is supported, which is True."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def close(self):
|
def close(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Close the file."""
|
"""Close the file."""
|
||||||
self._file.close()
|
self._file.close()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def closed(self):
|
def closed(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""Whether the file is closed."""
|
"""Whether the file is closed."""
|
||||||
return self._file.closed
|
return self._file.closed
|
||||||
|
|
||||||
def read(self, size=-1):
|
def read(self, size: int = -1) -> bytes:
|
||||||
# type: (int) -> bytes
|
|
||||||
"""Read up to size bytes from the object and return them.
|
"""Read up to size bytes from the object and return them.
|
||||||
|
|
||||||
As a convenience, if size is unspecified or -1,
|
As a convenience, if size is unspecified or -1,
|
||||||
|
@ -100,18 +94,16 @@ class LazyZipOverHTTP:
|
||||||
"""
|
"""
|
||||||
download_size = max(size, self._chunk_size)
|
download_size = max(size, self._chunk_size)
|
||||||
start, length = self.tell(), self._length
|
start, length = self.tell(), self._length
|
||||||
stop = length if size < 0 else min(start+download_size, length)
|
stop = length if size < 0 else min(start + download_size, length)
|
||||||
start = max(0, stop-download_size)
|
start = max(0, stop - download_size)
|
||||||
self._download(start, stop-1)
|
self._download(start, stop - 1)
|
||||||
return self._file.read(size)
|
return self._file.read(size)
|
||||||
|
|
||||||
def readable(self):
|
def readable(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""Return whether the file is readable, which is True."""
|
"""Return whether the file is readable, which is True."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def seek(self, offset, whence=0):
|
def seek(self, offset: int, whence: int = 0) -> int:
|
||||||
# type: (int, int) -> int
|
|
||||||
"""Change stream position and return the new absolute position.
|
"""Change stream position and return the new absolute position.
|
||||||
|
|
||||||
Seek to offset relative position indicated by whence:
|
Seek to offset relative position indicated by whence:
|
||||||
|
@ -121,13 +113,11 @@ class LazyZipOverHTTP:
|
||||||
"""
|
"""
|
||||||
return self._file.seek(offset, whence)
|
return self._file.seek(offset, whence)
|
||||||
|
|
||||||
def tell(self):
|
def tell(self) -> int:
|
||||||
# type: () -> int
|
"""Return the current position."""
|
||||||
"""Return the current possition."""
|
|
||||||
return self._file.tell()
|
return self._file.tell()
|
||||||
|
|
||||||
def truncate(self, size=None):
|
def truncate(self, size: Optional[int] = None) -> int:
|
||||||
# type: (Optional[int]) -> int
|
|
||||||
"""Resize the stream to the given size in bytes.
|
"""Resize the stream to the given size in bytes.
|
||||||
|
|
||||||
If size is unspecified resize to the current position.
|
If size is unspecified resize to the current position.
|
||||||
|
@ -137,23 +127,19 @@ class LazyZipOverHTTP:
|
||||||
"""
|
"""
|
||||||
return self._file.truncate(size)
|
return self._file.truncate(size)
|
||||||
|
|
||||||
def writable(self):
|
def writable(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""Return False."""
|
"""Return False."""
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self) -> "LazyZipOverHTTP":
|
||||||
# type: () -> LazyZipOverHTTP
|
|
||||||
self._file.__enter__()
|
self._file.__enter__()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, *exc):
|
def __exit__(self, *exc: Any) -> Optional[bool]:
|
||||||
# type: (*Any) -> Optional[bool]
|
|
||||||
return self._file.__exit__(*exc)
|
return self._file.__exit__(*exc)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _stay(self):
|
def _stay(self) -> Generator[None, None, None]:
|
||||||
# type: ()-> Iterator[None]
|
|
||||||
"""Return a context manager keeping the position.
|
"""Return a context manager keeping the position.
|
||||||
|
|
||||||
At the end of the block, seek back to original position.
|
At the end of the block, seek back to original position.
|
||||||
|
@ -164,8 +150,7 @@ class LazyZipOverHTTP:
|
||||||
finally:
|
finally:
|
||||||
self.seek(pos)
|
self.seek(pos)
|
||||||
|
|
||||||
def _check_zip(self):
|
def _check_zip(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Check and download until the file is a valid ZIP."""
|
"""Check and download until the file is a valid ZIP."""
|
||||||
end = self._length - 1
|
end = self._length - 1
|
||||||
for start in reversed(range(0, end, self._chunk_size)):
|
for start in reversed(range(0, end, self._chunk_size)):
|
||||||
|
@ -180,18 +165,20 @@ class LazyZipOverHTTP:
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
def _stream_response(self, start, end, base_headers=HEADERS):
|
def _stream_response(
|
||||||
# type: (int, int, Dict[str, str]) -> Response
|
self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
|
||||||
|
) -> Response:
|
||||||
"""Return HTTP response to a range request from start to end."""
|
"""Return HTTP response to a range request from start to end."""
|
||||||
headers = base_headers.copy()
|
headers = base_headers.copy()
|
||||||
headers['Range'] = f'bytes={start}-{end}'
|
headers["Range"] = f"bytes={start}-{end}"
|
||||||
# TODO: Get range requests to be correctly cached
|
# TODO: Get range requests to be correctly cached
|
||||||
headers['Cache-Control'] = 'no-cache'
|
headers["Cache-Control"] = "no-cache"
|
||||||
return self._session.get(self._url, headers=headers, stream=True)
|
return self._session.get(self._url, headers=headers, stream=True)
|
||||||
|
|
||||||
def _merge(self, start, end, left, right):
|
def _merge(
|
||||||
# type: (int, int, int, int) -> Iterator[Tuple[int, int]]
|
self, start: int, end: int, left: int, right: int
|
||||||
"""Return an iterator of intervals to be fetched.
|
) -> Generator[Tuple[int, int], None, None]:
|
||||||
|
"""Return a generator of intervals to be fetched.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
start (int): Start of needed interval
|
start (int): Start of needed interval
|
||||||
|
@ -200,18 +187,17 @@ class LazyZipOverHTTP:
|
||||||
right (int): Index after last overlapping downloaded data
|
right (int): Index after last overlapping downloaded data
|
||||||
"""
|
"""
|
||||||
lslice, rslice = self._left[left:right], self._right[left:right]
|
lslice, rslice = self._left[left:right], self._right[left:right]
|
||||||
i = start = min([start]+lslice[:1])
|
i = start = min([start] + lslice[:1])
|
||||||
end = max([end]+rslice[-1:])
|
end = max([end] + rslice[-1:])
|
||||||
for j, k in zip(lslice, rslice):
|
for j, k in zip(lslice, rslice):
|
||||||
if j > i:
|
if j > i:
|
||||||
yield i, j-1
|
yield i, j - 1
|
||||||
i = k + 1
|
i = k + 1
|
||||||
if i <= end:
|
if i <= end:
|
||||||
yield i, end
|
yield i, end
|
||||||
self._left[left:right], self._right[left:right] = [start], [end]
|
self._left[left:right], self._right[left:right] = [start], [end]
|
||||||
|
|
||||||
def _download(self, start, end):
|
def _download(self, start: int, end: int) -> None:
|
||||||
# type: (int, int) -> None
|
|
||||||
"""Download bytes from start to end inclusively."""
|
"""Download bytes from start to end inclusively."""
|
||||||
with self._stay():
|
with self._stay():
|
||||||
left = bisect_left(self._right, start)
|
left = bisect_left(self._right, start)
|
||||||
|
|
|
@ -2,27 +2,20 @@
|
||||||
network request configuration and behavior.
|
network request configuration and behavior.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# When mypy runs on Windows the call to distro.linux_distribution() is skipped
|
|
||||||
# resulting in the failure:
|
|
||||||
#
|
|
||||||
# error: unused 'type: ignore' comment
|
|
||||||
#
|
|
||||||
# If the upstream module adds typing, this comment should be removed. See
|
|
||||||
# https://github.com/nir0s/distro/pull/269
|
|
||||||
#
|
|
||||||
# mypy: warn-unused-ignores=False
|
|
||||||
|
|
||||||
import email.utils
|
import email.utils
|
||||||
|
import io
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
|
from typing import Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
from pip._vendor import requests, urllib3
|
from pip._vendor import requests, urllib3
|
||||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||||
|
@ -53,7 +46,7 @@ SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
||||||
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
SECURE_ORIGINS = [
|
SECURE_ORIGINS: List[SecureOrigin] = [
|
||||||
# protocol, hostname, port
|
# protocol, hostname, port
|
||||||
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
||||||
("https", "*", "*"),
|
("https", "*", "*"),
|
||||||
|
@ -63,7 +56,7 @@ SECURE_ORIGINS = [
|
||||||
("file", "*", None),
|
("file", "*", None),
|
||||||
# ssh is always secure.
|
# ssh is always secure.
|
||||||
("ssh", "*", "*"),
|
("ssh", "*", "*"),
|
||||||
] # type: List[SecureOrigin]
|
]
|
||||||
|
|
||||||
|
|
||||||
# These are environment variables present when running under various
|
# These are environment variables present when running under various
|
||||||
|
@ -75,18 +68,17 @@ SECURE_ORIGINS = [
|
||||||
# For more background, see: https://github.com/pypa/pip/issues/5499
|
# For more background, see: https://github.com/pypa/pip/issues/5499
|
||||||
CI_ENVIRONMENT_VARIABLES = (
|
CI_ENVIRONMENT_VARIABLES = (
|
||||||
# Azure Pipelines
|
# Azure Pipelines
|
||||||
'BUILD_BUILDID',
|
"BUILD_BUILDID",
|
||||||
# Jenkins
|
# Jenkins
|
||||||
'BUILD_ID',
|
"BUILD_ID",
|
||||||
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
|
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
|
||||||
'CI',
|
"CI",
|
||||||
# Explicit environment variable.
|
# Explicit environment variable.
|
||||||
'PIP_IS_CI',
|
"PIP_IS_CI",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def looks_like_ci():
|
def looks_like_ci() -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""
|
"""
|
||||||
Return whether it looks like pip is running under CI.
|
Return whether it looks like pip is running under CI.
|
||||||
"""
|
"""
|
||||||
|
@ -96,48 +88,50 @@ def looks_like_ci():
|
||||||
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
|
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
|
||||||
|
|
||||||
|
|
||||||
def user_agent():
|
def user_agent() -> str:
|
||||||
# type: () -> str
|
|
||||||
"""
|
"""
|
||||||
Return a string representing the user agent.
|
Return a string representing the user agent.
|
||||||
"""
|
"""
|
||||||
data = {
|
data: Dict[str, Any] = {
|
||||||
"installer": {"name": "pip", "version": __version__},
|
"installer": {"name": "pip", "version": __version__},
|
||||||
"python": platform.python_version(),
|
"python": platform.python_version(),
|
||||||
"implementation": {
|
"implementation": {
|
||||||
"name": platform.python_implementation(),
|
"name": platform.python_implementation(),
|
||||||
},
|
},
|
||||||
} # type: Dict[str, Any]
|
}
|
||||||
|
|
||||||
if data["implementation"]["name"] == 'CPython':
|
if data["implementation"]["name"] == "CPython":
|
||||||
data["implementation"]["version"] = platform.python_version()
|
data["implementation"]["version"] = platform.python_version()
|
||||||
elif data["implementation"]["name"] == 'PyPy':
|
elif data["implementation"]["name"] == "PyPy":
|
||||||
pypy_version_info = sys.pypy_version_info # type: ignore
|
pypy_version_info = sys.pypy_version_info # type: ignore
|
||||||
if pypy_version_info.releaselevel == 'final':
|
if pypy_version_info.releaselevel == "final":
|
||||||
pypy_version_info = pypy_version_info[:3]
|
pypy_version_info = pypy_version_info[:3]
|
||||||
data["implementation"]["version"] = ".".join(
|
data["implementation"]["version"] = ".".join(
|
||||||
[str(x) for x in pypy_version_info]
|
[str(x) for x in pypy_version_info]
|
||||||
)
|
)
|
||||||
elif data["implementation"]["name"] == 'Jython':
|
elif data["implementation"]["name"] == "Jython":
|
||||||
# Complete Guess
|
# Complete Guess
|
||||||
data["implementation"]["version"] = platform.python_version()
|
data["implementation"]["version"] = platform.python_version()
|
||||||
elif data["implementation"]["name"] == 'IronPython':
|
elif data["implementation"]["name"] == "IronPython":
|
||||||
# Complete Guess
|
# Complete Guess
|
||||||
data["implementation"]["version"] = platform.python_version()
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
|
||||||
if sys.platform.startswith("linux"):
|
if sys.platform.startswith("linux"):
|
||||||
from pip._vendor import distro
|
from pip._vendor import distro
|
||||||
|
|
||||||
# https://github.com/nir0s/distro/pull/269
|
linux_distribution = distro.name(), distro.version(), distro.codename()
|
||||||
linux_distribution = distro.linux_distribution() # type: ignore
|
distro_infos: Dict[str, Any] = dict(
|
||||||
distro_infos = dict(filter(
|
filter(
|
||||||
lambda x: x[1],
|
lambda x: x[1],
|
||||||
zip(["name", "version", "id"], linux_distribution),
|
zip(["name", "version", "id"], linux_distribution),
|
||||||
))
|
)
|
||||||
libc = dict(filter(
|
)
|
||||||
lambda x: x[1],
|
libc = dict(
|
||||||
zip(["lib", "version"], libc_ver()),
|
filter(
|
||||||
))
|
lambda x: x[1],
|
||||||
|
zip(["lib", "version"], libc_ver()),
|
||||||
|
)
|
||||||
|
)
|
||||||
if libc:
|
if libc:
|
||||||
distro_infos["libc"] = libc
|
distro_infos["libc"] = libc
|
||||||
if distro_infos:
|
if distro_infos:
|
||||||
|
@ -157,12 +151,28 @@ def user_agent():
|
||||||
|
|
||||||
if has_tls():
|
if has_tls():
|
||||||
import _ssl as ssl
|
import _ssl as ssl
|
||||||
|
|
||||||
data["openssl_version"] = ssl.OPENSSL_VERSION
|
data["openssl_version"] = ssl.OPENSSL_VERSION
|
||||||
|
|
||||||
setuptools_dist = get_default_environment().get_distribution("setuptools")
|
setuptools_dist = get_default_environment().get_distribution("setuptools")
|
||||||
if setuptools_dist is not None:
|
if setuptools_dist is not None:
|
||||||
data["setuptools_version"] = str(setuptools_dist.version)
|
data["setuptools_version"] = str(setuptools_dist.version)
|
||||||
|
|
||||||
|
if shutil.which("rustc") is not None:
|
||||||
|
# If for any reason `rustc --version` fails, silently ignore it
|
||||||
|
try:
|
||||||
|
rustc_output = subprocess.check_output(
|
||||||
|
["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if rustc_output.startswith(b"rustc "):
|
||||||
|
# The format of `rustc --version` is:
|
||||||
|
# `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
|
||||||
|
# We extract just the middle (1.52.1) part
|
||||||
|
data["rustc_version"] = rustc_output.split(b" ")[1].decode()
|
||||||
|
|
||||||
# Use None rather than False so as not to give the impression that
|
# Use None rather than False so as not to give the impression that
|
||||||
# pip knows it is not being run under CI. Rather, it is a null or
|
# pip knows it is not being run under CI. Rather, it is a null or
|
||||||
# inconclusive result. Also, we include some value rather than no
|
# inconclusive result. Also, we include some value rather than no
|
||||||
|
@ -180,17 +190,15 @@ def user_agent():
|
||||||
|
|
||||||
|
|
||||||
class LocalFSAdapter(BaseAdapter):
|
class LocalFSAdapter(BaseAdapter):
|
||||||
|
|
||||||
def send(
|
def send(
|
||||||
self,
|
self,
|
||||||
request, # type: PreparedRequest
|
request: PreparedRequest,
|
||||||
stream=False, # type: bool
|
stream: bool = False,
|
||||||
timeout=None, # type: Optional[Union[float, Tuple[float, float]]]
|
timeout: Optional[Union[float, Tuple[float, float]]] = None,
|
||||||
verify=True, # type: Union[bool, str]
|
verify: Union[bool, str] = True,
|
||||||
cert=None, # type: Optional[Union[str, Tuple[str, str]]]
|
cert: Optional[Union[str, Tuple[str, str]]] = None,
|
||||||
proxies=None, # type:Optional[Mapping[str, str]]
|
proxies: Optional[Mapping[str, str]] = None,
|
||||||
):
|
) -> Response:
|
||||||
# type: (...) -> Response
|
|
||||||
pathname = url_to_path(request.url)
|
pathname = url_to_path(request.url)
|
||||||
|
|
||||||
resp = Response()
|
resp = Response()
|
||||||
|
@ -200,67 +208,66 @@ class LocalFSAdapter(BaseAdapter):
|
||||||
try:
|
try:
|
||||||
stats = os.stat(pathname)
|
stats = os.stat(pathname)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
|
# format the exception raised as a io.BytesIO object,
|
||||||
|
# to return a better error message:
|
||||||
resp.status_code = 404
|
resp.status_code = 404
|
||||||
resp.raw = exc
|
resp.reason = type(exc).__name__
|
||||||
|
resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
|
||||||
else:
|
else:
|
||||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||||
resp.headers = CaseInsensitiveDict({
|
resp.headers = CaseInsensitiveDict(
|
||||||
"Content-Type": content_type,
|
{
|
||||||
"Content-Length": stats.st_size,
|
"Content-Type": content_type,
|
||||||
"Last-Modified": modified,
|
"Content-Length": stats.st_size,
|
||||||
})
|
"Last-Modified": modified,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
resp.raw = open(pathname, "rb")
|
resp.raw = open(pathname, "rb")
|
||||||
resp.close = resp.raw.close
|
resp.close = resp.raw.close
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def close(self):
|
def close(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InsecureHTTPAdapter(HTTPAdapter):
|
class InsecureHTTPAdapter(HTTPAdapter):
|
||||||
|
|
||||||
def cert_verify(
|
def cert_verify(
|
||||||
self,
|
self,
|
||||||
conn, # type: ConnectionPool
|
conn: ConnectionPool,
|
||||||
url, # type: str
|
url: str,
|
||||||
verify, # type: Union[bool, str]
|
verify: Union[bool, str],
|
||||||
cert, # type: Optional[Union[str, Tuple[str, str]]]
|
cert: Optional[Union[str, Tuple[str, str]]],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
||||||
|
|
||||||
|
|
||||||
class InsecureCacheControlAdapter(CacheControlAdapter):
|
class InsecureCacheControlAdapter(CacheControlAdapter):
|
||||||
|
|
||||||
def cert_verify(
|
def cert_verify(
|
||||||
self,
|
self,
|
||||||
conn, # type: ConnectionPool
|
conn: ConnectionPool,
|
||||||
url, # type: str
|
url: str,
|
||||||
verify, # type: Union[bool, str]
|
verify: Union[bool, str],
|
||||||
cert, # type: Optional[Union[str, Tuple[str, str]]]
|
cert: Optional[Union[str, Tuple[str, str]]],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
||||||
|
|
||||||
|
|
||||||
class PipSession(requests.Session):
|
class PipSession(requests.Session):
|
||||||
|
|
||||||
timeout = None # type: Optional[int]
|
timeout: Optional[int] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*args, # type: Any
|
*args: Any,
|
||||||
retries=0, # type: int
|
retries: int = 0,
|
||||||
cache=None, # type: Optional[str]
|
cache: Optional[str] = None,
|
||||||
trusted_hosts=(), # type: Sequence[str]
|
trusted_hosts: Sequence[str] = (),
|
||||||
index_urls=None, # type: Optional[List[str]]
|
index_urls: Optional[List[str]] = None,
|
||||||
**kwargs, # type: Any
|
**kwargs: Any,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
"""
|
||||||
:param trusted_hosts: Domains not to emit warnings for when not using
|
:param trusted_hosts: Domains not to emit warnings for when not using
|
||||||
HTTPS.
|
HTTPS.
|
||||||
|
@ -269,7 +276,7 @@ class PipSession(requests.Session):
|
||||||
|
|
||||||
# Namespace the attribute with "pip_" just in case to prevent
|
# Namespace the attribute with "pip_" just in case to prevent
|
||||||
# possible conflicts with the base class.
|
# possible conflicts with the base class.
|
||||||
self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]]
|
self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
|
||||||
|
|
||||||
# Attach our User Agent to the request
|
# Attach our User Agent to the request
|
||||||
self.headers["User-Agent"] = user_agent()
|
self.headers["User-Agent"] = user_agent()
|
||||||
|
@ -283,7 +290,6 @@ class PipSession(requests.Session):
|
||||||
# Set the total number of retries that a particular request can
|
# Set the total number of retries that a particular request can
|
||||||
# have.
|
# have.
|
||||||
total=retries,
|
total=retries,
|
||||||
|
|
||||||
# A 503 error from PyPI typically means that the Fastly -> Origin
|
# A 503 error from PyPI typically means that the Fastly -> Origin
|
||||||
# connection got interrupted in some way. A 503 error in general
|
# connection got interrupted in some way. A 503 error in general
|
||||||
# is typically considered a transient error so we'll go ahead and
|
# is typically considered a transient error so we'll go ahead and
|
||||||
|
@ -291,7 +297,6 @@ class PipSession(requests.Session):
|
||||||
# A 500 may indicate transient error in Amazon S3
|
# A 500 may indicate transient error in Amazon S3
|
||||||
# A 520 or 527 - may indicate transient error in CloudFlare
|
# A 520 or 527 - may indicate transient error in CloudFlare
|
||||||
status_forcelist=[500, 503, 520, 527],
|
status_forcelist=[500, 503, 520, 527],
|
||||||
|
|
||||||
# Add a small amount of back off between failed requests in
|
# Add a small amount of back off between failed requests in
|
||||||
# order to prevent hammering the service.
|
# order to prevent hammering the service.
|
||||||
backoff_factor=0.25,
|
backoff_factor=0.25,
|
||||||
|
@ -331,16 +336,16 @@ class PipSession(requests.Session):
|
||||||
for host in trusted_hosts:
|
for host in trusted_hosts:
|
||||||
self.add_trusted_host(host, suppress_logging=True)
|
self.add_trusted_host(host, suppress_logging=True)
|
||||||
|
|
||||||
def update_index_urls(self, new_index_urls):
|
def update_index_urls(self, new_index_urls: List[str]) -> None:
|
||||||
# type: (List[str]) -> None
|
|
||||||
"""
|
"""
|
||||||
:param new_index_urls: New index urls to update the authentication
|
:param new_index_urls: New index urls to update the authentication
|
||||||
handler with.
|
handler with.
|
||||||
"""
|
"""
|
||||||
self.auth.index_urls = new_index_urls
|
self.auth.index_urls = new_index_urls
|
||||||
|
|
||||||
def add_trusted_host(self, host, source=None, suppress_logging=False):
|
def add_trusted_host(
|
||||||
# type: (str, Optional[str], bool) -> None
|
self, host: str, source: Optional[str] = None, suppress_logging: bool = False
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
:param host: It is okay to provide a host that has previously been
|
:param host: It is okay to provide a host that has previously been
|
||||||
added.
|
added.
|
||||||
|
@ -348,9 +353,9 @@ class PipSession(requests.Session):
|
||||||
string came from.
|
string came from.
|
||||||
"""
|
"""
|
||||||
if not suppress_logging:
|
if not suppress_logging:
|
||||||
msg = f'adding trusted host: {host!r}'
|
msg = f"adding trusted host: {host!r}"
|
||||||
if source is not None:
|
if source is not None:
|
||||||
msg += f' (from {source})'
|
msg += f" (from {source})"
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
|
|
||||||
host_port = parse_netloc(host)
|
host_port = parse_netloc(host)
|
||||||
|
@ -358,35 +363,36 @@ class PipSession(requests.Session):
|
||||||
self.pip_trusted_origins.append(host_port)
|
self.pip_trusted_origins.append(host_port)
|
||||||
|
|
||||||
self.mount(
|
self.mount(
|
||||||
build_url_from_netloc(host) + '/',
|
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
|
||||||
self._trusted_host_adapter
|
|
||||||
)
|
)
|
||||||
|
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
|
||||||
if not host_port[1]:
|
if not host_port[1]:
|
||||||
# Mount wildcard ports for the same host.
|
|
||||||
self.mount(
|
self.mount(
|
||||||
build_url_from_netloc(host) + ':',
|
build_url_from_netloc(host, scheme="http") + ":",
|
||||||
self._trusted_host_adapter
|
self._trusted_host_adapter,
|
||||||
)
|
)
|
||||||
|
# Mount wildcard ports for the same host.
|
||||||
|
self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
|
||||||
|
|
||||||
def iter_secure_origins(self):
|
def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
|
||||||
# type: () -> Iterator[SecureOrigin]
|
|
||||||
yield from SECURE_ORIGINS
|
yield from SECURE_ORIGINS
|
||||||
for host, port in self.pip_trusted_origins:
|
for host, port in self.pip_trusted_origins:
|
||||||
yield ('*', host, '*' if port is None else port)
|
yield ("*", host, "*" if port is None else port)
|
||||||
|
|
||||||
def is_secure_origin(self, location):
|
def is_secure_origin(self, location: Link) -> bool:
|
||||||
# type: (Link) -> bool
|
|
||||||
# Determine if this url used a secure transport mechanism
|
# Determine if this url used a secure transport mechanism
|
||||||
parsed = urllib.parse.urlparse(str(location))
|
parsed = urllib.parse.urlparse(str(location))
|
||||||
origin_protocol, origin_host, origin_port = (
|
origin_protocol, origin_host, origin_port = (
|
||||||
parsed.scheme, parsed.hostname, parsed.port,
|
parsed.scheme,
|
||||||
|
parsed.hostname,
|
||||||
|
parsed.port,
|
||||||
)
|
)
|
||||||
|
|
||||||
# The protocol to use to see if the protocol matches.
|
# The protocol to use to see if the protocol matches.
|
||||||
# Don't count the repository type as part of the protocol: in
|
# Don't count the repository type as part of the protocol: in
|
||||||
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
||||||
# the last scheme.)
|
# the last scheme.)
|
||||||
origin_protocol = origin_protocol.rsplit('+', 1)[-1]
|
origin_protocol = origin_protocol.rsplit("+", 1)[-1]
|
||||||
|
|
||||||
# Determine if our origin is a secure origin by looking through our
|
# Determine if our origin is a secure origin by looking through our
|
||||||
# hardcoded list of secure origins, as well as any additional ones
|
# hardcoded list of secure origins, as well as any additional ones
|
||||||
|
@ -403,9 +409,9 @@ class PipSession(requests.Session):
|
||||||
# We don't have both a valid address or a valid network, so
|
# We don't have both a valid address or a valid network, so
|
||||||
# we'll check this origin against hostnames.
|
# we'll check this origin against hostnames.
|
||||||
if (
|
if (
|
||||||
origin_host and
|
origin_host
|
||||||
origin_host.lower() != secure_host.lower() and
|
and origin_host.lower() != secure_host.lower()
|
||||||
secure_host != "*"
|
and secure_host != "*"
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
@ -416,9 +422,9 @@ class PipSession(requests.Session):
|
||||||
|
|
||||||
# Check to see if the port matches.
|
# Check to see if the port matches.
|
||||||
if (
|
if (
|
||||||
origin_port != secure_port and
|
origin_port != secure_port
|
||||||
secure_port != "*" and
|
and secure_port != "*"
|
||||||
secure_port is not None
|
and secure_port is not None
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -440,10 +446,11 @@ class PipSession(requests.Session):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def request(self, method, url, *args, **kwargs):
|
def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
|
||||||
# type: (str, str, *Any, **Any) -> Response
|
|
||||||
# Allow setting a default timeout on a session
|
# Allow setting a default timeout on a session
|
||||||
kwargs.setdefault("timeout", self.timeout)
|
kwargs.setdefault("timeout", self.timeout)
|
||||||
|
# Allow setting a default proxies on a session
|
||||||
|
kwargs.setdefault("proxies", self.proxies)
|
||||||
|
|
||||||
# Dispatch the actual request
|
# Dispatch the actual request
|
||||||
return super().request(method, url, *args, **kwargs)
|
return super().request(method, url, *args, **kwargs)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from typing import Dict, Iterator
|
from typing import Dict, Generator
|
||||||
|
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
|
@ -23,40 +23,41 @@ from pip._internal.exceptions import NetworkConnectionError
|
||||||
# you're not asking for a compressed file and will then decompress it
|
# you're not asking for a compressed file and will then decompress it
|
||||||
# before sending because if that's the case I don't think it'll ever be
|
# before sending because if that's the case I don't think it'll ever be
|
||||||
# possible to make this work.
|
# possible to make this work.
|
||||||
HEADERS = {'Accept-Encoding': 'identity'} # type: Dict[str, str]
|
HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
|
||||||
|
|
||||||
|
|
||||||
def raise_for_status(resp):
|
def raise_for_status(resp: Response) -> None:
|
||||||
# type: (Response) -> None
|
http_error_msg = ""
|
||||||
http_error_msg = ''
|
|
||||||
if isinstance(resp.reason, bytes):
|
if isinstance(resp.reason, bytes):
|
||||||
# We attempt to decode utf-8 first because some servers
|
# We attempt to decode utf-8 first because some servers
|
||||||
# choose to localize their reason strings. If the string
|
# choose to localize their reason strings. If the string
|
||||||
# isn't utf-8, we fall back to iso-8859-1 for all other
|
# isn't utf-8, we fall back to iso-8859-1 for all other
|
||||||
# encodings.
|
# encodings.
|
||||||
try:
|
try:
|
||||||
reason = resp.reason.decode('utf-8')
|
reason = resp.reason.decode("utf-8")
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
reason = resp.reason.decode('iso-8859-1')
|
reason = resp.reason.decode("iso-8859-1")
|
||||||
else:
|
else:
|
||||||
reason = resp.reason
|
reason = resp.reason
|
||||||
|
|
||||||
if 400 <= resp.status_code < 500:
|
if 400 <= resp.status_code < 500:
|
||||||
http_error_msg = (
|
http_error_msg = (
|
||||||
f'{resp.status_code} Client Error: {reason} for url: {resp.url}')
|
f"{resp.status_code} Client Error: {reason} for url: {resp.url}"
|
||||||
|
)
|
||||||
|
|
||||||
elif 500 <= resp.status_code < 600:
|
elif 500 <= resp.status_code < 600:
|
||||||
http_error_msg = (
|
http_error_msg = (
|
||||||
f'{resp.status_code} Server Error: {reason} for url: {resp.url}')
|
f"{resp.status_code} Server Error: {reason} for url: {resp.url}"
|
||||||
|
)
|
||||||
|
|
||||||
if http_error_msg:
|
if http_error_msg:
|
||||||
raise NetworkConnectionError(http_error_msg, response=resp)
|
raise NetworkConnectionError(http_error_msg, response=resp)
|
||||||
|
|
||||||
|
|
||||||
def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE):
|
def response_chunks(
|
||||||
# type: (Response, int) -> Iterator[bytes]
|
response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
|
||||||
"""Given a requests Response, provide the data chunks.
|
) -> Generator[bytes, None, None]:
|
||||||
"""
|
"""Given a requests Response, provide the data chunks."""
|
||||||
try:
|
try:
|
||||||
# Special case for urllib3.
|
# Special case for urllib3.
|
||||||
for chunk in response.raw.stream(
|
for chunk in response.raw.stream(
|
||||||
|
|
|
@ -21,22 +21,32 @@ class PipXmlrpcTransport(xmlrpc.client.Transport):
|
||||||
object.
|
object.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, index_url, session, use_datetime=False):
|
def __init__(
|
||||||
# type: (str, PipSession, bool) -> None
|
self, index_url: str, session: PipSession, use_datetime: bool = False
|
||||||
|
) -> None:
|
||||||
super().__init__(use_datetime)
|
super().__init__(use_datetime)
|
||||||
index_parts = urllib.parse.urlparse(index_url)
|
index_parts = urllib.parse.urlparse(index_url)
|
||||||
self._scheme = index_parts.scheme
|
self._scheme = index_parts.scheme
|
||||||
self._session = session
|
self._session = session
|
||||||
|
|
||||||
def request(self, host, handler, request_body, verbose=False):
|
def request(
|
||||||
# type: (_HostType, str, bytes, bool) -> Tuple[_Marshallable, ...]
|
self,
|
||||||
|
host: "_HostType",
|
||||||
|
handler: str,
|
||||||
|
request_body: bytes,
|
||||||
|
verbose: bool = False,
|
||||||
|
) -> Tuple["_Marshallable", ...]:
|
||||||
assert isinstance(host, str)
|
assert isinstance(host, str)
|
||||||
parts = (self._scheme, host, handler, None, None, None)
|
parts = (self._scheme, host, handler, None, None, None)
|
||||||
url = urllib.parse.urlunparse(parts)
|
url = urllib.parse.urlunparse(parts)
|
||||||
try:
|
try:
|
||||||
headers = {'Content-Type': 'text/xml'}
|
headers = {"Content-Type": "text/xml"}
|
||||||
response = self._session.post(url, data=request_body,
|
response = self._session.post(
|
||||||
headers=headers, stream=True)
|
url,
|
||||||
|
data=request_body,
|
||||||
|
headers=headers,
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
raise_for_status(response)
|
raise_for_status(response)
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
return self.parse_response(response.raw)
|
return self.parse_response(response.raw)
|
||||||
|
@ -44,6 +54,7 @@ class PipXmlrpcTransport(xmlrpc.client.Transport):
|
||||||
assert exc.response
|
assert exc.response
|
||||||
logger.critical(
|
logger.critical(
|
||||||
"HTTP error %s while getting %s",
|
"HTTP error %s while getting %s",
|
||||||
exc.response.status_code, url,
|
exc.response.status_code,
|
||||||
|
url,
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -6,19 +6,22 @@ import os
|
||||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
InstallationSubprocessError,
|
||||||
|
MetadataGenerationFailed,
|
||||||
|
)
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
|
||||||
def generate_metadata(build_env, backend):
|
def generate_metadata(
|
||||||
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
|
||||||
|
) -> str:
|
||||||
"""Generate metadata using mechanisms described in PEP 517.
|
"""Generate metadata using mechanisms described in PEP 517.
|
||||||
|
|
||||||
Returns the generated metadata directory.
|
Returns the generated metadata directory.
|
||||||
"""
|
"""
|
||||||
metadata_tmpdir = TempDirectory(
|
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
||||||
kind="modern-metadata", globally_managed=True
|
|
||||||
)
|
|
||||||
|
|
||||||
metadata_dir = metadata_tmpdir.path
|
metadata_dir = metadata_tmpdir.path
|
||||||
|
|
||||||
|
@ -26,10 +29,11 @@ def generate_metadata(build_env, backend):
|
||||||
# Note that Pep517HookCaller implements a fallback for
|
# Note that Pep517HookCaller implements a fallback for
|
||||||
# prepare_metadata_for_build_wheel, so we don't have to
|
# prepare_metadata_for_build_wheel, so we don't have to
|
||||||
# consider the possibility that this hook doesn't exist.
|
# consider the possibility that this hook doesn't exist.
|
||||||
runner = runner_with_spinner_message("Preparing wheel metadata")
|
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
|
||||||
with backend.subprocess_runner(runner):
|
with backend.subprocess_runner(runner):
|
||||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
try:
|
||||||
metadata_dir
|
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
|
||||||
)
|
except InstallationSubprocessError as error:
|
||||||
|
raise MetadataGenerationFailed(package_details=details) from error
|
||||||
|
|
||||||
return os.path.join(metadata_dir, distinfo_dir)
|
return os.path.join(metadata_dir, distinfo_dir)
|
||||||
|
|
|
@ -5,7 +5,12 @@ import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.cli.spinners import open_spinner
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
InstallationError,
|
||||||
|
InstallationSubprocessError,
|
||||||
|
MetadataGenerationFailed,
|
||||||
|
)
|
||||||
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
||||||
from pip._internal.utils.subprocess import call_subprocess
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
@ -13,49 +18,39 @@ from pip._internal.utils.temp_dir import TempDirectory
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _find_egg_info(directory):
|
def _find_egg_info(directory: str) -> str:
|
||||||
# type: (str) -> str
|
"""Find an .egg-info subdirectory in `directory`."""
|
||||||
"""Find an .egg-info subdirectory in `directory`.
|
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
|
||||||
"""
|
|
||||||
filenames = [
|
|
||||||
f for f in os.listdir(directory) if f.endswith(".egg-info")
|
|
||||||
]
|
|
||||||
|
|
||||||
if not filenames:
|
if not filenames:
|
||||||
raise InstallationError(
|
raise InstallationError(f"No .egg-info directory found in {directory}")
|
||||||
f"No .egg-info directory found in {directory}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(filenames) > 1:
|
if len(filenames) > 1:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"More than one .egg-info directory found in {}".format(
|
"More than one .egg-info directory found in {}".format(directory)
|
||||||
directory
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return os.path.join(directory, filenames[0])
|
return os.path.join(directory, filenames[0])
|
||||||
|
|
||||||
|
|
||||||
def generate_metadata(
|
def generate_metadata(
|
||||||
build_env, # type: BuildEnvironment
|
build_env: BuildEnvironment,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
source_dir, # type: str
|
source_dir: str,
|
||||||
isolated, # type: bool
|
isolated: bool,
|
||||||
details, # type: str
|
details: str,
|
||||||
):
|
) -> str:
|
||||||
# type: (...) -> str
|
|
||||||
"""Generate metadata using setup.py-based defacto mechanisms.
|
"""Generate metadata using setup.py-based defacto mechanisms.
|
||||||
|
|
||||||
Returns the generated metadata directory.
|
Returns the generated metadata directory.
|
||||||
"""
|
"""
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Running setup.py (path:%s) egg_info for package %s',
|
"Running setup.py (path:%s) egg_info for package %s",
|
||||||
setup_py_path, details,
|
setup_py_path,
|
||||||
|
details,
|
||||||
)
|
)
|
||||||
|
|
||||||
egg_info_dir = TempDirectory(
|
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
|
||||||
kind="pip-egg-info", globally_managed=True
|
|
||||||
).path
|
|
||||||
|
|
||||||
args = make_setuptools_egg_info_args(
|
args = make_setuptools_egg_info_args(
|
||||||
setup_py_path,
|
setup_py_path,
|
||||||
|
@ -64,11 +59,16 @@ def generate_metadata(
|
||||||
)
|
)
|
||||||
|
|
||||||
with build_env:
|
with build_env:
|
||||||
call_subprocess(
|
with open_spinner("Preparing metadata (setup.py)") as spinner:
|
||||||
args,
|
try:
|
||||||
cwd=source_dir,
|
call_subprocess(
|
||||||
command_desc='python setup.py egg_info',
|
args,
|
||||||
)
|
cwd=source_dir,
|
||||||
|
command_desc="python setup.py egg_info",
|
||||||
|
spinner=spinner,
|
||||||
|
)
|
||||||
|
except InstallationSubprocessError as error:
|
||||||
|
raise MetadataGenerationFailed(package_details=details) from error
|
||||||
|
|
||||||
# Return the .egg-info directory.
|
# Return the .egg-info directory.
|
||||||
return _find_egg_info(egg_info_dir)
|
return _find_egg_info(egg_info_dir)
|
||||||
|
|
|
@ -10,22 +10,21 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def build_wheel_pep517(
|
def build_wheel_pep517(
|
||||||
name, # type: str
|
name: str,
|
||||||
backend, # type: Pep517HookCaller
|
backend: Pep517HookCaller,
|
||||||
metadata_directory, # type: str
|
metadata_directory: str,
|
||||||
tempd, # type: str
|
tempd: str,
|
||||||
):
|
) -> Optional[str]:
|
||||||
# type: (...) -> Optional[str]
|
|
||||||
"""Build one InstallRequirement using the PEP 517 build process.
|
"""Build one InstallRequirement using the PEP 517 build process.
|
||||||
|
|
||||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||||
"""
|
"""
|
||||||
assert metadata_directory is not None
|
assert metadata_directory is not None
|
||||||
try:
|
try:
|
||||||
logger.debug('Destination directory: %s', tempd)
|
logger.debug("Destination directory: %s", tempd)
|
||||||
|
|
||||||
runner = runner_with_spinner_message(
|
runner = runner_with_spinner_message(
|
||||||
f'Building wheel for {name} (PEP 517)'
|
f"Building wheel for {name} (pyproject.toml)"
|
||||||
)
|
)
|
||||||
with backend.subprocess_runner(runner):
|
with backend.subprocess_runner(runner):
|
||||||
wheel_name = backend.build_wheel(
|
wheel_name = backend.build_wheel(
|
||||||
|
@ -33,6 +32,6 @@ def build_wheel_pep517(
|
||||||
metadata_directory=metadata_directory,
|
metadata_directory=metadata_directory,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed building wheel for %s', name)
|
logger.error("Failed building wheel for %s", name)
|
||||||
return None
|
return None
|
||||||
return os.path.join(tempd, wheel_name)
|
return os.path.join(tempd, wheel_name)
|
||||||
|
|
|
@ -4,59 +4,51 @@ from typing import List, Optional
|
||||||
|
|
||||||
from pip._internal.cli.spinners import open_spinner
|
from pip._internal.cli.spinners import open_spinner
|
||||||
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
||||||
from pip._internal.utils.subprocess import (
|
from pip._internal.utils.subprocess import call_subprocess, format_command_args
|
||||||
LOG_DIVIDER,
|
|
||||||
call_subprocess,
|
|
||||||
format_command_args,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def format_command_result(
|
def format_command_result(
|
||||||
command_args, # type: List[str]
|
command_args: List[str],
|
||||||
command_output, # type: str
|
command_output: str,
|
||||||
):
|
) -> str:
|
||||||
# type: (...) -> str
|
|
||||||
"""Format command information for logging."""
|
"""Format command information for logging."""
|
||||||
command_desc = format_command_args(command_args)
|
command_desc = format_command_args(command_args)
|
||||||
text = f'Command arguments: {command_desc}\n'
|
text = f"Command arguments: {command_desc}\n"
|
||||||
|
|
||||||
if not command_output:
|
if not command_output:
|
||||||
text += 'Command output: None'
|
text += "Command output: None"
|
||||||
elif logger.getEffectiveLevel() > logging.DEBUG:
|
elif logger.getEffectiveLevel() > logging.DEBUG:
|
||||||
text += 'Command output: [use --verbose to show]'
|
text += "Command output: [use --verbose to show]"
|
||||||
else:
|
else:
|
||||||
if not command_output.endswith('\n'):
|
if not command_output.endswith("\n"):
|
||||||
command_output += '\n'
|
command_output += "\n"
|
||||||
text += f'Command output:\n{command_output}{LOG_DIVIDER}'
|
text += f"Command output:\n{command_output}"
|
||||||
|
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def get_legacy_build_wheel_path(
|
def get_legacy_build_wheel_path(
|
||||||
names, # type: List[str]
|
names: List[str],
|
||||||
temp_dir, # type: str
|
temp_dir: str,
|
||||||
name, # type: str
|
name: str,
|
||||||
command_args, # type: List[str]
|
command_args: List[str],
|
||||||
command_output, # type: str
|
command_output: str,
|
||||||
):
|
) -> Optional[str]:
|
||||||
# type: (...) -> Optional[str]
|
|
||||||
"""Return the path to the wheel in the temporary build directory."""
|
"""Return the path to the wheel in the temporary build directory."""
|
||||||
# Sort for determinism.
|
# Sort for determinism.
|
||||||
names = sorted(names)
|
names = sorted(names)
|
||||||
if not names:
|
if not names:
|
||||||
msg = (
|
msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
|
||||||
'Legacy build of wheel for {!r} created no files.\n'
|
|
||||||
).format(name)
|
|
||||||
msg += format_command_result(command_args, command_output)
|
msg += format_command_result(command_args, command_output)
|
||||||
logger.warning(msg)
|
logger.warning(msg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if len(names) > 1:
|
if len(names) > 1:
|
||||||
msg = (
|
msg = (
|
||||||
'Legacy build of wheel for {!r} created more than one file.\n'
|
"Legacy build of wheel for {!r} created more than one file.\n"
|
||||||
'Filenames (choosing first): {}\n'
|
"Filenames (choosing first): {}\n"
|
||||||
).format(name, names)
|
).format(name, names)
|
||||||
msg += format_command_result(command_args, command_output)
|
msg += format_command_result(command_args, command_output)
|
||||||
logger.warning(msg)
|
logger.warning(msg)
|
||||||
|
@ -65,14 +57,13 @@ def get_legacy_build_wheel_path(
|
||||||
|
|
||||||
|
|
||||||
def build_wheel_legacy(
|
def build_wheel_legacy(
|
||||||
name, # type: str
|
name: str,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
source_dir, # type: str
|
source_dir: str,
|
||||||
global_options, # type: List[str]
|
global_options: List[str],
|
||||||
build_options, # type: List[str]
|
build_options: List[str],
|
||||||
tempd, # type: str
|
tempd: str,
|
||||||
):
|
) -> Optional[str]:
|
||||||
# type: (...) -> Optional[str]
|
|
||||||
"""Build one unpacked package using the "legacy" build process.
|
"""Build one unpacked package using the "legacy" build process.
|
||||||
|
|
||||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||||
|
@ -84,19 +75,20 @@ def build_wheel_legacy(
|
||||||
destination_dir=tempd,
|
destination_dir=tempd,
|
||||||
)
|
)
|
||||||
|
|
||||||
spin_message = f'Building wheel for {name} (setup.py)'
|
spin_message = f"Building wheel for {name} (setup.py)"
|
||||||
with open_spinner(spin_message) as spinner:
|
with open_spinner(spin_message) as spinner:
|
||||||
logger.debug('Destination directory: %s', tempd)
|
logger.debug("Destination directory: %s", tempd)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
output = call_subprocess(
|
output = call_subprocess(
|
||||||
wheel_args,
|
wheel_args,
|
||||||
|
command_desc="python setup.py bdist_wheel",
|
||||||
cwd=source_dir,
|
cwd=source_dir,
|
||||||
spinner=spinner,
|
spinner=spinner,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
spinner.finish("error")
|
spinner.finish("error")
|
||||||
logger.error('Failed building wheel for %s', name)
|
logger.error("Failed building wheel for %s", name)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
names = os.listdir(tempd)
|
names = os.listdir(tempd)
|
||||||
|
|
|
@ -2,56 +2,55 @@
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from collections import namedtuple
|
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
from pip._vendor.pkg_resources import RequirementParseError
|
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||||
|
|
||||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||||
|
from pip._internal.metadata import get_default_environment
|
||||||
|
from pip._internal.metadata.base import DistributionVersion
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.utils.misc import get_installed_distributions
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from pip._vendor.packaging.utils import NormalizedName
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Shorthands
|
|
||||||
PackageSet = Dict['NormalizedName', 'PackageDetails']
|
|
||||||
Missing = Tuple[str, Any]
|
|
||||||
Conflicting = Tuple[str, str, Any]
|
|
||||||
|
|
||||||
MissingDict = Dict['NormalizedName', List[Missing]]
|
class PackageDetails(NamedTuple):
|
||||||
ConflictingDict = Dict['NormalizedName', List[Conflicting]]
|
version: DistributionVersion
|
||||||
|
dependencies: List[Requirement]
|
||||||
|
|
||||||
|
|
||||||
|
# Shorthands
|
||||||
|
PackageSet = Dict[NormalizedName, PackageDetails]
|
||||||
|
Missing = Tuple[NormalizedName, Requirement]
|
||||||
|
Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
|
||||||
|
|
||||||
|
MissingDict = Dict[NormalizedName, List[Missing]]
|
||||||
|
ConflictingDict = Dict[NormalizedName, List[Conflicting]]
|
||||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||||
ConflictDetails = Tuple[PackageSet, CheckResult]
|
ConflictDetails = Tuple[PackageSet, CheckResult]
|
||||||
|
|
||||||
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
|
||||||
|
|
||||||
|
|
||||||
def create_package_set_from_installed(**kwargs: Any) -> Tuple["PackageSet", bool]:
|
|
||||||
"""Converts a list of distributions into a PackageSet.
|
|
||||||
"""
|
|
||||||
# Default to using all packages installed on the system
|
|
||||||
if kwargs == {}:
|
|
||||||
kwargs = {"local_only": False, "skip": ()}
|
|
||||||
|
|
||||||
|
def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
|
||||||
|
"""Converts a list of distributions into a PackageSet."""
|
||||||
package_set = {}
|
package_set = {}
|
||||||
problems = False
|
problems = False
|
||||||
for dist in get_installed_distributions(**kwargs):
|
env = get_default_environment()
|
||||||
name = canonicalize_name(dist.project_name)
|
for dist in env.iter_installed_distributions(local_only=False, skip=()):
|
||||||
|
name = dist.canonical_name
|
||||||
try:
|
try:
|
||||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
dependencies = list(dist.iter_dependencies())
|
||||||
except (OSError, RequirementParseError) as e:
|
package_set[name] = PackageDetails(dist.version, dependencies)
|
||||||
# Don't crash on unreadable or broken metadata
|
except (OSError, ValueError) as e:
|
||||||
|
# Don't crash on unreadable or broken metadata.
|
||||||
logger.warning("Error parsing requirements for %s: %s", name, e)
|
logger.warning("Error parsing requirements for %s: %s", name, e)
|
||||||
problems = True
|
problems = True
|
||||||
return package_set, problems
|
return package_set, problems
|
||||||
|
|
||||||
|
|
||||||
def check_package_set(package_set, should_ignore=None):
|
def check_package_set(
|
||||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
|
||||||
|
) -> CheckResult:
|
||||||
"""Check if a package set is consistent
|
"""Check if a package set is consistent
|
||||||
|
|
||||||
If should_ignore is passed, it should be a callable that takes a
|
If should_ignore is passed, it should be a callable that takes a
|
||||||
|
@ -63,14 +62,14 @@ def check_package_set(package_set, should_ignore=None):
|
||||||
|
|
||||||
for package_name, package_detail in package_set.items():
|
for package_name, package_detail in package_set.items():
|
||||||
# Info about dependencies of package_name
|
# Info about dependencies of package_name
|
||||||
missing_deps = set() # type: Set[Missing]
|
missing_deps: Set[Missing] = set()
|
||||||
conflicting_deps = set() # type: Set[Conflicting]
|
conflicting_deps: Set[Conflicting] = set()
|
||||||
|
|
||||||
if should_ignore and should_ignore(package_name):
|
if should_ignore and should_ignore(package_name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for req in package_detail.requires:
|
for req in package_detail.dependencies:
|
||||||
name = canonicalize_name(req.project_name)
|
name = canonicalize_name(req.name)
|
||||||
|
|
||||||
# Check if it's missing
|
# Check if it's missing
|
||||||
if name not in package_set:
|
if name not in package_set:
|
||||||
|
@ -82,7 +81,7 @@ def check_package_set(package_set, should_ignore=None):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check if there's a conflict
|
# Check if there's a conflict
|
||||||
version = package_set[name].version # type: str
|
version = package_set[name].version
|
||||||
if not req.specifier.contains(version, prereleases=True):
|
if not req.specifier.contains(version, prereleases=True):
|
||||||
conflicting_deps.add((name, version, req))
|
conflicting_deps.add((name, version, req))
|
||||||
|
|
||||||
|
@ -94,8 +93,7 @@ def check_package_set(package_set, should_ignore=None):
|
||||||
return missing, conflicting
|
return missing, conflicting
|
||||||
|
|
||||||
|
|
||||||
def check_install_conflicts(to_install):
|
def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
|
||||||
# type: (List[InstallRequirement]) -> ConflictDetails
|
|
||||||
"""For checking if the dependency graph would be consistent after \
|
"""For checking if the dependency graph would be consistent after \
|
||||||
installing given requirements
|
installing given requirements
|
||||||
"""
|
"""
|
||||||
|
@ -111,41 +109,39 @@ def check_install_conflicts(to_install):
|
||||||
package_set,
|
package_set,
|
||||||
check_package_set(
|
check_package_set(
|
||||||
package_set, should_ignore=lambda name: name not in whitelist
|
package_set, should_ignore=lambda name: name not in whitelist
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _simulate_installation_of(to_install, package_set):
|
def _simulate_installation_of(
|
||||||
# type: (List[InstallRequirement], PackageSet) -> Set[NormalizedName]
|
to_install: List[InstallRequirement], package_set: PackageSet
|
||||||
"""Computes the version of packages after installing to_install.
|
) -> Set[NormalizedName]:
|
||||||
"""
|
"""Computes the version of packages after installing to_install."""
|
||||||
|
|
||||||
# Keep track of packages that were installed
|
# Keep track of packages that were installed
|
||||||
installed = set()
|
installed = set()
|
||||||
|
|
||||||
# Modify it as installing requirement_set would (assuming no errors)
|
# Modify it as installing requirement_set would (assuming no errors)
|
||||||
for inst_req in to_install:
|
for inst_req in to_install:
|
||||||
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
||||||
dist = abstract_dist.get_pkg_resources_distribution()
|
dist = abstract_dist.get_metadata_distribution()
|
||||||
|
name = dist.canonical_name
|
||||||
assert dist is not None
|
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
|
||||||
name = canonicalize_name(dist.key)
|
|
||||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
|
||||||
|
|
||||||
installed.add(name)
|
installed.add(name)
|
||||||
|
|
||||||
return installed
|
return installed
|
||||||
|
|
||||||
|
|
||||||
def _create_whitelist(would_be_installed, package_set):
|
def _create_whitelist(
|
||||||
# type: (Set[NormalizedName], PackageSet) -> Set[NormalizedName]
|
would_be_installed: Set[NormalizedName], package_set: PackageSet
|
||||||
|
) -> Set[NormalizedName]:
|
||||||
packages_affected = set(would_be_installed)
|
packages_affected = set(would_be_installed)
|
||||||
|
|
||||||
for package_name in package_set:
|
for package_name in package_set:
|
||||||
if package_name in packages_affected:
|
if package_name in packages_affected:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for req in package_set[package_name].requires:
|
for req in package_set[package_name].dependencies:
|
||||||
if canonicalize_name(req.name) in packages_affected:
|
if canonicalize_name(req.name) in packages_affected:
|
||||||
packages_affected.add(package_name)
|
packages_affected.add(package_name)
|
||||||
break
|
break
|
||||||
|
|
|
@ -1,73 +1,46 @@
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import (
|
from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
|
||||||
Container,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.pkg_resources import Distribution, Requirement, RequirementParseError
|
from pip._vendor.packaging.version import Version
|
||||||
|
|
||||||
from pip._internal.exceptions import BadCommand, InstallationError
|
from pip._internal.exceptions import BadCommand, InstallationError
|
||||||
|
from pip._internal.metadata import BaseDistribution, get_environment
|
||||||
from pip._internal.req.constructors import (
|
from pip._internal.req.constructors import (
|
||||||
install_req_from_editable,
|
install_req_from_editable,
|
||||||
install_req_from_line,
|
install_req_from_line,
|
||||||
)
|
)
|
||||||
from pip._internal.req.req_file import COMMENT_RE
|
from pip._internal.req.req_file import COMMENT_RE
|
||||||
from pip._internal.utils.direct_url_helpers import (
|
from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
|
||||||
direct_url_as_pep440_direct_reference,
|
|
||||||
dist_get_direct_url,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.misc import dist_is_editable, get_installed_distributions
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
|
|
||||||
|
class _EditableInfo(NamedTuple):
|
||||||
|
requirement: str
|
||||||
|
comments: List[str]
|
||||||
|
|
||||||
|
|
||||||
def freeze(
|
def freeze(
|
||||||
requirement=None, # type: Optional[List[str]]
|
requirement: Optional[List[str]] = None,
|
||||||
find_links=None, # type: Optional[List[str]]
|
local_only: bool = False,
|
||||||
local_only=False, # type: bool
|
user_only: bool = False,
|
||||||
user_only=False, # type: bool
|
paths: Optional[List[str]] = None,
|
||||||
paths=None, # type: Optional[List[str]]
|
isolated: bool = False,
|
||||||
isolated=False, # type: bool
|
exclude_editable: bool = False,
|
||||||
exclude_editable=False, # type: bool
|
skip: Container[str] = (),
|
||||||
skip=() # type: Container[str]
|
) -> Generator[str, None, None]:
|
||||||
):
|
installations: Dict[str, FrozenRequirement] = {}
|
||||||
# type: (...) -> Iterator[str]
|
|
||||||
find_links = find_links or []
|
|
||||||
|
|
||||||
for link in find_links:
|
dists = get_environment(paths).iter_installed_distributions(
|
||||||
yield f'-f {link}'
|
local_only=local_only,
|
||||||
installations = {} # type: Dict[str, FrozenRequirement]
|
skip=(),
|
||||||
|
user_only=user_only,
|
||||||
for dist in get_installed_distributions(
|
)
|
||||||
local_only=local_only,
|
for dist in dists:
|
||||||
skip=(),
|
req = FrozenRequirement.from_dist(dist)
|
||||||
user_only=user_only,
|
|
||||||
paths=paths
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
req = FrozenRequirement.from_dist(dist)
|
|
||||||
except RequirementParseError as exc:
|
|
||||||
# We include dist rather than dist.project_name because the
|
|
||||||
# dist string includes more information, like the version and
|
|
||||||
# location. We also include the exception message to aid
|
|
||||||
# troubleshooting.
|
|
||||||
logger.warning(
|
|
||||||
'Could not generate requirement for distribution %r: %s',
|
|
||||||
dist, exc
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if exclude_editable and req.editable:
|
if exclude_editable and req.editable:
|
||||||
continue
|
continue
|
||||||
installations[req.canonical_name] = req
|
installations[req.canonical_name] = req
|
||||||
|
@ -77,42 +50,50 @@ def freeze(
|
||||||
# should only be emitted once, even if the same option is in multiple
|
# should only be emitted once, even if the same option is in multiple
|
||||||
# requirements files, so we need to keep track of what has been emitted
|
# requirements files, so we need to keep track of what has been emitted
|
||||||
# so that we don't emit it again if it's seen again
|
# so that we don't emit it again if it's seen again
|
||||||
emitted_options = set() # type: Set[str]
|
emitted_options: Set[str] = set()
|
||||||
# keep track of which files a requirement is in so that we can
|
# keep track of which files a requirement is in so that we can
|
||||||
# give an accurate warning if a requirement appears multiple times.
|
# give an accurate warning if a requirement appears multiple times.
|
||||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
req_files: Dict[str, List[str]] = collections.defaultdict(list)
|
||||||
for req_file_path in requirement:
|
for req_file_path in requirement:
|
||||||
with open(req_file_path) as req_file:
|
with open(req_file_path) as req_file:
|
||||||
for line in req_file:
|
for line in req_file:
|
||||||
if (not line.strip() or
|
if (
|
||||||
line.strip().startswith('#') or
|
not line.strip()
|
||||||
line.startswith((
|
or line.strip().startswith("#")
|
||||||
'-r', '--requirement',
|
or line.startswith(
|
||||||
'-f', '--find-links',
|
(
|
||||||
'-i', '--index-url',
|
"-r",
|
||||||
'--pre',
|
"--requirement",
|
||||||
'--trusted-host',
|
"-f",
|
||||||
'--process-dependency-links',
|
"--find-links",
|
||||||
'--extra-index-url',
|
"-i",
|
||||||
'--use-feature'))):
|
"--index-url",
|
||||||
|
"--pre",
|
||||||
|
"--trusted-host",
|
||||||
|
"--process-dependency-links",
|
||||||
|
"--extra-index-url",
|
||||||
|
"--use-feature",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if line not in emitted_options:
|
if line not in emitted_options:
|
||||||
emitted_options.add(line)
|
emitted_options.add(line)
|
||||||
yield line
|
yield line
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if line.startswith('-e') or line.startswith('--editable'):
|
if line.startswith("-e") or line.startswith("--editable"):
|
||||||
if line.startswith('-e'):
|
if line.startswith("-e"):
|
||||||
line = line[2:].strip()
|
line = line[2:].strip()
|
||||||
else:
|
else:
|
||||||
line = line[len('--editable'):].strip().lstrip('=')
|
line = line[len("--editable") :].strip().lstrip("=")
|
||||||
line_req = install_req_from_editable(
|
line_req = install_req_from_editable(
|
||||||
line,
|
line,
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
line_req = install_req_from_line(
|
line_req = install_req_from_line(
|
||||||
COMMENT_RE.sub('', line).strip(),
|
COMMENT_RE.sub("", line).strip(),
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -120,15 +101,15 @@ def freeze(
|
||||||
logger.info(
|
logger.info(
|
||||||
"Skipping line in requirement file [%s] because "
|
"Skipping line in requirement file [%s] because "
|
||||||
"it's not clear what it would install: %s",
|
"it's not clear what it would install: %s",
|
||||||
req_file_path, line.strip(),
|
req_file_path,
|
||||||
|
line.strip(),
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
" (add #egg=PackageName to the URL to avoid"
|
" (add #egg=PackageName to the URL to avoid"
|
||||||
" this warning)"
|
" this warning)"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
line_req_canonical_name = canonicalize_name(
|
line_req_canonical_name = canonicalize_name(line_req.name)
|
||||||
line_req.name)
|
|
||||||
if line_req_canonical_name not in installations:
|
if line_req_canonical_name not in installations:
|
||||||
# either it's not installed, or it is installed
|
# either it's not installed, or it is installed
|
||||||
# but has been processed already
|
# but has been processed already
|
||||||
|
@ -137,14 +118,13 @@ def freeze(
|
||||||
"Requirement file [%s] contains %s, but "
|
"Requirement file [%s] contains %s, but "
|
||||||
"package %r is not installed",
|
"package %r is not installed",
|
||||||
req_file_path,
|
req_file_path,
|
||||||
COMMENT_RE.sub('', line).strip(),
|
COMMENT_RE.sub("", line).strip(),
|
||||||
line_req.name
|
line_req.name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
req_files[line_req.name].append(req_file_path)
|
req_files[line_req.name].append(req_file_path)
|
||||||
else:
|
else:
|
||||||
yield str(installations[
|
yield str(installations[line_req_canonical_name]).rstrip()
|
||||||
line_req_canonical_name]).rstrip()
|
|
||||||
del installations[line_req_canonical_name]
|
del installations[line_req_canonical_name]
|
||||||
req_files[line_req.name].append(req_file_path)
|
req_files[line_req.name].append(req_file_path)
|
||||||
|
|
||||||
|
@ -152,83 +132,98 @@ def freeze(
|
||||||
# single requirements file or in different requirements files).
|
# single requirements file or in different requirements files).
|
||||||
for name, files in req_files.items():
|
for name, files in req_files.items():
|
||||||
if len(files) > 1:
|
if len(files) > 1:
|
||||||
logger.warning("Requirement %s included multiple times [%s]",
|
logger.warning(
|
||||||
name, ', '.join(sorted(set(files))))
|
"Requirement %s included multiple times [%s]",
|
||||||
|
name,
|
||||||
|
", ".join(sorted(set(files))),
|
||||||
|
)
|
||||||
|
|
||||||
yield(
|
yield ("## The following requirements were added by pip freeze:")
|
||||||
'## The following requirements were added by '
|
for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
|
||||||
'pip freeze:'
|
|
||||||
)
|
|
||||||
for installation in sorted(
|
|
||||||
installations.values(), key=lambda x: x.name.lower()):
|
|
||||||
if installation.canonical_name not in skip:
|
if installation.canonical_name not in skip:
|
||||||
yield str(installation).rstrip()
|
yield str(installation).rstrip()
|
||||||
|
|
||||||
|
|
||||||
def get_requirement_info(dist):
|
def _format_as_name_version(dist: BaseDistribution) -> str:
|
||||||
# type: (Distribution) -> RequirementInfo
|
if isinstance(dist.version, Version):
|
||||||
|
return f"{dist.raw_name}=={dist.version}"
|
||||||
|
return f"{dist.raw_name}==={dist.version}"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
||||||
"""
|
"""
|
||||||
Compute and return values (req, editable, comments) for use in
|
Compute and return values (req, comments) for use in
|
||||||
FrozenRequirement.from_dist().
|
FrozenRequirement.from_dist().
|
||||||
"""
|
"""
|
||||||
if not dist_is_editable(dist):
|
editable_project_location = dist.editable_project_location
|
||||||
return (None, False, [])
|
assert editable_project_location
|
||||||
|
location = os.path.normcase(os.path.abspath(editable_project_location))
|
||||||
|
|
||||||
location = os.path.normcase(os.path.abspath(dist.location))
|
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
|
||||||
|
|
||||||
from pip._internal.vcs import RemoteNotFoundError, vcs
|
|
||||||
vcs_backend = vcs.get_backend_for_dir(location)
|
vcs_backend = vcs.get_backend_for_dir(location)
|
||||||
|
|
||||||
if vcs_backend is None:
|
if vcs_backend is None:
|
||||||
req = dist.as_requirement()
|
display = _format_as_name_version(dist)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'No VCS found for editable requirement "%s" in: %r', req,
|
'No VCS found for editable requirement "%s" in: %r',
|
||||||
|
display,
|
||||||
location,
|
location,
|
||||||
)
|
)
|
||||||
comments = [
|
return _EditableInfo(
|
||||||
f'# Editable install with no version control ({req})'
|
requirement=location,
|
||||||
]
|
comments=[f"# Editable install with no version control ({display})"],
|
||||||
return (location, True, comments)
|
)
|
||||||
|
|
||||||
|
vcs_name = type(vcs_backend).__name__
|
||||||
|
|
||||||
try:
|
try:
|
||||||
req = vcs_backend.get_src_requirement(location, dist.project_name)
|
req = vcs_backend.get_src_requirement(location, dist.raw_name)
|
||||||
except RemoteNotFoundError:
|
except RemoteNotFoundError:
|
||||||
req = dist.as_requirement()
|
display = _format_as_name_version(dist)
|
||||||
comments = [
|
return _EditableInfo(
|
||||||
'# Editable {} install with no remote ({})'.format(
|
requirement=location,
|
||||||
type(vcs_backend).__name__, req,
|
comments=[f"# Editable {vcs_name} install with no remote ({display})"],
|
||||||
)
|
)
|
||||||
]
|
except RemoteNotValidError as ex:
|
||||||
return (location, True, comments)
|
display = _format_as_name_version(dist)
|
||||||
|
return _EditableInfo(
|
||||||
|
requirement=location,
|
||||||
|
comments=[
|
||||||
|
f"# Editable {vcs_name} install ({display}) with either a deleted "
|
||||||
|
f"local remote or invalid URI:",
|
||||||
|
f"# '{ex.url}'",
|
||||||
|
],
|
||||||
|
)
|
||||||
except BadCommand:
|
except BadCommand:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'cannot determine version of editable source in %s '
|
"cannot determine version of editable source in %s "
|
||||||
'(%s command not found in path)',
|
"(%s command not found in path)",
|
||||||
location,
|
location,
|
||||||
vcs_backend.name,
|
vcs_backend.name,
|
||||||
)
|
)
|
||||||
return (None, True, [])
|
return _EditableInfo(requirement=location, comments=[])
|
||||||
|
|
||||||
except InstallationError as exc:
|
except InstallationError as exc:
|
||||||
logger.warning(
|
logger.warning("Error when trying to get requirement for VCS system %s", exc)
|
||||||
"Error when trying to get requirement for VCS system %s, "
|
|
||||||
"falling back to uneditable format", exc
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return (req, True, [])
|
return _EditableInfo(requirement=req, comments=[])
|
||||||
|
|
||||||
logger.warning(
|
logger.warning("Could not determine repository location of %s", location)
|
||||||
'Could not determine repository location of %s', location
|
|
||||||
|
return _EditableInfo(
|
||||||
|
requirement=location,
|
||||||
|
comments=["## !! Could not determine repository location"],
|
||||||
)
|
)
|
||||||
comments = ['## !! Could not determine repository location']
|
|
||||||
|
|
||||||
return (None, False, comments)
|
|
||||||
|
|
||||||
|
|
||||||
class FrozenRequirement:
|
class FrozenRequirement:
|
||||||
def __init__(self, name, req, editable, comments=()):
|
def __init__(
|
||||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
self,
|
||||||
|
name: str,
|
||||||
|
req: str,
|
||||||
|
editable: bool,
|
||||||
|
comments: Iterable[str] = (),
|
||||||
|
) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.canonical_name = canonicalize_name(name)
|
self.canonical_name = canonicalize_name(name)
|
||||||
self.req = req
|
self.req = req
|
||||||
|
@ -236,29 +231,24 @@ class FrozenRequirement:
|
||||||
self.comments = comments
|
self.comments = comments
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dist(cls, dist):
|
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
|
||||||
# type: (Distribution) -> FrozenRequirement
|
editable = dist.editable
|
||||||
# TODO `get_requirement_info` is taking care of editable requirements.
|
if editable:
|
||||||
# TODO This should be refactored when we will add detection of
|
req, comments = _get_editable_info(dist)
|
||||||
# editable that provide .dist-info metadata.
|
else:
|
||||||
req, editable, comments = get_requirement_info(dist)
|
comments = []
|
||||||
if req is None and not editable:
|
direct_url = dist.direct_url
|
||||||
# if PEP 610 metadata is present, attempt to use it
|
|
||||||
direct_url = dist_get_direct_url(dist)
|
|
||||||
if direct_url:
|
if direct_url:
|
||||||
req = direct_url_as_pep440_direct_reference(
|
# if PEP 610 metadata is present, use it
|
||||||
direct_url, dist.project_name
|
req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
|
||||||
)
|
else:
|
||||||
comments = []
|
# name==version requirement
|
||||||
if req is None:
|
req = _format_as_name_version(dist)
|
||||||
# name==version requirement
|
|
||||||
req = dist.as_requirement()
|
|
||||||
|
|
||||||
return cls(dist.project_name, req, editable, comments=comments)
|
return cls(dist.raw_name, req, editable, comments=comments)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
req = self.req
|
req = self.req
|
||||||
if self.editable:
|
if self.editable:
|
||||||
req = f'-e {req}'
|
req = f"-e {req}"
|
||||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
return "\n".join(list(self.comments) + [str(req)]) + "\n"
|
||||||
|
|
|
@ -12,22 +12,21 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def install_editable(
|
def install_editable(
|
||||||
install_options, # type: List[str]
|
install_options: List[str],
|
||||||
global_options, # type: Sequence[str]
|
global_options: Sequence[str],
|
||||||
prefix, # type: Optional[str]
|
prefix: Optional[str],
|
||||||
home, # type: Optional[str]
|
home: Optional[str],
|
||||||
use_user_site, # type: bool
|
use_user_site: bool,
|
||||||
name, # type: str
|
name: str,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
isolated, # type: bool
|
isolated: bool,
|
||||||
build_env, # type: BuildEnvironment
|
build_env: BuildEnvironment,
|
||||||
unpacked_source_directory, # type: str
|
unpacked_source_directory: str,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Install a package in editable mode. Most arguments are pass-through
|
"""Install a package in editable mode. Most arguments are pass-through
|
||||||
to setuptools.
|
to setuptools.
|
||||||
"""
|
"""
|
||||||
logger.info('Running setup.py develop for %s', name)
|
logger.info("Running setup.py develop for %s", name)
|
||||||
|
|
||||||
args = make_setuptools_develop_args(
|
args = make_setuptools_develop_args(
|
||||||
setup_py_path,
|
setup_py_path,
|
||||||
|
@ -43,5 +42,6 @@ def install_editable(
|
||||||
with build_env:
|
with build_env:
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
args,
|
args,
|
||||||
|
command_desc="python setup.py develop",
|
||||||
cwd=unpacked_source_directory,
|
cwd=unpacked_source_directory,
|
||||||
)
|
)
|
||||||
|
|
|
@ -3,14 +3,12 @@
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from distutils.util import change_root
|
from distutils.util import change_root
|
||||||
from typing import List, Optional, Sequence
|
from typing import List, Optional, Sequence
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
|
||||||
from pip._internal.models.scheme import Scheme
|
from pip._internal.models.scheme import Scheme
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import ensure_dir
|
from pip._internal.utils.misc import ensure_dir
|
||||||
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
@ -19,35 +17,65 @@ from pip._internal.utils.temp_dir import TempDirectory
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class LegacyInstallFailure(Exception):
|
def write_installed_files_from_setuptools_record(
|
||||||
def __init__(self):
|
record_lines: List[str],
|
||||||
# type: () -> None
|
root: Optional[str],
|
||||||
self.parent = sys.exc_info()
|
req_description: str,
|
||||||
|
) -> None:
|
||||||
|
def prepend_root(path: str) -> str:
|
||||||
|
if root is None or not os.path.isabs(path):
|
||||||
|
return path
|
||||||
|
else:
|
||||||
|
return change_root(root, path)
|
||||||
|
|
||||||
|
for line in record_lines:
|
||||||
|
directory = os.path.dirname(line)
|
||||||
|
if directory.endswith(".egg-info"):
|
||||||
|
egg_info_dir = prepend_root(directory)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
message = (
|
||||||
|
"{} did not indicate that it installed an "
|
||||||
|
".egg-info directory. Only setup.py projects "
|
||||||
|
"generating .egg-info directories are supported."
|
||||||
|
).format(req_description)
|
||||||
|
raise InstallationError(message)
|
||||||
|
|
||||||
|
new_lines = []
|
||||||
|
for line in record_lines:
|
||||||
|
filename = line.strip()
|
||||||
|
if os.path.isdir(filename):
|
||||||
|
filename += os.path.sep
|
||||||
|
new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir))
|
||||||
|
new_lines.sort()
|
||||||
|
ensure_dir(egg_info_dir)
|
||||||
|
inst_files_path = os.path.join(egg_info_dir, "installed-files.txt")
|
||||||
|
with open(inst_files_path, "w") as f:
|
||||||
|
f.write("\n".join(new_lines) + "\n")
|
||||||
|
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
install_options, # type: List[str]
|
install_options: List[str],
|
||||||
global_options, # type: Sequence[str]
|
global_options: Sequence[str],
|
||||||
root, # type: Optional[str]
|
root: Optional[str],
|
||||||
home, # type: Optional[str]
|
home: Optional[str],
|
||||||
prefix, # type: Optional[str]
|
prefix: Optional[str],
|
||||||
use_user_site, # type: bool
|
use_user_site: bool,
|
||||||
pycompile, # type: bool
|
pycompile: bool,
|
||||||
scheme, # type: Scheme
|
scheme: Scheme,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
isolated, # type: bool
|
isolated: bool,
|
||||||
req_name, # type: str
|
req_name: str,
|
||||||
build_env, # type: BuildEnvironment
|
build_env: BuildEnvironment,
|
||||||
unpacked_source_directory, # type: str
|
unpacked_source_directory: str,
|
||||||
req_description, # type: str
|
req_description: str,
|
||||||
):
|
) -> bool:
|
||||||
# type: (...) -> bool
|
|
||||||
|
|
||||||
header_dir = scheme.headers
|
header_dir = scheme.headers
|
||||||
|
|
||||||
with TempDirectory(kind="record") as temp_dir:
|
with TempDirectory(kind="record") as temp_dir:
|
||||||
try:
|
try:
|
||||||
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
record_filename = os.path.join(temp_dir.path, "install-record.txt")
|
||||||
install_args = make_setuptools_install_args(
|
install_args = make_setuptools_install_args(
|
||||||
setup_py_path,
|
setup_py_path,
|
||||||
global_options=global_options,
|
global_options=global_options,
|
||||||
|
@ -65,20 +93,20 @@ def install(
|
||||||
runner = runner_with_spinner_message(
|
runner = runner_with_spinner_message(
|
||||||
f"Running setup.py install for {req_name}"
|
f"Running setup.py install for {req_name}"
|
||||||
)
|
)
|
||||||
with indent_log(), build_env:
|
with build_env:
|
||||||
runner(
|
runner(
|
||||||
cmd=install_args,
|
cmd=install_args,
|
||||||
cwd=unpacked_source_directory,
|
cwd=unpacked_source_directory,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not os.path.exists(record_filename):
|
if not os.path.exists(record_filename):
|
||||||
logger.debug('Record file %s not found', record_filename)
|
logger.debug("Record file %s not found", record_filename)
|
||||||
# Signal to the caller that we didn't install the new package
|
# Signal to the caller that we didn't install the new package
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except Exception:
|
except Exception as e:
|
||||||
# Signal to the caller that we didn't install the new package
|
# Signal to the caller that we didn't install the new package
|
||||||
raise LegacyInstallFailure
|
raise LegacyInstallFailure(package_details=req_name) from e
|
||||||
|
|
||||||
# At this point, we have successfully installed the requirement.
|
# At this point, we have successfully installed the requirement.
|
||||||
|
|
||||||
|
@ -88,38 +116,5 @@ def install(
|
||||||
with open(record_filename) as f:
|
with open(record_filename) as f:
|
||||||
record_lines = f.read().splitlines()
|
record_lines = f.read().splitlines()
|
||||||
|
|
||||||
def prepend_root(path):
|
write_installed_files_from_setuptools_record(record_lines, root, req_description)
|
||||||
# type: (str) -> str
|
|
||||||
if root is None or not os.path.isabs(path):
|
|
||||||
return path
|
|
||||||
else:
|
|
||||||
return change_root(root, path)
|
|
||||||
|
|
||||||
for line in record_lines:
|
|
||||||
directory = os.path.dirname(line)
|
|
||||||
if directory.endswith('.egg-info'):
|
|
||||||
egg_info_dir = prepend_root(directory)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
message = (
|
|
||||||
"{} did not indicate that it installed an "
|
|
||||||
".egg-info directory. Only setup.py projects "
|
|
||||||
"generating .egg-info directories are supported."
|
|
||||||
).format(req_description)
|
|
||||||
raise InstallationError(message)
|
|
||||||
|
|
||||||
new_lines = []
|
|
||||||
for line in record_lines:
|
|
||||||
filename = line.strip()
|
|
||||||
if os.path.isdir(filename):
|
|
||||||
filename += os.path.sep
|
|
||||||
new_lines.append(
|
|
||||||
os.path.relpath(prepend_root(filename), egg_info_dir)
|
|
||||||
)
|
|
||||||
new_lines.sort()
|
|
||||||
ensure_dir(egg_info_dir)
|
|
||||||
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
|
||||||
with open(inst_files_path, 'w') as f:
|
|
||||||
f.write('\n'.join(new_lines) + '\n')
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -22,6 +22,7 @@ from typing import (
|
||||||
BinaryIO,
|
BinaryIO,
|
||||||
Callable,
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
|
Generator,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
List,
|
List,
|
||||||
|
@ -35,14 +36,17 @@ from typing import (
|
||||||
)
|
)
|
||||||
from zipfile import ZipFile, ZipInfo
|
from zipfile import ZipFile, ZipInfo
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
from pip._vendor.distlib.scripts import ScriptMaker
|
from pip._vendor.distlib.scripts import ScriptMaker
|
||||||
from pip._vendor.distlib.util import get_export_entry
|
from pip._vendor.distlib.util import get_export_entry
|
||||||
from pip._vendor.pkg_resources import Distribution
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.six import ensure_str, ensure_text, reraise
|
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.locations import get_major_minor_version
|
from pip._internal.locations import get_major_minor_version
|
||||||
|
from pip._internal.metadata import (
|
||||||
|
BaseDistribution,
|
||||||
|
FilesystemWheel,
|
||||||
|
get_wheel_distribution,
|
||||||
|
)
|
||||||
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
||||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||||
|
@ -53,98 +57,76 @@ from pip._internal.utils.unpacking import (
|
||||||
set_extracted_file_to_default_mode_plus_executable,
|
set_extracted_file_to_default_mode_plus_executable,
|
||||||
zip_item_is_executable,
|
zip_item_is_executable,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.wheel import parse_wheel, pkg_resources_distribution_for_wheel
|
from pip._internal.utils.wheel import parse_wheel
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Protocol
|
from typing import Protocol
|
||||||
|
|
||||||
class File(Protocol):
|
class File(Protocol):
|
||||||
src_record_path = None # type: RecordPath
|
src_record_path: "RecordPath"
|
||||||
dest_path = None # type: str
|
dest_path: str
|
||||||
changed = None # type: bool
|
changed: bool
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RecordPath = NewType('RecordPath', str)
|
RecordPath = NewType("RecordPath", str)
|
||||||
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
||||||
|
|
||||||
|
|
||||||
def rehash(path, blocksize=1 << 20):
|
def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
|
||||||
# type: (str, int) -> Tuple[str, str]
|
|
||||||
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
||||||
h, length = hash_file(path, blocksize)
|
h, length = hash_file(path, blocksize)
|
||||||
digest = 'sha256=' + urlsafe_b64encode(
|
digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
|
||||||
h.digest()
|
|
||||||
).decode('latin1').rstrip('=')
|
|
||||||
return (digest, str(length))
|
return (digest, str(length))
|
||||||
|
|
||||||
|
|
||||||
def csv_io_kwargs(mode):
|
def csv_io_kwargs(mode: str) -> Dict[str, Any]:
|
||||||
# type: (str) -> Dict[str, Any]
|
|
||||||
"""Return keyword arguments to properly open a CSV file
|
"""Return keyword arguments to properly open a CSV file
|
||||||
in the given mode.
|
in the given mode.
|
||||||
"""
|
"""
|
||||||
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
|
return {"mode": mode, "newline": "", "encoding": "utf-8"}
|
||||||
|
|
||||||
|
|
||||||
def fix_script(path):
|
def fix_script(path: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
"""Replace #!python with #!/path/to/python
|
"""Replace #!python with #!/path/to/python
|
||||||
Return True if file was changed.
|
Return True if file was changed.
|
||||||
"""
|
"""
|
||||||
# XXX RECORD hashes will need to be updated
|
# XXX RECORD hashes will need to be updated
|
||||||
assert os.path.isfile(path)
|
assert os.path.isfile(path)
|
||||||
|
|
||||||
with open(path, 'rb') as script:
|
with open(path, "rb") as script:
|
||||||
firstline = script.readline()
|
firstline = script.readline()
|
||||||
if not firstline.startswith(b'#!python'):
|
if not firstline.startswith(b"#!python"):
|
||||||
return False
|
return False
|
||||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||||
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
firstline = b"#!" + exename + os.linesep.encode("ascii")
|
||||||
rest = script.read()
|
rest = script.read()
|
||||||
with open(path, 'wb') as script:
|
with open(path, "wb") as script:
|
||||||
script.write(firstline)
|
script.write(firstline)
|
||||||
script.write(rest)
|
script.write(rest)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def wheel_root_is_purelib(metadata):
|
def wheel_root_is_purelib(metadata: Message) -> bool:
|
||||||
# type: (Message) -> bool
|
|
||||||
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
||||||
|
|
||||||
|
|
||||||
def get_entrypoints(distribution):
|
def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
|
||||||
# type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]]
|
console_scripts = {}
|
||||||
# get the entry points and then the script names
|
gui_scripts = {}
|
||||||
try:
|
for entry_point in dist.iter_entry_points():
|
||||||
console = distribution.get_entry_map('console_scripts')
|
if entry_point.group == "console_scripts":
|
||||||
gui = distribution.get_entry_map('gui_scripts')
|
console_scripts[entry_point.name] = entry_point.value
|
||||||
except KeyError:
|
elif entry_point.group == "gui_scripts":
|
||||||
# Our dict-based Distribution raises KeyError if entry_points.txt
|
gui_scripts[entry_point.name] = entry_point.value
|
||||||
# doesn't exist.
|
return console_scripts, gui_scripts
|
||||||
return {}, {}
|
|
||||||
|
|
||||||
def _split_ep(s):
|
|
||||||
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
|
|
||||||
"""get the string representation of EntryPoint,
|
|
||||||
remove space and split on '='
|
|
||||||
"""
|
|
||||||
split_parts = str(s).replace(" ", "").split("=")
|
|
||||||
return split_parts[0], split_parts[1]
|
|
||||||
|
|
||||||
# convert the EntryPoint objects into strings with module:function
|
|
||||||
console = dict(_split_ep(v) for v in console.values())
|
|
||||||
gui = dict(_split_ep(v) for v in gui.values())
|
|
||||||
return console, gui
|
|
||||||
|
|
||||||
|
|
||||||
def message_about_scripts_not_on_PATH(scripts):
|
def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
|
||||||
# type: (Sequence[str]) -> Optional[str]
|
|
||||||
"""Determine if any scripts are not on PATH and format a warning.
|
"""Determine if any scripts are not on PATH and format a warning.
|
||||||
Returns a warning message if one or more scripts are not on PATH,
|
Returns a warning message if one or more scripts are not on PATH,
|
||||||
otherwise None.
|
otherwise None.
|
||||||
|
@ -153,7 +135,7 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Group scripts by the path they were installed in
|
# Group scripts by the path they were installed in
|
||||||
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
|
||||||
for destfile in scripts:
|
for destfile in scripts:
|
||||||
parent_dir = os.path.dirname(destfile)
|
parent_dir = os.path.dirname(destfile)
|
||||||
script_name = os.path.basename(destfile)
|
script_name = os.path.basename(destfile)
|
||||||
|
@ -161,23 +143,24 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||||
|
|
||||||
# We don't want to warn for directories that are on PATH.
|
# We don't want to warn for directories that are on PATH.
|
||||||
not_warn_dirs = [
|
not_warn_dirs = [
|
||||||
os.path.normcase(i).rstrip(os.sep) for i in
|
os.path.normcase(i).rstrip(os.sep)
|
||||||
os.environ.get("PATH", "").split(os.pathsep)
|
for i in os.environ.get("PATH", "").split(os.pathsep)
|
||||||
]
|
]
|
||||||
# If an executable sits with sys.executable, we don't warn for it.
|
# If an executable sits with sys.executable, we don't warn for it.
|
||||||
# This covers the case of venv invocations without activating the venv.
|
# This covers the case of venv invocations without activating the venv.
|
||||||
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
||||||
warn_for = {
|
warn_for: Dict[str, Set[str]] = {
|
||||||
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
parent_dir: scripts
|
||||||
|
for parent_dir, scripts in grouped_by_dir.items()
|
||||||
if os.path.normcase(parent_dir) not in not_warn_dirs
|
if os.path.normcase(parent_dir) not in not_warn_dirs
|
||||||
} # type: Dict[str, Set[str]]
|
}
|
||||||
if not warn_for:
|
if not warn_for:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Format a message
|
# Format a message
|
||||||
msg_lines = []
|
msg_lines = []
|
||||||
for parent_dir, dir_scripts in warn_for.items():
|
for parent_dir, dir_scripts in warn_for.items():
|
||||||
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
sorted_scripts: List[str] = sorted(dir_scripts)
|
||||||
if len(sorted_scripts) == 1:
|
if len(sorted_scripts) == 1:
|
||||||
start_text = "script {} is".format(sorted_scripts[0])
|
start_text = "script {} is".format(sorted_scripts[0])
|
||||||
else:
|
else:
|
||||||
|
@ -186,8 +169,9 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||||
)
|
)
|
||||||
|
|
||||||
msg_lines.append(
|
msg_lines.append(
|
||||||
"The {} installed in '{}' which is not on PATH."
|
"The {} installed in '{}' which is not on PATH.".format(
|
||||||
.format(start_text, parent_dir)
|
start_text, parent_dir
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
last_line_fmt = (
|
last_line_fmt = (
|
||||||
|
@ -214,8 +198,9 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||||
return "\n".join(msg_lines)
|
return "\n".join(msg_lines)
|
||||||
|
|
||||||
|
|
||||||
def _normalized_outrows(outrows):
|
def _normalized_outrows(
|
||||||
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
|
outrows: Iterable[InstalledCSVRow],
|
||||||
|
) -> List[Tuple[str, str, str]]:
|
||||||
"""Normalize the given rows of a RECORD file.
|
"""Normalize the given rows of a RECORD file.
|
||||||
|
|
||||||
Items in each row are converted into str. Rows are then sorted to make
|
Items in each row are converted into str. Rows are then sorted to make
|
||||||
|
@ -235,69 +220,60 @@ def _normalized_outrows(outrows):
|
||||||
# For additional background, see--
|
# For additional background, see--
|
||||||
# https://github.com/pypa/pip/issues/5868
|
# https://github.com/pypa/pip/issues/5868
|
||||||
return sorted(
|
return sorted(
|
||||||
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
|
(record_path, hash_, str(size)) for record_path, hash_, size in outrows
|
||||||
for record_path, hash_, size in outrows
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _record_to_fs_path(record_path):
|
def _record_to_fs_path(record_path: RecordPath) -> str:
|
||||||
# type: (RecordPath) -> str
|
|
||||||
return record_path
|
return record_path
|
||||||
|
|
||||||
|
|
||||||
def _fs_to_record_path(path, relative_to=None):
|
def _fs_to_record_path(path: str, relative_to: Optional[str] = None) -> RecordPath:
|
||||||
# type: (str, Optional[str]) -> RecordPath
|
|
||||||
if relative_to is not None:
|
if relative_to is not None:
|
||||||
# On Windows, do not handle relative paths if they belong to different
|
# On Windows, do not handle relative paths if they belong to different
|
||||||
# logical disks
|
# logical disks
|
||||||
if os.path.splitdrive(path)[0].lower() == \
|
if (
|
||||||
os.path.splitdrive(relative_to)[0].lower():
|
os.path.splitdrive(path)[0].lower()
|
||||||
|
== os.path.splitdrive(relative_to)[0].lower()
|
||||||
|
):
|
||||||
path = os.path.relpath(path, relative_to)
|
path = os.path.relpath(path, relative_to)
|
||||||
path = path.replace(os.path.sep, '/')
|
path = path.replace(os.path.sep, "/")
|
||||||
return cast('RecordPath', path)
|
return cast("RecordPath", path)
|
||||||
|
|
||||||
|
|
||||||
def _parse_record_path(record_column):
|
|
||||||
# type: (str) -> RecordPath
|
|
||||||
p = ensure_text(record_column, encoding='utf-8')
|
|
||||||
return cast('RecordPath', p)
|
|
||||||
|
|
||||||
|
|
||||||
def get_csv_rows_for_installed(
|
def get_csv_rows_for_installed(
|
||||||
old_csv_rows, # type: List[List[str]]
|
old_csv_rows: List[List[str]],
|
||||||
installed, # type: Dict[RecordPath, RecordPath]
|
installed: Dict[RecordPath, RecordPath],
|
||||||
changed, # type: Set[RecordPath]
|
changed: Set[RecordPath],
|
||||||
generated, # type: List[str]
|
generated: List[str],
|
||||||
lib_dir, # type: str
|
lib_dir: str,
|
||||||
):
|
) -> List[InstalledCSVRow]:
|
||||||
# type: (...) -> List[InstalledCSVRow]
|
|
||||||
"""
|
"""
|
||||||
:param installed: A map from archive RECORD path to installation RECORD
|
:param installed: A map from archive RECORD path to installation RECORD
|
||||||
path.
|
path.
|
||||||
"""
|
"""
|
||||||
installed_rows = [] # type: List[InstalledCSVRow]
|
installed_rows: List[InstalledCSVRow] = []
|
||||||
for row in old_csv_rows:
|
for row in old_csv_rows:
|
||||||
if len(row) > 3:
|
if len(row) > 3:
|
||||||
logger.warning('RECORD line has more than three elements: %s', row)
|
logger.warning("RECORD line has more than three elements: %s", row)
|
||||||
old_record_path = _parse_record_path(row[0])
|
old_record_path = cast("RecordPath", row[0])
|
||||||
new_record_path = installed.pop(old_record_path, old_record_path)
|
new_record_path = installed.pop(old_record_path, old_record_path)
|
||||||
if new_record_path in changed:
|
if new_record_path in changed:
|
||||||
digest, length = rehash(_record_to_fs_path(new_record_path))
|
digest, length = rehash(_record_to_fs_path(new_record_path))
|
||||||
else:
|
else:
|
||||||
digest = row[1] if len(row) > 1 else ''
|
digest = row[1] if len(row) > 1 else ""
|
||||||
length = row[2] if len(row) > 2 else ''
|
length = row[2] if len(row) > 2 else ""
|
||||||
installed_rows.append((new_record_path, digest, length))
|
installed_rows.append((new_record_path, digest, length))
|
||||||
for f in generated:
|
for f in generated:
|
||||||
path = _fs_to_record_path(f, lib_dir)
|
path = _fs_to_record_path(f, lib_dir)
|
||||||
digest, length = rehash(f)
|
digest, length = rehash(f)
|
||||||
installed_rows.append((path, digest, length))
|
installed_rows.append((path, digest, length))
|
||||||
for installed_record_path in installed.values():
|
for installed_record_path in installed.values():
|
||||||
installed_rows.append((installed_record_path, '', ''))
|
installed_rows.append((installed_record_path, "", ""))
|
||||||
return installed_rows
|
return installed_rows
|
||||||
|
|
||||||
|
|
||||||
def get_console_script_specs(console):
|
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||||
# type: (Dict[str, str]) -> List[str]
|
|
||||||
"""
|
"""
|
||||||
Given the mapping from entrypoint name to callable, return the relevant
|
Given the mapping from entrypoint name to callable, return the relevant
|
||||||
console script specs.
|
console script specs.
|
||||||
|
@ -340,62 +316,57 @@ def get_console_script_specs(console):
|
||||||
# DEFAULT
|
# DEFAULT
|
||||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||||
# and easy_install-X.Y.
|
# and easy_install-X.Y.
|
||||||
pip_script = console.pop('pip', None)
|
pip_script = console.pop("pip", None)
|
||||||
if pip_script:
|
if pip_script:
|
||||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
scripts_to_generate.append('pip = ' + pip_script)
|
scripts_to_generate.append("pip = " + pip_script)
|
||||||
|
|
||||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append(
|
||||||
'pip{} = {}'.format(sys.version_info[0], pip_script)
|
"pip{} = {}".format(sys.version_info[0], pip_script)
|
||||||
)
|
)
|
||||||
|
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
|
||||||
f'pip{get_major_minor_version()} = {pip_script}'
|
|
||||||
)
|
|
||||||
# Delete any other versioned pip entry points
|
# Delete any other versioned pip entry points
|
||||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)]
|
||||||
for k in pip_ep:
|
for k in pip_ep:
|
||||||
del console[k]
|
del console[k]
|
||||||
easy_install_script = console.pop('easy_install', None)
|
easy_install_script = console.pop("easy_install", None)
|
||||||
if easy_install_script:
|
if easy_install_script:
|
||||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append("easy_install = " + easy_install_script)
|
||||||
'easy_install = ' + easy_install_script
|
|
||||||
)
|
|
||||||
|
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append(
|
||||||
'easy_install-{} = {}'.format(
|
"easy_install-{} = {}".format(
|
||||||
get_major_minor_version(), easy_install_script
|
get_major_minor_version(), easy_install_script
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# Delete any other versioned easy_install entry points
|
# Delete any other versioned easy_install entry points
|
||||||
easy_install_ep = [
|
easy_install_ep = [
|
||||||
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
k for k in console if re.match(r"easy_install(-\d\.\d)?$", k)
|
||||||
]
|
]
|
||||||
for k in easy_install_ep:
|
for k in easy_install_ep:
|
||||||
del console[k]
|
del console[k]
|
||||||
|
|
||||||
# Generate the console entry points specified in the wheel
|
# Generate the console entry points specified in the wheel
|
||||||
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
|
scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
|
||||||
|
|
||||||
return scripts_to_generate
|
return scripts_to_generate
|
||||||
|
|
||||||
|
|
||||||
class ZipBackedFile:
|
class ZipBackedFile:
|
||||||
def __init__(self, src_record_path, dest_path, zip_file):
|
def __init__(
|
||||||
# type: (RecordPath, str, ZipFile) -> None
|
self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
|
||||||
|
) -> None:
|
||||||
self.src_record_path = src_record_path
|
self.src_record_path = src_record_path
|
||||||
self.dest_path = dest_path
|
self.dest_path = dest_path
|
||||||
self._zip_file = zip_file
|
self._zip_file = zip_file
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
def _getinfo(self):
|
def _getinfo(self) -> ZipInfo:
|
||||||
# type: () -> ZipInfo
|
|
||||||
return self._zip_file.getinfo(self.src_record_path)
|
return self._zip_file.getinfo(self.src_record_path)
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
# directory creation is lazy and after file filtering
|
# directory creation is lazy and after file filtering
|
||||||
# to ensure we don't install empty dirs; empty dirs can't be
|
# to ensure we don't install empty dirs; empty dirs can't be
|
||||||
# uninstalled.
|
# uninstalled.
|
||||||
|
@ -424,22 +395,19 @@ class ZipBackedFile:
|
||||||
|
|
||||||
|
|
||||||
class ScriptFile:
|
class ScriptFile:
|
||||||
def __init__(self, file):
|
def __init__(self, file: "File") -> None:
|
||||||
# type: (File) -> None
|
|
||||||
self._file = file
|
self._file = file
|
||||||
self.src_record_path = self._file.src_record_path
|
self.src_record_path = self._file.src_record_path
|
||||||
self.dest_path = self._file.dest_path
|
self.dest_path = self._file.dest_path
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._file.save()
|
self._file.save()
|
||||||
self.changed = fix_script(self.dest_path)
|
self.changed = fix_script(self.dest_path)
|
||||||
|
|
||||||
|
|
||||||
class MissingCallableSuffix(InstallationError):
|
class MissingCallableSuffix(InstallationError):
|
||||||
def __init__(self, entry_point):
|
def __init__(self, entry_point: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"Invalid script entry point: {} - A callable "
|
"Invalid script entry point: {} - A callable "
|
||||||
"suffix is required. Cf https://packaging.python.org/"
|
"suffix is required. Cf https://packaging.python.org/"
|
||||||
|
@ -448,31 +416,28 @@ class MissingCallableSuffix(InstallationError):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _raise_for_invalid_entrypoint(specification):
|
def _raise_for_invalid_entrypoint(specification: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
entry = get_export_entry(specification)
|
entry = get_export_entry(specification)
|
||||||
if entry is not None and entry.suffix is None:
|
if entry is not None and entry.suffix is None:
|
||||||
raise MissingCallableSuffix(str(entry))
|
raise MissingCallableSuffix(str(entry))
|
||||||
|
|
||||||
|
|
||||||
class PipScriptMaker(ScriptMaker):
|
class PipScriptMaker(ScriptMaker):
|
||||||
def make(self, specification, options=None):
|
def make(self, specification: str, options: Dict[str, Any] = None) -> List[str]:
|
||||||
# type: (str, Dict[str, Any]) -> List[str]
|
|
||||||
_raise_for_invalid_entrypoint(specification)
|
_raise_for_invalid_entrypoint(specification)
|
||||||
return super().make(specification, options)
|
return super().make(specification, options)
|
||||||
|
|
||||||
|
|
||||||
def _install_wheel(
|
def _install_wheel(
|
||||||
name, # type: str
|
name: str,
|
||||||
wheel_zip, # type: ZipFile
|
wheel_zip: ZipFile,
|
||||||
wheel_path, # type: str
|
wheel_path: str,
|
||||||
scheme, # type: Scheme
|
scheme: Scheme,
|
||||||
pycompile=True, # type: bool
|
pycompile: bool = True,
|
||||||
warn_script_location=True, # type: bool
|
warn_script_location: bool = True,
|
||||||
direct_url=None, # type: Optional[DirectUrl]
|
direct_url: Optional[DirectUrl] = None,
|
||||||
requested=False, # type: bool
|
requested: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Install a wheel.
|
"""Install a wheel.
|
||||||
|
|
||||||
:param name: Name of the project to install
|
:param name: Name of the project to install
|
||||||
|
@ -499,33 +464,23 @@ def _install_wheel(
|
||||||
# installed = files copied from the wheel to the destination
|
# installed = files copied from the wheel to the destination
|
||||||
# changed = files changed while installing (scripts #! line typically)
|
# changed = files changed while installing (scripts #! line typically)
|
||||||
# generated = files newly generated during the install (script wrappers)
|
# generated = files newly generated during the install (script wrappers)
|
||||||
installed = {} # type: Dict[RecordPath, RecordPath]
|
installed: Dict[RecordPath, RecordPath] = {}
|
||||||
changed = set() # type: Set[RecordPath]
|
changed: Set[RecordPath] = set()
|
||||||
generated = [] # type: List[str]
|
generated: List[str] = []
|
||||||
|
|
||||||
def record_installed(srcfile, destfile, modified=False):
|
def record_installed(
|
||||||
# type: (RecordPath, str, bool) -> None
|
srcfile: RecordPath, destfile: str, modified: bool = False
|
||||||
|
) -> None:
|
||||||
"""Map archive RECORD paths to installation RECORD paths."""
|
"""Map archive RECORD paths to installation RECORD paths."""
|
||||||
newpath = _fs_to_record_path(destfile, lib_dir)
|
newpath = _fs_to_record_path(destfile, lib_dir)
|
||||||
installed[srcfile] = newpath
|
installed[srcfile] = newpath
|
||||||
if modified:
|
if modified:
|
||||||
changed.add(_fs_to_record_path(destfile))
|
changed.add(_fs_to_record_path(destfile))
|
||||||
|
|
||||||
def all_paths():
|
def is_dir_path(path: RecordPath) -> bool:
|
||||||
# type: () -> Iterable[RecordPath]
|
|
||||||
names = wheel_zip.namelist()
|
|
||||||
# If a flag is set, names may be unicode in Python 2. We convert to
|
|
||||||
# text explicitly so these are valid for lookup in RECORD.
|
|
||||||
decoded_names = map(ensure_text, names)
|
|
||||||
for name in decoded_names:
|
|
||||||
yield cast("RecordPath", name)
|
|
||||||
|
|
||||||
def is_dir_path(path):
|
|
||||||
# type: (RecordPath) -> bool
|
|
||||||
return path.endswith("/")
|
return path.endswith("/")
|
||||||
|
|
||||||
def assert_no_path_traversal(dest_dir_path, target_path):
|
def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
if not is_within_directory(dest_dir_path, target_path):
|
if not is_within_directory(dest_dir_path, target_path):
|
||||||
message = (
|
message = (
|
||||||
"The wheel {!r} has a file {!r} trying to install"
|
"The wheel {!r} has a file {!r} trying to install"
|
||||||
|
@ -535,10 +490,10 @@ def _install_wheel(
|
||||||
message.format(wheel_path, target_path, dest_dir_path)
|
message.format(wheel_path, target_path, dest_dir_path)
|
||||||
)
|
)
|
||||||
|
|
||||||
def root_scheme_file_maker(zip_file, dest):
|
def root_scheme_file_maker(
|
||||||
# type: (ZipFile, str) -> Callable[[RecordPath], File]
|
zip_file: ZipFile, dest: str
|
||||||
def make_root_scheme_file(record_path):
|
) -> Callable[[RecordPath], "File"]:
|
||||||
# type: (RecordPath) -> File
|
def make_root_scheme_file(record_path: RecordPath) -> "File":
|
||||||
normed_path = os.path.normpath(record_path)
|
normed_path = os.path.normpath(record_path)
|
||||||
dest_path = os.path.join(dest, normed_path)
|
dest_path = os.path.join(dest, normed_path)
|
||||||
assert_no_path_traversal(dest, dest_path)
|
assert_no_path_traversal(dest, dest_path)
|
||||||
|
@ -546,17 +501,12 @@ def _install_wheel(
|
||||||
|
|
||||||
return make_root_scheme_file
|
return make_root_scheme_file
|
||||||
|
|
||||||
def data_scheme_file_maker(zip_file, scheme):
|
def data_scheme_file_maker(
|
||||||
# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]
|
zip_file: ZipFile, scheme: Scheme
|
||||||
scheme_paths = {}
|
) -> Callable[[RecordPath], "File"]:
|
||||||
for key in SCHEME_KEYS:
|
scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
|
||||||
encoded_key = ensure_text(key)
|
|
||||||
scheme_paths[encoded_key] = ensure_text(
|
|
||||||
getattr(scheme, key), encoding=sys.getfilesystemencoding()
|
|
||||||
)
|
|
||||||
|
|
||||||
def make_data_scheme_file(record_path):
|
def make_data_scheme_file(record_path: RecordPath) -> "File":
|
||||||
# type: (RecordPath) -> File
|
|
||||||
normed_path = os.path.normpath(record_path)
|
normed_path = os.path.normpath(record_path)
|
||||||
try:
|
try:
|
||||||
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
||||||
|
@ -575,9 +525,7 @@ def _install_wheel(
|
||||||
"Unknown scheme key used in {}: {} (for file {!r}). .data"
|
"Unknown scheme key used in {}: {} (for file {!r}). .data"
|
||||||
" directory contents should be in subdirectories named"
|
" directory contents should be in subdirectories named"
|
||||||
" with a valid scheme key ({})"
|
" with a valid scheme key ({})"
|
||||||
).format(
|
).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
|
||||||
wheel_path, scheme_key, record_path, valid_scheme_keys
|
|
||||||
)
|
|
||||||
raise InstallationError(message)
|
raise InstallationError(message)
|
||||||
|
|
||||||
dest_path = os.path.join(scheme_path, dest_subpath)
|
dest_path = os.path.join(scheme_path, dest_subpath)
|
||||||
|
@ -586,30 +534,19 @@ def _install_wheel(
|
||||||
|
|
||||||
return make_data_scheme_file
|
return make_data_scheme_file
|
||||||
|
|
||||||
def is_data_scheme_path(path):
|
def is_data_scheme_path(path: RecordPath) -> bool:
|
||||||
# type: (RecordPath) -> bool
|
|
||||||
return path.split("/", 1)[0].endswith(".data")
|
return path.split("/", 1)[0].endswith(".data")
|
||||||
|
|
||||||
paths = all_paths()
|
paths = cast(List[RecordPath], wheel_zip.namelist())
|
||||||
file_paths = filterfalse(is_dir_path, paths)
|
file_paths = filterfalse(is_dir_path, paths)
|
||||||
root_scheme_paths, data_scheme_paths = partition(
|
root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
|
||||||
is_data_scheme_path, file_paths
|
|
||||||
)
|
|
||||||
|
|
||||||
make_root_scheme_file = root_scheme_file_maker(
|
make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
|
||||||
wheel_zip,
|
files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
|
||||||
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
|
|
||||||
)
|
|
||||||
files = map(make_root_scheme_file, root_scheme_paths)
|
|
||||||
|
|
||||||
def is_script_scheme_path(path):
|
def is_script_scheme_path(path: RecordPath) -> bool:
|
||||||
# type: (RecordPath) -> bool
|
|
||||||
parts = path.split("/", 2)
|
parts = path.split("/", 2)
|
||||||
return (
|
return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
|
||||||
len(parts) > 2 and
|
|
||||||
parts[0].endswith(".data") and
|
|
||||||
parts[1] == "scripts"
|
|
||||||
)
|
|
||||||
|
|
||||||
other_scheme_paths, script_scheme_paths = partition(
|
other_scheme_paths, script_scheme_paths = partition(
|
||||||
is_script_scheme_path, data_scheme_paths
|
is_script_scheme_path, data_scheme_paths
|
||||||
|
@ -620,32 +557,32 @@ def _install_wheel(
|
||||||
files = chain(files, other_scheme_files)
|
files = chain(files, other_scheme_files)
|
||||||
|
|
||||||
# Get the defined entry points
|
# Get the defined entry points
|
||||||
distribution = pkg_resources_distribution_for_wheel(
|
distribution = get_wheel_distribution(
|
||||||
wheel_zip, name, wheel_path
|
FilesystemWheel(wheel_path),
|
||||||
|
canonicalize_name(name),
|
||||||
)
|
)
|
||||||
console, gui = get_entrypoints(distribution)
|
console, gui = get_entrypoints(distribution)
|
||||||
|
|
||||||
def is_entrypoint_wrapper(file):
|
def is_entrypoint_wrapper(file: "File") -> bool:
|
||||||
# type: (File) -> bool
|
|
||||||
# EP, EP.exe and EP-script.py are scripts generated for
|
# EP, EP.exe and EP-script.py are scripts generated for
|
||||||
# entry point EP by setuptools
|
# entry point EP by setuptools
|
||||||
path = file.dest_path
|
path = file.dest_path
|
||||||
name = os.path.basename(path)
|
name = os.path.basename(path)
|
||||||
if name.lower().endswith('.exe'):
|
if name.lower().endswith(".exe"):
|
||||||
matchname = name[:-4]
|
matchname = name[:-4]
|
||||||
elif name.lower().endswith('-script.py'):
|
elif name.lower().endswith("-script.py"):
|
||||||
matchname = name[:-10]
|
matchname = name[:-10]
|
||||||
elif name.lower().endswith(".pya"):
|
elif name.lower().endswith(".pya"):
|
||||||
matchname = name[:-4]
|
matchname = name[:-4]
|
||||||
else:
|
else:
|
||||||
matchname = name
|
matchname = name
|
||||||
# Ignore setuptools-generated scripts
|
# Ignore setuptools-generated scripts
|
||||||
return (matchname in console or matchname in gui)
|
return matchname in console or matchname in gui
|
||||||
|
|
||||||
script_scheme_files = map(make_data_scheme_file, script_scheme_paths)
|
script_scheme_files: Iterator[File] = map(
|
||||||
script_scheme_files = filterfalse(
|
make_data_scheme_file, script_scheme_paths
|
||||||
is_entrypoint_wrapper, script_scheme_files
|
|
||||||
)
|
)
|
||||||
|
script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
|
||||||
script_scheme_files = map(ScriptFile, script_scheme_files)
|
script_scheme_files = map(ScriptFile, script_scheme_files)
|
||||||
files = chain(files, script_scheme_files)
|
files = chain(files, script_scheme_files)
|
||||||
|
|
||||||
|
@ -653,8 +590,7 @@ def _install_wheel(
|
||||||
file.save()
|
file.save()
|
||||||
record_installed(file.src_record_path, file.dest_path, file.changed)
|
record_installed(file.src_record_path, file.dest_path, file.changed)
|
||||||
|
|
||||||
def pyc_source_file_paths():
|
def pyc_source_file_paths() -> Generator[str, None, None]:
|
||||||
# type: () -> Iterator[str]
|
|
||||||
# We de-duplicate installation paths, since there can be overlap (e.g.
|
# We de-duplicate installation paths, since there can be overlap (e.g.
|
||||||
# file in .data maps to same location as file in wheel root).
|
# file in .data maps to same location as file in wheel root).
|
||||||
# Sorting installation paths makes it easier to reproduce and debug
|
# Sorting installation paths makes it easier to reproduce and debug
|
||||||
|
@ -663,30 +599,21 @@ def _install_wheel(
|
||||||
full_installed_path = os.path.join(lib_dir, installed_path)
|
full_installed_path = os.path.join(lib_dir, installed_path)
|
||||||
if not os.path.isfile(full_installed_path):
|
if not os.path.isfile(full_installed_path):
|
||||||
continue
|
continue
|
||||||
if not full_installed_path.endswith('.py'):
|
if not full_installed_path.endswith(".py"):
|
||||||
continue
|
continue
|
||||||
yield full_installed_path
|
yield full_installed_path
|
||||||
|
|
||||||
def pyc_output_path(path):
|
def pyc_output_path(path: str) -> str:
|
||||||
# type: (str) -> str
|
"""Return the path the pyc file would have been written to."""
|
||||||
"""Return the path the pyc file would have been written to.
|
|
||||||
"""
|
|
||||||
return importlib.util.cache_from_source(path)
|
return importlib.util.cache_from_source(path)
|
||||||
|
|
||||||
# Compile all of the pyc files for the installed files
|
# Compile all of the pyc files for the installed files
|
||||||
if pycompile:
|
if pycompile:
|
||||||
with captured_stdout() as stdout:
|
with captured_stdout() as stdout:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.filterwarnings('ignore')
|
warnings.filterwarnings("ignore")
|
||||||
for path in pyc_source_file_paths():
|
for path in pyc_source_file_paths():
|
||||||
# Python 2's `compileall.compile_file` requires a str in
|
success = compileall.compile_file(path, force=True, quiet=True)
|
||||||
# error cases, so we must convert to the native type.
|
|
||||||
path_arg = ensure_str(
|
|
||||||
path, encoding=sys.getfilesystemencoding()
|
|
||||||
)
|
|
||||||
success = compileall.compile_file(
|
|
||||||
path_arg, force=True, quiet=True
|
|
||||||
)
|
|
||||||
if success:
|
if success:
|
||||||
pyc_path = pyc_output_path(path)
|
pyc_path = pyc_output_path(path)
|
||||||
assert os.path.exists(pyc_path)
|
assert os.path.exists(pyc_path)
|
||||||
|
@ -705,7 +632,7 @@ def _install_wheel(
|
||||||
# Ensure we don't generate any variants for scripts because this is almost
|
# Ensure we don't generate any variants for scripts because this is almost
|
||||||
# never what somebody wants.
|
# never what somebody wants.
|
||||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||||
maker.variants = {''}
|
maker.variants = {""}
|
||||||
|
|
||||||
# This is required because otherwise distlib creates scripts that are not
|
# This is required because otherwise distlib creates scripts that are not
|
||||||
# executable.
|
# executable.
|
||||||
|
@ -715,14 +642,12 @@ def _install_wheel(
|
||||||
# Generate the console and GUI entry points specified in the wheel
|
# Generate the console and GUI entry points specified in the wheel
|
||||||
scripts_to_generate = get_console_script_specs(console)
|
scripts_to_generate = get_console_script_specs(console)
|
||||||
|
|
||||||
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
|
gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
|
||||||
|
|
||||||
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
||||||
generated.extend(generated_console_scripts)
|
generated.extend(generated_console_scripts)
|
||||||
|
|
||||||
generated.extend(
|
generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
|
||||||
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
|
||||||
)
|
|
||||||
|
|
||||||
if warn_script_location:
|
if warn_script_location:
|
||||||
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||||
|
@ -732,8 +657,7 @@ def _install_wheel(
|
||||||
generated_file_mode = 0o666 & ~current_umask()
|
generated_file_mode = 0o666 & ~current_umask()
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def _generate_file(path, **kwargs):
|
def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
||||||
# type: (str, **Any) -> Iterator[BinaryIO]
|
|
||||||
with adjacent_tmp_file(path, **kwargs) as f:
|
with adjacent_tmp_file(path, **kwargs) as f:
|
||||||
yield f
|
yield f
|
||||||
os.chmod(f.name, generated_file_mode)
|
os.chmod(f.name, generated_file_mode)
|
||||||
|
@ -742,9 +666,9 @@ def _install_wheel(
|
||||||
dest_info_dir = os.path.join(lib_dir, info_dir)
|
dest_info_dir = os.path.join(lib_dir, info_dir)
|
||||||
|
|
||||||
# Record pip as the installer
|
# Record pip as the installer
|
||||||
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
|
installer_path = os.path.join(dest_info_dir, "INSTALLER")
|
||||||
with _generate_file(installer_path) as installer_file:
|
with _generate_file(installer_path) as installer_file:
|
||||||
installer_file.write(b'pip\n')
|
installer_file.write(b"pip\n")
|
||||||
generated.append(installer_path)
|
generated.append(installer_path)
|
||||||
|
|
||||||
# Record the PEP 610 direct URL reference
|
# Record the PEP 610 direct URL reference
|
||||||
|
@ -756,12 +680,12 @@ def _install_wheel(
|
||||||
|
|
||||||
# Record the REQUESTED file
|
# Record the REQUESTED file
|
||||||
if requested:
|
if requested:
|
||||||
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
|
requested_path = os.path.join(dest_info_dir, "REQUESTED")
|
||||||
with open(requested_path, "wb"):
|
with open(requested_path, "wb"):
|
||||||
pass
|
pass
|
||||||
generated.append(requested_path)
|
generated.append(requested_path)
|
||||||
|
|
||||||
record_text = distribution.get_metadata('RECORD')
|
record_text = distribution.read_text("RECORD")
|
||||||
record_rows = list(csv.reader(record_text.splitlines()))
|
record_rows = list(csv.reader(record_text.splitlines()))
|
||||||
|
|
||||||
rows = get_csv_rows_for_installed(
|
rows = get_csv_rows_for_installed(
|
||||||
|
@ -769,42 +693,38 @@ def _install_wheel(
|
||||||
installed=installed,
|
installed=installed,
|
||||||
changed=changed,
|
changed=changed,
|
||||||
generated=generated,
|
generated=generated,
|
||||||
lib_dir=lib_dir)
|
lib_dir=lib_dir,
|
||||||
|
)
|
||||||
|
|
||||||
# Record details of all files installed
|
# Record details of all files installed
|
||||||
record_path = os.path.join(dest_info_dir, 'RECORD')
|
record_path = os.path.join(dest_info_dir, "RECORD")
|
||||||
|
|
||||||
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
|
with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
|
||||||
# The type mypy infers for record_file is different for Python 3
|
# Explicitly cast to typing.IO[str] as a workaround for the mypy error:
|
||||||
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
|
# "writer" has incompatible type "BinaryIO"; expected "_Writer"
|
||||||
# cast to typing.IO[str] as a workaround.
|
writer = csv.writer(cast("IO[str]", record_file))
|
||||||
writer = csv.writer(cast('IO[str]', record_file))
|
|
||||||
writer.writerows(_normalized_outrows(rows))
|
writer.writerows(_normalized_outrows(rows))
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def req_error_context(req_description):
|
def req_error_context(req_description: str) -> Generator[None, None, None]:
|
||||||
# type: (str) -> Iterator[None]
|
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
except InstallationError as e:
|
except InstallationError as e:
|
||||||
message = "For req: {}. {}".format(req_description, e.args[0])
|
message = "For req: {}. {}".format(req_description, e.args[0])
|
||||||
reraise(
|
raise InstallationError(message) from e
|
||||||
InstallationError, InstallationError(message), sys.exc_info()[2]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def install_wheel(
|
def install_wheel(
|
||||||
name, # type: str
|
name: str,
|
||||||
wheel_path, # type: str
|
wheel_path: str,
|
||||||
scheme, # type: Scheme
|
scheme: Scheme,
|
||||||
req_description, # type: str
|
req_description: str,
|
||||||
pycompile=True, # type: bool
|
pycompile: bool = True,
|
||||||
warn_script_location=True, # type: bool
|
warn_script_location: bool = True,
|
||||||
direct_url=None, # type: Optional[DirectUrl]
|
direct_url: Optional[DirectUrl] = None,
|
||||||
requested=False, # type: bool
|
requested: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
with ZipFile(wheel_path, allowZip64=True) as z:
|
with ZipFile(wheel_path, allowZip64=True) as z:
|
||||||
with req_error_context(req_description):
|
with req_error_context(req_description):
|
||||||
_install_wheel(
|
_install_wheel(
|
||||||
|
|
|
@ -8,10 +8,9 @@ import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Dict, Iterable, List, Optional, Tuple
|
from typing import Dict, Iterable, List, Optional
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||||
from pip._internal.distributions.installed import InstalledDistribution
|
from pip._internal.distributions.installed import InstalledDistribution
|
||||||
|
@ -25,6 +24,7 @@ from pip._internal.exceptions import (
|
||||||
VcsHashUnsupported,
|
VcsHashUnsupported,
|
||||||
)
|
)
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.network.download import BatchDownloader, Downloader
|
from pip._internal.network.download import BatchDownloader, Downloader
|
||||||
|
@ -33,13 +33,11 @@ from pip._internal.network.lazy_wheel import (
|
||||||
dist_from_wheel_url,
|
dist_from_wheel_url,
|
||||||
)
|
)
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
|
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
|
||||||
from pip._internal.utils.deprecation import deprecated
|
|
||||||
from pip._internal.utils.filesystem import copy2_fixed
|
|
||||||
from pip._internal.utils.hashes import Hashes, MissingHashes
|
from pip._internal.utils.hashes import Hashes, MissingHashes
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
from pip._internal.utils.misc import display_path, hide_url, rmtree
|
from pip._internal.utils.misc import display_path, hide_url, is_installable_dir
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
from pip._internal.utils.unpacking import unpack_file
|
from pip._internal.utils.unpacking import unpack_file
|
||||||
from pip._internal.vcs import vcs
|
from pip._internal.vcs import vcs
|
||||||
|
@ -48,30 +46,29 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_prepared_distribution(
|
def _get_prepared_distribution(
|
||||||
req, # type: InstallRequirement
|
req: InstallRequirement,
|
||||||
req_tracker, # type: RequirementTracker
|
build_tracker: BuildTracker,
|
||||||
finder, # type: PackageFinder
|
finder: PackageFinder,
|
||||||
build_isolation, # type: bool
|
build_isolation: bool,
|
||||||
):
|
check_build_deps: bool,
|
||||||
# type: (...) -> Distribution
|
) -> BaseDistribution:
|
||||||
"""Prepare a distribution for installation."""
|
"""Prepare a distribution for installation."""
|
||||||
abstract_dist = make_distribution_for_install_requirement(req)
|
abstract_dist = make_distribution_for_install_requirement(req)
|
||||||
with req_tracker.track(req):
|
with build_tracker.track(req):
|
||||||
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
|
abstract_dist.prepare_distribution_metadata(
|
||||||
return abstract_dist.get_pkg_resources_distribution()
|
finder, build_isolation, check_build_deps
|
||||||
|
)
|
||||||
|
return abstract_dist.get_metadata_distribution()
|
||||||
|
|
||||||
|
|
||||||
def unpack_vcs_link(link, location):
|
def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
|
||||||
# type: (Link, str) -> None
|
|
||||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||||
assert vcs_backend is not None
|
assert vcs_backend is not None
|
||||||
vcs_backend.unpack(location, url=hide_url(link.url))
|
vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
|
||||||
|
|
||||||
|
|
||||||
class File:
|
class File:
|
||||||
|
def __init__(self, path: str, content_type: Optional[str]) -> None:
|
||||||
def __init__(self, path, content_type):
|
|
||||||
# type: (str, Optional[str]) -> None
|
|
||||||
self.path = path
|
self.path = path
|
||||||
if content_type is None:
|
if content_type is None:
|
||||||
self.content_type = mimetypes.guess_type(path)[0]
|
self.content_type = mimetypes.guess_type(path)[0]
|
||||||
|
@ -80,19 +77,16 @@ class File:
|
||||||
|
|
||||||
|
|
||||||
def get_http_url(
|
def get_http_url(
|
||||||
link, # type: Link
|
link: Link,
|
||||||
download, # type: Downloader
|
download: Downloader,
|
||||||
download_dir=None, # type: Optional[str]
|
download_dir: Optional[str] = None,
|
||||||
hashes=None, # type: Optional[Hashes]
|
hashes: Optional[Hashes] = None,
|
||||||
):
|
) -> File:
|
||||||
# type: (...) -> File
|
|
||||||
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
||||||
# If a download dir is specified, is the file already downloaded there?
|
# If a download dir is specified, is the file already downloaded there?
|
||||||
already_downloaded_path = None
|
already_downloaded_path = None
|
||||||
if download_dir:
|
if download_dir:
|
||||||
already_downloaded_path = _check_download_dir(
|
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
||||||
link, download_dir, hashes
|
|
||||||
)
|
|
||||||
|
|
||||||
if already_downloaded_path:
|
if already_downloaded_path:
|
||||||
from_path = already_downloaded_path
|
from_path = already_downloaded_path
|
||||||
|
@ -106,72 +100,14 @@ def get_http_url(
|
||||||
return File(from_path, content_type)
|
return File(from_path, content_type)
|
||||||
|
|
||||||
|
|
||||||
def _copy2_ignoring_special_files(src, dest):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Copying special files is not supported, but as a convenience to users
|
|
||||||
we skip errors copying them. This supports tools that may create e.g.
|
|
||||||
socket files in the project source directory.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
copy2_fixed(src, dest)
|
|
||||||
except shutil.SpecialFileError as e:
|
|
||||||
# SpecialFileError may be raised due to either the source or
|
|
||||||
# destination. If the destination was the cause then we would actually
|
|
||||||
# care, but since the destination directory is deleted prior to
|
|
||||||
# copy we ignore all of them assuming it is caused by the source.
|
|
||||||
logger.warning(
|
|
||||||
"Ignoring special file error '%s' encountered copying %s to %s.",
|
|
||||||
str(e),
|
|
||||||
src,
|
|
||||||
dest,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _copy_source_tree(source, target):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
target_abspath = os.path.abspath(target)
|
|
||||||
target_basename = os.path.basename(target_abspath)
|
|
||||||
target_dirname = os.path.dirname(target_abspath)
|
|
||||||
|
|
||||||
def ignore(d, names):
|
|
||||||
# type: (str, List[str]) -> List[str]
|
|
||||||
skipped = [] # type: List[str]
|
|
||||||
if d == source:
|
|
||||||
# Pulling in those directories can potentially be very slow,
|
|
||||||
# exclude the following directories if they appear in the top
|
|
||||||
# level dir (and only it).
|
|
||||||
# See discussion at https://github.com/pypa/pip/pull/6770
|
|
||||||
skipped += ['.tox', '.nox']
|
|
||||||
if os.path.abspath(d) == target_dirname:
|
|
||||||
# Prevent an infinite recursion if the target is in source.
|
|
||||||
# This can happen when TMPDIR is set to ${PWD}/...
|
|
||||||
# and we copy PWD to TMPDIR.
|
|
||||||
skipped += [target_basename]
|
|
||||||
return skipped
|
|
||||||
|
|
||||||
shutil.copytree(
|
|
||||||
source,
|
|
||||||
target,
|
|
||||||
ignore=ignore,
|
|
||||||
symlinks=True,
|
|
||||||
copy_function=_copy2_ignoring_special_files,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_url(
|
def get_file_url(
|
||||||
link, # type: Link
|
link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
|
||||||
download_dir=None, # type: Optional[str]
|
) -> File:
|
||||||
hashes=None # type: Optional[Hashes]
|
"""Get file and optionally check its hash."""
|
||||||
):
|
|
||||||
# type: (...) -> File
|
|
||||||
"""Get file and optionally check its hash.
|
|
||||||
"""
|
|
||||||
# If a download dir is specified, is the file already there and valid?
|
# If a download dir is specified, is the file already there and valid?
|
||||||
already_downloaded_path = None
|
already_downloaded_path = None
|
||||||
if download_dir:
|
if download_dir:
|
||||||
already_downloaded_path = _check_download_dir(
|
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
||||||
link, download_dir, hashes
|
|
||||||
)
|
|
||||||
|
|
||||||
if already_downloaded_path:
|
if already_downloaded_path:
|
||||||
from_path = already_downloaded_path
|
from_path = already_downloaded_path
|
||||||
|
@ -189,13 +125,13 @@ def get_file_url(
|
||||||
|
|
||||||
|
|
||||||
def unpack_url(
|
def unpack_url(
|
||||||
link, # type: Link
|
link: Link,
|
||||||
location, # type: str
|
location: str,
|
||||||
download, # type: Downloader
|
download: Downloader,
|
||||||
download_dir=None, # type: Optional[str]
|
verbosity: int,
|
||||||
hashes=None, # type: Optional[Hashes]
|
download_dir: Optional[str] = None,
|
||||||
):
|
hashes: Optional[Hashes] = None,
|
||||||
# type: (...) -> Optional[File]
|
) -> Optional[File]:
|
||||||
"""Unpack link into location, downloading if required.
|
"""Unpack link into location, downloading if required.
|
||||||
|
|
||||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||||
|
@ -205,30 +141,10 @@ def unpack_url(
|
||||||
"""
|
"""
|
||||||
# non-editable vcs urls
|
# non-editable vcs urls
|
||||||
if link.is_vcs:
|
if link.is_vcs:
|
||||||
unpack_vcs_link(link, location)
|
unpack_vcs_link(link, location, verbosity=verbosity)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Once out-of-tree-builds are no longer supported, could potentially
|
assert not link.is_existing_dir()
|
||||||
# replace the below condition with `assert not link.is_existing_dir`
|
|
||||||
# - unpack_url does not need to be called for in-tree-builds.
|
|
||||||
#
|
|
||||||
# As further cleanup, _copy_source_tree and accompanying tests can
|
|
||||||
# be removed.
|
|
||||||
if link.is_existing_dir():
|
|
||||||
deprecated(
|
|
||||||
"A future pip version will change local packages to be built "
|
|
||||||
"in-place without first copying to a temporary directory. "
|
|
||||||
"We recommend you use --use-feature=in-tree-build to test "
|
|
||||||
"your packages with this new behavior before it becomes the "
|
|
||||||
"default.\n",
|
|
||||||
replacement=None,
|
|
||||||
gone_in="21.3",
|
|
||||||
issue=7555
|
|
||||||
)
|
|
||||||
if os.path.isdir(location):
|
|
||||||
rmtree(location)
|
|
||||||
_copy_source_tree(link.file_path, location)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# file urls
|
# file urls
|
||||||
if link.is_file:
|
if link.is_file:
|
||||||
|
@ -251,10 +167,11 @@ def unpack_url(
|
||||||
return file
|
return file
|
||||||
|
|
||||||
|
|
||||||
def _check_download_dir(link, download_dir, hashes):
|
def _check_download_dir(
|
||||||
# type: (Link, str, Optional[Hashes]) -> Optional[str]
|
link: Link, download_dir: str, hashes: Optional[Hashes]
|
||||||
""" Check download_dir for previously downloaded file with correct hash
|
) -> Optional[str]:
|
||||||
If a correct file is found return its path else None
|
"""Check download_dir for previously downloaded file with correct hash
|
||||||
|
If a correct file is found return its path else None
|
||||||
"""
|
"""
|
||||||
download_path = os.path.join(download_dir, link.filename)
|
download_path = os.path.join(download_dir, link.filename)
|
||||||
|
|
||||||
|
@ -262,15 +179,14 @@ def _check_download_dir(link, download_dir, hashes):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# If already downloaded, does its hash match?
|
# If already downloaded, does its hash match?
|
||||||
logger.info('File was already downloaded %s', download_path)
|
logger.info("File was already downloaded %s", download_path)
|
||||||
if hashes:
|
if hashes:
|
||||||
try:
|
try:
|
||||||
hashes.check_against_path(download_path)
|
hashes.check_against_path(download_path)
|
||||||
except HashMismatch:
|
except HashMismatch:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Previously-downloaded file %s has bad hash. '
|
"Previously-downloaded file %s has bad hash. Re-downloading.",
|
||||||
'Re-downloading.',
|
download_path,
|
||||||
download_path
|
|
||||||
)
|
)
|
||||||
os.unlink(download_path)
|
os.unlink(download_path)
|
||||||
return None
|
return None
|
||||||
|
@ -278,30 +194,29 @@ def _check_download_dir(link, download_dir, hashes):
|
||||||
|
|
||||||
|
|
||||||
class RequirementPreparer:
|
class RequirementPreparer:
|
||||||
"""Prepares a Requirement
|
"""Prepares a Requirement"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
build_dir, # type: str
|
build_dir: str,
|
||||||
download_dir, # type: Optional[str]
|
download_dir: Optional[str],
|
||||||
src_dir, # type: str
|
src_dir: str,
|
||||||
build_isolation, # type: bool
|
build_isolation: bool,
|
||||||
req_tracker, # type: RequirementTracker
|
check_build_deps: bool,
|
||||||
session, # type: PipSession
|
build_tracker: BuildTracker,
|
||||||
progress_bar, # type: str
|
session: PipSession,
|
||||||
finder, # type: PackageFinder
|
progress_bar: str,
|
||||||
require_hashes, # type: bool
|
finder: PackageFinder,
|
||||||
use_user_site, # type: bool
|
require_hashes: bool,
|
||||||
lazy_wheel, # type: bool
|
use_user_site: bool,
|
||||||
in_tree_build, # type: bool
|
lazy_wheel: bool,
|
||||||
):
|
verbosity: int,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
self.src_dir = src_dir
|
self.src_dir = src_dir
|
||||||
self.build_dir = build_dir
|
self.build_dir = build_dir
|
||||||
self.req_tracker = req_tracker
|
self.build_tracker = build_tracker
|
||||||
self._session = session
|
self._session = session
|
||||||
self._download = Downloader(session, progress_bar)
|
self._download = Downloader(session, progress_bar)
|
||||||
self._batch_download = BatchDownloader(session, progress_bar)
|
self._batch_download = BatchDownloader(session, progress_bar)
|
||||||
|
@ -314,6 +229,9 @@ class RequirementPreparer:
|
||||||
# Is build isolation allowed?
|
# Is build isolation allowed?
|
||||||
self.build_isolation = build_isolation
|
self.build_isolation = build_isolation
|
||||||
|
|
||||||
|
# Should check build dependencies?
|
||||||
|
self.check_build_deps = check_build_deps
|
||||||
|
|
||||||
# Should hash-checking be required?
|
# Should hash-checking be required?
|
||||||
self.require_hashes = require_hashes
|
self.require_hashes = require_hashes
|
||||||
|
|
||||||
|
@ -323,17 +241,16 @@ class RequirementPreparer:
|
||||||
# Should wheels be downloaded lazily?
|
# Should wheels be downloaded lazily?
|
||||||
self.use_lazy_wheel = lazy_wheel
|
self.use_lazy_wheel = lazy_wheel
|
||||||
|
|
||||||
# Should in-tree builds be used for local paths?
|
# How verbose should underlying tooling be?
|
||||||
self.in_tree_build = in_tree_build
|
self.verbosity = verbosity
|
||||||
|
|
||||||
# Memoized downloaded files, as mapping of url: (path, mime type)
|
# Memoized downloaded files, as mapping of url: path.
|
||||||
self._downloaded = {} # type: Dict[str, Tuple[str, str]]
|
self._downloaded: Dict[str, str] = {}
|
||||||
|
|
||||||
# Previous "header" printed for a link-based InstallRequirement
|
# Previous "header" printed for a link-based InstallRequirement
|
||||||
self._previous_requirement_header = ("", "")
|
self._previous_requirement_header = ("", "")
|
||||||
|
|
||||||
def _log_preparing_link(self, req):
|
def _log_preparing_link(self, req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
"""Provide context for the requirement being prepared."""
|
"""Provide context for the requirement being prepared."""
|
||||||
if req.link.is_file and not req.original_link_is_in_wheel_cache:
|
if req.link.is_file and not req.original_link_is_in_wheel_cache:
|
||||||
message = "Processing %s"
|
message = "Processing %s"
|
||||||
|
@ -350,8 +267,9 @@ class RequirementPreparer:
|
||||||
with indent_log():
|
with indent_log():
|
||||||
logger.info("Using cached %s", req.link.filename)
|
logger.info("Using cached %s", req.link.filename)
|
||||||
|
|
||||||
def _ensure_link_req_src_dir(self, req, parallel_builds):
|
def _ensure_link_req_src_dir(
|
||||||
# type: (InstallRequirement, bool) -> None
|
self, req: InstallRequirement, parallel_builds: bool
|
||||||
|
) -> None:
|
||||||
"""Ensure source_dir of a linked InstallRequirement."""
|
"""Ensure source_dir of a linked InstallRequirement."""
|
||||||
# Since source_dir is only set for editable requirements.
|
# Since source_dir is only set for editable requirements.
|
||||||
if req.link.is_wheel:
|
if req.link.is_wheel:
|
||||||
|
@ -359,7 +277,7 @@ class RequirementPreparer:
|
||||||
# directory.
|
# directory.
|
||||||
return
|
return
|
||||||
assert req.source_dir is None
|
assert req.source_dir is None
|
||||||
if req.link.is_existing_dir() and self.in_tree_build:
|
if req.link.is_existing_dir():
|
||||||
# build local directories in-tree
|
# build local directories in-tree
|
||||||
req.source_dir = req.link.file_path
|
req.source_dir = req.link.file_path
|
||||||
return
|
return
|
||||||
|
@ -376,7 +294,8 @@ class RequirementPreparer:
|
||||||
# installation.
|
# installation.
|
||||||
# FIXME: this won't upgrade when there's an existing
|
# FIXME: this won't upgrade when there's an existing
|
||||||
# package unpacked in `req.source_dir`
|
# package unpacked in `req.source_dir`
|
||||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
# TODO: this check is now probably dead code
|
||||||
|
if is_installable_dir(req.source_dir):
|
||||||
raise PreviousBuildDirError(
|
raise PreviousBuildDirError(
|
||||||
"pip can't proceed with requirements '{}' due to a"
|
"pip can't proceed with requirements '{}' due to a"
|
||||||
"pre-existing build directory ({}). This is likely "
|
"pre-existing build directory ({}). This is likely "
|
||||||
|
@ -385,8 +304,7 @@ class RequirementPreparer:
|
||||||
"Please delete it and try again.".format(req, req.source_dir)
|
"Please delete it and try again.".format(req, req.source_dir)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_linked_req_hashes(self, req):
|
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
||||||
# type: (InstallRequirement) -> Hashes
|
|
||||||
# By the time this is called, the requirement's link should have
|
# By the time this is called, the requirement's link should have
|
||||||
# been checked so we can tell what kind of requirements req is
|
# been checked so we can tell what kind of requirements req is
|
||||||
# and raise some more informative errors than otherwise.
|
# and raise some more informative errors than otherwise.
|
||||||
|
@ -418,18 +336,19 @@ class RequirementPreparer:
|
||||||
# showing the user what the hash should be.
|
# showing the user what the hash should be.
|
||||||
return req.hashes(trust_internet=False) or MissingHashes()
|
return req.hashes(trust_internet=False) or MissingHashes()
|
||||||
|
|
||||||
def _fetch_metadata_using_lazy_wheel(self, link):
|
def _fetch_metadata_using_lazy_wheel(
|
||||||
# type: (Link) -> Optional[Distribution]
|
self,
|
||||||
|
link: Link,
|
||||||
|
) -> Optional[BaseDistribution]:
|
||||||
"""Fetch metadata using lazy wheel, if possible."""
|
"""Fetch metadata using lazy wheel, if possible."""
|
||||||
if not self.use_lazy_wheel:
|
if not self.use_lazy_wheel:
|
||||||
return None
|
return None
|
||||||
if self.require_hashes:
|
if self.require_hashes:
|
||||||
logger.debug('Lazy wheel is not used as hash checking is required')
|
logger.debug("Lazy wheel is not used as hash checking is required")
|
||||||
return None
|
return None
|
||||||
if link.is_file or not link.is_wheel:
|
if link.is_file or not link.is_wheel:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Lazy wheel is not used as '
|
"Lazy wheel is not used as %r does not points to a remote wheel",
|
||||||
'%r does not points to a remote wheel',
|
|
||||||
link,
|
link,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
@ -437,22 +356,22 @@ class RequirementPreparer:
|
||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
name = canonicalize_name(wheel.name)
|
name = canonicalize_name(wheel.name)
|
||||||
logger.info(
|
logger.info(
|
||||||
'Obtaining dependency information from %s %s',
|
"Obtaining dependency information from %s %s",
|
||||||
name, wheel.version,
|
name,
|
||||||
|
wheel.version,
|
||||||
)
|
)
|
||||||
url = link.url.split('#', 1)[0]
|
url = link.url.split("#", 1)[0]
|
||||||
try:
|
try:
|
||||||
return dist_from_wheel_url(name, url, self._session)
|
return dist_from_wheel_url(name, url, self._session)
|
||||||
except HTTPRangeRequestUnsupported:
|
except HTTPRangeRequestUnsupported:
|
||||||
logger.debug('%s does not support range requests', url)
|
logger.debug("%s does not support range requests", url)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _complete_partial_requirements(
|
def _complete_partial_requirements(
|
||||||
self,
|
self,
|
||||||
partially_downloaded_reqs, # type: Iterable[InstallRequirement]
|
partially_downloaded_reqs: Iterable[InstallRequirement],
|
||||||
parallel_builds=False, # type: bool
|
parallel_builds: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Download any requirements which were only fetched by metadata."""
|
"""Download any requirements which were only fetched by metadata."""
|
||||||
# Download to a temporary directory. These will be copied over as
|
# Download to a temporary directory. These will be copied over as
|
||||||
# needed for downstream 'download', 'wheel', and 'install' commands.
|
# needed for downstream 'download', 'wheel', and 'install' commands.
|
||||||
|
@ -461,7 +380,7 @@ class RequirementPreparer:
|
||||||
# Map each link to the requirement that owns it. This allows us to set
|
# Map each link to the requirement that owns it. This allows us to set
|
||||||
# `req.local_file_path` on the appropriate requirement after passing
|
# `req.local_file_path` on the appropriate requirement after passing
|
||||||
# all the links at once into BatchDownloader.
|
# all the links at once into BatchDownloader.
|
||||||
links_to_fully_download = {} # type: Dict[Link, InstallRequirement]
|
links_to_fully_download: Dict[Link, InstallRequirement] = {}
|
||||||
for req in partially_downloaded_reqs:
|
for req in partially_downloaded_reqs:
|
||||||
assert req.link
|
assert req.link
|
||||||
links_to_fully_download[req.link] = req
|
links_to_fully_download[req.link] = req
|
||||||
|
@ -480,8 +399,9 @@ class RequirementPreparer:
|
||||||
for req in partially_downloaded_reqs:
|
for req in partially_downloaded_reqs:
|
||||||
self._prepare_linked_requirement(req, parallel_builds)
|
self._prepare_linked_requirement(req, parallel_builds)
|
||||||
|
|
||||||
def prepare_linked_requirement(self, req, parallel_builds=False):
|
def prepare_linked_requirement(
|
||||||
# type: (InstallRequirement, bool) -> Distribution
|
self, req: InstallRequirement, parallel_builds: bool = False
|
||||||
|
) -> BaseDistribution:
|
||||||
"""Prepare a requirement to be obtained from req.link."""
|
"""Prepare a requirement to be obtained from req.link."""
|
||||||
assert req.link
|
assert req.link
|
||||||
link = req.link
|
link = req.link
|
||||||
|
@ -496,7 +416,7 @@ class RequirementPreparer:
|
||||||
|
|
||||||
if file_path is not None:
|
if file_path is not None:
|
||||||
# The file is already available, so mark it as downloaded
|
# The file is already available, so mark it as downloaded
|
||||||
self._downloaded[req.link.url] = file_path, None
|
self._downloaded[req.link.url] = file_path
|
||||||
else:
|
else:
|
||||||
# The file is not available, attempt to fetch only metadata
|
# The file is not available, attempt to fetch only metadata
|
||||||
wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
|
wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
|
||||||
|
@ -507,8 +427,9 @@ class RequirementPreparer:
|
||||||
# None of the optimizations worked, fully prepare the requirement
|
# None of the optimizations worked, fully prepare the requirement
|
||||||
return self._prepare_linked_requirement(req, parallel_builds)
|
return self._prepare_linked_requirement(req, parallel_builds)
|
||||||
|
|
||||||
def prepare_linked_requirements_more(self, reqs, parallel_builds=False):
|
def prepare_linked_requirements_more(
|
||||||
# type: (Iterable[InstallRequirement], bool) -> None
|
self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
|
||||||
|
) -> None:
|
||||||
"""Prepare linked requirements more, if needed."""
|
"""Prepare linked requirements more, if needed."""
|
||||||
reqs = [req for req in reqs if req.needs_more_preparation]
|
reqs = [req for req in reqs if req.needs_more_preparation]
|
||||||
for req in reqs:
|
for req in reqs:
|
||||||
|
@ -517,12 +438,12 @@ class RequirementPreparer:
|
||||||
hashes = self._get_linked_req_hashes(req)
|
hashes = self._get_linked_req_hashes(req)
|
||||||
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
||||||
if file_path is not None:
|
if file_path is not None:
|
||||||
self._downloaded[req.link.url] = file_path, None
|
self._downloaded[req.link.url] = file_path
|
||||||
req.needs_more_preparation = False
|
req.needs_more_preparation = False
|
||||||
|
|
||||||
# Prepare requirements we found were already downloaded for some
|
# Prepare requirements we found were already downloaded for some
|
||||||
# reason. The other downloads will be completed separately.
|
# reason. The other downloads will be completed separately.
|
||||||
partially_downloaded_reqs = [] # type: List[InstallRequirement]
|
partially_downloaded_reqs: List[InstallRequirement] = []
|
||||||
for req in reqs:
|
for req in reqs:
|
||||||
if req.needs_more_preparation:
|
if req.needs_more_preparation:
|
||||||
partially_downloaded_reqs.append(req)
|
partially_downloaded_reqs.append(req)
|
||||||
|
@ -532,35 +453,41 @@ class RequirementPreparer:
|
||||||
# TODO: separate this part out from RequirementPreparer when the v1
|
# TODO: separate this part out from RequirementPreparer when the v1
|
||||||
# resolver can be removed!
|
# resolver can be removed!
|
||||||
self._complete_partial_requirements(
|
self._complete_partial_requirements(
|
||||||
partially_downloaded_reqs, parallel_builds=parallel_builds,
|
partially_downloaded_reqs,
|
||||||
|
parallel_builds=parallel_builds,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare_linked_requirement(self, req, parallel_builds):
|
def _prepare_linked_requirement(
|
||||||
# type: (InstallRequirement, bool) -> Distribution
|
self, req: InstallRequirement, parallel_builds: bool
|
||||||
|
) -> BaseDistribution:
|
||||||
assert req.link
|
assert req.link
|
||||||
link = req.link
|
link = req.link
|
||||||
|
|
||||||
self._ensure_link_req_src_dir(req, parallel_builds)
|
self._ensure_link_req_src_dir(req, parallel_builds)
|
||||||
hashes = self._get_linked_req_hashes(req)
|
hashes = self._get_linked_req_hashes(req)
|
||||||
|
|
||||||
if link.is_existing_dir() and self.in_tree_build:
|
if link.is_existing_dir():
|
||||||
local_file = None
|
local_file = None
|
||||||
elif link.url not in self._downloaded:
|
elif link.url not in self._downloaded:
|
||||||
try:
|
try:
|
||||||
local_file = unpack_url(
|
local_file = unpack_url(
|
||||||
link, req.source_dir, self._download,
|
link,
|
||||||
self.download_dir, hashes
|
req.source_dir,
|
||||||
|
self._download,
|
||||||
|
self.verbosity,
|
||||||
|
self.download_dir,
|
||||||
|
hashes,
|
||||||
)
|
)
|
||||||
except NetworkConnectionError as exc:
|
except NetworkConnectionError as exc:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'Could not install requirement {} because of HTTP '
|
"Could not install requirement {} because of HTTP "
|
||||||
'error {} for URL {}'.format(req, exc, link)
|
"error {} for URL {}".format(req, exc, link)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
file_path, content_type = self._downloaded[link.url]
|
file_path = self._downloaded[link.url]
|
||||||
if hashes:
|
if hashes:
|
||||||
hashes.check_against_path(file_path)
|
hashes.check_against_path(file_path)
|
||||||
local_file = File(file_path, content_type)
|
local_file = File(file_path, content_type=None)
|
||||||
|
|
||||||
# For use in later processing,
|
# For use in later processing,
|
||||||
# preserve the file path on the requirement.
|
# preserve the file path on the requirement.
|
||||||
|
@ -568,12 +495,15 @@ class RequirementPreparer:
|
||||||
req.local_file_path = local_file.path
|
req.local_file_path = local_file.path
|
||||||
|
|
||||||
dist = _get_prepared_distribution(
|
dist = _get_prepared_distribution(
|
||||||
req, self.req_tracker, self.finder, self.build_isolation,
|
req,
|
||||||
|
self.build_tracker,
|
||||||
|
self.finder,
|
||||||
|
self.build_isolation,
|
||||||
|
self.check_build_deps,
|
||||||
)
|
)
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
def save_linked_requirement(self, req):
|
def save_linked_requirement(self, req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
assert self.download_dir is not None
|
assert self.download_dir is not None
|
||||||
assert req.link is not None
|
assert req.link is not None
|
||||||
link = req.link
|
link = req.link
|
||||||
|
@ -584,8 +514,9 @@ class RequirementPreparer:
|
||||||
|
|
||||||
if link.is_existing_dir():
|
if link.is_existing_dir():
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Not copying link to destination directory '
|
"Not copying link to destination directory "
|
||||||
'since it is a directory: %s', link,
|
"since it is a directory: %s",
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
if req.local_file_path is None:
|
if req.local_file_path is None:
|
||||||
|
@ -596,31 +527,33 @@ class RequirementPreparer:
|
||||||
if not os.path.exists(download_location):
|
if not os.path.exists(download_location):
|
||||||
shutil.copy(req.local_file_path, download_location)
|
shutil.copy(req.local_file_path, download_location)
|
||||||
download_path = display_path(download_location)
|
download_path = display_path(download_location)
|
||||||
logger.info('Saved %s', download_path)
|
logger.info("Saved %s", download_path)
|
||||||
|
|
||||||
def prepare_editable_requirement(
|
def prepare_editable_requirement(
|
||||||
self,
|
self,
|
||||||
req, # type: InstallRequirement
|
req: InstallRequirement,
|
||||||
):
|
) -> BaseDistribution:
|
||||||
# type: (...) -> Distribution
|
"""Prepare an editable requirement."""
|
||||||
"""Prepare an editable requirement
|
|
||||||
"""
|
|
||||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||||
|
|
||||||
logger.info('Obtaining %s', req)
|
logger.info("Obtaining %s", req)
|
||||||
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
if self.require_hashes:
|
if self.require_hashes:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'The editable requirement {} cannot be installed when '
|
"The editable requirement {} cannot be installed when "
|
||||||
'requiring hashes, because there is no single file to '
|
"requiring hashes, because there is no single file to "
|
||||||
'hash.'.format(req)
|
"hash.".format(req)
|
||||||
)
|
)
|
||||||
req.ensure_has_source_dir(self.src_dir)
|
req.ensure_has_source_dir(self.src_dir)
|
||||||
req.update_editable()
|
req.update_editable()
|
||||||
|
|
||||||
dist = _get_prepared_distribution(
|
dist = _get_prepared_distribution(
|
||||||
req, self.req_tracker, self.finder, self.build_isolation,
|
req,
|
||||||
|
self.build_tracker,
|
||||||
|
self.finder,
|
||||||
|
self.build_isolation,
|
||||||
|
self.check_build_deps,
|
||||||
)
|
)
|
||||||
|
|
||||||
req.check_if_exists(self.use_user_site)
|
req.check_if_exists(self.use_user_site)
|
||||||
|
@ -629,27 +562,24 @@ class RequirementPreparer:
|
||||||
|
|
||||||
def prepare_installed_requirement(
|
def prepare_installed_requirement(
|
||||||
self,
|
self,
|
||||||
req, # type: InstallRequirement
|
req: InstallRequirement,
|
||||||
skip_reason # type: str
|
skip_reason: str,
|
||||||
):
|
) -> BaseDistribution:
|
||||||
# type: (...) -> Distribution
|
"""Prepare an already-installed requirement."""
|
||||||
"""Prepare an already-installed requirement
|
|
||||||
"""
|
|
||||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||||
assert skip_reason is not None, (
|
assert skip_reason is not None, (
|
||||||
"did not get skip reason skipped but req.satisfied_by "
|
"did not get skip reason skipped but req.satisfied_by "
|
||||||
"is set to {}".format(req.satisfied_by)
|
"is set to {}".format(req.satisfied_by)
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
'Requirement %s: %s (%s)',
|
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
|
||||||
skip_reason, req, req.satisfied_by.version
|
|
||||||
)
|
)
|
||||||
with indent_log():
|
with indent_log():
|
||||||
if self.require_hashes:
|
if self.require_hashes:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Since it is already installed, we are trusting this '
|
"Since it is already installed, we are trusting this "
|
||||||
'package without checking its hash. To ensure a '
|
"package without checking its hash. To ensure a "
|
||||||
'completely repeatable environment, install into an '
|
"completely repeatable environment, install into an "
|
||||||
'empty virtualenv.'
|
"empty virtualenv."
|
||||||
)
|
)
|
||||||
return InstalledDistribution(req).get_pkg_resources_distribution()
|
return InstalledDistribution(req).get_metadata_distribution()
|
||||||
|
|
|
@ -1,38 +1,34 @@
|
||||||
|
import importlib.util
|
||||||
import os
|
import os
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
from pip._vendor import toml
|
from pip._vendor import tomli
|
||||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import (
|
||||||
|
InstallationError,
|
||||||
|
InvalidPyProjectBuildRequires,
|
||||||
|
MissingPyProjectBuildRequires,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _is_list_of_str(obj):
|
def _is_list_of_str(obj: Any) -> bool:
|
||||||
# type: (Any) -> bool
|
return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
|
||||||
return (
|
|
||||||
isinstance(obj, list) and
|
|
||||||
all(isinstance(item, str) for item in obj)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def make_pyproject_path(unpacked_source_directory):
|
def make_pyproject_path(unpacked_source_directory: str) -> str:
|
||||||
# type: (str) -> str
|
return os.path.join(unpacked_source_directory, "pyproject.toml")
|
||||||
return os.path.join(unpacked_source_directory, 'pyproject.toml')
|
|
||||||
|
|
||||||
|
|
||||||
BuildSystemDetails = namedtuple('BuildSystemDetails', [
|
BuildSystemDetails = namedtuple(
|
||||||
'requires', 'backend', 'check', 'backend_path'
|
"BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
|
||||||
])
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_pyproject_toml(
|
def load_pyproject_toml(
|
||||||
use_pep517, # type: Optional[bool]
|
use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
|
||||||
pyproject_toml, # type: str
|
) -> Optional[BuildSystemDetails]:
|
||||||
setup_py, # type: str
|
|
||||||
req_name # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> Optional[BuildSystemDetails]
|
|
||||||
"""Load the pyproject.toml file.
|
"""Load the pyproject.toml file.
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
|
@ -57,9 +53,15 @@ def load_pyproject_toml(
|
||||||
has_pyproject = os.path.isfile(pyproject_toml)
|
has_pyproject = os.path.isfile(pyproject_toml)
|
||||||
has_setup = os.path.isfile(setup_py)
|
has_setup = os.path.isfile(setup_py)
|
||||||
|
|
||||||
|
if not has_pyproject and not has_setup:
|
||||||
|
raise InstallationError(
|
||||||
|
f"{req_name} does not appear to be a Python project: "
|
||||||
|
f"neither 'setup.py' nor 'pyproject.toml' found."
|
||||||
|
)
|
||||||
|
|
||||||
if has_pyproject:
|
if has_pyproject:
|
||||||
with open(pyproject_toml, encoding="utf-8") as f:
|
with open(pyproject_toml, encoding="utf-8") as f:
|
||||||
pp_toml = toml.load(f)
|
pp_toml = tomli.loads(f.read())
|
||||||
build_system = pp_toml.get("build-system")
|
build_system = pp_toml.get("build-system")
|
||||||
else:
|
else:
|
||||||
build_system = None
|
build_system = None
|
||||||
|
@ -82,17 +84,21 @@ def load_pyproject_toml(
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Disabling PEP 517 processing is invalid: "
|
"Disabling PEP 517 processing is invalid: "
|
||||||
"project specifies a build backend of {} "
|
"project specifies a build backend of {} "
|
||||||
"in pyproject.toml".format(
|
"in pyproject.toml".format(build_system["build-backend"])
|
||||||
build_system["build-backend"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
use_pep517 = True
|
use_pep517 = True
|
||||||
|
|
||||||
# If we haven't worked out whether to use PEP 517 yet,
|
# If we haven't worked out whether to use PEP 517 yet,
|
||||||
# and the user hasn't explicitly stated a preference,
|
# and the user hasn't explicitly stated a preference,
|
||||||
# we do so if the project has a pyproject.toml file.
|
# we do so if the project has a pyproject.toml file
|
||||||
|
# or if we cannot import setuptools.
|
||||||
|
|
||||||
|
# We fallback to PEP 517 when without setuptools,
|
||||||
|
# so setuptools can be installed as a default build backend.
|
||||||
|
# For more info see:
|
||||||
|
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
|
||||||
elif use_pep517 is None:
|
elif use_pep517 is None:
|
||||||
use_pep517 = has_pyproject
|
use_pep517 = has_pyproject or not importlib.util.find_spec("setuptools")
|
||||||
|
|
||||||
# At this point, we know whether we're going to use PEP 517.
|
# At this point, we know whether we're going to use PEP 517.
|
||||||
assert use_pep517 is not None
|
assert use_pep517 is not None
|
||||||
|
@ -124,46 +130,32 @@ def load_pyproject_toml(
|
||||||
|
|
||||||
# Ensure that the build-system section in pyproject.toml conforms
|
# Ensure that the build-system section in pyproject.toml conforms
|
||||||
# to PEP 518.
|
# to PEP 518.
|
||||||
error_template = (
|
|
||||||
"{package} has a pyproject.toml file that does not comply "
|
|
||||||
"with PEP 518: {reason}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Specifying the build-system table but not the requires key is invalid
|
# Specifying the build-system table but not the requires key is invalid
|
||||||
if "requires" not in build_system:
|
if "requires" not in build_system:
|
||||||
raise InstallationError(
|
raise MissingPyProjectBuildRequires(package=req_name)
|
||||||
error_template.format(package=req_name, reason=(
|
|
||||||
"it has a 'build-system' table but not "
|
|
||||||
"'build-system.requires' which is mandatory in the table"
|
|
||||||
))
|
|
||||||
)
|
|
||||||
|
|
||||||
# Error out if requires is not a list of strings
|
# Error out if requires is not a list of strings
|
||||||
requires = build_system["requires"]
|
requires = build_system["requires"]
|
||||||
if not _is_list_of_str(requires):
|
if not _is_list_of_str(requires):
|
||||||
raise InstallationError(error_template.format(
|
raise InvalidPyProjectBuildRequires(
|
||||||
package=req_name,
|
package=req_name,
|
||||||
reason="'build-system.requires' is not a list of strings.",
|
reason="It is not a list of strings.",
|
||||||
))
|
)
|
||||||
|
|
||||||
# Each requirement must be valid as per PEP 508
|
# Each requirement must be valid as per PEP 508
|
||||||
for requirement in requires:
|
for requirement in requires:
|
||||||
try:
|
try:
|
||||||
Requirement(requirement)
|
Requirement(requirement)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement as error:
|
||||||
raise InstallationError(
|
raise InvalidPyProjectBuildRequires(
|
||||||
error_template.format(
|
package=req_name,
|
||||||
package=req_name,
|
reason=f"It contains an invalid requirement: {requirement!r}",
|
||||||
reason=(
|
) from error
|
||||||
"'build-system.requires' contains an invalid "
|
|
||||||
"requirement: {!r}".format(requirement)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
backend = build_system.get("build-backend")
|
backend = build_system.get("build-backend")
|
||||||
backend_path = build_system.get("backend-path", [])
|
backend_path = build_system.get("backend-path", [])
|
||||||
check = [] # type: List[str]
|
check: List[str] = []
|
||||||
if backend is None:
|
if backend is None:
|
||||||
# If the user didn't specify a backend, we assume they want to use
|
# If the user didn't specify a backend, we assume they want to use
|
||||||
# the setuptools backend. But we can't be sure they have included
|
# the setuptools backend. But we can't be sure they have included
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
from typing import Iterator, List, Optional, Sequence, Tuple
|
from typing import Generator, List, Optional, Sequence, Tuple
|
||||||
|
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
|
|
||||||
|
@ -9,44 +9,42 @@ from .req_install import InstallRequirement
|
||||||
from .req_set import RequirementSet
|
from .req_set import RequirementSet
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RequirementSet", "InstallRequirement",
|
"RequirementSet",
|
||||||
"parse_requirements", "install_given_reqs",
|
"InstallRequirement",
|
||||||
|
"parse_requirements",
|
||||||
|
"install_given_reqs",
|
||||||
]
|
]
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class InstallationResult:
|
class InstallationResult:
|
||||||
def __init__(self, name):
|
def __init__(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return f"InstallationResult(name={self.name!r})"
|
return f"InstallationResult(name={self.name!r})"
|
||||||
|
|
||||||
|
|
||||||
def _validate_requirements(
|
def _validate_requirements(
|
||||||
requirements, # type: List[InstallRequirement]
|
requirements: List[InstallRequirement],
|
||||||
):
|
) -> Generator[Tuple[str, InstallRequirement], None, None]:
|
||||||
# type: (...) -> Iterator[Tuple[str, InstallRequirement]]
|
|
||||||
for req in requirements:
|
for req in requirements:
|
||||||
assert req.name, f"invalid to-be-installed requirement: {req}"
|
assert req.name, f"invalid to-be-installed requirement: {req}"
|
||||||
yield req.name, req
|
yield req.name, req
|
||||||
|
|
||||||
|
|
||||||
def install_given_reqs(
|
def install_given_reqs(
|
||||||
requirements, # type: List[InstallRequirement]
|
requirements: List[InstallRequirement],
|
||||||
install_options, # type: List[str]
|
install_options: List[str],
|
||||||
global_options, # type: Sequence[str]
|
global_options: Sequence[str],
|
||||||
root, # type: Optional[str]
|
root: Optional[str],
|
||||||
home, # type: Optional[str]
|
home: Optional[str],
|
||||||
prefix, # type: Optional[str]
|
prefix: Optional[str],
|
||||||
warn_script_location, # type: bool
|
warn_script_location: bool,
|
||||||
use_user_site, # type: bool
|
use_user_site: bool,
|
||||||
pycompile, # type: bool
|
pycompile: bool,
|
||||||
):
|
) -> List[InstallationResult]:
|
||||||
# type: (...) -> List[InstallationResult]
|
|
||||||
"""
|
"""
|
||||||
Install everything in the given list.
|
Install everything in the given list.
|
||||||
|
|
||||||
|
@ -56,8 +54,8 @@ def install_given_reqs(
|
||||||
|
|
||||||
if to_install:
|
if to_install:
|
||||||
logger.info(
|
logger.info(
|
||||||
'Installing collected packages: %s',
|
"Installing collected packages: %s",
|
||||||
', '.join(to_install.keys()),
|
", ".join(to_install.keys()),
|
||||||
)
|
)
|
||||||
|
|
||||||
installed = []
|
installed = []
|
||||||
|
@ -65,11 +63,9 @@ def install_given_reqs(
|
||||||
with indent_log():
|
with indent_log():
|
||||||
for req_name, requirement in to_install.items():
|
for req_name, requirement in to_install.items():
|
||||||
if requirement.should_reinstall:
|
if requirement.should_reinstall:
|
||||||
logger.info('Attempting uninstall: %s', req_name)
|
logger.info("Attempting uninstall: %s", req_name)
|
||||||
with indent_log():
|
with indent_log():
|
||||||
uninstalled_pathset = requirement.uninstall(
|
uninstalled_pathset = requirement.uninstall(auto_confirm=True)
|
||||||
auto_confirm=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
uninstalled_pathset = None
|
uninstalled_pathset = None
|
||||||
|
|
||||||
|
|
|
@ -16,32 +16,31 @@ from typing import Any, Dict, Optional, Set, Tuple, Union
|
||||||
from pip._vendor.packaging.markers import Marker
|
from pip._vendor.packaging.markers import Marker
|
||||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||||
from pip._vendor.packaging.specifiers import Specifier
|
from pip._vendor.packaging.specifiers import Specifier
|
||||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.models.index import PyPI, TestPyPI
|
from pip._internal.models.index import PyPI, TestPyPI
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.pyproject import make_pyproject_path
|
|
||||||
from pip._internal.req.req_file import ParsedRequirement
|
from pip._internal.req.req_file import ParsedRequirement
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.utils.filetypes import is_archive_file
|
from pip._internal.utils.filetypes import is_archive_file
|
||||||
from pip._internal.utils.misc import is_installable_dir
|
from pip._internal.utils.misc import is_installable_dir
|
||||||
|
from pip._internal.utils.packaging import get_requirement
|
||||||
from pip._internal.utils.urls import path_to_url
|
from pip._internal.utils.urls import path_to_url
|
||||||
from pip._internal.vcs import is_url, vcs
|
from pip._internal.vcs import is_url, vcs
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"install_req_from_editable", "install_req_from_line",
|
"install_req_from_editable",
|
||||||
"parse_editable"
|
"install_req_from_line",
|
||||||
|
"parse_editable",
|
||||||
]
|
]
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
operators = Specifier._operators.keys()
|
operators = Specifier._operators.keys()
|
||||||
|
|
||||||
|
|
||||||
def _strip_extras(path):
|
def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
||||||
# type: (str) -> Tuple[str, Optional[str]]
|
m = re.match(r"^(.+)(\[[^\]]+\])$", path)
|
||||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
|
||||||
extras = None
|
extras = None
|
||||||
if m:
|
if m:
|
||||||
path_no_extras = m.group(1)
|
path_no_extras = m.group(1)
|
||||||
|
@ -52,15 +51,13 @@ def _strip_extras(path):
|
||||||
return path_no_extras, extras
|
return path_no_extras, extras
|
||||||
|
|
||||||
|
|
||||||
def convert_extras(extras):
|
def convert_extras(extras: Optional[str]) -> Set[str]:
|
||||||
# type: (Optional[str]) -> Set[str]
|
|
||||||
if not extras:
|
if not extras:
|
||||||
return set()
|
return set()
|
||||||
return Requirement("placeholder" + extras.lower()).extras
|
return get_requirement("placeholder" + extras.lower()).extras
|
||||||
|
|
||||||
|
|
||||||
def parse_editable(editable_req):
|
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||||
# type: (str) -> Tuple[Optional[str], str, Set[str]]
|
|
||||||
"""Parses an editable requirement into:
|
"""Parses an editable requirement into:
|
||||||
- a requirement name
|
- a requirement name
|
||||||
- an URL
|
- an URL
|
||||||
|
@ -77,39 +74,23 @@ def parse_editable(editable_req):
|
||||||
url_no_extras, extras = _strip_extras(url)
|
url_no_extras, extras = _strip_extras(url)
|
||||||
|
|
||||||
if os.path.isdir(url_no_extras):
|
if os.path.isdir(url_no_extras):
|
||||||
setup_py = os.path.join(url_no_extras, 'setup.py')
|
|
||||||
setup_cfg = os.path.join(url_no_extras, 'setup.cfg')
|
|
||||||
if not os.path.exists(setup_py) and not os.path.exists(setup_cfg):
|
|
||||||
msg = (
|
|
||||||
'File "setup.py" or "setup.cfg" not found. Directory cannot be '
|
|
||||||
'installed in editable mode: {}'
|
|
||||||
.format(os.path.abspath(url_no_extras))
|
|
||||||
)
|
|
||||||
pyproject_path = make_pyproject_path(url_no_extras)
|
|
||||||
if os.path.isfile(pyproject_path):
|
|
||||||
msg += (
|
|
||||||
'\n(A "pyproject.toml" file was found, but editable '
|
|
||||||
'mode currently requires a setuptools-based build.)'
|
|
||||||
)
|
|
||||||
raise InstallationError(msg)
|
|
||||||
|
|
||||||
# Treating it as code that has already been checked out
|
# Treating it as code that has already been checked out
|
||||||
url_no_extras = path_to_url(url_no_extras)
|
url_no_extras = path_to_url(url_no_extras)
|
||||||
|
|
||||||
if url_no_extras.lower().startswith('file:'):
|
if url_no_extras.lower().startswith("file:"):
|
||||||
package_name = Link(url_no_extras).egg_fragment
|
package_name = Link(url_no_extras).egg_fragment
|
||||||
if extras:
|
if extras:
|
||||||
return (
|
return (
|
||||||
package_name,
|
package_name,
|
||||||
url_no_extras,
|
url_no_extras,
|
||||||
Requirement("placeholder" + extras.lower()).extras,
|
get_requirement("placeholder" + extras.lower()).extras,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return package_name, url_no_extras, set()
|
return package_name, url_no_extras, set()
|
||||||
|
|
||||||
for version_control in vcs:
|
for version_control in vcs:
|
||||||
if url.lower().startswith(f'{version_control}:'):
|
if url.lower().startswith(f"{version_control}:"):
|
||||||
url = f'{version_control}+{url}'
|
url = f"{version_control}+{url}"
|
||||||
break
|
break
|
||||||
|
|
||||||
link = Link(url)
|
link = Link(url)
|
||||||
|
@ -117,9 +98,9 @@ def parse_editable(editable_req):
|
||||||
if not link.is_vcs:
|
if not link.is_vcs:
|
||||||
backends = ", ".join(vcs.all_schemes)
|
backends = ", ".join(vcs.all_schemes)
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f'{editable_req} is not a valid editable requirement. '
|
f"{editable_req} is not a valid editable requirement. "
|
||||||
f'It should either be a path to a local project or a VCS URL '
|
f"It should either be a path to a local project or a VCS URL "
|
||||||
f'(beginning with {backends}).'
|
f"(beginning with {backends})."
|
||||||
)
|
)
|
||||||
|
|
||||||
package_name = link.egg_fragment
|
package_name = link.egg_fragment
|
||||||
|
@ -131,44 +112,66 @@ def parse_editable(editable_req):
|
||||||
return package_name, url, set()
|
return package_name, url, set()
|
||||||
|
|
||||||
|
|
||||||
def deduce_helpful_msg(req):
|
def check_first_requirement_in_file(filename: str) -> None:
|
||||||
# type: (str) -> str
|
"""Check if file is parsable as a requirements file.
|
||||||
|
|
||||||
|
This is heavily based on ``pkg_resources.parse_requirements``, but
|
||||||
|
simplified to just check the first meaningful line.
|
||||||
|
|
||||||
|
:raises InvalidRequirement: If the first meaningful line cannot be parsed
|
||||||
|
as an requirement.
|
||||||
|
"""
|
||||||
|
with open(filename, encoding="utf-8", errors="ignore") as f:
|
||||||
|
# Create a steppable iterator, so we can handle \-continuations.
|
||||||
|
lines = (
|
||||||
|
line
|
||||||
|
for line in (line.strip() for line in f)
|
||||||
|
if line and not line.startswith("#") # Skip blank lines/comments.
|
||||||
|
)
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
# Drop comments -- a hash without a space may be in a URL.
|
||||||
|
if " #" in line:
|
||||||
|
line = line[: line.find(" #")]
|
||||||
|
# If there is a line continuation, drop it, and append the next line.
|
||||||
|
if line.endswith("\\"):
|
||||||
|
line = line[:-2].strip() + next(lines, "")
|
||||||
|
Requirement(line)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def deduce_helpful_msg(req: str) -> str:
|
||||||
"""Returns helpful msg in case requirements file does not exist,
|
"""Returns helpful msg in case requirements file does not exist,
|
||||||
or cannot be parsed.
|
or cannot be parsed.
|
||||||
|
|
||||||
:params req: Requirements file path
|
:params req: Requirements file path
|
||||||
"""
|
"""
|
||||||
msg = ""
|
if not os.path.exists(req):
|
||||||
if os.path.exists(req):
|
return f" File '{req}' does not exist."
|
||||||
msg = " The path does exist. "
|
msg = " The path does exist. "
|
||||||
# Try to parse and check if it is a requirements file.
|
# Try to parse and check if it is a requirements file.
|
||||||
try:
|
try:
|
||||||
with open(req) as fp:
|
check_first_requirement_in_file(req)
|
||||||
# parse first line only
|
except InvalidRequirement:
|
||||||
next(parse_requirements(fp.read()))
|
logger.debug("Cannot parse '%s' as requirements file", req)
|
||||||
msg += (
|
|
||||||
"The argument you provided "
|
|
||||||
"({}) appears to be a"
|
|
||||||
" requirements file. If that is the"
|
|
||||||
" case, use the '-r' flag to install"
|
|
||||||
" the packages specified within it."
|
|
||||||
).format(req)
|
|
||||||
except RequirementParseError:
|
|
||||||
logger.debug(
|
|
||||||
"Cannot parse '%s' as requirements file", req, exc_info=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
msg += f" File '{req}' does not exist."
|
msg += (
|
||||||
|
f"The argument you provided "
|
||||||
|
f"({req}) appears to be a"
|
||||||
|
f" requirements file. If that is the"
|
||||||
|
f" case, use the '-r' flag to install"
|
||||||
|
f" the packages specified within it."
|
||||||
|
)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
class RequirementParts:
|
class RequirementParts:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
requirement, # type: Optional[Requirement]
|
requirement: Optional[Requirement],
|
||||||
link, # type: Optional[Link]
|
link: Optional[Link],
|
||||||
markers, # type: Optional[Marker]
|
markers: Optional[Marker],
|
||||||
extras, # type: Set[str]
|
extras: Set[str],
|
||||||
):
|
):
|
||||||
self.requirement = requirement
|
self.requirement = requirement
|
||||||
self.link = link
|
self.link = link
|
||||||
|
@ -176,13 +179,12 @@ class RequirementParts:
|
||||||
self.extras = extras
|
self.extras = extras
|
||||||
|
|
||||||
|
|
||||||
def parse_req_from_editable(editable_req):
|
def parse_req_from_editable(editable_req: str) -> RequirementParts:
|
||||||
# type: (str) -> RequirementParts
|
|
||||||
name, url, extras_override = parse_editable(editable_req)
|
name, url, extras_override = parse_editable(editable_req)
|
||||||
|
|
||||||
if name is not None:
|
if name is not None:
|
||||||
try:
|
try:
|
||||||
req = Requirement(name) # type: Optional[Requirement]
|
req: Optional[Requirement] = Requirement(name)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement:
|
||||||
raise InstallationError(f"Invalid requirement: '{name}'")
|
raise InstallationError(f"Invalid requirement: '{name}'")
|
||||||
else:
|
else:
|
||||||
|
@ -197,15 +199,16 @@ def parse_req_from_editable(editable_req):
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_editable(
|
def install_req_from_editable(
|
||||||
editable_req, # type: str
|
editable_req: str,
|
||||||
comes_from=None, # type: Optional[Union[InstallRequirement, str]]
|
comes_from: Optional[Union[InstallRequirement, str]] = None,
|
||||||
use_pep517=None, # type: Optional[bool]
|
use_pep517: Optional[bool] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
options=None, # type: Optional[Dict[str, Any]]
|
options: Optional[Dict[str, Any]] = None,
|
||||||
constraint=False, # type: bool
|
constraint: bool = False,
|
||||||
user_supplied=False, # type: bool
|
user_supplied: bool = False,
|
||||||
):
|
permit_editable_wheels: bool = False,
|
||||||
# type: (...) -> InstallRequirement
|
config_settings: Optional[Dict[str, str]] = None,
|
||||||
|
) -> InstallRequirement:
|
||||||
|
|
||||||
parts = parse_req_from_editable(editable_req)
|
parts = parse_req_from_editable(editable_req)
|
||||||
|
|
||||||
|
@ -214,6 +217,7 @@ def install_req_from_editable(
|
||||||
comes_from=comes_from,
|
comes_from=comes_from,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
editable=True,
|
editable=True,
|
||||||
|
permit_editable_wheels=permit_editable_wheels,
|
||||||
link=parts.link,
|
link=parts.link,
|
||||||
constraint=constraint,
|
constraint=constraint,
|
||||||
use_pep517=use_pep517,
|
use_pep517=use_pep517,
|
||||||
|
@ -221,12 +225,12 @@ def install_req_from_editable(
|
||||||
install_options=options.get("install_options", []) if options else [],
|
install_options=options.get("install_options", []) if options else [],
|
||||||
global_options=options.get("global_options", []) if options else [],
|
global_options=options.get("global_options", []) if options else [],
|
||||||
hash_options=options.get("hashes", {}) if options else {},
|
hash_options=options.get("hashes", {}) if options else {},
|
||||||
|
config_settings=config_settings,
|
||||||
extras=parts.extras,
|
extras=parts.extras,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _looks_like_path(name):
|
def _looks_like_path(name: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
"""Checks whether the string "looks like" a path on the filesystem.
|
"""Checks whether the string "looks like" a path on the filesystem.
|
||||||
|
|
||||||
This does not check whether the target actually exists, only judge from the
|
This does not check whether the target actually exists, only judge from the
|
||||||
|
@ -245,11 +249,10 @@ def _looks_like_path(name):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _get_url_from_path(path, name):
|
def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
||||||
# type: (str, str) -> Optional[str]
|
|
||||||
"""
|
"""
|
||||||
First, it checks whether a provided path is an installable directory
|
First, it checks whether a provided path is an installable directory. If it
|
||||||
(e.g. it has a setup.py). If it is, returns the path.
|
is, returns the path.
|
||||||
|
|
||||||
If false, check if the path is an archive file (such as a .whl).
|
If false, check if the path is an archive file (such as a .whl).
|
||||||
The function checks if the path is a file. If false, if the path has
|
The function checks if the path is a file. If false, if the path has
|
||||||
|
@ -258,6 +261,8 @@ def _get_url_from_path(path, name):
|
||||||
if _looks_like_path(name) and os.path.isdir(path):
|
if _looks_like_path(name) and os.path.isdir(path):
|
||||||
if is_installable_dir(path):
|
if is_installable_dir(path):
|
||||||
return path_to_url(path)
|
return path_to_url(path)
|
||||||
|
# TODO: The is_installable_dir test here might not be necessary
|
||||||
|
# now that it is done in load_pyproject_toml too.
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
||||||
"nor 'pyproject.toml' found."
|
"nor 'pyproject.toml' found."
|
||||||
|
@ -266,25 +271,23 @@ def _get_url_from_path(path, name):
|
||||||
return None
|
return None
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
return path_to_url(path)
|
return path_to_url(path)
|
||||||
urlreq_parts = name.split('@', 1)
|
urlreq_parts = name.split("@", 1)
|
||||||
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
||||||
# If the path contains '@' and the part before it does not look
|
# If the path contains '@' and the part before it does not look
|
||||||
# like a path, try to treat it as a PEP 440 URL req instead.
|
# like a path, try to treat it as a PEP 440 URL req instead.
|
||||||
return None
|
return None
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Requirement %r looks like a filename, but the '
|
"Requirement %r looks like a filename, but the file does not exist",
|
||||||
'file does not exist',
|
name,
|
||||||
name
|
|
||||||
)
|
)
|
||||||
return path_to_url(path)
|
return path_to_url(path)
|
||||||
|
|
||||||
|
|
||||||
def parse_req_from_line(name, line_source):
|
def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
|
||||||
# type: (str, Optional[str]) -> RequirementParts
|
|
||||||
if is_url(name):
|
if is_url(name):
|
||||||
marker_sep = '; '
|
marker_sep = "; "
|
||||||
else:
|
else:
|
||||||
marker_sep = ';'
|
marker_sep = ";"
|
||||||
if marker_sep in name:
|
if marker_sep in name:
|
||||||
name, markers_as_string = name.split(marker_sep, 1)
|
name, markers_as_string = name.split(marker_sep, 1)
|
||||||
markers_as_string = markers_as_string.strip()
|
markers_as_string = markers_as_string.strip()
|
||||||
|
@ -311,9 +314,8 @@ def parse_req_from_line(name, line_source):
|
||||||
# it's a local file, dir, or url
|
# it's a local file, dir, or url
|
||||||
if link:
|
if link:
|
||||||
# Handle relative file URLs
|
# Handle relative file URLs
|
||||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
if link.scheme == "file" and re.search(r"\.\./", link.url):
|
||||||
link = Link(
|
link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
|
||||||
# wheel file
|
# wheel file
|
||||||
if link.is_wheel:
|
if link.is_wheel:
|
||||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||||
|
@ -329,29 +331,27 @@ def parse_req_from_line(name, line_source):
|
||||||
|
|
||||||
extras = convert_extras(extras_as_string)
|
extras = convert_extras(extras_as_string)
|
||||||
|
|
||||||
def with_source(text):
|
def with_source(text: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
if not line_source:
|
if not line_source:
|
||||||
return text
|
return text
|
||||||
return f'{text} (from {line_source})'
|
return f"{text} (from {line_source})"
|
||||||
|
|
||||||
def _parse_req_string(req_as_string: str) -> Requirement:
|
def _parse_req_string(req_as_string: str) -> Requirement:
|
||||||
try:
|
try:
|
||||||
req = Requirement(req_as_string)
|
req = get_requirement(req_as_string)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement:
|
||||||
if os.path.sep in req_as_string:
|
if os.path.sep in req_as_string:
|
||||||
add_msg = "It looks like a path."
|
add_msg = "It looks like a path."
|
||||||
add_msg += deduce_helpful_msg(req_as_string)
|
add_msg += deduce_helpful_msg(req_as_string)
|
||||||
elif ('=' in req_as_string and
|
elif "=" in req_as_string and not any(
|
||||||
not any(op in req_as_string for op in operators)):
|
op in req_as_string for op in operators
|
||||||
|
):
|
||||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||||
else:
|
else:
|
||||||
add_msg = ''
|
add_msg = ""
|
||||||
msg = with_source(
|
msg = with_source(f"Invalid requirement: {req_as_string!r}")
|
||||||
f'Invalid requirement: {req_as_string!r}'
|
|
||||||
)
|
|
||||||
if add_msg:
|
if add_msg:
|
||||||
msg += f'\nHint: {add_msg}'
|
msg += f"\nHint: {add_msg}"
|
||||||
raise InstallationError(msg)
|
raise InstallationError(msg)
|
||||||
else:
|
else:
|
||||||
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
||||||
|
@ -360,13 +360,13 @@ def parse_req_from_line(name, line_source):
|
||||||
# RequirementParts
|
# RequirementParts
|
||||||
for spec in req.specifier:
|
for spec in req.specifier:
|
||||||
spec_str = str(spec)
|
spec_str = str(spec)
|
||||||
if spec_str.endswith(']'):
|
if spec_str.endswith("]"):
|
||||||
msg = f"Extras after version '{spec_str}'."
|
msg = f"Extras after version '{spec_str}'."
|
||||||
raise InstallationError(msg)
|
raise InstallationError(msg)
|
||||||
return req
|
return req
|
||||||
|
|
||||||
if req_as_string is not None:
|
if req_as_string is not None:
|
||||||
req = _parse_req_string(req_as_string) # type: Optional[Requirement]
|
req: Optional[Requirement] = _parse_req_string(req_as_string)
|
||||||
else:
|
else:
|
||||||
req = None
|
req = None
|
||||||
|
|
||||||
|
@ -374,16 +374,16 @@ def parse_req_from_line(name, line_source):
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_line(
|
def install_req_from_line(
|
||||||
name, # type: str
|
name: str,
|
||||||
comes_from=None, # type: Optional[Union[str, InstallRequirement]]
|
comes_from: Optional[Union[str, InstallRequirement]] = None,
|
||||||
use_pep517=None, # type: Optional[bool]
|
use_pep517: Optional[bool] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
options=None, # type: Optional[Dict[str, Any]]
|
options: Optional[Dict[str, Any]] = None,
|
||||||
constraint=False, # type: bool
|
constraint: bool = False,
|
||||||
line_source=None, # type: Optional[str]
|
line_source: Optional[str] = None,
|
||||||
user_supplied=False, # type: bool
|
user_supplied: bool = False,
|
||||||
):
|
config_settings: Optional[Dict[str, str]] = None,
|
||||||
# type: (...) -> InstallRequirement
|
) -> InstallRequirement:
|
||||||
"""Creates an InstallRequirement from a name, which might be a
|
"""Creates an InstallRequirement from a name, which might be a
|
||||||
requirement, directory containing 'setup.py', filename, or URL.
|
requirement, directory containing 'setup.py', filename, or URL.
|
||||||
|
|
||||||
|
@ -393,11 +393,16 @@ def install_req_from_line(
|
||||||
parts = parse_req_from_line(name, line_source)
|
parts = parse_req_from_line(name, line_source)
|
||||||
|
|
||||||
return InstallRequirement(
|
return InstallRequirement(
|
||||||
parts.requirement, comes_from, link=parts.link, markers=parts.markers,
|
parts.requirement,
|
||||||
use_pep517=use_pep517, isolated=isolated,
|
comes_from,
|
||||||
|
link=parts.link,
|
||||||
|
markers=parts.markers,
|
||||||
|
use_pep517=use_pep517,
|
||||||
|
isolated=isolated,
|
||||||
install_options=options.get("install_options", []) if options else [],
|
install_options=options.get("install_options", []) if options else [],
|
||||||
global_options=options.get("global_options", []) if options else [],
|
global_options=options.get("global_options", []) if options else [],
|
||||||
hash_options=options.get("hashes", {}) if options else {},
|
hash_options=options.get("hashes", {}) if options else {},
|
||||||
|
config_settings=config_settings,
|
||||||
constraint=constraint,
|
constraint=constraint,
|
||||||
extras=parts.extras,
|
extras=parts.extras,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
|
@ -405,15 +410,15 @@ def install_req_from_line(
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_req_string(
|
def install_req_from_req_string(
|
||||||
req_string, # type: str
|
req_string: str,
|
||||||
comes_from=None, # type: Optional[InstallRequirement]
|
comes_from: Optional[InstallRequirement] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
use_pep517=None, # type: Optional[bool]
|
use_pep517: Optional[bool] = None,
|
||||||
user_supplied=False, # type: bool
|
user_supplied: bool = False,
|
||||||
):
|
config_settings: Optional[Dict[str, str]] = None,
|
||||||
# type: (...) -> InstallRequirement
|
) -> InstallRequirement:
|
||||||
try:
|
try:
|
||||||
req = Requirement(req_string)
|
req = get_requirement(req_string)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement:
|
||||||
raise InstallationError(f"Invalid requirement: '{req_string}'")
|
raise InstallationError(f"Invalid requirement: '{req_string}'")
|
||||||
|
|
||||||
|
@ -421,8 +426,12 @@ def install_req_from_req_string(
|
||||||
PyPI.file_storage_domain,
|
PyPI.file_storage_domain,
|
||||||
TestPyPI.file_storage_domain,
|
TestPyPI.file_storage_domain,
|
||||||
]
|
]
|
||||||
if (req.url and comes_from and comes_from.link and
|
if (
|
||||||
comes_from.link.netloc in domains_not_allowed):
|
req.url
|
||||||
|
and comes_from
|
||||||
|
and comes_from.link
|
||||||
|
and comes_from.link.netloc in domains_not_allowed
|
||||||
|
):
|
||||||
# Explicitly disallow pypi packages that depend on external urls
|
# Explicitly disallow pypi packages that depend on external urls
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Packages installed from PyPI cannot depend on packages "
|
"Packages installed from PyPI cannot depend on packages "
|
||||||
|
@ -436,16 +445,17 @@ def install_req_from_req_string(
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
use_pep517=use_pep517,
|
use_pep517=use_pep517,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
|
config_settings=config_settings,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_parsed_requirement(
|
def install_req_from_parsed_requirement(
|
||||||
parsed_req, # type: ParsedRequirement
|
parsed_req: ParsedRequirement,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
use_pep517=None, # type: Optional[bool]
|
use_pep517: Optional[bool] = None,
|
||||||
user_supplied=False, # type: bool
|
user_supplied: bool = False,
|
||||||
):
|
config_settings: Optional[Dict[str, str]] = None,
|
||||||
# type: (...) -> InstallRequirement
|
) -> InstallRequirement:
|
||||||
if parsed_req.is_editable:
|
if parsed_req.is_editable:
|
||||||
req = install_req_from_editable(
|
req = install_req_from_editable(
|
||||||
parsed_req.requirement,
|
parsed_req.requirement,
|
||||||
|
@ -454,6 +464,7 @@ def install_req_from_parsed_requirement(
|
||||||
constraint=parsed_req.constraint,
|
constraint=parsed_req.constraint,
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
|
config_settings=config_settings,
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -466,12 +477,14 @@ def install_req_from_parsed_requirement(
|
||||||
constraint=parsed_req.constraint,
|
constraint=parsed_req.constraint,
|
||||||
line_source=parsed_req.line_source,
|
line_source=parsed_req.line_source,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
|
config_settings=config_settings,
|
||||||
)
|
)
|
||||||
return req
|
return req
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_link_and_ireq(link, ireq):
|
def install_req_from_link_and_ireq(
|
||||||
# type: (Link, InstallRequirement) -> InstallRequirement
|
link: Link, ireq: InstallRequirement
|
||||||
|
) -> InstallRequirement:
|
||||||
return InstallRequirement(
|
return InstallRequirement(
|
||||||
req=ireq.req,
|
req=ireq.req,
|
||||||
comes_from=ireq.comes_from,
|
comes_from=ireq.comes_from,
|
||||||
|
@ -483,4 +496,6 @@ def install_req_from_link_and_ireq(link, ireq):
|
||||||
install_options=ireq.install_options,
|
install_options=ireq.install_options,
|
||||||
global_options=ireq.global_options,
|
global_options=ireq.global_options,
|
||||||
hash_options=ireq.hash_options,
|
hash_options=ireq.hash_options,
|
||||||
|
config_settings=ireq.config_settings,
|
||||||
|
user_supplied=ireq.user_supplied,
|
||||||
)
|
)
|
||||||
|
|
|
@ -8,7 +8,17 @@ import re
|
||||||
import shlex
|
import shlex
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Optional, Tuple
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Generator,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.exceptions import InstallationError, RequirementsFileParseError
|
from pip._internal.exceptions import InstallationError, RequirementsFileParseError
|
||||||
|
@ -16,7 +26,7 @@ from pip._internal.models.search_scope import SearchScope
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
from pip._internal.network.utils import raise_for_status
|
from pip._internal.network.utils import raise_for_status
|
||||||
from pip._internal.utils.encoding import auto_decode
|
from pip._internal.utils.encoding import auto_decode
|
||||||
from pip._internal.utils.urls import get_url_scheme, url_to_path
|
from pip._internal.utils.urls import get_url_scheme
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
# NoReturn introduced in 3.6.2; imported only for type checking to maintain
|
# NoReturn introduced in 3.6.2; imported only for type checking to maintain
|
||||||
|
@ -25,22 +35,22 @@ if TYPE_CHECKING:
|
||||||
|
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
__all__ = ['parse_requirements']
|
__all__ = ["parse_requirements"]
|
||||||
|
|
||||||
ReqFileLines = Iterator[Tuple[int, str]]
|
ReqFileLines = Iterable[Tuple[int, str]]
|
||||||
|
|
||||||
LineParser = Callable[[str], Tuple[str, Values]]
|
LineParser = Callable[[str], Tuple[str, Values]]
|
||||||
|
|
||||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
|
||||||
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
|
COMMENT_RE = re.compile(r"(^|\s+)#.*$")
|
||||||
|
|
||||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||||
# variable name consisting of only uppercase letters, digits or the '_'
|
# variable name consisting of only uppercase letters, digits or the '_'
|
||||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||||
# 2013 Edition.
|
# 2013 Edition.
|
||||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
|
||||||
|
|
||||||
SUPPORTED_OPTIONS = [
|
SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
||||||
cmdoptions.index_url,
|
cmdoptions.index_url,
|
||||||
cmdoptions.extra_index_url,
|
cmdoptions.extra_index_url,
|
||||||
cmdoptions.no_index,
|
cmdoptions.no_index,
|
||||||
|
@ -55,14 +65,14 @@ SUPPORTED_OPTIONS = [
|
||||||
cmdoptions.pre,
|
cmdoptions.pre,
|
||||||
cmdoptions.trusted_host,
|
cmdoptions.trusted_host,
|
||||||
cmdoptions.use_new_feature,
|
cmdoptions.use_new_feature,
|
||||||
] # type: List[Callable[..., optparse.Option]]
|
]
|
||||||
|
|
||||||
# options to be passed to requirements
|
# options to be passed to requirements
|
||||||
SUPPORTED_OPTIONS_REQ = [
|
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
|
||||||
cmdoptions.install_options,
|
cmdoptions.install_options,
|
||||||
cmdoptions.global_options,
|
cmdoptions.global_options,
|
||||||
cmdoptions.hash,
|
cmdoptions.hash,
|
||||||
] # type: List[Callable[..., optparse.Option]]
|
]
|
||||||
|
|
||||||
# the 'dest' string values
|
# the 'dest' string values
|
||||||
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||||
|
@ -71,14 +81,13 @@ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||||
class ParsedRequirement:
|
class ParsedRequirement:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
requirement, # type:str
|
requirement: str,
|
||||||
is_editable, # type: bool
|
is_editable: bool,
|
||||||
comes_from, # type: str
|
comes_from: str,
|
||||||
constraint, # type: bool
|
constraint: bool,
|
||||||
options=None, # type: Optional[Dict[str, Any]]
|
options: Optional[Dict[str, Any]] = None,
|
||||||
line_source=None, # type: Optional[str]
|
line_source: Optional[str] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self.requirement = requirement
|
self.requirement = requirement
|
||||||
self.is_editable = is_editable
|
self.is_editable = is_editable
|
||||||
self.comes_from = comes_from
|
self.comes_from = comes_from
|
||||||
|
@ -90,13 +99,12 @@ class ParsedRequirement:
|
||||||
class ParsedLine:
|
class ParsedLine:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
filename, # type: str
|
filename: str,
|
||||||
lineno, # type: int
|
lineno: int,
|
||||||
args, # type: str
|
args: str,
|
||||||
opts, # type: Values
|
opts: Values,
|
||||||
constraint, # type: bool
|
constraint: bool,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.lineno = lineno
|
self.lineno = lineno
|
||||||
self.opts = opts
|
self.opts = opts
|
||||||
|
@ -116,13 +124,12 @@ class ParsedLine:
|
||||||
|
|
||||||
|
|
||||||
def parse_requirements(
|
def parse_requirements(
|
||||||
filename, # type: str
|
filename: str,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
finder=None, # type: Optional[PackageFinder]
|
finder: Optional["PackageFinder"] = None,
|
||||||
options=None, # type: Optional[optparse.Values]
|
options: Optional[optparse.Values] = None,
|
||||||
constraint=False, # type: bool
|
constraint: bool = False,
|
||||||
):
|
) -> Generator[ParsedRequirement, None, None]:
|
||||||
# type: (...) -> Iterator[ParsedRequirement]
|
|
||||||
"""Parse a requirements file and yield ParsedRequirement instances.
|
"""Parse a requirements file and yield ParsedRequirement instances.
|
||||||
|
|
||||||
:param filename: Path or url of requirements file.
|
:param filename: Path or url of requirements file.
|
||||||
|
@ -137,22 +144,18 @@ def parse_requirements(
|
||||||
|
|
||||||
for parsed_line in parser.parse(filename, constraint):
|
for parsed_line in parser.parse(filename, constraint):
|
||||||
parsed_req = handle_line(
|
parsed_req = handle_line(
|
||||||
parsed_line,
|
parsed_line, options=options, finder=finder, session=session
|
||||||
options=options,
|
|
||||||
finder=finder,
|
|
||||||
session=session
|
|
||||||
)
|
)
|
||||||
if parsed_req is not None:
|
if parsed_req is not None:
|
||||||
yield parsed_req
|
yield parsed_req
|
||||||
|
|
||||||
|
|
||||||
def preprocess(content):
|
def preprocess(content: str) -> ReqFileLines:
|
||||||
# type: (str) -> ReqFileLines
|
|
||||||
"""Split, filter, and join lines, and return a line iterator
|
"""Split, filter, and join lines, and return a line iterator
|
||||||
|
|
||||||
:param content: the content of the requirements file
|
:param content: the content of the requirements file
|
||||||
"""
|
"""
|
||||||
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
|
lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
|
||||||
lines_enum = join_lines(lines_enum)
|
lines_enum = join_lines(lines_enum)
|
||||||
lines_enum = ignore_comments(lines_enum)
|
lines_enum = ignore_comments(lines_enum)
|
||||||
lines_enum = expand_env_variables(lines_enum)
|
lines_enum = expand_env_variables(lines_enum)
|
||||||
|
@ -160,14 +163,15 @@ def preprocess(content):
|
||||||
|
|
||||||
|
|
||||||
def handle_requirement_line(
|
def handle_requirement_line(
|
||||||
line, # type: ParsedLine
|
line: ParsedLine,
|
||||||
options=None, # type: Optional[optparse.Values]
|
options: Optional[optparse.Values] = None,
|
||||||
):
|
) -> ParsedRequirement:
|
||||||
# type: (...) -> ParsedRequirement
|
|
||||||
|
|
||||||
# preserve for the nested code path
|
# preserve for the nested code path
|
||||||
line_comes_from = '{} {} (line {})'.format(
|
line_comes_from = "{} {} (line {})".format(
|
||||||
'-c' if line.constraint else '-r', line.filename, line.lineno,
|
"-c" if line.constraint else "-r",
|
||||||
|
line.filename,
|
||||||
|
line.lineno,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert line.is_requirement
|
assert line.is_requirement
|
||||||
|
@ -192,7 +196,7 @@ def handle_requirement_line(
|
||||||
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
||||||
req_options[dest] = line.opts.__dict__[dest]
|
req_options[dest] = line.opts.__dict__[dest]
|
||||||
|
|
||||||
line_source = f'line {line.lineno} of {line.filename}'
|
line_source = f"line {line.lineno} of {line.filename}"
|
||||||
return ParsedRequirement(
|
return ParsedRequirement(
|
||||||
requirement=line.requirement,
|
requirement=line.requirement,
|
||||||
is_editable=line.is_editable,
|
is_editable=line.is_editable,
|
||||||
|
@ -204,14 +208,13 @@ def handle_requirement_line(
|
||||||
|
|
||||||
|
|
||||||
def handle_option_line(
|
def handle_option_line(
|
||||||
opts, # type: Values
|
opts: Values,
|
||||||
filename, # type: str
|
filename: str,
|
||||||
lineno, # type: int
|
lineno: int,
|
||||||
finder=None, # type: Optional[PackageFinder]
|
finder: Optional["PackageFinder"] = None,
|
||||||
options=None, # type: Optional[optparse.Values]
|
options: Optional[optparse.Values] = None,
|
||||||
session=None, # type: Optional[PipSession]
|
session: Optional[PipSession] = None,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
|
|
||||||
if options:
|
if options:
|
||||||
# percolate options upward
|
# percolate options upward
|
||||||
|
@ -219,8 +222,7 @@ def handle_option_line(
|
||||||
options.require_hashes = opts.require_hashes
|
options.require_hashes = opts.require_hashes
|
||||||
if opts.features_enabled:
|
if opts.features_enabled:
|
||||||
options.features_enabled.extend(
|
options.features_enabled.extend(
|
||||||
f for f in opts.features_enabled
|
f for f in opts.features_enabled if f not in options.features_enabled
|
||||||
if f not in options.features_enabled
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# set finder options
|
# set finder options
|
||||||
|
@ -262,17 +264,16 @@ def handle_option_line(
|
||||||
|
|
||||||
if session:
|
if session:
|
||||||
for host in opts.trusted_hosts or []:
|
for host in opts.trusted_hosts or []:
|
||||||
source = f'line {lineno} of {filename}'
|
source = f"line {lineno} of {filename}"
|
||||||
session.add_trusted_host(host, source=source)
|
session.add_trusted_host(host, source=source)
|
||||||
|
|
||||||
|
|
||||||
def handle_line(
|
def handle_line(
|
||||||
line, # type: ParsedLine
|
line: ParsedLine,
|
||||||
options=None, # type: Optional[optparse.Values]
|
options: Optional[optparse.Values] = None,
|
||||||
finder=None, # type: Optional[PackageFinder]
|
finder: Optional["PackageFinder"] = None,
|
||||||
session=None, # type: Optional[PipSession]
|
session: Optional[PipSession] = None,
|
||||||
):
|
) -> Optional[ParsedRequirement]:
|
||||||
# type: (...) -> Optional[ParsedRequirement]
|
|
||||||
"""Handle a single parsed requirements line; This can result in
|
"""Handle a single parsed requirements line; This can result in
|
||||||
creating/yielding requirements, or updating the finder.
|
creating/yielding requirements, or updating the finder.
|
||||||
|
|
||||||
|
@ -314,25 +315,24 @@ def handle_line(
|
||||||
class RequirementsFileParser:
|
class RequirementsFileParser:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
session, # type: PipSession
|
session: PipSession,
|
||||||
line_parser, # type: LineParser
|
line_parser: LineParser,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
self._session = session
|
self._session = session
|
||||||
self._line_parser = line_parser
|
self._line_parser = line_parser
|
||||||
|
|
||||||
def parse(self, filename, constraint):
|
def parse(
|
||||||
# type: (str, bool) -> Iterator[ParsedLine]
|
self, filename: str, constraint: bool
|
||||||
"""Parse a given file, yielding parsed lines.
|
) -> Generator[ParsedLine, None, None]:
|
||||||
"""
|
"""Parse a given file, yielding parsed lines."""
|
||||||
yield from self._parse_and_recurse(filename, constraint)
|
yield from self._parse_and_recurse(filename, constraint)
|
||||||
|
|
||||||
def _parse_and_recurse(self, filename, constraint):
|
def _parse_and_recurse(
|
||||||
# type: (str, bool) -> Iterator[ParsedLine]
|
self, filename: str, constraint: bool
|
||||||
|
) -> Generator[ParsedLine, None, None]:
|
||||||
for line in self._parse_file(filename, constraint):
|
for line in self._parse_file(filename, constraint):
|
||||||
if (
|
if not line.is_requirement and (
|
||||||
not line.is_requirement and
|
line.opts.requirements or line.opts.constraints
|
||||||
(line.opts.requirements or line.opts.constraints)
|
|
||||||
):
|
):
|
||||||
# parse a nested requirements file
|
# parse a nested requirements file
|
||||||
if line.opts.requirements:
|
if line.opts.requirements:
|
||||||
|
@ -350,15 +350,17 @@ class RequirementsFileParser:
|
||||||
elif not SCHEME_RE.search(req_path):
|
elif not SCHEME_RE.search(req_path):
|
||||||
# do a join so relative paths work
|
# do a join so relative paths work
|
||||||
req_path = os.path.join(
|
req_path = os.path.join(
|
||||||
os.path.dirname(filename), req_path,
|
os.path.dirname(filename),
|
||||||
|
req_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
yield from self._parse_and_recurse(req_path, nested_constraint)
|
yield from self._parse_and_recurse(req_path, nested_constraint)
|
||||||
else:
|
else:
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
def _parse_file(self, filename, constraint):
|
def _parse_file(
|
||||||
# type: (str, bool) -> Iterator[ParsedLine]
|
self, filename: str, constraint: bool
|
||||||
|
) -> Generator[ParsedLine, None, None]:
|
||||||
_, content = get_file_content(filename, self._session)
|
_, content = get_file_content(filename, self._session)
|
||||||
|
|
||||||
lines_enum = preprocess(content)
|
lines_enum = preprocess(content)
|
||||||
|
@ -368,7 +370,7 @@ class RequirementsFileParser:
|
||||||
args_str, opts = self._line_parser(line)
|
args_str, opts = self._line_parser(line)
|
||||||
except OptionParsingError as e:
|
except OptionParsingError as e:
|
||||||
# add offending line
|
# add offending line
|
||||||
msg = f'Invalid requirement: {line}\n{e.msg}'
|
msg = f"Invalid requirement: {line}\n{e.msg}"
|
||||||
raise RequirementsFileParseError(msg)
|
raise RequirementsFileParseError(msg)
|
||||||
|
|
||||||
yield ParsedLine(
|
yield ParsedLine(
|
||||||
|
@ -380,10 +382,8 @@ class RequirementsFileParser:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_line_parser(finder):
|
def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
|
||||||
# type: (Optional[PackageFinder]) -> LineParser
|
def parse_line(line: str) -> Tuple[str, Values]:
|
||||||
def parse_line(line):
|
|
||||||
# type: (str) -> Tuple[str, Values]
|
|
||||||
# Build new parser for each line since it accumulates appendable
|
# Build new parser for each line since it accumulates appendable
|
||||||
# options.
|
# options.
|
||||||
parser = build_parser()
|
parser = build_parser()
|
||||||
|
@ -401,32 +401,29 @@ def get_line_parser(finder):
|
||||||
return parse_line
|
return parse_line
|
||||||
|
|
||||||
|
|
||||||
def break_args_options(line):
|
def break_args_options(line: str) -> Tuple[str, str]:
|
||||||
# type: (str) -> Tuple[str, str]
|
|
||||||
"""Break up the line into an args and options string. We only want to shlex
|
"""Break up the line into an args and options string. We only want to shlex
|
||||||
(and then optparse) the options, not the args. args can contain markers
|
(and then optparse) the options, not the args. args can contain markers
|
||||||
which are corrupted by shlex.
|
which are corrupted by shlex.
|
||||||
"""
|
"""
|
||||||
tokens = line.split(' ')
|
tokens = line.split(" ")
|
||||||
args = []
|
args = []
|
||||||
options = tokens[:]
|
options = tokens[:]
|
||||||
for token in tokens:
|
for token in tokens:
|
||||||
if token.startswith('-') or token.startswith('--'):
|
if token.startswith("-") or token.startswith("--"):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
args.append(token)
|
args.append(token)
|
||||||
options.pop(0)
|
options.pop(0)
|
||||||
return ' '.join(args), ' '.join(options)
|
return " ".join(args), " ".join(options)
|
||||||
|
|
||||||
|
|
||||||
class OptionParsingError(Exception):
|
class OptionParsingError(Exception):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
def build_parser():
|
def build_parser() -> optparse.OptionParser:
|
||||||
# type: () -> optparse.OptionParser
|
|
||||||
"""
|
"""
|
||||||
Return a parser for parsing requirement lines
|
Return a parser for parsing requirement lines
|
||||||
"""
|
"""
|
||||||
|
@ -439,9 +436,9 @@ def build_parser():
|
||||||
|
|
||||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||||
# that in our own exception.
|
# that in our own exception.
|
||||||
def parser_exit(self, msg):
|
def parser_exit(self: Any, msg: str) -> "NoReturn":
|
||||||
# type: (Any, str) -> NoReturn
|
|
||||||
raise OptionParsingError(msg)
|
raise OptionParsingError(msg)
|
||||||
|
|
||||||
# NOTE: mypy disallows assigning to a method
|
# NOTE: mypy disallows assigning to a method
|
||||||
# https://github.com/python/mypy/issues/2427
|
# https://github.com/python/mypy/issues/2427
|
||||||
parser.exit = parser_exit # type: ignore
|
parser.exit = parser_exit # type: ignore
|
||||||
|
@ -449,52 +446,49 @@ def build_parser():
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def join_lines(lines_enum):
|
def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
|
||||||
# type: (ReqFileLines) -> ReqFileLines
|
|
||||||
"""Joins a line ending in '\' with the previous line (except when following
|
"""Joins a line ending in '\' with the previous line (except when following
|
||||||
comments). The joined line takes on the index of the first line.
|
comments). The joined line takes on the index of the first line.
|
||||||
"""
|
"""
|
||||||
primary_line_number = None
|
primary_line_number = None
|
||||||
new_line = [] # type: List[str]
|
new_line: List[str] = []
|
||||||
for line_number, line in lines_enum:
|
for line_number, line in lines_enum:
|
||||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
if not line.endswith("\\") or COMMENT_RE.match(line):
|
||||||
if COMMENT_RE.match(line):
|
if COMMENT_RE.match(line):
|
||||||
# this ensures comments are always matched later
|
# this ensures comments are always matched later
|
||||||
line = ' ' + line
|
line = " " + line
|
||||||
if new_line:
|
if new_line:
|
||||||
new_line.append(line)
|
new_line.append(line)
|
||||||
assert primary_line_number is not None
|
assert primary_line_number is not None
|
||||||
yield primary_line_number, ''.join(new_line)
|
yield primary_line_number, "".join(new_line)
|
||||||
new_line = []
|
new_line = []
|
||||||
else:
|
else:
|
||||||
yield line_number, line
|
yield line_number, line
|
||||||
else:
|
else:
|
||||||
if not new_line:
|
if not new_line:
|
||||||
primary_line_number = line_number
|
primary_line_number = line_number
|
||||||
new_line.append(line.strip('\\'))
|
new_line.append(line.strip("\\"))
|
||||||
|
|
||||||
# last line contains \
|
# last line contains \
|
||||||
if new_line:
|
if new_line:
|
||||||
assert primary_line_number is not None
|
assert primary_line_number is not None
|
||||||
yield primary_line_number, ''.join(new_line)
|
yield primary_line_number, "".join(new_line)
|
||||||
|
|
||||||
# TODO: handle space after '\'.
|
# TODO: handle space after '\'.
|
||||||
|
|
||||||
|
|
||||||
def ignore_comments(lines_enum):
|
def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
|
||||||
# type: (ReqFileLines) -> ReqFileLines
|
|
||||||
"""
|
"""
|
||||||
Strips comments and filter empty lines.
|
Strips comments and filter empty lines.
|
||||||
"""
|
"""
|
||||||
for line_number, line in lines_enum:
|
for line_number, line in lines_enum:
|
||||||
line = COMMENT_RE.sub('', line)
|
line = COMMENT_RE.sub("", line)
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line:
|
if line:
|
||||||
yield line_number, line
|
yield line_number, line
|
||||||
|
|
||||||
|
|
||||||
def expand_env_variables(lines_enum):
|
def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
|
||||||
# type: (ReqFileLines) -> ReqFileLines
|
|
||||||
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
||||||
|
|
||||||
The only allowed format for environment variables defined in the
|
The only allowed format for environment variables defined in the
|
||||||
|
@ -521,8 +515,7 @@ def expand_env_variables(lines_enum):
|
||||||
yield line_number, line
|
yield line_number, line
|
||||||
|
|
||||||
|
|
||||||
def get_file_content(url, session):
|
def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
|
||||||
# type: (str, PipSession) -> Tuple[str, str]
|
|
||||||
"""Gets the content of a file; it may be a filename, file: URL, or
|
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||||
http: URL. Returns (location, content). Content is unicode.
|
http: URL. Returns (location, content). Content is unicode.
|
||||||
Respects # -*- coding: declarations on the retrieved files.
|
Respects # -*- coding: declarations on the retrieved files.
|
||||||
|
@ -532,20 +525,16 @@ def get_file_content(url, session):
|
||||||
"""
|
"""
|
||||||
scheme = get_url_scheme(url)
|
scheme = get_url_scheme(url)
|
||||||
|
|
||||||
if scheme in ['http', 'https']:
|
# Pip has special support for file:// URLs (LocalFSAdapter).
|
||||||
# FIXME: catch some errors
|
if scheme in ["http", "https", "file"]:
|
||||||
resp = session.get(url)
|
resp = session.get(url)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp.url, resp.text
|
return resp.url, resp.text
|
||||||
|
|
||||||
elif scheme == 'file':
|
# Assume this is a bare path.
|
||||||
url = url_to_path(url)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(url, 'rb') as f:
|
with open(url, "rb") as f:
|
||||||
content = auto_decode(f.read())
|
content = auto_decode(f.read())
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
raise InstallationError(
|
raise InstallationError(f"Could not open requirements file: {exc}")
|
||||||
f'Could not open requirements file: {exc}'
|
|
||||||
)
|
|
||||||
return url, content
|
return url, content
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
import zipfile
|
import zipfile
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Union
|
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
||||||
|
|
||||||
from pip._vendor import pkg_resources, six
|
|
||||||
from pip._vendor.packaging.markers import Marker
|
from pip._vendor.packaging.markers import Marker
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||||
|
@ -17,39 +17,44 @@ from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.packaging.version import Version
|
from pip._vendor.packaging.version import Version
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
|
||||||
from pip._internal.locations import get_scheme
|
from pip._internal.locations import get_scheme
|
||||||
|
from pip._internal.metadata import (
|
||||||
|
BaseDistribution,
|
||||||
|
get_default_environment,
|
||||||
|
get_directory_distribution,
|
||||||
|
)
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.operations.build.metadata import generate_metadata
|
from pip._internal.operations.build.metadata import generate_metadata
|
||||||
|
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
|
||||||
from pip._internal.operations.build.metadata_legacy import (
|
from pip._internal.operations.build.metadata_legacy import (
|
||||||
generate_metadata as generate_metadata_legacy,
|
generate_metadata as generate_metadata_legacy,
|
||||||
)
|
)
|
||||||
from pip._internal.operations.install.editable_legacy import (
|
from pip._internal.operations.install.editable_legacy import (
|
||||||
install_editable as install_editable_legacy,
|
install_editable as install_editable_legacy,
|
||||||
)
|
)
|
||||||
from pip._internal.operations.install.legacy import LegacyInstallFailure
|
|
||||||
from pip._internal.operations.install.legacy import install as install_legacy
|
from pip._internal.operations.install.legacy import install as install_legacy
|
||||||
from pip._internal.operations.install.wheel import install_wheel
|
from pip._internal.operations.install.wheel import install_wheel
|
||||||
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
||||||
from pip._internal.req.req_uninstall import UninstallPathSet
|
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||||
from pip._internal.utils.deprecation import deprecated
|
from pip._internal.utils.deprecation import deprecated
|
||||||
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
from pip._internal.utils.direct_url_helpers import (
|
||||||
|
direct_url_for_editable,
|
||||||
|
direct_url_from_link,
|
||||||
|
)
|
||||||
from pip._internal.utils.hashes import Hashes
|
from pip._internal.utils.hashes import Hashes
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import (
|
||||||
|
ConfiguredPep517HookCaller,
|
||||||
ask_path_exists,
|
ask_path_exists,
|
||||||
backup_dir,
|
backup_dir,
|
||||||
display_path,
|
display_path,
|
||||||
dist_in_site_packages,
|
|
||||||
dist_in_usersite,
|
|
||||||
get_distribution,
|
|
||||||
hide_url,
|
hide_url,
|
||||||
redact_auth_from_url,
|
redact_auth_from_url,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.packaging import get_metadata
|
from pip._internal.utils.packaging import safe_extra
|
||||||
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
from pip._internal.vcs import vcs
|
from pip._internal.vcs import vcs
|
||||||
|
@ -57,33 +62,6 @@ from pip._internal.vcs import vcs
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_dist(metadata_directory):
|
|
||||||
# type: (str) -> Distribution
|
|
||||||
"""Return a pkg_resources.Distribution for the provided
|
|
||||||
metadata directory.
|
|
||||||
"""
|
|
||||||
dist_dir = metadata_directory.rstrip(os.sep)
|
|
||||||
|
|
||||||
# Build a PathMetadata object, from path to metadata. :wink:
|
|
||||||
base_dir, dist_dir_name = os.path.split(dist_dir)
|
|
||||||
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
|
||||||
|
|
||||||
# Determine the correct Distribution object type.
|
|
||||||
if dist_dir.endswith(".egg-info"):
|
|
||||||
dist_cls = pkg_resources.Distribution
|
|
||||||
dist_name = os.path.splitext(dist_dir_name)[0]
|
|
||||||
else:
|
|
||||||
assert dist_dir.endswith(".dist-info")
|
|
||||||
dist_cls = pkg_resources.DistInfoDistribution
|
|
||||||
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
|
||||||
|
|
||||||
return dist_cls(
|
|
||||||
base_dir,
|
|
||||||
project_name=dist_name,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InstallRequirement:
|
class InstallRequirement:
|
||||||
"""
|
"""
|
||||||
Represents something that may be installed later on, may have information
|
Represents something that may be installed later on, may have information
|
||||||
|
@ -93,40 +71,40 @@ class InstallRequirement:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
req, # type: Optional[Requirement]
|
req: Optional[Requirement],
|
||||||
comes_from, # type: Optional[Union[str, InstallRequirement]]
|
comes_from: Optional[Union[str, "InstallRequirement"]],
|
||||||
editable=False, # type: bool
|
editable: bool = False,
|
||||||
link=None, # type: Optional[Link]
|
link: Optional[Link] = None,
|
||||||
markers=None, # type: Optional[Marker]
|
markers: Optional[Marker] = None,
|
||||||
use_pep517=None, # type: Optional[bool]
|
use_pep517: Optional[bool] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
install_options=None, # type: Optional[List[str]]
|
install_options: Optional[List[str]] = None,
|
||||||
global_options=None, # type: Optional[List[str]]
|
global_options: Optional[List[str]] = None,
|
||||||
hash_options=None, # type: Optional[Dict[str, List[str]]]
|
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||||
constraint=False, # type: bool
|
config_settings: Optional[Dict[str, str]] = None,
|
||||||
extras=(), # type: Iterable[str]
|
constraint: bool = False,
|
||||||
user_supplied=False, # type: bool
|
extras: Collection[str] = (),
|
||||||
):
|
user_supplied: bool = False,
|
||||||
# type: (...) -> None
|
permit_editable_wheels: bool = False,
|
||||||
|
) -> None:
|
||||||
assert req is None or isinstance(req, Requirement), req
|
assert req is None or isinstance(req, Requirement), req
|
||||||
self.req = req
|
self.req = req
|
||||||
self.comes_from = comes_from
|
self.comes_from = comes_from
|
||||||
self.constraint = constraint
|
self.constraint = constraint
|
||||||
self.editable = editable
|
self.editable = editable
|
||||||
self.legacy_install_reason = None # type: Optional[int]
|
self.permit_editable_wheels = permit_editable_wheels
|
||||||
|
self.legacy_install_reason: Optional[int] = None
|
||||||
|
|
||||||
# source_dir is the local directory where the linked requirement is
|
# source_dir is the local directory where the linked requirement is
|
||||||
# located, or unpacked. In case unpacking is needed, creating and
|
# located, or unpacked. In case unpacking is needed, creating and
|
||||||
# populating source_dir is done by the RequirementPreparer. Note this
|
# populating source_dir is done by the RequirementPreparer. Note this
|
||||||
# is not necessarily the directory where pyproject.toml or setup.py is
|
# is not necessarily the directory where pyproject.toml or setup.py is
|
||||||
# located - that one is obtained via unpacked_source_directory.
|
# located - that one is obtained via unpacked_source_directory.
|
||||||
self.source_dir = None # type: Optional[str]
|
self.source_dir: Optional[str] = None
|
||||||
if self.editable:
|
if self.editable:
|
||||||
assert link
|
assert link
|
||||||
if link.is_file:
|
if link.is_file:
|
||||||
self.source_dir = os.path.normpath(
|
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
||||||
os.path.abspath(link.file_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
if link is None and req and req.url:
|
if link is None and req and req.url:
|
||||||
# PEP 508 URL requirement
|
# PEP 508 URL requirement
|
||||||
|
@ -135,36 +113,34 @@ class InstallRequirement:
|
||||||
self.original_link_is_in_wheel_cache = False
|
self.original_link_is_in_wheel_cache = False
|
||||||
|
|
||||||
# Path to any downloaded or already-existing package.
|
# Path to any downloaded or already-existing package.
|
||||||
self.local_file_path = None # type: Optional[str]
|
self.local_file_path: Optional[str] = None
|
||||||
if self.link and self.link.is_file:
|
if self.link and self.link.is_file:
|
||||||
self.local_file_path = self.link.file_path
|
self.local_file_path = self.link.file_path
|
||||||
|
|
||||||
if extras:
|
if extras:
|
||||||
self.extras = extras
|
self.extras = extras
|
||||||
elif req:
|
elif req:
|
||||||
self.extras = {
|
self.extras = {safe_extra(extra) for extra in req.extras}
|
||||||
pkg_resources.safe_extra(extra) for extra in req.extras
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
self.extras = set()
|
self.extras = set()
|
||||||
if markers is None and req:
|
if markers is None and req:
|
||||||
markers = req.marker
|
markers = req.marker
|
||||||
self.markers = markers
|
self.markers = markers
|
||||||
|
|
||||||
# This holds the pkg_resources.Distribution object if this requirement
|
# This holds the Distribution object if this requirement is already installed.
|
||||||
# is already available:
|
self.satisfied_by: Optional[BaseDistribution] = None
|
||||||
self.satisfied_by = None # type: Optional[Distribution]
|
|
||||||
# Whether the installation process should try to uninstall an existing
|
# Whether the installation process should try to uninstall an existing
|
||||||
# distribution before installing this requirement.
|
# distribution before installing this requirement.
|
||||||
self.should_reinstall = False
|
self.should_reinstall = False
|
||||||
# Temporary build location
|
# Temporary build location
|
||||||
self._temp_build_dir = None # type: Optional[TempDirectory]
|
self._temp_build_dir: Optional[TempDirectory] = None
|
||||||
# Set to True after successful installation
|
# Set to True after successful installation
|
||||||
self.install_succeeded = None # type: Optional[bool]
|
self.install_succeeded: Optional[bool] = None
|
||||||
# Supplied options
|
# Supplied options
|
||||||
self.install_options = install_options if install_options else []
|
self.install_options = install_options if install_options else []
|
||||||
self.global_options = global_options if global_options else []
|
self.global_options = global_options if global_options else []
|
||||||
self.hash_options = hash_options if hash_options else {}
|
self.hash_options = hash_options if hash_options else {}
|
||||||
|
self.config_settings = config_settings
|
||||||
# Set to True after successful preparation of this requirement
|
# Set to True after successful preparation of this requirement
|
||||||
self.prepared = False
|
self.prepared = False
|
||||||
# User supplied requirement are explicitly requested for installation
|
# User supplied requirement are explicitly requested for installation
|
||||||
|
@ -173,22 +149,22 @@ class InstallRequirement:
|
||||||
self.user_supplied = user_supplied
|
self.user_supplied = user_supplied
|
||||||
|
|
||||||
self.isolated = isolated
|
self.isolated = isolated
|
||||||
self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
|
self.build_env: BuildEnvironment = NoOpBuildEnvironment()
|
||||||
|
|
||||||
# For PEP 517, the directory where we request the project metadata
|
# For PEP 517, the directory where we request the project metadata
|
||||||
# gets stored. We need this to pass to build_wheel, so the backend
|
# gets stored. We need this to pass to build_wheel, so the backend
|
||||||
# can ensure that the wheel matches the metadata (see the PEP for
|
# can ensure that the wheel matches the metadata (see the PEP for
|
||||||
# details).
|
# details).
|
||||||
self.metadata_directory = None # type: Optional[str]
|
self.metadata_directory: Optional[str] = None
|
||||||
|
|
||||||
# The static build requirements (from pyproject.toml)
|
# The static build requirements (from pyproject.toml)
|
||||||
self.pyproject_requires = None # type: Optional[List[str]]
|
self.pyproject_requires: Optional[List[str]] = None
|
||||||
|
|
||||||
# Build requirements that we will check are available
|
# Build requirements that we will check are available
|
||||||
self.requirements_to_check = [] # type: List[str]
|
self.requirements_to_check: List[str] = []
|
||||||
|
|
||||||
# The PEP 517 backend we should use to build the project
|
# The PEP 517 backend we should use to build the project
|
||||||
self.pep517_backend = None # type: Optional[Pep517HookCaller]
|
self.pep517_backend: Optional[Pep517HookCaller] = None
|
||||||
|
|
||||||
# Are we using PEP 517 for this requirement?
|
# Are we using PEP 517 for this requirement?
|
||||||
# After pyproject.toml has been loaded, the only valid values are True
|
# After pyproject.toml has been loaded, the only valid values are True
|
||||||
|
@ -200,87 +176,88 @@ class InstallRequirement:
|
||||||
# This requirement needs more preparation before it can be built
|
# This requirement needs more preparation before it can be built
|
||||||
self.needs_more_preparation = False
|
self.needs_more_preparation = False
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
if self.req:
|
if self.req:
|
||||||
s = str(self.req)
|
s = str(self.req)
|
||||||
if self.link:
|
if self.link:
|
||||||
s += ' from {}'.format(redact_auth_from_url(self.link.url))
|
s += " from {}".format(redact_auth_from_url(self.link.url))
|
||||||
elif self.link:
|
elif self.link:
|
||||||
s = redact_auth_from_url(self.link.url)
|
s = redact_auth_from_url(self.link.url)
|
||||||
else:
|
else:
|
||||||
s = '<InstallRequirement>'
|
s = "<InstallRequirement>"
|
||||||
if self.satisfied_by is not None:
|
if self.satisfied_by is not None:
|
||||||
s += ' in {}'.format(display_path(self.satisfied_by.location))
|
s += " in {}".format(display_path(self.satisfied_by.location))
|
||||||
if self.comes_from:
|
if self.comes_from:
|
||||||
if isinstance(self.comes_from, str):
|
if isinstance(self.comes_from, str):
|
||||||
comes_from = self.comes_from # type: Optional[str]
|
comes_from: Optional[str] = self.comes_from
|
||||||
else:
|
else:
|
||||||
comes_from = self.comes_from.from_path()
|
comes_from = self.comes_from.from_path()
|
||||||
if comes_from:
|
if comes_from:
|
||||||
s += f' (from {comes_from})'
|
s += f" (from {comes_from})"
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
return "<{} object: {} editable={!r}>".format(
|
||||||
return '<{} object: {} editable={!r}>'.format(
|
self.__class__.__name__, str(self), self.editable
|
||||||
self.__class__.__name__, str(self), self.editable)
|
)
|
||||||
|
|
||||||
def format_debug(self):
|
def format_debug(self) -> str:
|
||||||
# type: () -> str
|
"""An un-tested helper for getting state, for debugging."""
|
||||||
"""An un-tested helper for getting state, for debugging.
|
|
||||||
"""
|
|
||||||
attributes = vars(self)
|
attributes = vars(self)
|
||||||
names = sorted(attributes)
|
names = sorted(attributes)
|
||||||
|
|
||||||
state = (
|
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
|
||||||
"{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
|
return "<{name} object: {{{state}}}>".format(
|
||||||
)
|
|
||||||
return '<{name} object: {{{state}}}>'.format(
|
|
||||||
name=self.__class__.__name__,
|
name=self.__class__.__name__,
|
||||||
state=", ".join(state),
|
state=", ".join(state),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Things that are valid for all kinds of requirements?
|
# Things that are valid for all kinds of requirements?
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
|
||||||
if self.req is None:
|
if self.req is None:
|
||||||
return None
|
return None
|
||||||
return pkg_resources.safe_name(self.req.name)
|
return self.req.name
|
||||||
|
|
||||||
|
@functools.lru_cache() # use cached_property in python 3.8+
|
||||||
|
def supports_pyproject_editable(self) -> bool:
|
||||||
|
if not self.use_pep517:
|
||||||
|
return False
|
||||||
|
assert self.pep517_backend
|
||||||
|
with self.build_env:
|
||||||
|
runner = runner_with_spinner_message(
|
||||||
|
"Checking if build backend supports build_editable"
|
||||||
|
)
|
||||||
|
with self.pep517_backend.subprocess_runner(runner):
|
||||||
|
return "build_editable" in self.pep517_backend._supported_features()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def specifier(self):
|
def specifier(self) -> SpecifierSet:
|
||||||
# type: () -> SpecifierSet
|
|
||||||
return self.req.specifier
|
return self.req.specifier
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_pinned(self):
|
def is_pinned(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""Return whether I am pinned to an exact version.
|
"""Return whether I am pinned to an exact version.
|
||||||
|
|
||||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||||
"""
|
"""
|
||||||
specifiers = self.specifier
|
specifiers = self.specifier
|
||||||
return (len(specifiers) == 1 and
|
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
||||||
next(iter(specifiers)).operator in {'==', '==='})
|
|
||||||
|
|
||||||
def match_markers(self, extras_requested=None):
|
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
||||||
# type: (Optional[Iterable[str]]) -> bool
|
|
||||||
if not extras_requested:
|
if not extras_requested:
|
||||||
# Provide an extra to safely evaluate the markers
|
# Provide an extra to safely evaluate the markers
|
||||||
# without matching any extra
|
# without matching any extra
|
||||||
extras_requested = ('',)
|
extras_requested = ("",)
|
||||||
if self.markers is not None:
|
if self.markers is not None:
|
||||||
return any(
|
return any(
|
||||||
self.markers.evaluate({'extra': extra})
|
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
||||||
for extra in extras_requested)
|
)
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_hash_options(self):
|
def has_hash_options(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
"""Return whether any known-good hashes are specified as options.
|
"""Return whether any known-good hashes are specified as options.
|
||||||
|
|
||||||
These activate --require-hashes mode; hashes specified as part of a
|
These activate --require-hashes mode; hashes specified as part of a
|
||||||
|
@ -289,8 +266,7 @@ class InstallRequirement:
|
||||||
"""
|
"""
|
||||||
return bool(self.hash_options)
|
return bool(self.hash_options)
|
||||||
|
|
||||||
def hashes(self, trust_internet=True):
|
def hashes(self, trust_internet: bool = True) -> Hashes:
|
||||||
# type: (bool) -> Hashes
|
|
||||||
"""Return a hash-comparer that considers my option- and URL-based
|
"""Return a hash-comparer that considers my option- and URL-based
|
||||||
hashes to be known-good.
|
hashes to be known-good.
|
||||||
|
|
||||||
|
@ -311,10 +287,8 @@ class InstallRequirement:
|
||||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||||
return Hashes(good_hashes)
|
return Hashes(good_hashes)
|
||||||
|
|
||||||
def from_path(self):
|
def from_path(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
"""Format a nice indicator to show where this "comes from" """
|
||||||
"""Format a nice indicator to show where this "comes from"
|
|
||||||
"""
|
|
||||||
if self.req is None:
|
if self.req is None:
|
||||||
return None
|
return None
|
||||||
s = str(self.req)
|
s = str(self.req)
|
||||||
|
@ -324,11 +298,12 @@ class InstallRequirement:
|
||||||
else:
|
else:
|
||||||
comes_from = self.comes_from.from_path()
|
comes_from = self.comes_from.from_path()
|
||||||
if comes_from:
|
if comes_from:
|
||||||
s += '->' + comes_from
|
s += "->" + comes_from
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def ensure_build_location(self, build_dir, autodelete, parallel_builds):
|
def ensure_build_location(
|
||||||
# type: (str, bool, bool) -> str
|
self, build_dir: str, autodelete: bool, parallel_builds: bool
|
||||||
|
) -> str:
|
||||||
assert build_dir is not None
|
assert build_dir is not None
|
||||||
if self._temp_build_dir is not None:
|
if self._temp_build_dir is not None:
|
||||||
assert self._temp_build_dir.path
|
assert self._temp_build_dir.path
|
||||||
|
@ -349,14 +324,14 @@ class InstallRequirement:
|
||||||
|
|
||||||
# When parallel builds are enabled, add a UUID to the build directory
|
# When parallel builds are enabled, add a UUID to the build directory
|
||||||
# name so multiple builds do not interfere with each other.
|
# name so multiple builds do not interfere with each other.
|
||||||
dir_name = canonicalize_name(self.name) # type: str
|
dir_name: str = canonicalize_name(self.name)
|
||||||
if parallel_builds:
|
if parallel_builds:
|
||||||
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
||||||
|
|
||||||
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
||||||
# need this)
|
# need this)
|
||||||
if not os.path.exists(build_dir):
|
if not os.path.exists(build_dir):
|
||||||
logger.debug('Creating directory %s', build_dir)
|
logger.debug("Creating directory %s", build_dir)
|
||||||
os.makedirs(build_dir)
|
os.makedirs(build_dir)
|
||||||
actual_build_dir = os.path.join(build_dir, dir_name)
|
actual_build_dir = os.path.join(build_dir, dir_name)
|
||||||
# `None` indicates that we respect the globally-configured deletion
|
# `None` indicates that we respect the globally-configured deletion
|
||||||
|
@ -369,10 +344,8 @@ class InstallRequirement:
|
||||||
globally_managed=True,
|
globally_managed=True,
|
||||||
).path
|
).path
|
||||||
|
|
||||||
def _set_requirement(self):
|
def _set_requirement(self) -> None:
|
||||||
# type: () -> None
|
"""Set requirement after generating metadata."""
|
||||||
"""Set requirement after generating metadata.
|
|
||||||
"""
|
|
||||||
assert self.req is None
|
assert self.req is None
|
||||||
assert self.metadata is not None
|
assert self.metadata is not None
|
||||||
assert self.source_dir is not None
|
assert self.source_dir is not None
|
||||||
|
@ -384,15 +357,16 @@ class InstallRequirement:
|
||||||
op = "==="
|
op = "==="
|
||||||
|
|
||||||
self.req = Requirement(
|
self.req = Requirement(
|
||||||
"".join([
|
"".join(
|
||||||
self.metadata["Name"],
|
[
|
||||||
op,
|
self.metadata["Name"],
|
||||||
self.metadata["Version"],
|
op,
|
||||||
])
|
self.metadata["Version"],
|
||||||
|
]
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def warn_on_mismatching_name(self):
|
def warn_on_mismatching_name(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
metadata_name = canonicalize_name(self.metadata["Name"])
|
metadata_name = canonicalize_name(self.metadata["Name"])
|
||||||
if canonicalize_name(self.req.name) == metadata_name:
|
if canonicalize_name(self.req.name) == metadata_name:
|
||||||
# Everything is fine.
|
# Everything is fine.
|
||||||
|
@ -400,45 +374,40 @@ class InstallRequirement:
|
||||||
|
|
||||||
# If we're here, there's a mismatch. Log a warning about it.
|
# If we're here, there's a mismatch. Log a warning about it.
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Generating metadata for package %s '
|
"Generating metadata for package %s "
|
||||||
'produced metadata for project name %s. Fix your '
|
"produced metadata for project name %s. Fix your "
|
||||||
'#egg=%s fragments.',
|
"#egg=%s fragments.",
|
||||||
self.name, metadata_name, self.name
|
self.name,
|
||||||
|
metadata_name,
|
||||||
|
self.name,
|
||||||
)
|
)
|
||||||
self.req = Requirement(metadata_name)
|
self.req = Requirement(metadata_name)
|
||||||
|
|
||||||
def check_if_exists(self, use_user_site):
|
def check_if_exists(self, use_user_site: bool) -> None:
|
||||||
# type: (bool) -> None
|
|
||||||
"""Find an installed distribution that satisfies or conflicts
|
"""Find an installed distribution that satisfies or conflicts
|
||||||
with this requirement, and set self.satisfied_by or
|
with this requirement, and set self.satisfied_by or
|
||||||
self.should_reinstall appropriately.
|
self.should_reinstall appropriately.
|
||||||
"""
|
"""
|
||||||
if self.req is None:
|
if self.req is None:
|
||||||
return
|
return
|
||||||
existing_dist = get_distribution(self.req.name)
|
existing_dist = get_default_environment().get_distribution(self.req.name)
|
||||||
if not existing_dist:
|
if not existing_dist:
|
||||||
return
|
return
|
||||||
|
|
||||||
# pkg_resouces may contain a different copy of packaging.version from
|
version_compatible = self.req.specifier.contains(
|
||||||
# pip in if the downstream distributor does a poor job debundling pip.
|
existing_dist.version,
|
||||||
# We avoid existing_dist.parsed_version and let SpecifierSet.contains
|
prereleases=True,
|
||||||
# parses the version instead.
|
|
||||||
existing_version = existing_dist.version
|
|
||||||
version_compatible = (
|
|
||||||
existing_version is not None and
|
|
||||||
self.req.specifier.contains(existing_version, prereleases=True)
|
|
||||||
)
|
)
|
||||||
if not version_compatible:
|
if not version_compatible:
|
||||||
self.satisfied_by = None
|
self.satisfied_by = None
|
||||||
if use_user_site:
|
if use_user_site:
|
||||||
if dist_in_usersite(existing_dist):
|
if existing_dist.in_usersite:
|
||||||
self.should_reinstall = True
|
self.should_reinstall = True
|
||||||
elif (running_under_virtualenv() and
|
elif running_under_virtualenv() and existing_dist.in_site_packages:
|
||||||
dist_in_site_packages(existing_dist)):
|
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Will not install to the user site because it will "
|
f"Will not install to the user site because it will "
|
||||||
"lack sys.path precedence to {} in {}".format(
|
f"lack sys.path precedence to {existing_dist.raw_name} "
|
||||||
existing_dist.project_name, existing_dist.location)
|
f"in {existing_dist.location}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.should_reinstall = True
|
self.should_reinstall = True
|
||||||
|
@ -453,36 +422,38 @@ class InstallRequirement:
|
||||||
|
|
||||||
# Things valid for wheels
|
# Things valid for wheels
|
||||||
@property
|
@property
|
||||||
def is_wheel(self):
|
def is_wheel(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
if not self.link:
|
if not self.link:
|
||||||
return False
|
return False
|
||||||
return self.link.is_wheel
|
return self.link.is_wheel
|
||||||
|
|
||||||
# Things valid for sdists
|
# Things valid for sdists
|
||||||
@property
|
@property
|
||||||
def unpacked_source_directory(self):
|
def unpacked_source_directory(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
self.source_dir,
|
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
||||||
self.link and self.link.subdirectory_fragment or '')
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def setup_py_path(self):
|
def setup_py_path(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
assert self.source_dir, f"No source dir for {self}"
|
assert self.source_dir, f"No source dir for {self}"
|
||||||
setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
|
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
|
||||||
|
|
||||||
return setup_py
|
return setup_py
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pyproject_toml_path(self):
|
def setup_cfg_path(self) -> str:
|
||||||
# type: () -> str
|
assert self.source_dir, f"No source dir for {self}"
|
||||||
|
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
|
||||||
|
|
||||||
|
return setup_cfg
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pyproject_toml_path(self) -> str:
|
||||||
assert self.source_dir, f"No source dir for {self}"
|
assert self.source_dir, f"No source dir for {self}"
|
||||||
return make_pyproject_path(self.unpacked_source_directory)
|
return make_pyproject_path(self.unpacked_source_directory)
|
||||||
|
|
||||||
def load_pyproject_toml(self):
|
def load_pyproject_toml(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Load the pyproject.toml file.
|
"""Load the pyproject.toml file.
|
||||||
|
|
||||||
After calling this routine, all of the attributes related to PEP 517
|
After calling this routine, all of the attributes related to PEP 517
|
||||||
|
@ -491,10 +462,7 @@ class InstallRequirement:
|
||||||
follow the PEP 517 or legacy (setup.py) code path.
|
follow the PEP 517 or legacy (setup.py) code path.
|
||||||
"""
|
"""
|
||||||
pyproject_toml_data = load_pyproject_toml(
|
pyproject_toml_data = load_pyproject_toml(
|
||||||
self.use_pep517,
|
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
|
||||||
self.pyproject_toml_path,
|
|
||||||
self.setup_py_path,
|
|
||||||
str(self)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if pyproject_toml_data is None:
|
if pyproject_toml_data is None:
|
||||||
|
@ -505,63 +473,70 @@ class InstallRequirement:
|
||||||
requires, backend, check, backend_path = pyproject_toml_data
|
requires, backend, check, backend_path = pyproject_toml_data
|
||||||
self.requirements_to_check = check
|
self.requirements_to_check = check
|
||||||
self.pyproject_requires = requires
|
self.pyproject_requires = requires
|
||||||
self.pep517_backend = Pep517HookCaller(
|
self.pep517_backend = ConfiguredPep517HookCaller(
|
||||||
self.unpacked_source_directory, backend, backend_path=backend_path,
|
self,
|
||||||
|
self.unpacked_source_directory,
|
||||||
|
backend,
|
||||||
|
backend_path=backend_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _check_setup_py_or_cfg_exists(self) -> bool:
|
def isolated_editable_sanity_check(self) -> None:
|
||||||
"""Check if the requirement actually has a setuptools build file.
|
"""Check that an editable requirement if valid for use with PEP 517/518.
|
||||||
|
|
||||||
If setup.py does not exist, we also check setup.cfg in the same
|
This verifies that an editable that has a pyproject.toml either supports PEP 660
|
||||||
directory and allow the directory if that exists.
|
or as a setup.py or a setup.cfg
|
||||||
"""
|
"""
|
||||||
if os.path.exists(self.setup_py_path):
|
if (
|
||||||
return True
|
self.editable
|
||||||
stem, ext = os.path.splitext(self.setup_py_path)
|
and self.use_pep517
|
||||||
if ext == ".py" and os.path.exists(f"{stem}.cfg"):
|
and not self.supports_pyproject_editable()
|
||||||
return True
|
and not os.path.isfile(self.setup_py_path)
|
||||||
return False
|
and not os.path.isfile(self.setup_cfg_path)
|
||||||
|
):
|
||||||
|
raise InstallationError(
|
||||||
|
f"Project {self} has a 'pyproject.toml' and its build "
|
||||||
|
f"backend is missing the 'build_editable' hook. Since it does not "
|
||||||
|
f"have a 'setup.py' nor a 'setup.cfg', "
|
||||||
|
f"it cannot be installed in editable mode. "
|
||||||
|
f"Consider using a build backend that supports PEP 660."
|
||||||
|
)
|
||||||
|
|
||||||
def _generate_metadata(self):
|
def prepare_metadata(self) -> None:
|
||||||
# type: () -> str
|
"""Ensure that project metadata is available.
|
||||||
"""Invokes metadata generator functions, with the required arguments.
|
|
||||||
|
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
||||||
|
Under legacy processing, call setup.py egg-info.
|
||||||
"""
|
"""
|
||||||
if not self.use_pep517:
|
assert self.source_dir
|
||||||
assert self.unpacked_source_directory
|
details = self.name or f"from {self.link}"
|
||||||
|
|
||||||
if not self._check_setup_py_or_cfg_exists():
|
if self.use_pep517:
|
||||||
raise InstallationError(
|
assert self.pep517_backend is not None
|
||||||
f'File "setup.py" or "setup.cfg" not found for legacy '
|
if (
|
||||||
f'project {self}.'
|
self.editable
|
||||||
|
and self.permit_editable_wheels
|
||||||
|
and self.supports_pyproject_editable()
|
||||||
|
):
|
||||||
|
self.metadata_directory = generate_editable_metadata(
|
||||||
|
build_env=self.build_env,
|
||||||
|
backend=self.pep517_backend,
|
||||||
|
details=details,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
return generate_metadata_legacy(
|
self.metadata_directory = generate_metadata(
|
||||||
|
build_env=self.build_env,
|
||||||
|
backend=self.pep517_backend,
|
||||||
|
details=details,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.metadata_directory = generate_metadata_legacy(
|
||||||
build_env=self.build_env,
|
build_env=self.build_env,
|
||||||
setup_py_path=self.setup_py_path,
|
setup_py_path=self.setup_py_path,
|
||||||
source_dir=self.unpacked_source_directory,
|
source_dir=self.unpacked_source_directory,
|
||||||
isolated=self.isolated,
|
isolated=self.isolated,
|
||||||
details=self.name or f"from {self.link}"
|
details=details,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert self.pep517_backend is not None
|
|
||||||
|
|
||||||
return generate_metadata(
|
|
||||||
build_env=self.build_env,
|
|
||||||
backend=self.pep517_backend,
|
|
||||||
)
|
|
||||||
|
|
||||||
def prepare_metadata(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Ensure that project metadata is available.
|
|
||||||
|
|
||||||
Under PEP 517, call the backend hook to prepare the metadata.
|
|
||||||
Under legacy processing, call setup.py egg-info.
|
|
||||||
"""
|
|
||||||
assert self.source_dir
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
self.metadata_directory = self._generate_metadata()
|
|
||||||
|
|
||||||
# Act on the newly generated metadata, based on the name and version.
|
# Act on the newly generated metadata, based on the name and version.
|
||||||
if not self.name:
|
if not self.name:
|
||||||
self._set_requirement()
|
self._set_requirement()
|
||||||
|
@ -571,30 +546,27 @@ class InstallRequirement:
|
||||||
self.assert_source_matches_version()
|
self.assert_source_matches_version()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self):
|
def metadata(self) -> Any:
|
||||||
# type: () -> Any
|
if not hasattr(self, "_metadata"):
|
||||||
if not hasattr(self, '_metadata'):
|
self._metadata = self.get_dist().metadata
|
||||||
self._metadata = get_metadata(self.get_dist())
|
|
||||||
|
|
||||||
return self._metadata
|
return self._metadata
|
||||||
|
|
||||||
def get_dist(self):
|
def get_dist(self) -> BaseDistribution:
|
||||||
# type: () -> Distribution
|
return get_directory_distribution(self.metadata_directory)
|
||||||
return _get_dist(self.metadata_directory)
|
|
||||||
|
|
||||||
def assert_source_matches_version(self):
|
def assert_source_matches_version(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
version = self.metadata['version']
|
version = self.metadata["version"]
|
||||||
if self.req.specifier and version not in self.req.specifier:
|
if self.req.specifier and version not in self.req.specifier:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Requested %s, but installing version %s',
|
"Requested %s, but installing version %s",
|
||||||
self,
|
self,
|
||||||
version,
|
version,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Source in %s has version %s, which satisfies requirement %s',
|
"Source in %s has version %s, which satisfies requirement %s",
|
||||||
display_path(self.source_dir),
|
display_path(self.source_dir),
|
||||||
version,
|
version,
|
||||||
self,
|
self,
|
||||||
|
@ -603,11 +575,10 @@ class InstallRequirement:
|
||||||
# For both source distributions and editables
|
# For both source distributions and editables
|
||||||
def ensure_has_source_dir(
|
def ensure_has_source_dir(
|
||||||
self,
|
self,
|
||||||
parent_dir,
|
parent_dir: str,
|
||||||
autodelete=False,
|
autodelete: bool = False,
|
||||||
parallel_builds=False,
|
parallel_builds: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (str, bool, bool) -> None
|
|
||||||
"""Ensure that a source_dir is set.
|
"""Ensure that a source_dir is set.
|
||||||
|
|
||||||
This will create a temporary build dir if the name of the requirement
|
This will create a temporary build dir if the name of the requirement
|
||||||
|
@ -625,18 +596,16 @@ class InstallRequirement:
|
||||||
)
|
)
|
||||||
|
|
||||||
# For editable installations
|
# For editable installations
|
||||||
def update_editable(self):
|
def update_editable(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if not self.link:
|
if not self.link:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Cannot update repository at %s; repository location is "
|
"Cannot update repository at %s; repository location is unknown",
|
||||||
"unknown",
|
|
||||||
self.source_dir,
|
self.source_dir,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
assert self.editable
|
assert self.editable
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
if self.link.scheme == 'file':
|
if self.link.scheme == "file":
|
||||||
# Static paths don't get updated
|
# Static paths don't get updated
|
||||||
return
|
return
|
||||||
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
||||||
|
@ -644,11 +613,12 @@ class InstallRequirement:
|
||||||
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
||||||
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
||||||
hidden_url = hide_url(self.link.url)
|
hidden_url = hide_url(self.link.url)
|
||||||
vcs_backend.obtain(self.source_dir, url=hidden_url)
|
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
|
||||||
|
|
||||||
# Top-level Actions
|
# Top-level Actions
|
||||||
def uninstall(self, auto_confirm=False, verbose=False):
|
def uninstall(
|
||||||
# type: (bool, bool) -> Optional[UninstallPathSet]
|
self, auto_confirm: bool = False, verbose: bool = False
|
||||||
|
) -> Optional[UninstallPathSet]:
|
||||||
"""
|
"""
|
||||||
Uninstall the distribution currently satisfying this requirement.
|
Uninstall the distribution currently satisfying this requirement.
|
||||||
|
|
||||||
|
@ -662,34 +632,30 @@ class InstallRequirement:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
assert self.req
|
assert self.req
|
||||||
dist = get_distribution(self.req.name)
|
dist = get_default_environment().get_distribution(self.req.name)
|
||||||
if not dist:
|
if not dist:
|
||||||
logger.warning("Skipping %s as it is not installed.", self.name)
|
logger.warning("Skipping %s as it is not installed.", self.name)
|
||||||
return None
|
return None
|
||||||
logger.info('Found existing installation: %s', dist)
|
logger.info("Found existing installation: %s", dist)
|
||||||
|
|
||||||
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
||||||
uninstalled_pathset.remove(auto_confirm, verbose)
|
uninstalled_pathset.remove(auto_confirm, verbose)
|
||||||
return uninstalled_pathset
|
return uninstalled_pathset
|
||||||
|
|
||||||
def _get_archive_name(self, path, parentdir, rootdir):
|
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
||||||
# type: (str, str, str) -> str
|
def _clean_zip_name(name: str, prefix: str) -> str:
|
||||||
|
assert name.startswith(
|
||||||
def _clean_zip_name(name, prefix):
|
prefix + os.path.sep
|
||||||
# type: (str, str) -> str
|
), f"name {name!r} doesn't start with prefix {prefix!r}"
|
||||||
assert name.startswith(prefix + os.path.sep), (
|
name = name[len(prefix) + 1 :]
|
||||||
f"name {name!r} doesn't start with prefix {prefix!r}"
|
name = name.replace(os.path.sep, "/")
|
||||||
)
|
|
||||||
name = name[len(prefix) + 1:]
|
|
||||||
name = name.replace(os.path.sep, '/')
|
|
||||||
return name
|
return name
|
||||||
|
|
||||||
path = os.path.join(parentdir, path)
|
path = os.path.join(parentdir, path)
|
||||||
name = _clean_zip_name(path, rootdir)
|
name = _clean_zip_name(path, rootdir)
|
||||||
return self.name + '/' + name
|
return self.name + "/" + name
|
||||||
|
|
||||||
def archive(self, build_dir):
|
def archive(self, build_dir: Optional[str]) -> None:
|
||||||
# type: (Optional[str]) -> None
|
|
||||||
"""Saves archive to provided build_dir.
|
"""Saves archive to provided build_dir.
|
||||||
|
|
||||||
Used for saving downloaded VCS requirements as part of `pip download`.
|
Used for saving downloaded VCS requirements as part of `pip download`.
|
||||||
|
@ -699,70 +665,74 @@ class InstallRequirement:
|
||||||
return
|
return
|
||||||
|
|
||||||
create_archive = True
|
create_archive = True
|
||||||
archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
|
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
|
||||||
archive_path = os.path.join(build_dir, archive_name)
|
archive_path = os.path.join(build_dir, archive_name)
|
||||||
|
|
||||||
if os.path.exists(archive_path):
|
if os.path.exists(archive_path):
|
||||||
response = ask_path_exists(
|
response = ask_path_exists(
|
||||||
'The file {} exists. (i)gnore, (w)ipe, '
|
"The file {} exists. (i)gnore, (w)ipe, "
|
||||||
'(b)ackup, (a)bort '.format(
|
"(b)ackup, (a)bort ".format(display_path(archive_path)),
|
||||||
display_path(archive_path)),
|
("i", "w", "b", "a"),
|
||||||
('i', 'w', 'b', 'a'))
|
)
|
||||||
if response == 'i':
|
if response == "i":
|
||||||
create_archive = False
|
create_archive = False
|
||||||
elif response == 'w':
|
elif response == "w":
|
||||||
logger.warning('Deleting %s', display_path(archive_path))
|
logger.warning("Deleting %s", display_path(archive_path))
|
||||||
os.remove(archive_path)
|
os.remove(archive_path)
|
||||||
elif response == 'b':
|
elif response == "b":
|
||||||
dest_file = backup_dir(archive_path)
|
dest_file = backup_dir(archive_path)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Backing up %s to %s',
|
"Backing up %s to %s",
|
||||||
display_path(archive_path),
|
display_path(archive_path),
|
||||||
display_path(dest_file),
|
display_path(dest_file),
|
||||||
)
|
)
|
||||||
shutil.move(archive_path, dest_file)
|
shutil.move(archive_path, dest_file)
|
||||||
elif response == 'a':
|
elif response == "a":
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
if not create_archive:
|
if not create_archive:
|
||||||
return
|
return
|
||||||
|
|
||||||
zip_output = zipfile.ZipFile(
|
zip_output = zipfile.ZipFile(
|
||||||
archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
|
archive_path,
|
||||||
|
"w",
|
||||||
|
zipfile.ZIP_DEFLATED,
|
||||||
|
allowZip64=True,
|
||||||
)
|
)
|
||||||
with zip_output:
|
with zip_output:
|
||||||
dir = os.path.normcase(
|
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
|
||||||
os.path.abspath(self.unpacked_source_directory)
|
|
||||||
)
|
|
||||||
for dirpath, dirnames, filenames in os.walk(dir):
|
for dirpath, dirnames, filenames in os.walk(dir):
|
||||||
for dirname in dirnames:
|
for dirname in dirnames:
|
||||||
dir_arcname = self._get_archive_name(
|
dir_arcname = self._get_archive_name(
|
||||||
dirname, parentdir=dirpath, rootdir=dir,
|
dirname,
|
||||||
|
parentdir=dirpath,
|
||||||
|
rootdir=dir,
|
||||||
)
|
)
|
||||||
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
zipdir = zipfile.ZipInfo(dir_arcname + "/")
|
||||||
zipdir.external_attr = 0x1ED << 16 # 0o755
|
zipdir.external_attr = 0x1ED << 16 # 0o755
|
||||||
zip_output.writestr(zipdir, '')
|
zip_output.writestr(zipdir, "")
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
file_arcname = self._get_archive_name(
|
file_arcname = self._get_archive_name(
|
||||||
filename, parentdir=dirpath, rootdir=dir,
|
filename,
|
||||||
|
parentdir=dirpath,
|
||||||
|
rootdir=dir,
|
||||||
)
|
)
|
||||||
filename = os.path.join(dirpath, filename)
|
filename = os.path.join(dirpath, filename)
|
||||||
zip_output.write(filename, file_arcname)
|
zip_output.write(filename, file_arcname)
|
||||||
|
|
||||||
logger.info('Saved %s', display_path(archive_path))
|
logger.info("Saved %s", display_path(archive_path))
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self,
|
||||||
install_options, # type: List[str]
|
install_options: List[str],
|
||||||
global_options=None, # type: Optional[Sequence[str]]
|
global_options: Optional[Sequence[str]] = None,
|
||||||
root=None, # type: Optional[str]
|
root: Optional[str] = None,
|
||||||
home=None, # type: Optional[str]
|
home: Optional[str] = None,
|
||||||
prefix=None, # type: Optional[str]
|
prefix: Optional[str] = None,
|
||||||
warn_script_location=True, # type: bool
|
warn_script_location: bool = True,
|
||||||
use_user_site=False, # type: bool
|
use_user_site: bool = False,
|
||||||
pycompile=True # type: bool
|
pycompile: bool = True,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
scheme = get_scheme(
|
scheme = get_scheme(
|
||||||
self.name,
|
self.name,
|
||||||
user=use_user_site,
|
user=use_user_site,
|
||||||
|
@ -773,7 +743,7 @@ class InstallRequirement:
|
||||||
)
|
)
|
||||||
|
|
||||||
global_options = global_options if global_options is not None else []
|
global_options = global_options if global_options is not None else []
|
||||||
if self.editable:
|
if self.editable and not self.is_wheel:
|
||||||
install_editable_legacy(
|
install_editable_legacy(
|
||||||
install_options,
|
install_options,
|
||||||
global_options,
|
global_options,
|
||||||
|
@ -792,7 +762,9 @@ class InstallRequirement:
|
||||||
if self.is_wheel:
|
if self.is_wheel:
|
||||||
assert self.local_file_path
|
assert self.local_file_path
|
||||||
direct_url = None
|
direct_url = None
|
||||||
if self.original_link:
|
if self.editable:
|
||||||
|
direct_url = direct_url_for_editable(self.unpacked_source_directory)
|
||||||
|
elif self.original_link:
|
||||||
direct_url = direct_url_from_link(
|
direct_url = direct_url_from_link(
|
||||||
self.original_link,
|
self.original_link,
|
||||||
self.source_dir,
|
self.source_dir,
|
||||||
|
@ -840,7 +812,7 @@ class InstallRequirement:
|
||||||
)
|
)
|
||||||
except LegacyInstallFailure as exc:
|
except LegacyInstallFailure as exc:
|
||||||
self.install_succeeded = False
|
self.install_succeeded = False
|
||||||
six.reraise(*exc.parent)
|
raise exc
|
||||||
except Exception:
|
except Exception:
|
||||||
self.install_succeeded = True
|
self.install_succeeded = True
|
||||||
raise
|
raise
|
||||||
|
@ -851,8 +823,9 @@ class InstallRequirement:
|
||||||
deprecated(
|
deprecated(
|
||||||
reason=(
|
reason=(
|
||||||
"{} was installed using the legacy 'setup.py install' "
|
"{} was installed using the legacy 'setup.py install' "
|
||||||
"method, because a wheel could not be built for it.".
|
"method, because a wheel could not be built for it.".format(
|
||||||
format(self.name)
|
self.name
|
||||||
|
)
|
||||||
),
|
),
|
||||||
replacement="to fix the wheel build issue reported above",
|
replacement="to fix the wheel build issue reported above",
|
||||||
gone_in=None,
|
gone_in=None,
|
||||||
|
@ -860,8 +833,7 @@ class InstallRequirement:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_invalid_constraint_type(req):
|
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
||||||
# type: (InstallRequirement) -> str
|
|
||||||
|
|
||||||
# Check for unsupported forms
|
# Check for unsupported forms
|
||||||
problem = ""
|
problem = ""
|
||||||
|
@ -881,12 +853,10 @@ def check_invalid_constraint_type(req):
|
||||||
"undocumented. The new implementation of the resolver no "
|
"undocumented. The new implementation of the resolver no "
|
||||||
"longer supports these forms."
|
"longer supports these forms."
|
||||||
),
|
),
|
||||||
replacement=(
|
replacement="replacing the constraint with a requirement",
|
||||||
"replacing the constraint with a requirement."
|
|
||||||
),
|
|
||||||
# No plan yet for when the new resolver becomes default
|
# No plan yet for when the new resolver becomes default
|
||||||
gone_in=None,
|
gone_in=None,
|
||||||
issue=8210
|
issue=8210,
|
||||||
)
|
)
|
||||||
|
|
||||||
return problem
|
return problem
|
||||||
|
|
|
@ -1,191 +1,62 @@
|
||||||
import logging
|
import logging
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from typing import Dict, Iterable, List, Optional, Tuple
|
from typing import Dict, List
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.models.wheel import Wheel
|
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.utils import compatibility_tags
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RequirementSet:
|
class RequirementSet:
|
||||||
|
def __init__(self, check_supported_wheels: bool = True) -> None:
|
||||||
|
"""Create a RequirementSet."""
|
||||||
|
|
||||||
def __init__(self, check_supported_wheels=True):
|
self.requirements: Dict[str, InstallRequirement] = OrderedDict()
|
||||||
# type: (bool) -> None
|
|
||||||
"""Create a RequirementSet.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.requirements = OrderedDict() # type: Dict[str, InstallRequirement]
|
|
||||||
self.check_supported_wheels = check_supported_wheels
|
self.check_supported_wheels = check_supported_wheels
|
||||||
|
|
||||||
self.unnamed_requirements = [] # type: List[InstallRequirement]
|
self.unnamed_requirements: List[InstallRequirement] = []
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
requirements = sorted(
|
requirements = sorted(
|
||||||
(req for req in self.requirements.values() if not req.comes_from),
|
(req for req in self.requirements.values() if not req.comes_from),
|
||||||
key=lambda req: canonicalize_name(req.name or ""),
|
key=lambda req: canonicalize_name(req.name or ""),
|
||||||
)
|
)
|
||||||
return ' '.join(str(req.req) for req in requirements)
|
return " ".join(str(req.req) for req in requirements)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
requirements = sorted(
|
requirements = sorted(
|
||||||
self.requirements.values(),
|
self.requirements.values(),
|
||||||
key=lambda req: canonicalize_name(req.name or ""),
|
key=lambda req: canonicalize_name(req.name or ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
format_string = '<{classname} object; {count} requirement(s): {reqs}>'
|
format_string = "<{classname} object; {count} requirement(s): {reqs}>"
|
||||||
return format_string.format(
|
return format_string.format(
|
||||||
classname=self.__class__.__name__,
|
classname=self.__class__.__name__,
|
||||||
count=len(requirements),
|
count=len(requirements),
|
||||||
reqs=', '.join(str(req.req) for req in requirements),
|
reqs=", ".join(str(req.req) for req in requirements),
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_unnamed_requirement(self, install_req):
|
def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
assert not install_req.name
|
assert not install_req.name
|
||||||
self.unnamed_requirements.append(install_req)
|
self.unnamed_requirements.append(install_req)
|
||||||
|
|
||||||
def add_named_requirement(self, install_req):
|
def add_named_requirement(self, install_req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
assert install_req.name
|
assert install_req.name
|
||||||
|
|
||||||
project_name = canonicalize_name(install_req.name)
|
project_name = canonicalize_name(install_req.name)
|
||||||
self.requirements[project_name] = install_req
|
self.requirements[project_name] = install_req
|
||||||
|
|
||||||
def add_requirement(
|
def has_requirement(self, name: str) -> bool:
|
||||||
self,
|
|
||||||
install_req, # type: InstallRequirement
|
|
||||||
parent_req_name=None, # type: Optional[str]
|
|
||||||
extras_requested=None # type: Optional[Iterable[str]]
|
|
||||||
):
|
|
||||||
# type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]
|
|
||||||
"""Add install_req as a requirement to install.
|
|
||||||
|
|
||||||
:param parent_req_name: The name of the requirement that needed this
|
|
||||||
added. The name is used because when multiple unnamed requirements
|
|
||||||
resolve to the same name, we could otherwise end up with dependency
|
|
||||||
links that point outside the Requirements set. parent_req must
|
|
||||||
already be added. Note that None implies that this is a user
|
|
||||||
supplied requirement, vs an inferred one.
|
|
||||||
:param extras_requested: an iterable of extras used to evaluate the
|
|
||||||
environment markers.
|
|
||||||
:return: Additional requirements to scan. That is either [] if
|
|
||||||
the requirement is not applicable, or [install_req] if the
|
|
||||||
requirement is applicable and has just been added.
|
|
||||||
"""
|
|
||||||
# If the markers do not match, ignore this requirement.
|
|
||||||
if not install_req.match_markers(extras_requested):
|
|
||||||
logger.info(
|
|
||||||
"Ignoring %s: markers '%s' don't match your environment",
|
|
||||||
install_req.name, install_req.markers,
|
|
||||||
)
|
|
||||||
return [], None
|
|
||||||
|
|
||||||
# If the wheel is not supported, raise an error.
|
|
||||||
# Should check this after filtering out based on environment markers to
|
|
||||||
# allow specifying different wheels based on the environment/OS, in a
|
|
||||||
# single requirements file.
|
|
||||||
if install_req.link and install_req.link.is_wheel:
|
|
||||||
wheel = Wheel(install_req.link.filename)
|
|
||||||
tags = compatibility_tags.get_supported()
|
|
||||||
if (self.check_supported_wheels and not wheel.supported(tags)):
|
|
||||||
raise InstallationError(
|
|
||||||
"{} is not a supported wheel on this platform.".format(
|
|
||||||
wheel.filename)
|
|
||||||
)
|
|
||||||
|
|
||||||
# This next bit is really a sanity check.
|
|
||||||
assert not install_req.user_supplied or parent_req_name is None, (
|
|
||||||
"a user supplied req shouldn't have a parent"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Unnamed requirements are scanned again and the requirement won't be
|
|
||||||
# added as a dependency until after scanning.
|
|
||||||
if not install_req.name:
|
|
||||||
self.add_unnamed_requirement(install_req)
|
|
||||||
return [install_req], None
|
|
||||||
|
|
||||||
try:
|
|
||||||
existing_req = self.get_requirement(
|
|
||||||
install_req.name) # type: Optional[InstallRequirement]
|
|
||||||
except KeyError:
|
|
||||||
existing_req = None
|
|
||||||
|
|
||||||
has_conflicting_requirement = (
|
|
||||||
parent_req_name is None and
|
|
||||||
existing_req and
|
|
||||||
not existing_req.constraint and
|
|
||||||
existing_req.extras == install_req.extras and
|
|
||||||
existing_req.req and
|
|
||||||
install_req.req and
|
|
||||||
existing_req.req.specifier != install_req.req.specifier
|
|
||||||
)
|
|
||||||
if has_conflicting_requirement:
|
|
||||||
raise InstallationError(
|
|
||||||
"Double requirement given: {} (already in {}, name={!r})"
|
|
||||||
.format(install_req, existing_req, install_req.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# When no existing requirement exists, add the requirement as a
|
|
||||||
# dependency and it will be scanned again after.
|
|
||||||
if not existing_req:
|
|
||||||
self.add_named_requirement(install_req)
|
|
||||||
# We'd want to rescan this requirement later
|
|
||||||
return [install_req], install_req
|
|
||||||
|
|
||||||
# Assume there's no need to scan, and that we've already
|
|
||||||
# encountered this for scanning.
|
|
||||||
if install_req.constraint or not existing_req.constraint:
|
|
||||||
return [], existing_req
|
|
||||||
|
|
||||||
does_not_satisfy_constraint = (
|
|
||||||
install_req.link and
|
|
||||||
not (
|
|
||||||
existing_req.link and
|
|
||||||
install_req.link.path == existing_req.link.path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if does_not_satisfy_constraint:
|
|
||||||
raise InstallationError(
|
|
||||||
"Could not satisfy constraints for '{}': "
|
|
||||||
"installation from path or url cannot be "
|
|
||||||
"constrained to a version".format(install_req.name)
|
|
||||||
)
|
|
||||||
# If we're now installing a constraint, mark the existing
|
|
||||||
# object for real installation.
|
|
||||||
existing_req.constraint = False
|
|
||||||
# If we're now installing a user supplied requirement,
|
|
||||||
# mark the existing object as such.
|
|
||||||
if install_req.user_supplied:
|
|
||||||
existing_req.user_supplied = True
|
|
||||||
existing_req.extras = tuple(sorted(
|
|
||||||
set(existing_req.extras) | set(install_req.extras)
|
|
||||||
))
|
|
||||||
logger.debug(
|
|
||||||
"Setting %s extras to: %s",
|
|
||||||
existing_req, existing_req.extras,
|
|
||||||
)
|
|
||||||
# Return the existing requirement for addition to the parent and
|
|
||||||
# scanning again.
|
|
||||||
return [existing_req], existing_req
|
|
||||||
|
|
||||||
def has_requirement(self, name):
|
|
||||||
# type: (str) -> bool
|
|
||||||
project_name = canonicalize_name(name)
|
project_name = canonicalize_name(name)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
project_name in self.requirements and
|
project_name in self.requirements
|
||||||
not self.requirements[project_name].constraint
|
and not self.requirements[project_name].constraint
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_requirement(self, name):
|
def get_requirement(self, name: str) -> InstallRequirement:
|
||||||
# type: (str) -> InstallRequirement
|
|
||||||
project_name = canonicalize_name(name)
|
project_name = canonicalize_name(name)
|
||||||
|
|
||||||
if project_name in self.requirements:
|
if project_name in self.requirements:
|
||||||
|
@ -194,6 +65,5 @@ class RequirementSet:
|
||||||
raise KeyError(f"No project with the name {name!r}")
|
raise KeyError(f"No project with the name {name!r}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all_requirements(self):
|
def all_requirements(self) -> List[InstallRequirement]:
|
||||||
# type: () -> List[InstallRequirement]
|
|
||||||
return self.unnamed_requirements + list(self.requirements.values())
|
return self.unnamed_requirements + list(self.requirements.values())
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue