parent
b88a51267d
commit
ff03c8d9c2
@ -0,0 +1,3 @@
|
||||
/venv
|
||||
.git
|
||||
__pycache__
|
||||
@ -0,0 +1,3 @@
|
||||
/.vscode
|
||||
__pycache__
|
||||
/venv
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,798 @@
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
from PyQt5.QtWidgets import (QMainWindow, QMenuBar, QToolBar, QTextEdit, QAction, QApplication,
|
||||
qApp, QMessageBox, QFileDialog, QLabel, QHBoxLayout, QGroupBox,
|
||||
QComboBox, QGridLayout, QLineEdit, QSlider, QPushButton)
|
||||
from PyQt5 import QtGui
|
||||
from PyQt5.QtGui import *
|
||||
from PyQt5.QtGui import QPalette, QImage, QPixmap, QBrush
|
||||
from PyQt5.QtCore import *
|
||||
|
||||
import sys
|
||||
import cv2 as cv
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
import DIP_Code as ph
|
||||
import matplotlib.pyplot as plt
|
||||
import time
|
||||
from pylab import *
|
||||
|
||||
input_path = "resources/output/input.png"
|
||||
|
||||
class Window(QMainWindow):
|
||||
image = 0
|
||||
path = ' '
|
||||
makeupvalue = 25
|
||||
alpha = 0.6
|
||||
beta = 80
|
||||
r = 0
|
||||
g = 0
|
||||
b = 0
|
||||
|
||||
|
||||
text = ' '
|
||||
p_x = 50
|
||||
p_y = 150
|
||||
fontsize = 1
|
||||
fonttype = 1
|
||||
fontbold = 1
|
||||
angle = 0
|
||||
change_path = "resources/output/change.png"
|
||||
|
||||
IMG1 = ' '
|
||||
IMG2 = 'null'
|
||||
|
||||
def __init__(self):
|
||||
super(Window, self).__init__()
|
||||
# 界面初始化
|
||||
self.createMenu()
|
||||
self.image_show()
|
||||
self.font_GroupBox()
|
||||
|
||||
self.initUI()
|
||||
|
||||
# 菜单栏
|
||||
def createMenu(self):
|
||||
# menubar = QMenuBar(self)
|
||||
menubar = self.menuBar()
|
||||
|
||||
menu1 = menubar.addMenu("文件")
|
||||
menu1.addAction("打开")
|
||||
menu1.addAction("保存")
|
||||
|
||||
menu2 = menubar.addMenu("原图")
|
||||
menu2.addAction("返回原图")
|
||||
|
||||
menu3 = menubar.addMenu("基础操作")
|
||||
menu3_1 = menu3.addMenu("彩色空间转换")
|
||||
menu3_1.addAction("转HSV")
|
||||
menu3_1.addAction("转GRAY")
|
||||
menu3_1.addAction("转BGRA")
|
||||
menu3_1.addAction("转HLS")
|
||||
menu3_1.addAction("转YUV")
|
||||
menu3.addAction("FFT变换")
|
||||
menu3.addAction("DCT变换")
|
||||
menu3.addAction("仿射变换")
|
||||
menu3_2 = menu3.addMenu("缩放")
|
||||
menu3_2.addAction("放大")
|
||||
menu3_2.addAction("缩小")
|
||||
menu3_3 = menu3.addMenu("旋转")
|
||||
menu3_3.addAction("右旋转90度")
|
||||
menu3_3.addAction("左旋转90度")
|
||||
menu3_4 = menu3.addMenu("翻转")
|
||||
menu3_4.addAction("水平翻转")
|
||||
menu3_4.addAction("垂直翻转")
|
||||
menu3_4.addAction("对角翻转")
|
||||
|
||||
menu4 = menubar.addMenu("直方图")
|
||||
menu4_1 = menu4.addMenu("拉伸")
|
||||
menu4_1.addAction("线性拉伸")
|
||||
menu4_1.addAction("非线性拉伸")
|
||||
menu4_2 = menu4.addMenu("均衡")
|
||||
menu4_2.addAction("自适应均衡")
|
||||
menu4_2.addAction("全局均衡")
|
||||
|
||||
menu5 = menubar.addMenu("滤镜")
|
||||
menu5_1 = menu5.addMenu("平滑")
|
||||
menu5_1.addAction("均值模糊")
|
||||
menu5_1.addAction("高斯模糊")
|
||||
menu5_1.addAction("中值模糊")
|
||||
menu5_2 = menu5.addMenu("锐化")
|
||||
menu5_2.addAction("锐化")
|
||||
# menu5_2.addAction("锐化2")
|
||||
# menu5_2.addAction("锐化3")
|
||||
menu5.addAction("美颜")
|
||||
|
||||
menu6 = menubar.addMenu("图像恢复")
|
||||
# menu6.addAction("投影矫正")
|
||||
# menu6.addAction("模糊消除")
|
||||
menu6_1 = menu6.addMenu("添加噪声")
|
||||
menu6_1.addAction("高斯噪声")
|
||||
menu6_1.addAction("椒盐噪声")
|
||||
menu6_2 = menu6.addMenu("滤波器")
|
||||
menu6_2_1 = menu6_2.addMenu("均值类滤波器")
|
||||
menu6_2_1.addAction("算数均值滤波器")
|
||||
menu6_2_1.addAction("几何均值滤波器")
|
||||
menu6_2_1.addAction("谐波均值滤波器")
|
||||
menu6_2_2 = menu6_2.addMenu("排序统计类滤波器")
|
||||
menu6_2_2.addAction("最大值滤波器")
|
||||
menu6_2_2.addAction("中值滤波器")
|
||||
menu6_2_2.addAction("最小值滤波器")
|
||||
menu6_2_3 = menu6_2.addMenu("选择性滤波器")
|
||||
menu6_2_3.addAction("高通滤波器")
|
||||
menu6_2_3.addAction("低通滤波器")
|
||||
menu6_2_3.addAction("带通滤波器")
|
||||
|
||||
menu7 = menubar.addMenu("图像合成")
|
||||
menu7.addAction("图像拼接")
|
||||
# menu7.addAction("更换背景")
|
||||
# menu7.addAction("换头")
|
||||
menu7_1 = menu7.addMenu("算数运算")
|
||||
menu7_1.addAction("加法")
|
||||
menu7_1.addAction("减法")
|
||||
menu7_1.addAction("乘法")
|
||||
menu7_1.addAction("除法")
|
||||
menu7_2 = menu7.addMenu("逻辑运算")
|
||||
menu7_2.addAction("与运算")
|
||||
menu7_2.addAction("或运算")
|
||||
menu7_2.addAction("非运算")
|
||||
|
||||
menu8 = menubar.addMenu("边缘检测")
|
||||
menu8.addAction("增强图像")
|
||||
menu8.addAction("Robert算子")
|
||||
menu8.addAction("Prewitt算子")
|
||||
menu8.addAction("Sobel算子")
|
||||
menu8.addAction("Laplacian算子")
|
||||
menu8.addAction("LoG算子")
|
||||
menu8.addAction("Canny")
|
||||
|
||||
menu9 = menubar.addMenu("图像动作驱动")
|
||||
menu9.addAction("图像动作驱动")
|
||||
|
||||
|
||||
# menu9 = menubar.addMenu("添加噪声")
|
||||
# menu9.addAction("高斯噪声")
|
||||
# menu9.addAction("椒盐噪声")
|
||||
#
|
||||
# menu10 = menubar.addMenu("滤波器")
|
||||
# menu10_1 = menu10.addMenu("均值类滤波器")
|
||||
# menu10_1.addAction("算数均值滤波器")
|
||||
# menu10_1.addAction("几何均值滤波器")
|
||||
# menu10_1.addAction("谐波均值滤波器")
|
||||
# menu10_2 = menu10.addMenu("排序统计类滤波器")
|
||||
# menu10_2.addAction("最大值滤波器")
|
||||
# menu10_2.addAction("中值滤波器")
|
||||
# menu10_2.addAction("最小值滤波器")
|
||||
# menu10_3 = menu10.addMenu("选择性滤波器")
|
||||
# menu10_3.addAction("高通滤波器")
|
||||
# menu10_3.addAction("低通滤波器")
|
||||
# menu10_3.addAction("带通滤波器")
|
||||
|
||||
|
||||
# 发射信号绑定信号槽
|
||||
menu1.triggered[QAction].connect(self.menu1_process)
|
||||
menu2.triggered[QAction].connect(self.menu2_process)
|
||||
menu3.triggered[QAction].connect(self.menu3_process)
|
||||
menu4.triggered[QAction].connect(self.menu4_process)
|
||||
menu5.triggered[QAction].connect(self.menu5_process)
|
||||
menu6.triggered[QAction].connect(self.menu6_process)
|
||||
menu6_2.triggered[QAction].connect(self.menu6_process)
|
||||
menu7.triggered[QAction].connect(self.menu7_process)
|
||||
menu8.triggered[QAction].connect(self.menu8_process)
|
||||
menu9.triggered[QAction].connect(self.menu9_process)
|
||||
# menu9.triggered[QAction].connect(self.menu9_process)
|
||||
# menu10.triggered[QAction].connect(self.menu10_process)
|
||||
|
||||
|
||||
# 像素图显示
|
||||
def image_show(self):
|
||||
|
||||
self.lbl = QLabel(self)
|
||||
self.lbl.setPixmap(QPixmap('resources/img/white board.jpg'))
|
||||
self.lbl.setAlignment(Qt.AlignCenter) # 图像显示区,居中
|
||||
self.lbl.setGeometry(450, 35, 800, 500)
|
||||
self.lbl.setStyleSheet("border: 2px solid black")
|
||||
|
||||
|
||||
# 工具处理布局
|
||||
def font_GroupBox(self):
|
||||
# 文字编辑
|
||||
text_lbl = QLabel("文字编辑工具", self)
|
||||
font = QFont("Microsoft YaHei", 10, 75)
|
||||
text_lbl.setFont(font)
|
||||
text_lbl.setGeometry(25, 35, 110, 30)
|
||||
|
||||
|
||||
text_lbl = QLabel("输入文字:", self)
|
||||
text_lbl.setGeometry(25, 70, 85, 30)
|
||||
# text_lbl.setWordWrap(True)
|
||||
text_text = QLineEdit(self)
|
||||
text_text.setGeometry(125, 70, 250, 30)
|
||||
|
||||
xy_lbl = QLabel("输入坐标:", self)
|
||||
xy_lbl.setGeometry(25, 105, 85, 30)
|
||||
x_text = QLineEdit(self)
|
||||
x_text.setGeometry(125, 105, 30, 30)
|
||||
y_text = QLineEdit(self)
|
||||
y_text.setGeometry(165, 105, 30, 30)
|
||||
xy_btn = QPushButton('坐标确认', self)
|
||||
xy_btn.setGeometry(225, 105, 85, 30)
|
||||
|
||||
size_lbl = QLabel("字体大小:", self)
|
||||
size_lbl.setGeometry(25, 140, 85, 30)
|
||||
size_combo = QComboBox(self)
|
||||
size_combo.addItem("1")
|
||||
size_combo.addItem("2")
|
||||
size_combo.addItem("3")
|
||||
size_combo.addItem("4")
|
||||
size_combo.addItem("5")
|
||||
size_combo.addItem("6")
|
||||
size_combo.addItem("7")
|
||||
size_combo.setGeometry(125, 140, 50, 30)
|
||||
|
||||
bold_lbl = QLabel("字体厚度:", self)
|
||||
bold_lbl.setGeometry(205, 140, 85, 30)
|
||||
bold_combo = QComboBox(self)
|
||||
bold_combo.addItem("1")
|
||||
bold_combo.addItem("2")
|
||||
bold_combo.addItem("3")
|
||||
bold_combo.addItem("4")
|
||||
bold_combo.addItem("5")
|
||||
bold_combo.addItem("6")
|
||||
bold_combo.addItem("7")
|
||||
bold_combo.setGeometry(305, 140, 50, 30)
|
||||
|
||||
color_lbl = QLabel("字体颜色:", self)
|
||||
color_lbl.setGeometry(25, 175, 85, 30)
|
||||
color_combo = QComboBox(self)
|
||||
color_combo.addItem("黑")
|
||||
color_combo.addItem("白")
|
||||
color_combo.addItem("红")
|
||||
color_combo.addItem("橙")
|
||||
color_combo.addItem("黄")
|
||||
color_combo.addItem("蓝")
|
||||
color_combo.addItem("绿")
|
||||
color_combo.addItem("紫")
|
||||
color_combo.setGeometry(125, 175, 50, 30)
|
||||
|
||||
type_lbl = QLabel("字体样式:", self)
|
||||
type_lbl.setGeometry(205, 175, 85, 30)
|
||||
type_combo = QComboBox(self)
|
||||
type_combo.addItem("1")
|
||||
type_combo.addItem("2")
|
||||
type_combo.addItem("3")
|
||||
type_combo.addItem("4")
|
||||
type_combo.addItem("5")
|
||||
type_combo.addItem("6")
|
||||
type_combo.addItem("7")
|
||||
type_combo.setGeometry(305, 175, 50, 30)
|
||||
|
||||
wordbtn = QPushButton('文字确认', self)
|
||||
wordbtn.setCheckable(True)
|
||||
wordbtn.setGeometry(25, 210, 85, 30)
|
||||
|
||||
|
||||
|
||||
# 图像编辑
|
||||
text_lbl = QLabel("图像编辑工具", self)
|
||||
font = QFont("Microsoft YaHei", 10, 75)
|
||||
text_lbl.setFont(font)
|
||||
text_lbl.setGeometry(25, 255, 110, 30)
|
||||
|
||||
angle_lbl = QLabel("图像旋转角度:", self)
|
||||
angle_lbl.setGeometry(25, 290, 120, 40)
|
||||
angle_text = QLineEdit(self)
|
||||
angle_text.setGeometry(150, 290, 65, 40)
|
||||
|
||||
surebtn = QPushButton('旋转确认', self)
|
||||
surebtn.setCheckable(True)
|
||||
surebtn.setGeometry(240, 290, 85, 30)
|
||||
|
||||
|
||||
# makeup_lbl = QLabel("美颜:", self)
|
||||
# makeup_lbl.setGeometry(25, 300, 100, 40)
|
||||
# sld = QSlider(Qt.Horizontal, self)
|
||||
# sld.setFocusPolicy(Qt.NoFocus)
|
||||
# sld.setGeometry(50, 350, 200, 30)
|
||||
|
||||
brightness_lbl = QLabel("亮度:", self)
|
||||
brightness_lbl.setGeometry(25, 325, 100, 40)
|
||||
sld_brightness = QSlider(Qt.Horizontal, self)
|
||||
sld_brightness.setFocusPolicy(Qt.NoFocus)
|
||||
sld_brightness.setMaximum(255)
|
||||
sld_brightness.setValue(128)
|
||||
sld_brightness.setGeometry(50, 360, 200, 30)
|
||||
|
||||
contrast_lbl = QLabel("对比度:", self)
|
||||
contrast_lbl.setGeometry(25, 395, 100, 40)
|
||||
sld_contrast = QSlider(Qt.Horizontal, self)
|
||||
sld_contrast.setFocusPolicy(Qt.NoFocus)
|
||||
sld_contrast.setMaximum(300)
|
||||
sld_contrast.setValue(150)
|
||||
sld_contrast.setGeometry(50, 430, 200, 30)
|
||||
|
||||
# 信号与槽连接 文本框输入连接
|
||||
text_text.textChanged[str].connect(self.input_text)
|
||||
x_text.textChanged[str].connect(self.input_px)
|
||||
y_text.textChanged[str].connect(self.input_py)
|
||||
xy_btn.clicked[bool].connect(self.xy_press)
|
||||
size_combo.activated[str].connect(self.inputsize)
|
||||
bold_combo.activated[str].connect(self.inputbold)
|
||||
type_combo.activated[str].connect(self.inputtype)
|
||||
color_combo.activated[str].connect(self.inputcolor)
|
||||
angle_text.textChanged[str].connect(self.input_angle) # 输入旋转角度
|
||||
surebtn.clicked[bool].connect(self.presssure) # 旋转确认
|
||||
wordbtn.clicked[bool].connect(self.pressure2) # 文字确认
|
||||
# sld.valueChanged[int].connect(self.makeupValue) # 美颜滑条
|
||||
sld_brightness.valueChanged[int].connect(self.updateBeta) # 亮度滑条
|
||||
sld_contrast.valueChanged[int].connect(self.updateAlpha) # 对比度滑条
|
||||
|
||||
def initUI(self):
|
||||
self.setGeometry(50, 50, 1300, 550)
|
||||
self.setWindowTitle('DIP_Course')
|
||||
self.setWindowIcon(QIcon(r"resources/img/test.jpg"))
|
||||
palette = QPalette()
|
||||
palette.setColor(self.backgroundRole(), QColor(255, 255, 255))
|
||||
self.setPalette(palette)
|
||||
self.show()
|
||||
|
||||
# 菜单1处理
|
||||
def menu1_process(self, q):
|
||||
if q.text() == "打开":
|
||||
choose = 1
|
||||
else:
|
||||
choose = 2
|
||||
|
||||
if choose == 1:
|
||||
file_path, file_type = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpg)")
|
||||
self.path = file_path
|
||||
self.image = cv.imread(self.path)
|
||||
self.IMG1 = self.image
|
||||
# pixmap = QPixmap(self.path)
|
||||
self.image = self.setAutoResizeImage(self.image)
|
||||
cv.imwrite(self.path, self.image)
|
||||
cv.imwrite(input_path, self.image)
|
||||
pixmap = QPixmap(input_path)
|
||||
self.lbl.setPixmap(pixmap)
|
||||
|
||||
else:
|
||||
save_path = QFileDialog.getSaveFileName(self, '保存文件', 'E:/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpg)")
|
||||
cv.imwrite(save_path[0], self.image)
|
||||
|
||||
# 菜单2处理
|
||||
def menu2_process(self, q):
|
||||
self.image = cv.imread(self.path)
|
||||
self.lbl.setPixmap(QPixmap(self.path))
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
|
||||
# 菜单3处理
|
||||
def menu3_process(self, q):
|
||||
if q.text() == "转HSV":
|
||||
self.image = ph.color_space(self.image, 1)
|
||||
elif q.text() == "转GRAY":
|
||||
self.image = ph.color_space(self.image, 2)
|
||||
elif q.text() == "转BGRA":
|
||||
self.image = ph.color_space(self.image, 3)
|
||||
elif q.text() == "转HLS":
|
||||
self.image = ph.color_space(self.image, 4)
|
||||
elif q.text() == "转YUV":
|
||||
self.image = ph.color_space(self.image, 5)
|
||||
elif q.text() == "FFT变换":
|
||||
ph.FFT(self.image)
|
||||
elif q.text() == "DCT变换":
|
||||
ph.DCT(self.image)
|
||||
elif q.text() == "放大":
|
||||
self.image = ph.changescale(self.image, 2)
|
||||
elif q.text() == "缩小":
|
||||
self.image = ph.changescale(self.image, 0.5)
|
||||
elif q.text() == "右旋转90度":
|
||||
self.image = ph.rotate(self.image, -90)
|
||||
elif q.text() == "左旋转90度":
|
||||
self.image = ph.rotate(self.image, 90)
|
||||
elif q.text() == "水平翻转":
|
||||
self.image = ph.changeflip(self.image, 1)
|
||||
elif q.text() == "垂直翻转":
|
||||
self.image = ph.changeflip(self.image, 0)
|
||||
elif q.text() == "对角翻转":
|
||||
self.image = ph.changeflip(self.image, -1)
|
||||
if q.text() == "仿射变换":
|
||||
self.image = ph.affinetransform(self.image)
|
||||
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 菜单4处理
|
||||
def menu4_process(self, q):
|
||||
if q.text() == "线性拉伸":
|
||||
self.image = ph.Linear_hist(self.image)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
plt.subplot(1, 1, 1)
|
||||
plt.hist(self.image.ravel(), 256, [0, 256]), plt.title("linear")
|
||||
plt.show()
|
||||
|
||||
elif q.text() == "自适应均衡":
|
||||
self.image = ph.adaptive_equalization(self.image)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
plt.subplot(1, 1, 1)
|
||||
plt.hist(self.image.ravel(), 256, [0, 256]), plt.title("adaptive equalizer")
|
||||
plt.show()
|
||||
elif q.text() == "全局均衡":
|
||||
self.image = ph.global_equalization(self.image)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
plt.subplot(1, 1, 1)
|
||||
plt.hist(self.image.ravel(), 256, [0, 256]), plt.title("global equalizer")
|
||||
plt.show()
|
||||
elif q.text() == "非线性拉伸":
|
||||
self.image = ph.Ninear_hist(self.image)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
plt.subplot(1, 1, 1)
|
||||
plt.hist(self.image.ravel(), 256, [0, 256]), plt.title("non-linear")
|
||||
plt.show()
|
||||
|
||||
# 菜单5处理
|
||||
def menu5_process(self, q):
|
||||
if q.text() == "均值模糊":
|
||||
self.image = ph.ave_blur(self.image)
|
||||
elif q.text() == "高斯模糊":
|
||||
self.image = ph.gau_blur(self.image)
|
||||
elif q.text() == "中值模糊":
|
||||
self.image = ph.mid_blur(self.image)
|
||||
elif q.text() == "锐化":
|
||||
self.image = ph.l_sharpen3(self.image)
|
||||
elif q.text() == "美颜":
|
||||
ph.makeup(self.image, self.makeupvalue)
|
||||
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 菜单6处理
|
||||
def menu6_process(self, q):
|
||||
if q.text() == "高斯噪声":
|
||||
self.image = ph.gaussNoise(self.image)
|
||||
elif q.text() == "椒盐噪声":
|
||||
self.image = ph.saltNoise(self.image)
|
||||
elif q.text() == "算数均值滤波器":
|
||||
self.image = ph.arithFilter(self.image)
|
||||
elif q.text() == "几何均值滤波器":
|
||||
self.image = ph.geometryFilter(self.image)
|
||||
elif q.text() == "谐波均值滤波器":
|
||||
self.image = ph.harmonyFilter(self.image)
|
||||
elif q.text() == "最大值滤波器":
|
||||
self.image = ph.maxSortFilter(self.image)
|
||||
elif q.text() == "中值滤波器":
|
||||
self.image = ph.mediumSortFilter(self.image)
|
||||
elif q.text() == "最小值滤波器":
|
||||
self.image = ph.minSortFilter(self.image)
|
||||
elif q.text() == "高通滤波器":
|
||||
self.image = ph.HighPassFilter(self.image)
|
||||
elif q.text() == "低通滤波器":
|
||||
self.image = ph.LowPassFilter(self.image)
|
||||
elif q.text() == "带通滤波器":
|
||||
self.image = ph.BrandPassFilter(self.image)
|
||||
# elif q.text() == "投影矫正":
|
||||
# self.image = ph.correct(self.image)
|
||||
# elif q.text() == "模糊消除":
|
||||
# self.image = ph.removefuzzy(self.image)
|
||||
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 菜单7处理
|
||||
def menu7_process(self, q):
|
||||
|
||||
if q.text() == "图像拼接":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
self.image = ph.conectImage(self.image, self.IMG2)
|
||||
# elif q.text() == "更换背景":
|
||||
# self.image = ph.back_vary(self.image)
|
||||
# elif q.text() == "换头":
|
||||
# img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
# "All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
# self.IMG2 = cv.imread(img2_path[0])
|
||||
# self.image = ph.huantou(self.image, self.IMG2)
|
||||
elif q.text() == "加法":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
height = self.image.shape[0]
|
||||
width = self.image.shape[1]
|
||||
size = (width, height)
|
||||
self.IMG2 = cv.resize(self.IMG2, size, interpolation=cv.INTER_NEAREST)
|
||||
self.image = cv.add(self.image, self.IMG2)
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
elif q.text() == "减法":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
height = self.image.shape[0]
|
||||
width = self.image.shape[1]
|
||||
size = (width, height)
|
||||
self.IMG2 = cv.resize(self.IMG2, size, interpolation=cv.INTER_NEAREST)
|
||||
self.image = cv.subtract(self.image, self.IMG2)
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
elif q.text() == "乘法":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
height = self.image.shape[0]
|
||||
width = self.image.shape[1]
|
||||
size = (width, height)
|
||||
self.IMG2 = cv.resize(self.IMG2, size, interpolation=cv.INTER_NEAREST)
|
||||
self.image = cv.multiply(self.image, self.IMG2)
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
elif q.text() == "除法":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
height = self.image.shape[0]
|
||||
width = self.image.shape[1]
|
||||
size = (width, height)
|
||||
self.IMG2 = cv.resize(self.IMG2, size, interpolation=cv.INTER_NEAREST)
|
||||
self.image = cv.divide(self.image, self.IMG2)
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
elif q.text() == "与运算":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
height = self.image.shape[0]
|
||||
width = self.image.shape[1]
|
||||
size = (width, height)
|
||||
self.IMG2 = cv.resize(self.IMG2, size, interpolation=cv.INTER_NEAREST)
|
||||
self.image = self.image & self.IMG2
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
elif q.text() == "或运算":
|
||||
img2_path = QFileDialog.getOpenFileName(self, '打开文件', 'E:/DIPcourse/images/',
|
||||
"All Files (*);;(*.bmp);;(*.tif);;(*.png);;(*.jpeg);;(*.jpg)")
|
||||
self.IMG2 = cv.imread(img2_path[0])
|
||||
height = self.image.shape[0]
|
||||
width = self.image.shape[1]
|
||||
size = (width, height)
|
||||
self.IMG2 = cv.resize(self.IMG2, size, interpolation=cv.INTER_NEAREST)
|
||||
self.image = self.image | self.IMG2
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
elif q.text() == "非运算":
|
||||
self.image = ~self.image
|
||||
cv.imwrite(self.change_path, self.image)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 菜单8处理
|
||||
def menu8_process(self, q):
|
||||
|
||||
if q.text() == "增强图像":
|
||||
self.image = ph.sharpImage(self.image)
|
||||
elif q.text() == "Robert算子":
|
||||
self.image = ph.robs(self.image)
|
||||
elif q.text() == "Prewitt算子":
|
||||
self.image = ph.prewitt(self.image)
|
||||
elif q.text() == "Sobel算子":
|
||||
self.image = ph.sob(self.image)
|
||||
elif q.text() == "Laplacian算子":
|
||||
self.image = ph.lap(self.image)
|
||||
elif q.text() == "LoG算子":
|
||||
self.image = ph._log(self.image)
|
||||
elif q.text() == "Canny":
|
||||
self.image = ph.cny(self.image)
|
||||
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 菜单9处理
|
||||
def menu9_process(self, q):
|
||||
if q.text() == "图像动作驱动":
|
||||
ph.actiondrive()
|
||||
|
||||
|
||||
|
||||
# def menu9_process(self, q):
|
||||
# if q.text() == "高斯噪声":
|
||||
# self.image = ph.gaussNoise(self.image)
|
||||
# elif q.text() == "椒盐噪声":
|
||||
# self.image = ph.saltNoise(self.image)
|
||||
#
|
||||
# self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
#
|
||||
#
|
||||
# def menu10_process(self, q):
|
||||
# if q.text() == "算数均值滤波器":
|
||||
# self.image = ph.arithFilter(self.image)
|
||||
# elif q.text() == "几何均值滤波器":
|
||||
# self.image = ph.geometryFilter(self.image)
|
||||
# elif q.text() == "谐波均值滤波器":
|
||||
# self.image = None
|
||||
# elif q.text() == "最大值滤波器":
|
||||
# self.image = None
|
||||
# elif q.text() == "中值滤波器":
|
||||
# self.image = None
|
||||
# elif q.text() == "最小值滤波器":
|
||||
# self.image = None
|
||||
# elif q.text() == "高通滤波器":
|
||||
# self.image = None
|
||||
# elif q.text() == "低通滤波器":
|
||||
# self.image = None
|
||||
# elif q.text() == "带通滤波器":
|
||||
# self.image = None
|
||||
# self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 文本输入处理
|
||||
def input_text(self, text):
|
||||
self.text = text
|
||||
ph.Drawworld(self.image.copy(), text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# X轴输入处理
|
||||
def input_px(self, text):
|
||||
|
||||
self.p_x = int(text)
|
||||
|
||||
# Y轴输入处理
|
||||
def input_py(self, text):
|
||||
|
||||
self.p_y = int(text)
|
||||
|
||||
def xy_press(self):
|
||||
ph.Drawworld(self.image.copy(), self.text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 字体大小
|
||||
def inputsize(self, text):
|
||||
if text == "1":
|
||||
self.fontsize = 1
|
||||
elif text == "2":
|
||||
self.fontsize = 2
|
||||
elif text == "3":
|
||||
self.fontsize = 3
|
||||
elif text == "4":
|
||||
self.fontsize = 4
|
||||
elif text == "5":
|
||||
self.fontsize = 5
|
||||
elif text == "6":
|
||||
self.fontsize = 6
|
||||
elif text == "7":
|
||||
self.fontsize = 7
|
||||
|
||||
ph.Drawworld(self.image.copy(), self.text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 字体厚度
|
||||
def inputbold(self, text):
|
||||
if text == "1":
|
||||
self.fontbold = 1
|
||||
elif text == "2":
|
||||
self.fontbold = 2
|
||||
elif text == "3":
|
||||
self.fontbold = 3
|
||||
elif text == "4":
|
||||
self.fontbold = 4
|
||||
elif text == "5":
|
||||
self.fontbold = 5
|
||||
elif text == "6":
|
||||
self.fontbold = 6
|
||||
elif text == "7":
|
||||
self.fontbold = 7
|
||||
|
||||
ph.Drawworld(self.image.copy(), self.text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 字体样式
|
||||
def inputtype(self, text):
|
||||
if text == "1":
|
||||
self.fonttype = 1
|
||||
elif text == "2":
|
||||
self.fonttype = 2
|
||||
elif text == "3":
|
||||
self.fonttype = 3
|
||||
elif text == "4":
|
||||
self.fonttype = 4
|
||||
elif text == "5":
|
||||
self.fonttype = 5
|
||||
elif text == "6":
|
||||
self.fonttype = 6
|
||||
elif text == "7":
|
||||
self.fonttype = 7
|
||||
|
||||
ph.Drawworld(self.image.copy(), self.text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 字体颜色
|
||||
def inputcolor(self, text):
|
||||
if text == "黑":
|
||||
self.r = 0
|
||||
self.g = 0
|
||||
self.b = 0
|
||||
|
||||
elif text == "白":
|
||||
self.r = 255
|
||||
self.g = 255
|
||||
self.b = 255
|
||||
|
||||
elif text == "红":
|
||||
self.r = 0
|
||||
self.g = 0
|
||||
self.b = 255
|
||||
|
||||
elif text == "橙":
|
||||
self.r = 0
|
||||
self.g = 140
|
||||
self.b = 255
|
||||
|
||||
elif text == "黄":
|
||||
self.r = 0
|
||||
self.g = 255
|
||||
self.b = 255
|
||||
|
||||
elif text == "蓝":
|
||||
self.r = 255
|
||||
self.g = 0
|
||||
self.b = 0
|
||||
|
||||
elif text == "绿":
|
||||
self.r = 0
|
||||
self.g = 255
|
||||
self.b = 0
|
||||
|
||||
elif text == "紫":
|
||||
self.r = 21
|
||||
self.g = 0
|
||||
self.b = 148
|
||||
|
||||
ph.Drawworld(self.image.copy(), self.text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 选装角处理
|
||||
def input_angle(self, text):
|
||||
self.angle = str(text)
|
||||
self.angle = int(self.angle)
|
||||
|
||||
# 旋转确认按钮
|
||||
def presssure(self):
|
||||
self.image = ph.rotate(self.image, self.angle)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 文字确认按钮
|
||||
def pressure2(self):
|
||||
self.image = ph.Drawworld(self.image.copy(), self.text, self.p_x, self.p_y, self.fonttype, self.fontsize, self.fontbold,
|
||||
(self.r, self.g, self.b))
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# # 美颜程度滑条
|
||||
# def makeupValue(self, value):
|
||||
#
|
||||
# self.makeupvalue = int(value)
|
||||
# ph.makeup(self.image, self.makeupvalue)
|
||||
# self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 亮度程度滑条
|
||||
def updateBeta(self, value):
|
||||
self.beta = int(value)
|
||||
ph.updateBrightness(self.image, self.alpha, self.beta)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
# 对比度程度滑条
|
||||
def updateAlpha(self, value):
|
||||
self.alpha = float(value*0.01)
|
||||
ph.updateContrast(self.image, self.alpha, self.beta)
|
||||
self.lbl.setPixmap(QPixmap(self.change_path))
|
||||
|
||||
def setAutoResizeImage(self, image):
|
||||
height, width = image.shape[0], image.shape[1]
|
||||
label_height = self.lbl.height()
|
||||
label_width = self.lbl.width()
|
||||
if width/height >= label_width/label_height:
|
||||
scared_image = cv.resize(image, (label_width, int(height * label_width/width)))
|
||||
else:
|
||||
scared_image = cv.resize(image, (int(width * label_height/height), label_height))
|
||||
# scared_pixmap = pixmap.scaled(label_width, label_height, aspectRatioMode=Qt.KeepAspectRatio)
|
||||
# self.lbl.setPixmap(scared_pixmap)
|
||||
return scared_image
|
||||
|
||||
if __name__ == '__main__':
|
||||
app = QApplication(sys.argv)
|
||||
ex = Window()
|
||||
ex.show()
|
||||
sys.exit(app.exec_())
|
||||
@ -0,0 +1,14 @@
|
||||
FROM nvcr.io/nvidia/cuda:10.0-cudnn7-runtime-ubuntu18.04
|
||||
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt-get -qq update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get -qqy install python3-pip ffmpeg git less nano libsm6 libxext6 libxrender-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY . /app/
|
||||
WORKDIR /app
|
||||
|
||||
RUN pip3 install --upgrade pip
|
||||
RUN pip3 install \
|
||||
https://download.pytorch.org/whl/cu100/torch-1.0.0-cp36-cp36m-linux_x86_64.whl \
|
||||
git+https://github.com/1adrianb/face-alignment \
|
||||
-r requirements.txt
|
||||
@ -0,0 +1,101 @@
|
||||
import os
|
||||
from tqdm import tqdm
|
||||
|
||||
import torch
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from frames_dataset import PairedDataset
|
||||
from logger import Logger, Visualizer
|
||||
import imageio
|
||||
from scipy.spatial import ConvexHull
|
||||
import numpy as np
|
||||
|
||||
from sync_batchnorm import DataParallelWithCallback
|
||||
|
||||
|
||||
def normalize_kp(kp_source, kp_driving, kp_driving_initial, adapt_movement_scale=False,
|
||||
use_relative_movement=False, use_relative_jacobian=False):
|
||||
if adapt_movement_scale:
|
||||
source_area = ConvexHull(kp_source['value'][0].data.cpu().numpy()).volume
|
||||
driving_area = ConvexHull(kp_driving_initial['value'][0].data.cpu().numpy()).volume
|
||||
adapt_movement_scale = np.sqrt(source_area) / np.sqrt(driving_area)
|
||||
else:
|
||||
adapt_movement_scale = 1
|
||||
|
||||
kp_new = {k: v for k, v in kp_driving.items()}
|
||||
|
||||
if use_relative_movement:
|
||||
kp_value_diff = (kp_driving['value'] - kp_driving_initial['value'])
|
||||
kp_value_diff *= adapt_movement_scale
|
||||
kp_new['value'] = kp_value_diff + kp_source['value']
|
||||
|
||||
if use_relative_jacobian:
|
||||
jacobian_diff = torch.matmul(kp_driving['jacobian'], torch.inverse(kp_driving_initial['jacobian']))
|
||||
kp_new['jacobian'] = torch.matmul(jacobian_diff, kp_source['jacobian'])
|
||||
|
||||
return kp_new
|
||||
|
||||
|
||||
def animate(config, generator, kp_detector, checkpoint, log_dir, dataset):
|
||||
log_dir = os.path.join(log_dir, 'animation')
|
||||
png_dir = os.path.join(log_dir, 'png')
|
||||
animate_params = config['animate_params']
|
||||
|
||||
dataset = PairedDataset(initial_dataset=dataset, number_of_pairs=animate_params['num_pairs'])
|
||||
dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=1)
|
||||
|
||||
if checkpoint is not None:
|
||||
Logger.load_cpk(checkpoint, generator=generator, kp_detector=kp_detector)
|
||||
else:
|
||||
raise AttributeError("Checkpoint should be specified for mode='animate'.")
|
||||
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
if not os.path.exists(png_dir):
|
||||
os.makedirs(png_dir)
|
||||
|
||||
if torch.cuda.is_available():
|
||||
generator = DataParallelWithCallback(generator)
|
||||
kp_detector = DataParallelWithCallback(kp_detector)
|
||||
|
||||
generator.eval()
|
||||
kp_detector.eval()
|
||||
|
||||
for it, x in tqdm(enumerate(dataloader)):
|
||||
with torch.no_grad():
|
||||
predictions = []
|
||||
visualizations = []
|
||||
|
||||
driving_video = x['driving_video']
|
||||
source_frame = x['source_video'][:, :, 0, :, :]
|
||||
|
||||
kp_source = kp_detector(source_frame)
|
||||
kp_driving_initial = kp_detector(driving_video[:, :, 0])
|
||||
|
||||
for frame_idx in range(driving_video.shape[2]):
|
||||
driving_frame = driving_video[:, :, frame_idx]
|
||||
kp_driving = kp_detector(driving_frame)
|
||||
kp_norm = normalize_kp(kp_source=kp_source, kp_driving=kp_driving,
|
||||
kp_driving_initial=kp_driving_initial, **animate_params['normalization_params'])
|
||||
out = generator(source_frame, kp_source=kp_source, kp_driving=kp_norm)
|
||||
|
||||
out['kp_driving'] = kp_driving
|
||||
out['kp_source'] = kp_source
|
||||
out['kp_norm'] = kp_norm
|
||||
|
||||
del out['sparse_deformed']
|
||||
|
||||
predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0])
|
||||
|
||||
visualization = Visualizer(**config['visualizer_params']).visualize(source=source_frame,
|
||||
driving=driving_frame, out=out)
|
||||
visualization = visualization
|
||||
visualizations.append(visualization)
|
||||
|
||||
predictions = np.concatenate(predictions, axis=1)
|
||||
result_name = "-".join([x['driving_name'][0], x['source_name'][0]])
|
||||
imageio.imsave(os.path.join(png_dir, result_name + '.png'), (255 * predictions).astype(np.uint8))
|
||||
|
||||
image_name = result_name + animate_params['format']
|
||||
imageio.mimsave(os.path.join(log_dir, image_name), visualizations)
|
||||
@ -0,0 +1,345 @@
|
||||
"""
|
||||
Code from https://github.com/hassony2/torch_videovision
|
||||
"""
|
||||
|
||||
import numbers
|
||||
|
||||
import random
|
||||
import numpy as np
|
||||
import PIL
|
||||
|
||||
from skimage.transform import resize, rotate
|
||||
from skimage.util import pad
|
||||
import torchvision
|
||||
|
||||
import warnings
|
||||
|
||||
from skimage import img_as_ubyte, img_as_float
|
||||
|
||||
|
||||
def crop_clip(clip, min_h, min_w, h, w):
|
||||
if isinstance(clip[0], np.ndarray):
|
||||
cropped = [img[min_h:min_h + h, min_w:min_w + w, :] for img in clip]
|
||||
|
||||
elif isinstance(clip[0], PIL.Image.Image):
|
||||
cropped = [
|
||||
img.crop((min_w, min_h, min_w + w, min_h + h)) for img in clip
|
||||
]
|
||||
else:
|
||||
raise TypeError('Expected numpy.ndarray or PIL.Image' +
|
||||
'but got list of {0}'.format(type(clip[0])))
|
||||
return cropped
|
||||
|
||||
|
||||
def pad_clip(clip, h, w):
|
||||
im_h, im_w = clip[0].shape[:2]
|
||||
pad_h = (0, 0) if h < im_h else ((h - im_h) // 2, (h - im_h + 1) // 2)
|
||||
pad_w = (0, 0) if w < im_w else ((w - im_w) // 2, (w - im_w + 1) // 2)
|
||||
|
||||
return pad(clip, ((0, 0), pad_h, pad_w, (0, 0)), mode='edge')
|
||||
|
||||
|
||||
def resize_clip(clip, size, interpolation='bilinear'):
|
||||
if isinstance(clip[0], np.ndarray):
|
||||
if isinstance(size, numbers.Number):
|
||||
im_h, im_w, im_c = clip[0].shape
|
||||
# Min spatial dim already matches minimal size
|
||||
if (im_w <= im_h and im_w == size) or (im_h <= im_w
|
||||
and im_h == size):
|
||||
return clip
|
||||
new_h, new_w = get_resize_sizes(im_h, im_w, size)
|
||||
size = (new_w, new_h)
|
||||
else:
|
||||
size = size[1], size[0]
|
||||
|
||||
scaled = [
|
||||
resize(img, size, order=1 if interpolation == 'bilinear' else 0, preserve_range=True,
|
||||
mode='constant', anti_aliasing=True) for img in clip
|
||||
]
|
||||
elif isinstance(clip[0], PIL.Image.Image):
|
||||
if isinstance(size, numbers.Number):
|
||||
im_w, im_h = clip[0].size
|
||||
# Min spatial dim already matches minimal size
|
||||
if (im_w <= im_h and im_w == size) or (im_h <= im_w
|
||||
and im_h == size):
|
||||
return clip
|
||||
new_h, new_w = get_resize_sizes(im_h, im_w, size)
|
||||
size = (new_w, new_h)
|
||||
else:
|
||||
size = size[1], size[0]
|
||||
if interpolation == 'bilinear':
|
||||
pil_inter = PIL.Image.NEAREST
|
||||
else:
|
||||
pil_inter = PIL.Image.BILINEAR
|
||||
scaled = [img.resize(size, pil_inter) for img in clip]
|
||||
else:
|
||||
raise TypeError('Expected numpy.ndarray or PIL.Image' +
|
||||
'but got list of {0}'.format(type(clip[0])))
|
||||
return scaled
|
||||
|
||||
|
||||
def get_resize_sizes(im_h, im_w, size):
|
||||
if im_w < im_h:
|
||||
ow = size
|
||||
oh = int(size * im_h / im_w)
|
||||
else:
|
||||
oh = size
|
||||
ow = int(size * im_w / im_h)
|
||||
return oh, ow
|
||||
|
||||
|
||||
class RandomFlip(object):
|
||||
def __init__(self, time_flip=False, horizontal_flip=False):
|
||||
self.time_flip = time_flip
|
||||
self.horizontal_flip = horizontal_flip
|
||||
|
||||
def __call__(self, clip):
|
||||
if random.random() < 0.5 and self.time_flip:
|
||||
return clip[::-1]
|
||||
if random.random() < 0.5 and self.horizontal_flip:
|
||||
return [np.fliplr(img) for img in clip]
|
||||
|
||||
return clip
|
||||
|
||||
|
||||
class RandomResize(object):
|
||||
"""Resizes a list of (H x W x C) numpy.ndarray to the final size
|
||||
The larger the original image is, the more times it takes to
|
||||
interpolate
|
||||
Args:
|
||||
interpolation (str): Can be one of 'nearest', 'bilinear'
|
||||
defaults to nearest
|
||||
size (tuple): (widht, height)
|
||||
"""
|
||||
|
||||
def __init__(self, ratio=(3. / 4., 4. / 3.), interpolation='nearest'):
|
||||
self.ratio = ratio
|
||||
self.interpolation = interpolation
|
||||
|
||||
def __call__(self, clip):
|
||||
scaling_factor = random.uniform(self.ratio[0], self.ratio[1])
|
||||
|
||||
if isinstance(clip[0], np.ndarray):
|
||||
im_h, im_w, im_c = clip[0].shape
|
||||
elif isinstance(clip[0], PIL.Image.Image):
|
||||
im_w, im_h = clip[0].size
|
||||
|
||||
new_w = int(im_w * scaling_factor)
|
||||
new_h = int(im_h * scaling_factor)
|
||||
new_size = (new_w, new_h)
|
||||
resized = resize_clip(
|
||||
clip, new_size, interpolation=self.interpolation)
|
||||
|
||||
return resized
|
||||
|
||||
|
||||
class RandomCrop(object):
|
||||
"""Extract random crop at the same location for a list of videos
|
||||
Args:
|
||||
size (sequence or int): Desired output size for the
|
||||
crop in format (h, w)
|
||||
"""
|
||||
|
||||
def __init__(self, size):
|
||||
if isinstance(size, numbers.Number):
|
||||
size = (size, size)
|
||||
|
||||
self.size = size
|
||||
|
||||
def __call__(self, clip):
|
||||
"""
|
||||
Args:
|
||||
img (PIL.Image or numpy.ndarray): List of videos to be cropped
|
||||
in format (h, w, c) in numpy.ndarray
|
||||
Returns:
|
||||
PIL.Image or numpy.ndarray: Cropped list of videos
|
||||
"""
|
||||
h, w = self.size
|
||||
if isinstance(clip[0], np.ndarray):
|
||||
im_h, im_w, im_c = clip[0].shape
|
||||
elif isinstance(clip[0], PIL.Image.Image):
|
||||
im_w, im_h = clip[0].size
|
||||
else:
|
||||
raise TypeError('Expected numpy.ndarray or PIL.Image' +
|
||||
'but got list of {0}'.format(type(clip[0])))
|
||||
|
||||
clip = pad_clip(clip, h, w)
|
||||
im_h, im_w = clip.shape[1:3]
|
||||
x1 = 0 if h == im_h else random.randint(0, im_w - w)
|
||||
y1 = 0 if w == im_w else random.randint(0, im_h - h)
|
||||
cropped = crop_clip(clip, y1, x1, h, w)
|
||||
|
||||
return cropped
|
||||
|
||||
|
||||
class RandomRotation(object):
|
||||
"""Rotate entire clip randomly by a random angle within
|
||||
given bounds
|
||||
Args:
|
||||
degrees (sequence or int): Range of degrees to select from
|
||||
If degrees is a number instead of sequence like (min, max),
|
||||
the range of degrees, will be (-degrees, +degrees).
|
||||
"""
|
||||
|
||||
def __init__(self, degrees):
|
||||
if isinstance(degrees, numbers.Number):
|
||||
if degrees < 0:
|
||||
raise ValueError('If degrees is a single number,'
|
||||
'must be positive')
|
||||
degrees = (-degrees, degrees)
|
||||
else:
|
||||
if len(degrees) != 2:
|
||||
raise ValueError('If degrees is a sequence,'
|
||||
'it must be of len 2.')
|
||||
|
||||
self.degrees = degrees
|
||||
|
||||
def __call__(self, clip):
|
||||
"""
|
||||
Args:
|
||||
img (PIL.Image or numpy.ndarray): List of videos to be cropped
|
||||
in format (h, w, c) in numpy.ndarray
|
||||
Returns:
|
||||
PIL.Image or numpy.ndarray: Cropped list of videos
|
||||
"""
|
||||
angle = random.uniform(self.degrees[0], self.degrees[1])
|
||||
if isinstance(clip[0], np.ndarray):
|
||||
rotated = [rotate(image=img, angle=angle, preserve_range=True) for img in clip]
|
||||
elif isinstance(clip[0], PIL.Image.Image):
|
||||
rotated = [img.rotate(angle) for img in clip]
|
||||
else:
|
||||
raise TypeError('Expected numpy.ndarray or PIL.Image' +
|
||||
'but got list of {0}'.format(type(clip[0])))
|
||||
|
||||
return rotated
|
||||
|
||||
|
||||
class ColorJitter(object):
|
||||
"""Randomly change the brightness, contrast and saturation and hue of the clip
|
||||
Args:
|
||||
brightness (float): How much to jitter brightness. brightness_factor
|
||||
is chosen uniformly from [max(0, 1 - brightness), 1 + brightness].
|
||||
contrast (float): How much to jitter contrast. contrast_factor
|
||||
is chosen uniformly from [max(0, 1 - contrast), 1 + contrast].
|
||||
saturation (float): How much to jitter saturation. saturation_factor
|
||||
is chosen uniformly from [max(0, 1 - saturation), 1 + saturation].
|
||||
hue(float): How much to jitter hue. hue_factor is chosen uniformly from
|
||||
[-hue, hue]. Should be >=0 and <= 0.5.
|
||||
"""
|
||||
|
||||
def __init__(self, brightness=0, contrast=0, saturation=0, hue=0):
|
||||
self.brightness = brightness
|
||||
self.contrast = contrast
|
||||
self.saturation = saturation
|
||||
self.hue = hue
|
||||
|
||||
def get_params(self, brightness, contrast, saturation, hue):
|
||||
if brightness > 0:
|
||||
brightness_factor = random.uniform(
|
||||
max(0, 1 - brightness), 1 + brightness)
|
||||
else:
|
||||
brightness_factor = None
|
||||
|
||||
if contrast > 0:
|
||||
contrast_factor = random.uniform(
|
||||
max(0, 1 - contrast), 1 + contrast)
|
||||
else:
|
||||
contrast_factor = None
|
||||
|
||||
if saturation > 0:
|
||||
saturation_factor = random.uniform(
|
||||
max(0, 1 - saturation), 1 + saturation)
|
||||
else:
|
||||
saturation_factor = None
|
||||
|
||||
if hue > 0:
|
||||
hue_factor = random.uniform(-hue, hue)
|
||||
else:
|
||||
hue_factor = None
|
||||
return brightness_factor, contrast_factor, saturation_factor, hue_factor
|
||||
|
||||
def __call__(self, clip):
|
||||
"""
|
||||
Args:
|
||||
clip (list): list of PIL.Image
|
||||
Returns:
|
||||
list PIL.Image : list of transformed PIL.Image
|
||||
"""
|
||||
if isinstance(clip[0], np.ndarray):
|
||||
brightness, contrast, saturation, hue = self.get_params(
|
||||
self.brightness, self.contrast, self.saturation, self.hue)
|
||||
|
||||
# Create img transform function sequence
|
||||
img_transforms = []
|
||||
if brightness is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_brightness(img, brightness))
|
||||
if saturation is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_saturation(img, saturation))
|
||||
if hue is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_hue(img, hue))
|
||||
if contrast is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_contrast(img, contrast))
|
||||
random.shuffle(img_transforms)
|
||||
img_transforms = [img_as_ubyte, torchvision.transforms.ToPILImage()] + img_transforms + [np.array,
|
||||
img_as_float]
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
jittered_clip = []
|
||||
for img in clip:
|
||||
jittered_img = img
|
||||
for func in img_transforms:
|
||||
jittered_img = func(jittered_img)
|
||||
jittered_clip.append(jittered_img.astype('float32'))
|
||||
elif isinstance(clip[0], PIL.Image.Image):
|
||||
brightness, contrast, saturation, hue = self.get_params(
|
||||
self.brightness, self.contrast, self.saturation, self.hue)
|
||||
|
||||
# Create img transform function sequence
|
||||
img_transforms = []
|
||||
if brightness is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_brightness(img, brightness))
|
||||
if saturation is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_saturation(img, saturation))
|
||||
if hue is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_hue(img, hue))
|
||||
if contrast is not None:
|
||||
img_transforms.append(lambda img: torchvision.transforms.functional.adjust_contrast(img, contrast))
|
||||
random.shuffle(img_transforms)
|
||||
|
||||
# Apply to all videos
|
||||
jittered_clip = []
|
||||
for img in clip:
|
||||
for func in img_transforms:
|
||||
jittered_img = func(img)
|
||||
jittered_clip.append(jittered_img)
|
||||
|
||||
else:
|
||||
raise TypeError('Expected numpy.ndarray or PIL.Image' +
|
||||
'but got list of {0}'.format(type(clip[0])))
|
||||
return jittered_clip
|
||||
|
||||
|
||||
class AllAugmentationTransform:
|
||||
def __init__(self, resize_param=None, rotation_param=None, flip_param=None, crop_param=None, jitter_param=None):
|
||||
self.transforms = []
|
||||
|
||||
if flip_param is not None:
|
||||
self.transforms.append(RandomFlip(**flip_param))
|
||||
|
||||
if rotation_param is not None:
|
||||
self.transforms.append(RandomRotation(**rotation_param))
|
||||
|
||||
if resize_param is not None:
|
||||
self.transforms.append(RandomResize(**resize_param))
|
||||
|
||||
if crop_param is not None:
|
||||
self.transforms.append(RandomCrop(**crop_param))
|
||||
|
||||
if jitter_param is not None:
|
||||
self.transforms.append(ColorJitter(**jitter_param))
|
||||
|
||||
def __call__(self, clip):
|
||||
for t in self.transforms:
|
||||
clip = t(clip)
|
||||
return clip
|
||||
@ -0,0 +1,158 @@
|
||||
import face_alignment
|
||||
import skimage.io
|
||||
import numpy
|
||||
from argparse import ArgumentParser
|
||||
from skimage import img_as_ubyte
|
||||
from skimage.transform import resize
|
||||
from tqdm import tqdm
|
||||
import os
|
||||
import imageio
|
||||
import numpy as np
|
||||
import warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
def extract_bbox(frame, fa):
|
||||
if max(frame.shape[0], frame.shape[1]) > 640:
|
||||
scale_factor = max(frame.shape[0], frame.shape[1]) / 640.0
|
||||
frame = resize(frame, (int(frame.shape[0] / scale_factor), int(frame.shape[1] / scale_factor)))
|
||||
frame = img_as_ubyte(frame)
|
||||
else:
|
||||
scale_factor = 1
|
||||
frame = frame[..., :3]
|
||||
bboxes = fa.face_detector.detect_from_image(frame[..., ::-1])
|
||||
if len(bboxes) == 0:
|
||||
return []
|
||||
return np.array(bboxes)[:, :-1] * scale_factor
|
||||
|
||||
|
||||
|
||||
def bb_intersection_over_union(boxA, boxB):
|
||||
xA = max(boxA[0], boxB[0])
|
||||
yA = max(boxA[1], boxB[1])
|
||||
xB = min(boxA[2], boxB[2])
|
||||
yB = min(boxA[3], boxB[3])
|
||||
interArea = max(0, xB - xA + 1) * max(0, yB - yA + 1)
|
||||
boxAArea = (boxA[2] - boxA[0] + 1) * (boxA[3] - boxA[1] + 1)
|
||||
boxBArea = (boxB[2] - boxB[0] + 1) * (boxB[3] - boxB[1] + 1)
|
||||
iou = interArea / float(boxAArea + boxBArea - interArea)
|
||||
return iou
|
||||
|
||||
|
||||
def join(tube_bbox, bbox):
|
||||
xA = min(tube_bbox[0], bbox[0])
|
||||
yA = min(tube_bbox[1], bbox[1])
|
||||
xB = max(tube_bbox[2], bbox[2])
|
||||
yB = max(tube_bbox[3], bbox[3])
|
||||
return (xA, yA, xB, yB)
|
||||
|
||||
|
||||
def compute_bbox(start, end, fps, tube_bbox, frame_shape, inp, image_shape, increase_area=0.1):
|
||||
left, top, right, bot = tube_bbox
|
||||
width = right - left
|
||||
height = bot - top
|
||||
|
||||
#Computing aspect preserving bbox
|
||||
width_increase = max(increase_area, ((1 + 2 * increase_area) * height - width) / (2 * width))
|
||||
height_increase = max(increase_area, ((1 + 2 * increase_area) * width - height) / (2 * height))
|
||||
|
||||
left = int(left - width_increase * width)
|
||||
top = int(top - height_increase * height)
|
||||
right = int(right + width_increase * width)
|
||||
bot = int(bot + height_increase * height)
|
||||
|
||||
top, bot, left, right = max(0, top), min(bot, frame_shape[0]), max(0, left), min(right, frame_shape[1])
|
||||
h, w = bot - top, right - left
|
||||
|
||||
start = start / fps
|
||||
end = end / fps
|
||||
time = end - start
|
||||
|
||||
scale = f'{image_shape[0]}:{image_shape[1]}'
|
||||
|
||||
return f'ffmpeg -i {inp} -ss {start} -t {time} -filter:v "crop={w}:{h}:{left}:{top}, scale={scale}" crop.mp4'
|
||||
|
||||
|
||||
def compute_bbox_trajectories(trajectories, fps, frame_shape, args):
|
||||
commands = []
|
||||
for i, (bbox, tube_bbox, start, end) in enumerate(trajectories):
|
||||
if (end - start) > args.min_frames:
|
||||
command = compute_bbox(start, end, fps, tube_bbox, frame_shape, inp=args.inp, image_shape=args.image_shape, increase_area=args.increase)
|
||||
commands.append(command)
|
||||
return commands
|
||||
|
||||
|
||||
def process_video(args):
|
||||
device = 'cpu' if args.cpu else 'cuda'
|
||||
fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False, device=device)
|
||||
video = imageio.get_reader(args.inp)
|
||||
|
||||
trajectories = []
|
||||
previous_frame = None
|
||||
fps = video.get_meta_data()['fps']
|
||||
commands = []
|
||||
try:
|
||||
for i, frame in tqdm(enumerate(video)):
|
||||
frame_shape = frame.shape
|
||||
bboxes = extract_bbox(frame, fa)
|
||||
## For each trajectory check the criterion
|
||||
not_valid_trajectories = []
|
||||
valid_trajectories = []
|
||||
|
||||
for trajectory in trajectories:
|
||||
tube_bbox = trajectory[0]
|
||||
intersection = 0
|
||||
for bbox in bboxes:
|
||||
intersection = max(intersection, bb_intersection_over_union(tube_bbox, bbox))
|
||||
if intersection > args.iou_with_initial:
|
||||
valid_trajectories.append(trajectory)
|
||||
else:
|
||||
not_valid_trajectories.append(trajectory)
|
||||
|
||||
commands += compute_bbox_trajectories(not_valid_trajectories, fps, frame_shape, args)
|
||||
trajectories = valid_trajectories
|
||||
|
||||
## Assign bbox to trajectories, create new trajectories
|
||||
for bbox in bboxes:
|
||||
intersection = 0
|
||||
current_trajectory = None
|
||||
for trajectory in trajectories:
|
||||
tube_bbox = trajectory[0]
|
||||
current_intersection = bb_intersection_over_union(tube_bbox, bbox)
|
||||
if intersection < current_intersection and current_intersection > args.iou_with_initial:
|
||||
intersection = bb_intersection_over_union(tube_bbox, bbox)
|
||||
current_trajectory = trajectory
|
||||
|
||||
## Create new trajectory
|
||||
if current_trajectory is None:
|
||||
trajectories.append([bbox, bbox, i, i])
|
||||
else:
|
||||
current_trajectory[3] = i
|
||||
current_trajectory[1] = join(current_trajectory[1], bbox)
|
||||
|
||||
|
||||
except IndexError as e:
|
||||
raise (e)
|
||||
|
||||
commands += compute_bbox_trajectories(trajectories, fps, frame_shape, args)
|
||||
return commands
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser()
|
||||
|
||||
parser.add_argument("--image_shape", default=(256, 256), type=lambda x: tuple(map(int, x.split(','))),
|
||||
help="Image shape")
|
||||
parser.add_argument("--increase", default=0.1, type=float, help='Increase bbox by this amount')
|
||||
parser.add_argument("--iou_with_initial", type=float, default=0.25, help="The minimal allowed iou with inital bbox")
|
||||
parser.add_argument("--inp", required=True, help='Input image or video')
|
||||
parser.add_argument("--min_frames", type=int, default=150, help='Minimum number of frames')
|
||||
parser.add_argument("--cpu", dest="cpu", action="store_true", help="cpu mode.")
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
commands = process_video(args)
|
||||
for command in commands:
|
||||
print (command)
|
||||
|
||||
|
||||
@ -0,0 +1,197 @@
|
||||
import os
|
||||
from skimage import io, img_as_float32
|
||||
from skimage.color import gray2rgb
|
||||
from sklearn.model_selection import train_test_split
|
||||
from imageio import mimread
|
||||
|
||||
import numpy as np
|
||||
from torch.utils.data import Dataset
|
||||
import pandas as pd
|
||||
from augmentation import AllAugmentationTransform
|
||||
import glob
|
||||
|
||||
|
||||
def read_video(name, frame_shape):
|
||||
"""
|
||||
Read video which can be:
|
||||
- an image of concatenated frames
|
||||
- '.mp4' and'.gif'
|
||||
- folder with videos
|
||||
"""
|
||||
|
||||
if os.path.isdir(name):
|
||||
frames = sorted(os.listdir(name))
|
||||
num_frames = len(frames)
|
||||
video_array = np.array(
|
||||
[img_as_float32(io.imread(os.path.join(name, frames[idx]))) for idx in range(num_frames)])
|
||||
elif name.lower().endswith('.png') or name.lower().endswith('.jpg'):
|
||||
image = io.imread(name)
|
||||
|
||||
if len(image.shape) == 2 or image.shape[2] == 1:
|
||||
image = gray2rgb(image)
|
||||
|
||||
if image.shape[2] == 4:
|
||||
image = image[..., :3]
|
||||
|
||||
image = img_as_float32(image)
|
||||
|
||||
video_array = np.moveaxis(image, 1, 0)
|
||||
|
||||
video_array = video_array.reshape((-1,) + frame_shape)
|
||||
video_array = np.moveaxis(video_array, 1, 2)
|
||||
elif name.lower().endswith('.gif') or name.lower().endswith('.mp4') or name.lower().endswith('.mov'):
|
||||
video = np.array(mimread(name))
|
||||
if len(video.shape) == 3:
|
||||
video = np.array([gray2rgb(frame) for frame in video])
|
||||
if video.shape[-1] == 4:
|
||||
video = video[..., :3]
|
||||
video_array = img_as_float32(video)
|
||||
else:
|
||||
raise Exception("Unknown file extensions %s" % name)
|
||||
|
||||
return video_array
|
||||
|
||||
|
||||
class FramesDataset(Dataset):
|
||||
"""
|
||||
Dataset of videos, each video can be represented as:
|
||||
- an image of concatenated frames
|
||||
- '.mp4' or '.gif'
|
||||
- folder with all frames
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir, frame_shape=(256, 256, 3), id_sampling=False, is_train=True,
|
||||
random_seed=0, pairs_list=None, augmentation_params=None):
|
||||
self.root_dir = root_dir
|
||||
self.videos = os.listdir(root_dir)
|
||||
self.frame_shape = tuple(frame_shape)
|
||||
self.pairs_list = pairs_list
|
||||
self.id_sampling = id_sampling
|
||||
if os.path.exists(os.path.join(root_dir, 'train')):
|
||||
assert os.path.exists(os.path.join(root_dir, 'test'))
|
||||
print("Use predefined train-test split.")
|
||||
if id_sampling:
|
||||
train_videos = {os.path.basename(video).split('#')[0] for video in
|
||||
os.listdir(os.path.join(root_dir, 'train'))}
|
||||
train_videos = list(train_videos)
|
||||
else:
|
||||
train_videos = os.listdir(os.path.join(root_dir, 'train'))
|
||||
test_videos = os.listdir(os.path.join(root_dir, 'test'))
|
||||
self.root_dir = os.path.join(self.root_dir, 'train' if is_train else 'test')
|
||||
else:
|
||||
print("Use random train-test split.")
|
||||
train_videos, test_videos = train_test_split(self.videos, random_state=random_seed, test_size=0.2)
|
||||
|
||||
if is_train:
|
||||
self.videos = train_videos
|
||||
else:
|
||||
self.videos = test_videos
|
||||
|
||||
self.is_train = is_train
|
||||
|
||||
if self.is_train:
|
||||
self.transform = AllAugmentationTransform(**augmentation_params)
|
||||
else:
|
||||
self.transform = None
|
||||
|
||||
def __len__(self):
|
||||
return len(self.videos)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
if self.is_train and self.id_sampling:
|
||||
name = self.videos[idx]
|
||||
path = np.random.choice(glob.glob(os.path.join(self.root_dir, name + '*.mp4')))
|
||||
else:
|
||||
name = self.videos[idx]
|
||||
path = os.path.join(self.root_dir, name)
|
||||
|
||||
video_name = os.path.basename(path)
|
||||
|
||||
if self.is_train and os.path.isdir(path):
|
||||
frames = os.listdir(path)
|
||||
num_frames = len(frames)
|
||||
frame_idx = np.sort(np.random.choice(num_frames, replace=True, size=2))
|
||||
video_array = [img_as_float32(io.imread(os.path.join(path, frames[idx]))) for idx in frame_idx]
|
||||
else:
|
||||
video_array = read_video(path, frame_shape=self.frame_shape)
|
||||
num_frames = len(video_array)
|
||||
frame_idx = np.sort(np.random.choice(num_frames, replace=True, size=2)) if self.is_train else range(
|
||||
num_frames)
|
||||
video_array = video_array[frame_idx]
|
||||
|
||||
if self.transform is not None:
|
||||
video_array = self.transform(video_array)
|
||||
|
||||
out = {}
|
||||
if self.is_train:
|
||||
source = np.array(video_array[0], dtype='float32')
|
||||
driving = np.array(video_array[1], dtype='float32')
|
||||
|
||||
out['driving'] = driving.transpose((2, 0, 1))
|
||||
out['source'] = source.transpose((2, 0, 1))
|
||||
else:
|
||||
video = np.array(video_array, dtype='float32')
|
||||
out['video'] = video.transpose((3, 0, 1, 2))
|
||||
|
||||
out['name'] = video_name
|
||||
|
||||
return out
|
||||
|
||||
|
||||
class DatasetRepeater(Dataset):
|
||||
"""
|
||||
Pass several times over the same dataset for better i/o performance
|
||||
"""
|
||||
|
||||
def __init__(self, dataset, num_repeats=100):
|
||||
self.dataset = dataset
|
||||
self.num_repeats = num_repeats
|
||||
|
||||
def __len__(self):
|
||||
return self.num_repeats * self.dataset.__len__()
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self.dataset[idx % self.dataset.__len__()]
|
||||
|
||||
|
||||
class PairedDataset(Dataset):
|
||||
"""
|
||||
Dataset of pairs for animation.
|
||||
"""
|
||||
|
||||
def __init__(self, initial_dataset, number_of_pairs, seed=0):
|
||||
self.initial_dataset = initial_dataset
|
||||
pairs_list = self.initial_dataset.pairs_list
|
||||
|
||||
np.random.seed(seed)
|
||||
|
||||
if pairs_list is None:
|
||||
max_idx = min(number_of_pairs, len(initial_dataset))
|
||||
nx, ny = max_idx, max_idx
|
||||
xy = np.mgrid[:nx, :ny].reshape(2, -1).T
|
||||
number_of_pairs = min(xy.shape[0], number_of_pairs)
|
||||
self.pairs = xy.take(np.random.choice(xy.shape[0], number_of_pairs, replace=False), axis=0)
|
||||
else:
|
||||
videos = self.initial_dataset.videos
|
||||
name_to_index = {name: index for index, name in enumerate(videos)}
|
||||
pairs = pd.read_csv(pairs_list)
|
||||
pairs = pairs[np.logical_and(pairs['source'].isin(videos), pairs['driving'].isin(videos))]
|
||||
|
||||
number_of_pairs = min(pairs.shape[0], number_of_pairs)
|
||||
self.pairs = []
|
||||
self.start_frames = []
|
||||
for ind in range(number_of_pairs):
|
||||
self.pairs.append(
|
||||
(name_to_index[pairs['driving'].iloc[ind]], name_to_index[pairs['source'].iloc[ind]]))
|
||||
|
||||
def __len__(self):
|
||||
return len(self.pairs)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
pair = self.pairs[idx]
|
||||
first = self.initial_dataset[pair[0]]
|
||||
second = self.initial_dataset[pair[1]]
|
||||
first = {'driving_' + key: value for key, value in first.items()}
|
||||
second = {'source_' + key: value for key, value in second.items()}
|
||||
|
||||
return {**first, **second}
|
||||
@ -0,0 +1,208 @@
|
||||
import numpy as np
|
||||
import torch
|
||||
import torch.nn.functional as F
|
||||
import imageio
|
||||
|
||||
import os
|
||||
from skimage.draw import circle
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import collections
|
||||
|
||||
|
||||
class Logger:
|
||||
def __init__(self, log_dir, checkpoint_freq=100, visualizer_params=None, zfill_num=8, log_file_name='log.txt'):
|
||||
|
||||
self.loss_list = []
|
||||
self.cpk_dir = log_dir
|
||||
self.visualizations_dir = os.path.join(log_dir, 'train-vis')
|
||||
if not os.path.exists(self.visualizations_dir):
|
||||
os.makedirs(self.visualizations_dir)
|
||||
self.log_file = open(os.path.join(log_dir, log_file_name), 'a')
|
||||
self.zfill_num = zfill_num
|
||||
self.visualizer = Visualizer(**visualizer_params)
|
||||
self.checkpoint_freq = checkpoint_freq
|
||||
self.epoch = 0
|
||||
self.best_loss = float('inf')
|
||||
self.names = None
|
||||
|
||||
def log_scores(self, loss_names):
|
||||
loss_mean = np.array(self.loss_list).mean(axis=0)
|
||||
|
||||
loss_string = "; ".join(["%s - %.5f" % (name, value) for name, value in zip(loss_names, loss_mean)])
|
||||
loss_string = str(self.epoch).zfill(self.zfill_num) + ") " + loss_string
|
||||
|
||||
print(loss_string, file=self.log_file)
|
||||
self.loss_list = []
|
||||
self.log_file.flush()
|
||||
|
||||
def visualize_rec(self, inp, out):
|
||||
image = self.visualizer.visualize(inp['driving'], inp['source'], out)
|
||||
imageio.imsave(os.path.join(self.visualizations_dir, "%s-rec.png" % str(self.epoch).zfill(self.zfill_num)), image)
|
||||
|
||||
def save_cpk(self, emergent=False):
|
||||
cpk = {k: v.state_dict() for k, v in self.models.items()}
|
||||
cpk['epoch'] = self.epoch
|
||||
cpk_path = os.path.join(self.cpk_dir, '%s-checkpoint.pth.tar' % str(self.epoch).zfill(self.zfill_num))
|
||||
if not (os.path.exists(cpk_path) and emergent):
|
||||
torch.save(cpk, cpk_path)
|
||||
|
||||
@staticmethod
|
||||
def load_cpk(checkpoint_path, generator=None, discriminator=None, kp_detector=None,
|
||||
optimizer_generator=None, optimizer_discriminator=None, optimizer_kp_detector=None):
|
||||
checkpoint = torch.load(checkpoint_path)
|
||||
if generator is not None:
|
||||
generator.load_state_dict(checkpoint['generator'])
|
||||
if kp_detector is not None:
|
||||
kp_detector.load_state_dict(checkpoint['kp_detector'])
|
||||
if discriminator is not None:
|
||||
try:
|
||||
discriminator.load_state_dict(checkpoint['discriminator'])
|
||||
except:
|
||||
print ('No discriminator in the state-dict. Dicriminator will be randomly initialized')
|
||||
if optimizer_generator is not None:
|
||||
optimizer_generator.load_state_dict(checkpoint['optimizer_generator'])
|
||||
if optimizer_discriminator is not None:
|
||||
try:
|
||||
optimizer_discriminator.load_state_dict(checkpoint['optimizer_discriminator'])
|
||||
except RuntimeError as e:
|
||||
print ('No discriminator optimizer in the state-dict. Optimizer will be not initialized')
|
||||
if optimizer_kp_detector is not None:
|
||||
optimizer_kp_detector.load_state_dict(checkpoint['optimizer_kp_detector'])
|
||||
|
||||
return checkpoint['epoch']
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if 'models' in self.__dict__:
|
||||
self.save_cpk()
|
||||
self.log_file.close()
|
||||
|
||||
def log_iter(self, losses):
|
||||
losses = collections.OrderedDict(losses.items())
|
||||
if self.names is None:
|
||||
self.names = list(losses.keys())
|
||||
self.loss_list.append(list(losses.values()))
|
||||
|
||||
def log_epoch(self, epoch, models, inp, out):
|
||||
self.epoch = epoch
|
||||
self.models = models
|
||||
if (self.epoch + 1) % self.checkpoint_freq == 0:
|
||||
self.save_cpk()
|
||||
self.log_scores(self.names)
|
||||
self.visualize_rec(inp, out)
|
||||
|
||||
|
||||
class Visualizer:
|
||||
def __init__(self, kp_size=5, draw_border=False, colormap='gist_rainbow'):
|
||||
self.kp_size = kp_size
|
||||
self.draw_border = draw_border
|
||||
self.colormap = plt.get_cmap(colormap)
|
||||
|
||||
def draw_image_with_kp(self, image, kp_array):
|
||||
image = np.copy(image)
|
||||
spatial_size = np.array(image.shape[:2][::-1])[np.newaxis]
|
||||
kp_array = spatial_size * (kp_array + 1) / 2
|
||||
num_kp = kp_array.shape[0]
|
||||
for kp_ind, kp in enumerate(kp_array):
|
||||
rr, cc = circle(kp[1], kp[0], self.kp_size, shape=image.shape[:2])
|
||||
image[rr, cc] = np.array(self.colormap(kp_ind / num_kp))[:3]
|
||||
return image
|
||||
|
||||
def create_image_column_with_kp(self, images, kp):
|
||||
image_array = np.array([self.draw_image_with_kp(v, k) for v, k in zip(images, kp)])
|
||||
return self.create_image_column(image_array)
|
||||
|
||||
def create_image_column(self, images):
|
||||
if self.draw_border:
|
||||
images = np.copy(images)
|
||||
images[:, :, [0, -1]] = (1, 1, 1)
|
||||
images[:, :, [0, -1]] = (1, 1, 1)
|
||||
return np.concatenate(list(images), axis=0)
|
||||
|
||||
def create_image_grid(self, *args):
|
||||
out = []
|
||||
for arg in args:
|
||||
if type(arg) == tuple:
|
||||
out.append(self.create_image_column_with_kp(arg[0], arg[1]))
|
||||
else:
|
||||
out.append(self.create_image_column(arg))
|
||||
return np.concatenate(out, axis=1)
|
||||
|
||||
def visualize(self, driving, source, out):
|
||||
images = []
|
||||
|
||||
# Source image with keypoints
|
||||
source = source.data.cpu()
|
||||
kp_source = out['kp_source']['value'].data.cpu().numpy()
|
||||
source = np.transpose(source, [0, 2, 3, 1])
|
||||
images.append((source, kp_source))
|
||||
|
||||
# Equivariance visualization
|
||||
if 'transformed_frame' in out:
|
||||
transformed = out['transformed_frame'].data.cpu().numpy()
|
||||
transformed = np.transpose(transformed, [0, 2, 3, 1])
|
||||
transformed_kp = out['transformed_kp']['value'].data.cpu().numpy()
|
||||
images.append((transformed, transformed_kp))
|
||||
|
||||
# Driving image with keypoints
|
||||
kp_driving = out['kp_driving']['value'].data.cpu().numpy()
|
||||
driving = driving.data.cpu().numpy()
|
||||
driving = np.transpose(driving, [0, 2, 3, 1])
|
||||
images.append((driving, kp_driving))
|
||||
|
||||
# Deformed image
|
||||
if 'deformed' in out:
|
||||
deformed = out['deformed'].data.cpu().numpy()
|
||||
deformed = np.transpose(deformed, [0, 2, 3, 1])
|
||||
images.append(deformed)
|
||||
|
||||
# Result with and without keypoints
|
||||
prediction = out['prediction'].data.cpu().numpy()
|
||||
prediction = np.transpose(prediction, [0, 2, 3, 1])
|
||||
if 'kp_norm' in out:
|
||||
kp_norm = out['kp_norm']['value'].data.cpu().numpy()
|
||||
images.append((prediction, kp_norm))
|
||||
images.append(prediction)
|
||||
|
||||
|
||||
## Occlusion map
|
||||
if 'occlusion_map' in out:
|
||||
occlusion_map = out['occlusion_map'].data.cpu().repeat(1, 3, 1, 1)
|
||||
occlusion_map = F.interpolate(occlusion_map, size=source.shape[1:3]).numpy()
|
||||
occlusion_map = np.transpose(occlusion_map, [0, 2, 3, 1])
|
||||
images.append(occlusion_map)
|
||||
|
||||
# Deformed images according to each individual transform
|
||||
if 'sparse_deformed' in out:
|
||||
full_mask = []
|
||||
for i in range(out['sparse_deformed'].shape[1]):
|
||||
image = out['sparse_deformed'][:, i].data.cpu()
|
||||
image = F.interpolate(image, size=source.shape[1:3])
|
||||
mask = out['mask'][:, i:(i+1)].data.cpu().repeat(1, 3, 1, 1)
|
||||
mask = F.interpolate(mask, size=source.shape[1:3])
|
||||
image = np.transpose(image.numpy(), (0, 2, 3, 1))
|
||||
mask = np.transpose(mask.numpy(), (0, 2, 3, 1))
|
||||
|
||||
if i != 0:
|
||||
color = np.array(self.colormap((i - 1) / (out['sparse_deformed'].shape[1] - 1)))[:3]
|
||||
else:
|
||||
color = np.array((0, 0, 0))
|
||||
|
||||
color = color.reshape((1, 1, 1, 3))
|
||||
|
||||
images.append(image)
|
||||
if i != 0:
|
||||
images.append(mask * color)
|
||||
else:
|
||||
images.append(mask)
|
||||
|
||||
full_mask.append(mask * color)
|
||||
|
||||
images.append(sum(full_mask))
|
||||
|
||||
image = self.create_image_grid(*images)
|
||||
image = (255 * image).astype(np.uint8)
|
||||
return image
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,67 @@
|
||||
import os
|
||||
from tqdm import tqdm
|
||||
import torch
|
||||
from torch.utils.data import DataLoader
|
||||
from logger import Logger, Visualizer
|
||||
import numpy as np
|
||||
import imageio
|
||||
from sync_batchnorm import DataParallelWithCallback
|
||||
|
||||
|
||||
def reconstruction(config, generator, kp_detector, checkpoint, log_dir, dataset):
|
||||
png_dir = os.path.join(log_dir, 'reconstruction/png')
|
||||
log_dir = os.path.join(log_dir, 'reconstruction')
|
||||
|
||||
if checkpoint is not None:
|
||||
Logger.load_cpk(checkpoint, generator=generator, kp_detector=kp_detector)
|
||||
else:
|
||||
raise AttributeError("Checkpoint should be specified for mode='reconstruction'.")
|
||||
dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=1)
|
||||
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
if not os.path.exists(png_dir):
|
||||
os.makedirs(png_dir)
|
||||
|
||||
loss_list = []
|
||||
if torch.cuda.is_available():
|
||||
generator = DataParallelWithCallback(generator)
|
||||
kp_detector = DataParallelWithCallback(kp_detector)
|
||||
|
||||
generator.eval()
|
||||
kp_detector.eval()
|
||||
|
||||
for it, x in tqdm(enumerate(dataloader)):
|
||||
if config['reconstruction_params']['num_videos'] is not None:
|
||||
if it > config['reconstruction_params']['num_videos']:
|
||||
break
|
||||
with torch.no_grad():
|
||||
predictions = []
|
||||
visualizations = []
|
||||
if torch.cuda.is_available():
|
||||
x['video'] = x['video'].cuda()
|
||||
kp_source = kp_detector(x['video'][:, :, 0])
|
||||
for frame_idx in range(x['video'].shape[2]):
|
||||
source = x['video'][:, :, 0]
|
||||
driving = x['video'][:, :, frame_idx]
|
||||
kp_driving = kp_detector(driving)
|
||||
out = generator(source, kp_source=kp_source, kp_driving=kp_driving)
|
||||
out['kp_source'] = kp_source
|
||||
out['kp_driving'] = kp_driving
|
||||
del out['sparse_deformed']
|
||||
predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0])
|
||||
|
||||
visualization = Visualizer(**config['visualizer_params']).visualize(source=source,
|
||||
driving=driving, out=out)
|
||||
visualizations.append(visualization)
|
||||
|
||||
loss_list.append(torch.abs(out['prediction'] - driving).mean().cpu().numpy())
|
||||
|
||||
predictions = np.concatenate(predictions, axis=1)
|
||||
imageio.imsave(os.path.join(png_dir, x['name'][0] + '.png'), (255 * predictions).astype(np.uint8))
|
||||
|
||||
image_name = x['name'][0] + config['reconstruction_params']['format']
|
||||
imageio.mimsave(os.path.join(log_dir, image_name), visualizations)
|
||||
|
||||
print("Reconstruction loss: %s" % np.mean(loss_list))
|
||||
@ -0,0 +1,13 @@
|
||||
imageio==2.19.3
|
||||
matplotlib==3.5.2
|
||||
numpy==1.22.2
|
||||
pandas==1.3.4
|
||||
python-dateutil==2.8.2
|
||||
pytz==2021.3
|
||||
PyYAML==5.4.1
|
||||
scikit-image==0.18.3
|
||||
scikit-learn==1.1.1
|
||||
scipy==1.8.1
|
||||
torch==1.10.0
|
||||
torchvision==0.11.1
|
||||
tqdm==4.64.0
|
||||
@ -0,0 +1,87 @@
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use('Agg')
|
||||
|
||||
import os, sys
|
||||
import yaml
|
||||
from argparse import ArgumentParser
|
||||
from time import gmtime, strftime
|
||||
from shutil import copy
|
||||
|
||||
from frames_dataset import FramesDataset
|
||||
|
||||
from modules.generator import OcclusionAwareGenerator
|
||||
from modules.discriminator import MultiScaleDiscriminator
|
||||
from modules.keypoint_detector import KPDetector
|
||||
|
||||
import torch
|
||||
|
||||
from train import train
|
||||
from reconstruction import reconstruction
|
||||
from animate import animate
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
raise Exception("You must use Python 3 or higher. Recommended version is Python 3.7")
|
||||
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--config", required=True, help="path to config")
|
||||
parser.add_argument("--mode", default="train", choices=["train", "reconstruction", "animate"])
|
||||
parser.add_argument("--log_dir", default='log', help="path to log into")
|
||||
parser.add_argument("--checkpoint", default=None, help="path to checkpoint to restore")
|
||||
parser.add_argument("--device_ids", default="0", type=lambda x: list(map(int, x.split(','))),
|
||||
help="Names of the devices comma separated.")
|
||||
parser.add_argument("--verbose", dest="verbose", action="store_true", help="Print model architecture")
|
||||
parser.set_defaults(verbose=False)
|
||||
|
||||
opt = parser.parse_args()
|
||||
with open(opt.config) as f:
|
||||
config = yaml.load(f)
|
||||
|
||||
if opt.checkpoint is not None:
|
||||
log_dir = os.path.join(*os.path.split(opt.checkpoint)[:-1])
|
||||
else:
|
||||
log_dir = os.path.join(opt.log_dir, os.path.basename(opt.config).split('.')[0])
|
||||
log_dir += ' ' + strftime("%d_%m_%y_%H.%M.%S", gmtime())
|
||||
|
||||
generator = OcclusionAwareGenerator(**config['model_params']['generator_params'],
|
||||
**config['model_params']['common_params'])
|
||||
|
||||
if torch.cuda.is_available():
|
||||
generator.to(opt.device_ids[0])
|
||||
if opt.verbose:
|
||||
print(generator)
|
||||
|
||||
discriminator = MultiScaleDiscriminator(**config['model_params']['discriminator_params'],
|
||||
**config['model_params']['common_params'])
|
||||
if torch.cuda.is_available():
|
||||
discriminator.to(opt.device_ids[0])
|
||||
if opt.verbose:
|
||||
print(discriminator)
|
||||
|
||||
kp_detector = KPDetector(**config['model_params']['kp_detector_params'],
|
||||
**config['model_params']['common_params'])
|
||||
|
||||
if torch.cuda.is_available():
|
||||
kp_detector.to(opt.device_ids[0])
|
||||
|
||||
if opt.verbose:
|
||||
print(kp_detector)
|
||||
|
||||
dataset = FramesDataset(is_train=(opt.mode == 'train'), **config['dataset_params'])
|
||||
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
if not os.path.exists(os.path.join(log_dir, os.path.basename(opt.config))):
|
||||
copy(opt.config, log_dir)
|
||||
|
||||
if opt.mode == 'train':
|
||||
print("Training...")
|
||||
train(config, generator, discriminator, kp_detector, opt.checkpoint, log_dir, dataset, opt.device_ids)
|
||||
elif opt.mode == 'reconstruction':
|
||||
print("Reconstruction...")
|
||||
reconstruction(config, generator, kp_detector, opt.checkpoint, log_dir, dataset)
|
||||
elif opt.mode == 'animate':
|
||||
print("Animate...")
|
||||
animate(config, generator, kp_detector, opt.checkpoint, log_dir, dataset)
|
||||
@ -0,0 +1,87 @@
|
||||
from tqdm import trange
|
||||
import torch
|
||||
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from logger import Logger
|
||||
from modules.model import GeneratorFullModel, DiscriminatorFullModel
|
||||
|
||||
from torch.optim.lr_scheduler import MultiStepLR
|
||||
|
||||
from sync_batchnorm import DataParallelWithCallback
|
||||
|
||||
from frames_dataset import DatasetRepeater
|
||||
|
||||
|
||||
def train(config, generator, discriminator, kp_detector, checkpoint, log_dir, dataset, device_ids):
|
||||
train_params = config['train_params']
|
||||
|
||||
optimizer_generator = torch.optim.Adam(generator.parameters(), lr=train_params['lr_generator'], betas=(0.5, 0.999))
|
||||
optimizer_discriminator = torch.optim.Adam(discriminator.parameters(), lr=train_params['lr_discriminator'], betas=(0.5, 0.999))
|
||||
optimizer_kp_detector = torch.optim.Adam(kp_detector.parameters(), lr=train_params['lr_kp_detector'], betas=(0.5, 0.999))
|
||||
|
||||
if checkpoint is not None:
|
||||
start_epoch = Logger.load_cpk(checkpoint, generator, discriminator, kp_detector,
|
||||
optimizer_generator, optimizer_discriminator,
|
||||
None if train_params['lr_kp_detector'] == 0 else optimizer_kp_detector)
|
||||
else:
|
||||
start_epoch = 0
|
||||
|
||||
scheduler_generator = MultiStepLR(optimizer_generator, train_params['epoch_milestones'], gamma=0.1,
|
||||
last_epoch=start_epoch - 1)
|
||||
scheduler_discriminator = MultiStepLR(optimizer_discriminator, train_params['epoch_milestones'], gamma=0.1,
|
||||
last_epoch=start_epoch - 1)
|
||||
scheduler_kp_detector = MultiStepLR(optimizer_kp_detector, train_params['epoch_milestones'], gamma=0.1,
|
||||
last_epoch=-1 + start_epoch * (train_params['lr_kp_detector'] != 0))
|
||||
|
||||
if 'num_repeats' in train_params or train_params['num_repeats'] != 1:
|
||||
dataset = DatasetRepeater(dataset, train_params['num_repeats'])
|
||||
dataloader = DataLoader(dataset, batch_size=train_params['batch_size'], shuffle=True, num_workers=6, drop_last=True)
|
||||
|
||||
generator_full = GeneratorFullModel(kp_detector, generator, discriminator, train_params)
|
||||
discriminator_full = DiscriminatorFullModel(kp_detector, generator, discriminator, train_params)
|
||||
|
||||
if torch.cuda.is_available():
|
||||
generator_full = DataParallelWithCallback(generator_full, device_ids=device_ids)
|
||||
discriminator_full = DataParallelWithCallback(discriminator_full, device_ids=device_ids)
|
||||
|
||||
with Logger(log_dir=log_dir, visualizer_params=config['visualizer_params'], checkpoint_freq=train_params['checkpoint_freq']) as logger:
|
||||
for epoch in trange(start_epoch, train_params['num_epochs']):
|
||||
for x in dataloader:
|
||||
losses_generator, generated = generator_full(x)
|
||||
|
||||
loss_values = [val.mean() for val in losses_generator.values()]
|
||||
loss = sum(loss_values)
|
||||
|
||||
loss.backward()
|
||||
optimizer_generator.step()
|
||||
optimizer_generator.zero_grad()
|
||||
optimizer_kp_detector.step()
|
||||
optimizer_kp_detector.zero_grad()
|
||||
|
||||
if train_params['loss_weights']['generator_gan'] != 0:
|
||||
optimizer_discriminator.zero_grad()
|
||||
losses_discriminator = discriminator_full(x, generated)
|
||||
loss_values = [val.mean() for val in losses_discriminator.values()]
|
||||
loss = sum(loss_values)
|
||||
|
||||
loss.backward()
|
||||
optimizer_discriminator.step()
|
||||
optimizer_discriminator.zero_grad()
|
||||
else:
|
||||
losses_discriminator = {}
|
||||
|
||||
losses_generator.update(losses_discriminator)
|
||||
losses = {key: value.mean().detach().data.cpu().numpy() for key, value in losses_generator.items()}
|
||||
logger.log_iter(losses=losses)
|
||||
|
||||
scheduler_generator.step()
|
||||
scheduler_discriminator.step()
|
||||
scheduler_kp_detector.step()
|
||||
|
||||
logger.log_epoch(epoch, {'generator': generator,
|
||||
'discriminator': discriminator,
|
||||
'kp_detector': kp_detector,
|
||||
'optimizer_generator': optimizer_generator,
|
||||
'optimizer_discriminator': optimizer_discriminator,
|
||||
'optimizer_kp_detector': optimizer_kp_detector}, inp=x, out=generated)
|
||||
@ -0,0 +1,14 @@
|
||||
from typing import List, Optional
|
||||
|
||||
__version__ = "21.1.2"
|
||||
|
||||
|
||||
def main(args=None):
|
||||
# type: (Optional[List[str]]) -> int
|
||||
"""This is an internal API only meant for use by pip's own console scripts.
|
||||
|
||||
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||
"""
|
||||
from pip._internal.utils.entrypoints import _wrapper
|
||||
|
||||
return _wrapper(args)
|
||||
@ -0,0 +1,31 @@
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Remove '' and current working directory from the first entry
|
||||
# of sys.path, if present to avoid using current directory
|
||||
# in pip commands check, freeze, install, list and show,
|
||||
# when invoked as python -m pip <command>
|
||||
if sys.path[0] in ("", os.getcwd()):
|
||||
sys.path.pop(0)
|
||||
|
||||
# If we are running from a wheel, add the wheel to sys.path
|
||||
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
||||
if __package__ == "":
|
||||
# __file__ is pip-*.whl/pip/__main__.py
|
||||
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
||||
# Resulting path is the name of the wheel itself
|
||||
# Add that to sys.path so we can import pip
|
||||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path.insert(0, path)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Work around the error reported in #9540, pending a proper fix.
|
||||
# Note: It is essential the warning filter is set *before* importing
|
||||
# pip, as the deprecation happens at import time, not runtime.
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=DeprecationWarning, module=".*packaging\\.version"
|
||||
)
|
||||
from pip._internal.cli.main import main as _main
|
||||
|
||||
sys.exit(_main())
|
||||
@ -0,0 +1,15 @@
|
||||
from typing import List, Optional
|
||||
|
||||
import pip._internal.utils.inject_securetransport # noqa
|
||||
|
||||
|
||||
def main(args=None):
|
||||
# type: (Optional[List[str]]) -> int
|
||||
"""This is preserved for old console scripts that may still be referencing
|
||||
it.
|
||||
|
||||
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||
"""
|
||||
from pip._internal.utils.entrypoints import _wrapper
|
||||
|
||||
return _wrapper(args)
|
||||
@ -0,0 +1,286 @@
|
||||
"""Build Environment used for isolation during sdist building
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import textwrap
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from sysconfig import get_paths
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type
|
||||
|
||||
from pip._vendor.certifi import where
|
||||
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli.spinners import open_spinner
|
||||
from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _Prefix:
|
||||
|
||||
def __init__(self, path):
|
||||
# type: (str) -> None
|
||||
self.path = path
|
||||
self.setup = False
|
||||
self.bin_dir = get_paths(
|
||||
'nt' if os.name == 'nt' else 'posix_prefix',
|
||||
vars={'base': path, 'platbase': path}
|
||||
)['scripts']
|
||||
self.lib_dirs = get_prefixed_libs(path)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _create_standalone_pip() -> Iterator[str]:
|
||||
"""Create a "standalone pip" zip file.
|
||||
|
||||
The zip file's content is identical to the currently-running pip.
|
||||
It will be used to install requirements into the build environment.
|
||||
"""
|
||||
source = pathlib.Path(pip_location).resolve().parent
|
||||
|
||||
# Return the current instance if `source` is not a directory. We can't build
|
||||
# a zip from this, and it likely means the instance is already standalone.
|
||||
if not source.is_dir():
|
||||
yield str(source)
|
||||
return
|
||||
|
||||
with TempDirectory(kind="standalone-pip") as tmp_dir:
|
||||
pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip")
|
||||
kwargs = {}
|
||||
if sys.version_info >= (3, 8):
|
||||
kwargs["strict_timestamps"] = False
|
||||
with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf:
|
||||
for child in source.rglob("*"):
|
||||
zf.write(child, child.relative_to(source.parent).as_posix())
|
||||
yield os.path.join(pip_zip, "pip")
|
||||
|
||||
|
||||
class BuildEnvironment:
|
||||
"""Creates and manages an isolated environment to install build deps
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
temp_dir = TempDirectory(
|
||||
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
|
||||
)
|
||||
|
||||
self._prefixes = OrderedDict(
|
||||
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
||||
for name in ('normal', 'overlay')
|
||||
)
|
||||
|
||||
self._bin_dirs = [] # type: List[str]
|
||||
self._lib_dirs = [] # type: List[str]
|
||||
for prefix in reversed(list(self._prefixes.values())):
|
||||
self._bin_dirs.append(prefix.bin_dir)
|
||||
self._lib_dirs.extend(prefix.lib_dirs)
|
||||
|
||||
# Customize site to:
|
||||
# - ensure .pth files are honored
|
||||
# - prevent access to system site packages
|
||||
system_sites = {
|
||||
os.path.normcase(site) for site in (get_purelib(), get_platlib())
|
||||
}
|
||||
self._site_dir = os.path.join(temp_dir.path, 'site')
|
||||
if not os.path.exists(self._site_dir):
|
||||
os.mkdir(self._site_dir)
|
||||
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
||||
fp.write(textwrap.dedent(
|
||||
'''
|
||||
import os, site, sys
|
||||
|
||||
# First, drop system-sites related paths.
|
||||
original_sys_path = sys.path[:]
|
||||
known_paths = set()
|
||||
for path in {system_sites!r}:
|
||||
site.addsitedir(path, known_paths=known_paths)
|
||||
system_paths = set(
|
||||
os.path.normcase(path)
|
||||
for path in sys.path[len(original_sys_path):]
|
||||
)
|
||||
original_sys_path = [
|
||||
path for path in original_sys_path
|
||||
if os.path.normcase(path) not in system_paths
|
||||
]
|
||||
sys.path = original_sys_path
|
||||
|
||||
# Second, add lib directories.
|
||||
# ensuring .pth file are processed.
|
||||
for path in {lib_dirs!r}:
|
||||
assert not path in sys.path
|
||||
site.addsitedir(path)
|
||||
'''
|
||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
self._save_env = {
|
||||
name: os.environ.get(name, None)
|
||||
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
||||
}
|
||||
|
||||
path = self._bin_dirs[:]
|
||||
old_path = self._save_env['PATH']
|
||||
if old_path:
|
||||
path.extend(old_path.split(os.pathsep))
|
||||
|
||||
pythonpath = [self._site_dir]
|
||||
|
||||
os.environ.update({
|
||||
'PATH': os.pathsep.join(path),
|
||||
'PYTHONNOUSERSITE': '1',
|
||||
'PYTHONPATH': os.pathsep.join(pythonpath),
|
||||
})
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[Type[BaseException]]
|
||||
exc_val, # type: Optional[BaseException]
|
||||
exc_tb # type: Optional[TracebackType]
|
||||
):
|
||||
# type: (...) -> None
|
||||
for varname, old_value in self._save_env.items():
|
||||
if old_value is None:
|
||||
os.environ.pop(varname, None)
|
||||
else:
|
||||
os.environ[varname] = old_value
|
||||
|
||||
def check_requirements(self, reqs):
|
||||
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
||||
"""Return 2 sets:
|
||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||
- missing requirements: set of reqs
|
||||
"""
|
||||
missing = set()
|
||||
conflicting = set()
|
||||
if reqs:
|
||||
ws = WorkingSet(self._lib_dirs)
|
||||
for req in reqs:
|
||||
try:
|
||||
if ws.find(Requirement.parse(req)) is None:
|
||||
missing.add(req)
|
||||
except VersionConflict as e:
|
||||
conflicting.add((str(e.args[0].as_requirement()),
|
||||
str(e.args[1])))
|
||||
return conflicting, missing
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder, # type: PackageFinder
|
||||
requirements, # type: Iterable[str]
|
||||
prefix_as_string, # type: str
|
||||
message # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
prefix = self._prefixes[prefix_as_string]
|
||||
assert not prefix.setup
|
||||
prefix.setup = True
|
||||
if not requirements:
|
||||
return
|
||||
with contextlib.ExitStack() as ctx:
|
||||
# TODO: Remove this block when dropping 3.6 support. Python 3.6
|
||||
# lacks importlib.resources and pep517 has issues loading files in
|
||||
# a zip, so we fallback to the "old" method by adding the current
|
||||
# pip directory to the child process's sys.path.
|
||||
if sys.version_info < (3, 7):
|
||||
pip_runnable = os.path.dirname(pip_location)
|
||||
else:
|
||||
pip_runnable = ctx.enter_context(_create_standalone_pip())
|
||||
self._install_requirements(
|
||||
pip_runnable,
|
||||
finder,
|
||||
requirements,
|
||||
prefix,
|
||||
message,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _install_requirements(
|
||||
pip_runnable: str,
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix: _Prefix,
|
||||
message: str,
|
||||
) -> None:
|
||||
args = [
|
||||
sys.executable, pip_runnable, 'install',
|
||||
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
||||
'--no-warn-script-location',
|
||||
] # type: List[str]
|
||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||
args.append('-v')
|
||||
for format_control in ('no_binary', 'only_binary'):
|
||||
formats = getattr(finder.format_control, format_control)
|
||||
args.extend(('--' + format_control.replace('_', '-'),
|
||||
','.join(sorted(formats or {':none:'}))))
|
||||
|
||||
index_urls = finder.index_urls
|
||||
if index_urls:
|
||||
args.extend(['-i', index_urls[0]])
|
||||
for extra_index in index_urls[1:]:
|
||||
args.extend(['--extra-index-url', extra_index])
|
||||
else:
|
||||
args.append('--no-index')
|
||||
for link in finder.find_links:
|
||||
args.extend(['--find-links', link])
|
||||
|
||||
for host in finder.trusted_hosts:
|
||||
args.extend(['--trusted-host', host])
|
||||
if finder.allow_all_prereleases:
|
||||
args.append('--pre')
|
||||
if finder.prefer_binary:
|
||||
args.append('--prefer-binary')
|
||||
args.append('--')
|
||||
args.extend(requirements)
|
||||
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
||||
with open_spinner(message) as spinner:
|
||||
call_subprocess(args, spinner=spinner, extra_environ=extra_environ)
|
||||
|
||||
|
||||
class NoOpBuildEnvironment(BuildEnvironment):
|
||||
"""A no-op drop-in replacement for BuildEnvironment
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[Type[BaseException]]
|
||||
exc_val, # type: Optional[BaseException]
|
||||
exc_tb # type: Optional[TracebackType]
|
||||
):
|
||||
# type: (...) -> None
|
||||
pass
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder, # type: PackageFinder
|
||||
requirements, # type: Iterable[str]
|
||||
prefix_as_string, # type: str
|
||||
message # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
raise NotImplementedError()
|
||||
@ -0,0 +1,287 @@
|
||||
"""Cache Management
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional, Set
|
||||
|
||||
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import InvalidWheelFilename
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _hash_dict(d):
|
||||
# type: (Dict[str, str]) -> str
|
||||
"""Return a stable sha224 of a dictionary."""
|
||||
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
||||
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
||||
|
||||
|
||||
class Cache:
|
||||
"""An abstract class - provides cache directories for data from links
|
||||
|
||||
|
||||
:param cache_dir: The root of the cache.
|
||||
:param format_control: An object of FormatControl class to limit
|
||||
binaries being read from the cache.
|
||||
:param allowed_formats: which formats of files the cache should store.
|
||||
('binary' and 'source' are the only allowed values)
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control, allowed_formats):
|
||||
# type: (str, FormatControl, Set[str]) -> None
|
||||
super().__init__()
|
||||
assert not cache_dir or os.path.isabs(cache_dir)
|
||||
self.cache_dir = cache_dir or None
|
||||
self.format_control = format_control
|
||||
self.allowed_formats = allowed_formats
|
||||
|
||||
_valid_formats = {"source", "binary"}
|
||||
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||
|
||||
def _get_cache_path_parts(self, link):
|
||||
# type: (Link) -> List[str]
|
||||
"""Get parts of part that must be os.path.joined with cache_dir
|
||||
"""
|
||||
|
||||
# We want to generate an url to use as our cache key, we don't want to
|
||||
# just re-use the URL because it might have other items in the fragment
|
||||
# and we don't care about those.
|
||||
key_parts = {"url": link.url_without_fragment}
|
||||
if link.hash_name is not None and link.hash is not None:
|
||||
key_parts[link.hash_name] = link.hash
|
||||
if link.subdirectory_fragment:
|
||||
key_parts["subdirectory"] = link.subdirectory_fragment
|
||||
|
||||
# Include interpreter name, major and minor version in cache key
|
||||
# to cope with ill-behaved sdists that build a different wheel
|
||||
# depending on the python version their setup.py is being run on,
|
||||
# and don't encode the difference in compatibility tags.
|
||||
# https://github.com/pypa/pip/issues/7296
|
||||
key_parts["interpreter_name"] = interpreter_name()
|
||||
key_parts["interpreter_version"] = interpreter_version()
|
||||
|
||||
# Encode our key url with sha224, we'll use this because it has similar
|
||||
# security properties to sha256, but with a shorter total output (and
|
||||
# thus less secure). However the differences don't make a lot of
|
||||
# difference for our use case here.
|
||||
hashed = _hash_dict(key_parts)
|
||||
|
||||
# We want to nest the directories some to prevent having a ton of top
|
||||
# level directories where we might run out of sub directories on some
|
||||
# FS.
|
||||
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||
|
||||
return parts
|
||||
|
||||
def _get_candidates(self, link, canonical_package_name):
|
||||
# type: (Link, str) -> List[Any]
|
||||
can_not_cache = (
|
||||
not self.cache_dir or
|
||||
not canonical_package_name or
|
||||
not link
|
||||
)
|
||||
if can_not_cache:
|
||||
return []
|
||||
|
||||
formats = self.format_control.get_allowed_formats(
|
||||
canonical_package_name
|
||||
)
|
||||
if not self.allowed_formats.intersection(formats):
|
||||
return []
|
||||
|
||||
candidates = []
|
||||
path = self.get_path_for_link(link)
|
||||
if os.path.isdir(path):
|
||||
for candidate in os.listdir(path):
|
||||
candidates.append((candidate, path))
|
||||
return candidates
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
"""Return a directory to store cached items in for link.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Link
|
||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||
passed link.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class SimpleWheelCache(Cache):
|
||||
"""A cache of wheels for future installs.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
# type: (str, FormatControl) -> None
|
||||
super().__init__(cache_dir, format_control, {"binary"})
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
"""Return a directory to store cached wheels for link
|
||||
|
||||
Because there are M wheels for any one sdist, we provide a directory
|
||||
to cache them in, and then consult that directory when looking up
|
||||
cache hits.
|
||||
|
||||
We only insert things into the cache if they have plausible version
|
||||
numbers, so that we don't contaminate the cache with things that were
|
||||
not unique. E.g. ./package might have dozens of installs done for it
|
||||
and build a version of 0.0...and if we built and cached a wheel, we'd
|
||||
end up using the same wheel even if the source has been edited.
|
||||
|
||||
:param link: The link of the sdist for which this will cache wheels.
|
||||
"""
|
||||
parts = self._get_cache_path_parts(link)
|
||||
assert self.cache_dir
|
||||
# Store wheels within the root cache_dir
|
||||
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||
|
||||
def get(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Link
|
||||
candidates = []
|
||||
|
||||
if not package_name:
|
||||
return link
|
||||
|
||||
canonical_package_name = canonicalize_name(package_name)
|
||||
for wheel_name, wheel_dir in self._get_candidates(
|
||||
link, canonical_package_name
|
||||
):
|
||||
try:
|
||||
wheel = Wheel(wheel_name)
|
||||
except InvalidWheelFilename:
|
||||
continue
|
||||
if canonicalize_name(wheel.name) != canonical_package_name:
|
||||
logger.debug(
|
||||
"Ignoring cached wheel %s for %s as it "
|
||||
"does not match the expected distribution name %s.",
|
||||
wheel_name, link, package_name,
|
||||
)
|
||||
continue
|
||||
if not wheel.supported(supported_tags):
|
||||
# Built for a different python/arch/etc
|
||||
continue
|
||||
candidates.append(
|
||||
(
|
||||
wheel.support_index_min(supported_tags),
|
||||
wheel_name,
|
||||
wheel_dir,
|
||||
)
|
||||
)
|
||||
|
||||
if not candidates:
|
||||
return link
|
||||
|
||||
_, wheel_name, wheel_dir = min(candidates)
|
||||
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
|
||||
|
||||
|
||||
class EphemWheelCache(SimpleWheelCache):
|
||||
"""A SimpleWheelCache that creates it's own temporary cache directory
|
||||
"""
|
||||
|
||||
def __init__(self, format_control):
|
||||
# type: (FormatControl) -> None
|
||||
self._temp_dir = TempDirectory(
|
||||
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
super().__init__(self._temp_dir.path, format_control)
|
||||
|
||||
|
||||
class CacheEntry:
|
||||
def __init__(
|
||||
self,
|
||||
link, # type: Link
|
||||
persistent, # type: bool
|
||||
):
|
||||
self.link = link
|
||||
self.persistent = persistent
|
||||
|
||||
|
||||
class WheelCache(Cache):
|
||||
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
||||
|
||||
This Cache allows for gracefully degradation, using the ephem wheel cache
|
||||
when a certain link is not found in the simple wheel cache first.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
# type: (str, FormatControl) -> None
|
||||
super().__init__(cache_dir, format_control, {'binary'})
|
||||
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||
self._ephem_cache = EphemWheelCache(format_control)
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
return self._wheel_cache.get_path_for_link(link)
|
||||
|
||||
def get_ephem_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
return self._ephem_cache.get_path_for_link(link)
|
||||
|
||||
def get(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Link
|
||||
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
||||
if cache_entry is None:
|
||||
return link
|
||||
return cache_entry.link
|
||||
|
||||
def get_cache_entry(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Optional[CacheEntry]
|
||||
"""Returns a CacheEntry with a link to a cached item if it exists or
|
||||
None. The cache entry indicates if the item was found in the persistent
|
||||
or ephemeral cache.
|
||||
"""
|
||||
retval = self._wheel_cache.get(
|
||||
link=link,
|
||||
package_name=package_name,
|
||||
supported_tags=supported_tags,
|
||||
)
|
||||
if retval is not link:
|
||||
return CacheEntry(retval, persistent=True)
|
||||
|
||||
retval = self._ephem_cache.get(
|
||||
link=link,
|
||||
package_name=package_name,
|
||||
supported_tags=supported_tags,
|
||||
)
|
||||
if retval is not link:
|
||||
return CacheEntry(retval, persistent=False)
|
||||
|
||||
return None
|
||||
@ -0,0 +1,4 @@
|
||||
"""Subpackage containing all of pip's command line interface related code
|
||||
"""
|
||||
|
||||
# This file intentionally does not import submodules
|
||||
@ -0,0 +1,162 @@
|
||||
"""Logic that powers autocompletion installed by ``pip completion``.
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import Any, Iterable, List, Optional
|
||||
|
||||
from pip._internal.cli.main_parser import create_main_parser
|
||||
from pip._internal.commands import commands_dict, create_command
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
|
||||
|
||||
def autocomplete():
|
||||
# type: () -> None
|
||||
"""Entry Point for completion of main and subcommand options."""
|
||||
# Don't complete if user hasn't sourced bash_completion file.
|
||||
if "PIP_AUTO_COMPLETE" not in os.environ:
|
||||
return
|
||||
cwords = os.environ["COMP_WORDS"].split()[1:]
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
try:
|
||||
current = cwords[cword - 1]
|
||||
except IndexError:
|
||||
current = ""
|
||||
|
||||
parser = create_main_parser()
|
||||
subcommands = list(commands_dict)
|
||||
options = []
|
||||
|
||||
# subcommand
|
||||
subcommand_name = None # type: Optional[str]
|
||||
for word in cwords:
|
||||
if word in subcommands:
|
||||
subcommand_name = word
|
||||
break
|
||||
# subcommand options
|
||||
if subcommand_name is not None:
|
||||
# special case: 'help' subcommand has no options
|
||||
if subcommand_name == "help":
|
||||
sys.exit(1)
|
||||
# special case: list locally installed dists for show and uninstall
|
||||
should_list_installed = not current.startswith("-") and subcommand_name in [
|
||||
"show",
|
||||
"uninstall",
|
||||
]
|
||||
if should_list_installed:
|
||||
lc = current.lower()
|
||||
installed = [
|
||||
dist.key
|
||||
for dist in get_installed_distributions(local_only=True)
|
||||
if dist.key.startswith(lc) and dist.key not in cwords[1:]
|
||||
]
|
||||
# if there are no dists installed, fall back to option completion
|
||||
if installed:
|
||||
for dist in installed:
|
||||
print(dist)
|
||||
sys.exit(1)
|
||||
|
||||
subcommand = create_command(subcommand_name)
|
||||
|
||||
for opt in subcommand.parser.option_list_all:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
for opt_str in opt._long_opts + opt._short_opts:
|
||||
options.append((opt_str, opt.nargs))
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
||||
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||
# filter options by current input
|
||||
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||
# get completion type given cwords and available subcommand options
|
||||
completion_type = get_path_completion_type(
|
||||
cwords,
|
||||
cword,
|
||||
subcommand.parser.option_list_all,
|
||||
)
|
||||
# get completion files and directories if ``completion_type`` is
|
||||
# ``<file>``, ``<dir>`` or ``<path>``
|
||||
if completion_type:
|
||||
paths = auto_complete_paths(current, completion_type)
|
||||
options = [(path, 0) for path in paths]
|
||||
for option in options:
|
||||
opt_label = option[0]
|
||||
# append '=' to options which require args
|
||||
if option[1] and option[0][:2] == "--":
|
||||
opt_label += "="
|
||||
print(opt_label)
|
||||
else:
|
||||
# show main parser options only when necessary
|
||||
|
||||
opts = [i.option_list for i in parser.option_groups]
|
||||
opts.append(parser.option_list)
|
||||
flattened_opts = chain.from_iterable(opts)
|
||||
if current.startswith("-"):
|
||||
for opt in flattened_opts:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
subcommands += opt._long_opts + opt._short_opts
|
||||
else:
|
||||
# get completion type given cwords and all available options
|
||||
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
|
||||
if completion_type:
|
||||
subcommands = list(auto_complete_paths(current, completion_type))
|
||||
|
||||
print(" ".join([x for x in subcommands if x.startswith(current)]))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_path_completion_type(cwords, cword, opts):
|
||||
# type: (List[str], int, Iterable[Any]) -> Optional[str]
|
||||
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
||||
|
||||
:param cwords: same as the environmental variable ``COMP_WORDS``
|
||||
:param cword: same as the environmental variable ``COMP_CWORD``
|
||||
:param opts: The available options to check
|
||||
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
||||
"""
|
||||
if cword < 2 or not cwords[cword - 2].startswith("-"):
|
||||
return None
|
||||
for opt in opts:
|
||||
if opt.help == optparse.SUPPRESS_HELP:
|
||||
continue
|
||||
for o in str(opt).split("/"):
|
||||
if cwords[cword - 2].split("=")[0] == o:
|
||||
if not opt.metavar or any(
|
||||
x in ("path", "file", "dir") for x in opt.metavar.split("/")
|
||||
):
|
||||
return opt.metavar
|
||||
return None
|
||||
|
||||
|
||||
def auto_complete_paths(current, completion_type):
|
||||
# type: (str, str) -> Iterable[str]
|
||||
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
||||
and directories starting with ``current``; otherwise only list directories
|
||||
starting with ``current``.
|
||||
|
||||
:param current: The word to be completed
|
||||
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
||||
:return: A generator of regular files and/or directories
|
||||
"""
|
||||
directory, filename = os.path.split(current)
|
||||
current_path = os.path.abspath(directory)
|
||||
# Don't complete paths if they can't be accessed
|
||||
if not os.access(current_path, os.R_OK):
|
||||
return
|
||||
filename = os.path.normcase(filename)
|
||||
# list all files that start with ``filename``
|
||||
file_list = (
|
||||
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
|
||||
)
|
||||
for f in file_list:
|
||||
opt = os.path.join(current_path, f)
|
||||
comp_file = os.path.normcase(os.path.join(directory, f))
|
||||
# complete regular files when there is not ``<dir>`` after option
|
||||
# complete directories when there is ``<file>``, ``<path>`` or
|
||||
# ``<dir>``after option
|
||||
if completion_type != "dir" and os.path.isfile(opt):
|
||||
yield comp_file
|
||||
elif os.path.isdir(opt):
|
||||
yield os.path.join(comp_file, "")
|
||||
@ -0,0 +1,221 @@
|
||||
"""Base Command class, and related routines"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.command_context import CommandContextMixIn
|
||||
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip._internal.cli.status_codes import (
|
||||
ERROR,
|
||||
PREVIOUS_BUILD_DIR_ERROR,
|
||||
UNKNOWN_ERROR,
|
||||
VIRTUALENV_NOT_FOUND,
|
||||
)
|
||||
from pip._internal.exceptions import (
|
||||
BadCommand,
|
||||
CommandError,
|
||||
InstallationError,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
UninstallationError,
|
||||
)
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||
from pip._internal.utils.misc import get_prog, normalize_path
|
||||
from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
|
||||
from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
__all__ = ["Command"]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(CommandContextMixIn):
|
||||
usage = None # type: str
|
||||
ignore_require_venv = False # type: bool
|
||||
|
||||
def __init__(self, name, summary, isolated=False):
|
||||
# type: (str, str, bool) -> None
|
||||
super().__init__()
|
||||
|
||||
self.name = name
|
||||
self.summary = summary
|
||||
self.parser = ConfigOptionParser(
|
||||
usage=self.usage,
|
||||
prog=f"{get_prog()} {name}",
|
||||
formatter=UpdatingDefaultsHelpFormatter(),
|
||||
add_help_option=False,
|
||||
name=name,
|
||||
description=self.__doc__,
|
||||
isolated=isolated,
|
||||
)
|
||||
|
||||
self.tempdir_registry = None # type: Optional[TempDirRegistry]
|
||||
|
||||
# Commands should add options to this option group
|
||||
optgroup_name = f"{self.name.capitalize()} Options"
|
||||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||
|
||||
# Add the general options
|
||||
gen_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.general_group,
|
||||
self.parser,
|
||||
)
|
||||
self.parser.add_option_group(gen_opts)
|
||||
|
||||
self.add_options()
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def handle_pip_version_check(self, options):
|
||||
# type: (Values) -> None
|
||||
"""
|
||||
This is a no-op so that commands by default do not do the pip version
|
||||
check.
|
||||
"""
|
||||
# Make sure we do the pip version check if the index_group options
|
||||
# are present.
|
||||
assert not hasattr(options, "no_index")
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[Any]) -> int
|
||||
raise NotImplementedError
|
||||
|
||||
def parse_args(self, args):
|
||||
# type: (List[str]) -> Tuple[Any, Any]
|
||||
# factored out for testability
|
||||
return self.parser.parse_args(args)
|
||||
|
||||
def main(self, args):
|
||||
# type: (List[str]) -> int
|
||||
try:
|
||||
with self.main_context():
|
||||
return self._main(args)
|
||||
finally:
|
||||
logging.shutdown()
|
||||
|
||||
def _main(self, args):
|
||||
# type: (List[str]) -> int
|
||||
# We must initialize this before the tempdir manager, otherwise the
|
||||
# configuration would not be accessible by the time we clean up the
|
||||
# tempdir manager.
|
||||
self.tempdir_registry = self.enter_context(tempdir_registry())
|
||||
# Intentionally set as early as possible so globally-managed temporary
|
||||
# directories are available to the rest of the code.
|
||||
self.enter_context(global_tempdir_manager())
|
||||
|
||||
options, args = self.parse_args(args)
|
||||
|
||||
# Set verbosity so that it can be used elsewhere.
|
||||
self.verbosity = options.verbose - options.quiet
|
||||
|
||||
level_number = setup_logging(
|
||||
verbosity=self.verbosity,
|
||||
no_color=options.no_color,
|
||||
user_log_file=options.log,
|
||||
)
|
||||
|
||||
# TODO: Try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# This also affects isolated builds and it should.
|
||||
|
||||
if options.no_input:
|
||||
os.environ["PIP_NO_INPUT"] = "1"
|
||||
|
||||
if options.exists_action:
|
||||
os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
|
||||
|
||||
if options.require_venv and not self.ignore_require_venv:
|
||||
# If a venv is required check if it can really be found
|
||||
if not running_under_virtualenv():
|
||||
logger.critical("Could not find an activated virtualenv (required).")
|
||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||
|
||||
if options.cache_dir:
|
||||
options.cache_dir = normalize_path(options.cache_dir)
|
||||
if not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"or is not writable by the current user. The cache "
|
||||
"has been disabled. Check the permissions and owner of "
|
||||
"that directory. If executing pip with sudo, you should "
|
||||
"use sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
if getattr(options, "build_dir", None):
|
||||
deprecated(
|
||||
reason=(
|
||||
"The -b/--build/--build-dir/--build-directory "
|
||||
"option is deprecated and has no effect anymore."
|
||||
),
|
||||
replacement=(
|
||||
"use the TMPDIR/TEMP/TMP environment variable, "
|
||||
"possibly combined with --no-clean"
|
||||
),
|
||||
gone_in="21.3",
|
||||
issue=8333,
|
||||
)
|
||||
|
||||
if "2020-resolver" in options.features_enabled:
|
||||
logger.warning(
|
||||
"--use-feature=2020-resolver no longer has any effect, "
|
||||
"since it is now the default dependency resolver in pip. "
|
||||
"This will become an error in pip 21.0."
|
||||
)
|
||||
|
||||
try:
|
||||
status = self.run(options, args)
|
||||
assert isinstance(status, int)
|
||||
return status
|
||||
except PreviousBuildDirError as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return PREVIOUS_BUILD_DIR_ERROR
|
||||
except (
|
||||
InstallationError,
|
||||
UninstallationError,
|
||||
BadCommand,
|
||||
NetworkConnectionError,
|
||||
) as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except CommandError as exc:
|
||||
logger.critical("%s", exc)
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BrokenStdoutLoggingError:
|
||||
# Bypass our logger and write any remaining messages to stderr
|
||||
# because stdout no longer works.
|
||||
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
||||
if level_number <= logging.DEBUG:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
return ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.critical("Operation cancelled by user")
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BaseException:
|
||||
logger.critical("Exception:", exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
finally:
|
||||
self.handle_pip_version_check(options)
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,30 @@
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from typing import ContextManager, Iterator, TypeVar
|
||||
|
||||
_T = TypeVar("_T", covariant=True)
|
||||
|
||||
|
||||
class CommandContextMixIn:
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
super().__init__()
|
||||
self._in_main_context = False
|
||||
self._main_context = ExitStack()
|
||||
|
||||
@contextmanager
|
||||
def main_context(self):
|
||||
# type: () -> Iterator[None]
|
||||
assert not self._in_main_context
|
||||
|
||||
self._in_main_context = True
|
||||
try:
|
||||
with self._main_context:
|
||||
yield
|
||||
finally:
|
||||
self._in_main_context = False
|
||||
|
||||
def enter_context(self, context_provider):
|
||||
# type: (ContextManager[_T]) -> _T
|
||||
assert self._in_main_context
|
||||
|
||||
return self._main_context.enter_context(context_provider)
|
||||
@ -0,0 +1,71 @@
|
||||
"""Primary application entrypoint.
|
||||
"""
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.autocompletion import autocomplete
|
||||
from pip._internal.cli.main_parser import parse_command
|
||||
from pip._internal.commands import create_command
|
||||
from pip._internal.exceptions import PipError
|
||||
from pip._internal.utils import deprecation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Do not import and use main() directly! Using it directly is actively
|
||||
# discouraged by pip's maintainers. The name, location and behavior of
|
||||
# this function is subject to change, so calling it directly is not
|
||||
# portable across different pip versions.
|
||||
|
||||
# In addition, running pip in-process is unsupported and unsafe. This is
|
||||
# elaborated in detail at
|
||||
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
|
||||
# That document also provides suggestions that should work for nearly
|
||||
# all users that are considering importing and using main() directly.
|
||||
|
||||
# However, we know that certain users will still want to invoke pip
|
||||
# in-process. If you understand and accept the implications of using pip
|
||||
# in an unsupported manner, the best approach is to use runpy to avoid
|
||||
# depending on the exact location of this entry point.
|
||||
|
||||
# The following example shows how to use runpy to invoke pip in that
|
||||
# case:
|
||||
#
|
||||
# sys.argv = ["pip", your, args, here]
|
||||
# runpy.run_module("pip", run_name="__main__")
|
||||
#
|
||||
# Note that this will exit the process after running, unlike a direct
|
||||
# call to main. As it is not safe to do any processing after calling
|
||||
# main, this should not be an issue in practice.
|
||||
|
||||
|
||||
def main(args=None):
|
||||
# type: (Optional[List[str]]) -> int
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Configure our deprecation warnings to be sent through loggers
|
||||
deprecation.install_warning_logger()
|
||||
|
||||
autocomplete()
|
||||
|
||||
try:
|
||||
cmd_name, cmd_args = parse_command(args)
|
||||
except PipError as exc:
|
||||
sys.stderr.write(f"ERROR: {exc}")
|
||||
sys.stderr.write(os.linesep)
|
||||
sys.exit(1)
|
||||
|
||||
# Needed for locale.getpreferredencoding(False) to work
|
||||
# in pip._internal.utils.encoding.auto_decode
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
except locale.Error as e:
|
||||
# setlocale can apparently crash if locale are uninitialized
|
||||
logger.debug("Ignoring error %s when setting locale", e)
|
||||
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
|
||||
|
||||
return command.main(cmd_args)
|
||||
@ -0,0 +1,89 @@
|
||||
"""A single place for constructing and exposing the main parser
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip._internal.commands import commands_dict, get_similar_commands
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.utils.misc import get_pip_version, get_prog
|
||||
|
||||
__all__ = ["create_main_parser", "parse_command"]
|
||||
|
||||
|
||||
def create_main_parser():
|
||||
# type: () -> ConfigOptionParser
|
||||
"""Creates and returns the main parser for pip's CLI"""
|
||||
|
||||
parser = ConfigOptionParser(
|
||||
usage="\n%prog <command> [options]",
|
||||
add_help_option=False,
|
||||
formatter=UpdatingDefaultsHelpFormatter(),
|
||||
name="global",
|
||||
prog=get_prog(),
|
||||
)
|
||||
parser.disable_interspersed_args()
|
||||
|
||||
parser.version = get_pip_version()
|
||||
|
||||
# add the general options
|
||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||
parser.add_option_group(gen_opts)
|
||||
|
||||
# so the help formatter knows
|
||||
parser.main = True # type: ignore
|
||||
|
||||
# create command listing for description
|
||||
description = [""] + [
|
||||
f"{name:27} {command_info.summary}"
|
||||
for name, command_info in commands_dict.items()
|
||||
]
|
||||
parser.description = "\n".join(description)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def parse_command(args):
|
||||
# type: (List[str]) -> Tuple[str, List[str]]
|
||||
parser = create_main_parser()
|
||||
|
||||
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||
# call is to split the initial args into the general options before the
|
||||
# subcommand and everything else.
|
||||
# For example:
|
||||
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||
# general_options: ['--timeout==5']
|
||||
# args_else: ['install', '--user', 'INITools']
|
||||
general_options, args_else = parser.parse_args(args)
|
||||
|
||||
# --version
|
||||
if general_options.version:
|
||||
sys.stdout.write(parser.version)
|
||||
sys.stdout.write(os.linesep)
|
||||
sys.exit()
|
||||
|
||||
# pip || pip help -> print_help()
|
||||
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
|
||||
parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
# the subcommand name
|
||||
cmd_name = args_else[0]
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = [f'unknown command "{cmd_name}"']
|
||||
if guess:
|
||||
msg.append(f'maybe you meant "{guess}"')
|
||||
|
||||
raise CommandError(" - ".join(msg))
|
||||
|
||||
# all the args without the subcommand
|
||||
cmd_args = args[:]
|
||||
cmd_args.remove(cmd_name)
|
||||
|
||||
return cmd_name, cmd_args
|
||||
@ -0,0 +1,305 @@
|
||||
"""Base option parser setup"""
|
||||
|
||||
import logging
|
||||
import optparse
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
from contextlib import suppress
|
||||
from typing import Any, Dict, Iterator, List, Tuple
|
||||
|
||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||
from pip._internal.configuration import Configuration, ConfigurationError
|
||||
from pip._internal.utils.misc import redact_auth_from_url, strtobool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
"""A prettier/less verbose help formatter for optparse."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (*Any, **Any) -> None
|
||||
# help position must be aligned with __init__.parseopts.description
|
||||
kwargs["max_help_position"] = 30
|
||||
kwargs["indent_increment"] = 1
|
||||
kwargs["width"] = shutil.get_terminal_size()[0] - 2
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def format_option_strings(self, option):
|
||||
# type: (optparse.Option) -> str
|
||||
return self._format_option_strings(option)
|
||||
|
||||
def _format_option_strings(self, option, mvarfmt=" <{}>", optsep=", "):
|
||||
# type: (optparse.Option, str, str) -> str
|
||||
"""
|
||||
Return a comma-separated list of option strings and metavars.
|
||||
|
||||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||
:param mvarfmt: metavar format string
|
||||
:param optsep: separator
|
||||
"""
|
||||
opts = []
|
||||
|
||||
if option._short_opts:
|
||||
opts.append(option._short_opts[0])
|
||||
if option._long_opts:
|
||||
opts.append(option._long_opts[0])
|
||||
if len(opts) > 1:
|
||||
opts.insert(1, optsep)
|
||||
|
||||
if option.takes_value():
|
||||
assert option.dest is not None
|
||||
metavar = option.metavar or option.dest.lower()
|
||||
opts.append(mvarfmt.format(metavar.lower()))
|
||||
|
||||
return "".join(opts)
|
||||
|
||||
def format_heading(self, heading):
|
||||
# type: (str) -> str
|
||||
if heading == "Options":
|
||||
return ""
|
||||
return heading + ":\n"
|
||||
|
||||
def format_usage(self, usage):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Ensure there is only one newline between usage and the first heading
|
||||
if there is no description.
|
||||
"""
|
||||
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
|
||||
return msg
|
||||
|
||||
def format_description(self, description):
|
||||
# type: (str) -> str
|
||||
# leave full control over description to us
|
||||
if description:
|
||||
if hasattr(self.parser, "main"):
|
||||
label = "Commands"
|
||||
else:
|
||||
label = "Description"
|
||||
# some doc strings have initial newlines, some don't
|
||||
description = description.lstrip("\n")
|
||||
# some doc strings have final newlines and spaces, some don't
|
||||
description = description.rstrip()
|
||||
# dedent, then reindent
|
||||
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||
description = f"{label}:\n{description}\n"
|
||||
return description
|
||||
else:
|
||||
return ""
|
||||
|
||||
def format_epilog(self, epilog):
|
||||
# type: (str) -> str
|
||||
# leave full control over epilog to us
|
||||
if epilog:
|
||||
return epilog
|
||||
else:
|
||||
return ""
|
||||
|
||||
def indent_lines(self, text, indent):
|
||||
# type: (str, str) -> str
|
||||
new_lines = [indent + line for line in text.split("\n")]
|
||||
return "\n".join(new_lines)
|
||||
|
||||
|
||||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||
"""Custom help formatter for use in ConfigOptionParser.
|
||||
|
||||
This is updates the defaults before expanding them, allowing
|
||||
them to show up correctly in the help listing.
|
||||
|
||||
Also redact auth from url type options
|
||||
"""
|
||||
|
||||
def expand_default(self, option):
|
||||
# type: (optparse.Option) -> str
|
||||
default_values = None
|
||||
if self.parser is not None:
|
||||
assert isinstance(self.parser, ConfigOptionParser)
|
||||
self.parser._update_defaults(self.parser.defaults)
|
||||
assert option.dest is not None
|
||||
default_values = self.parser.defaults.get(option.dest)
|
||||
help_text = super().expand_default(option)
|
||||
|
||||
if default_values and option.metavar == "URL":
|
||||
if isinstance(default_values, str):
|
||||
default_values = [default_values]
|
||||
|
||||
# If its not a list, we should abort and just return the help text
|
||||
if not isinstance(default_values, list):
|
||||
default_values = []
|
||||
|
||||
for val in default_values:
|
||||
help_text = help_text.replace(val, redact_auth_from_url(val))
|
||||
|
||||
return help_text
|
||||
|
||||
|
||||
class CustomOptionParser(optparse.OptionParser):
|
||||
def insert_option_group(self, idx, *args, **kwargs):
|
||||
# type: (int, Any, Any) -> optparse.OptionGroup
|
||||
"""Insert an OptionGroup at a given position."""
|
||||
group = self.add_option_group(*args, **kwargs)
|
||||
|
||||
self.option_groups.pop()
|
||||
self.option_groups.insert(idx, group)
|
||||
|
||||
return group
|
||||
|
||||
@property
|
||||
def option_list_all(self):
|
||||
# type: () -> List[optparse.Option]
|
||||
"""Get a list of all options, including those in option groups."""
|
||||
res = self.option_list[:]
|
||||
for i in self.option_groups:
|
||||
res.extend(i.option_list)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class ConfigOptionParser(CustomOptionParser):
|
||||
"""Custom option parser which updates its defaults by checking the
|
||||
configuration files and environmental variables"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args, # type: Any
|
||||
name, # type: str
|
||||
isolated=False, # type: bool
|
||||
**kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.name = name
|
||||
self.config = Configuration(isolated)
|
||||
|
||||
assert self.name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def check_default(self, option, key, val):
|
||||
# type: (optparse.Option, str, Any) -> Any
|
||||
try:
|
||||
return option.check_value(key, val)
|
||||
except optparse.OptionValueError as exc:
|
||||
print(f"An error occurred during configuration: {exc}")
|
||||
sys.exit(3)
|
||||
|
||||
def _get_ordered_configuration_items(self):
|
||||
# type: () -> Iterator[Tuple[str, Any]]
|
||||
# Configuration gives keys in an unordered manner. Order them.
|
||||
override_order = ["global", self.name, ":env:"]
|
||||
|
||||
# Pool the options into different groups
|
||||
section_items = {
|
||||
name: [] for name in override_order
|
||||
} # type: Dict[str, List[Tuple[str, Any]]]
|
||||
for section_key, val in self.config.items():
|
||||
# ignore empty values
|
||||
if not val:
|
||||
logger.debug(
|
||||
"Ignoring configuration key '%s' as it's value is empty.",
|
||||
section_key,
|
||||
)
|
||||
continue
|
||||
|
||||
section, key = section_key.split(".", 1)
|
||||
if section in override_order:
|
||||
section_items[section].append((key, val))
|
||||
|
||||
# Yield each group in their override order
|
||||
for section in override_order:
|
||||
for key, val in section_items[section]:
|
||||
yield key, val
|
||||
|
||||
def _update_defaults(self, defaults):
|
||||
# type: (Dict[str, Any]) -> Dict[str, Any]
|
||||
"""Updates the given defaults with values from the config files and
|
||||
the environ. Does a little special handling for certain types of
|
||||
options (lists)."""
|
||||
|
||||
# Accumulate complex default state.
|
||||
self.values = optparse.Values(self.defaults)
|
||||
late_eval = set()
|
||||
# Then set the options with those values
|
||||
for key, val in self._get_ordered_configuration_items():
|
||||
# '--' because configuration supports only long names
|
||||
option = self.get_option("--" + key)
|
||||
|
||||
# Ignore options not present in this parser. E.g. non-globals put
|
||||
# in [global] by users that want them to apply to all applicable
|
||||
# commands.
|
||||
if option is None:
|
||||
continue
|
||||
|
||||
assert option.dest is not None
|
||||
|
||||
if option.action in ("store_true", "store_false"):
|
||||
try:
|
||||
val = strtobool(val)
|
||||
except ValueError:
|
||||
self.error(
|
||||
"{} is not a valid value for {} option, " # noqa
|
||||
"please specify a boolean value like yes/no, "
|
||||
"true/false or 1/0 instead.".format(val, key)
|
||||
)
|
||||
elif option.action == "count":
|
||||
with suppress(ValueError):
|
||||
val = strtobool(val)
|
||||
with suppress(ValueError):
|
||||
val = int(val)
|
||||
if not isinstance(val, int) or val < 0:
|
||||
self.error(
|
||||
"{} is not a valid value for {} option, " # noqa
|
||||
"please instead specify either a non-negative integer "
|
||||
"or a boolean value like yes/no or false/true "
|
||||
"which is equivalent to 1/0.".format(val, key)
|
||||
)
|
||||
elif option.action == "append":
|
||||
val = val.split()
|
||||
val = [self.check_default(option, key, v) for v in val]
|
||||
elif option.action == "callback":
|
||||
assert option.callback is not None
|
||||
late_eval.add(option.dest)
|
||||
opt_str = option.get_opt_string()
|
||||
val = option.convert_value(opt_str, val)
|
||||
# From take_action
|
||||
args = option.callback_args or ()
|
||||
kwargs = option.callback_kwargs or {}
|
||||
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||
else:
|
||||
val = self.check_default(option, key, val)
|
||||
|
||||
defaults[option.dest] = val
|
||||
|
||||
for key in late_eval:
|
||||
defaults[key] = getattr(self.values, key)
|
||||
self.values = None
|
||||
return defaults
|
||||
|
||||
def get_default_values(self):
|
||||
# type: () -> optparse.Values
|
||||
"""Overriding to make updating the defaults after instantiation of
|
||||
the option parser possible, _update_defaults() does the dirty work."""
|
||||
if not self.process_default_values:
|
||||
# Old, pre-Optik 1.5 behaviour.
|
||||
return optparse.Values(self.defaults)
|
||||
|
||||
# Load the configuration, or error out in case of an error
|
||||
try:
|
||||
self.config.load()
|
||||
except ConfigurationError as err:
|
||||
self.exit(UNKNOWN_ERROR, str(err))
|
||||
|
||||
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||
for option in self._get_all_options():
|
||||
assert option.dest is not None
|
||||
default = defaults.get(option.dest)
|
||||
if isinstance(default, str):
|
||||
opt_str = option.get_opt_string()
|
||||
defaults[option.dest] = option.check_value(opt_str, default)
|
||||
return optparse.Values(defaults)
|
||||
|
||||
def error(self, msg):
|
||||
# type: (str) -> None
|
||||
self.print_usage(sys.stderr)
|
||||
self.exit(UNKNOWN_ERROR, f"{msg}\n")
|
||||
@ -0,0 +1,261 @@
|
||||
import itertools
|
||||
import sys
|
||||
from signal import SIGINT, default_int_handler, signal
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
|
||||
from pip._vendor.progress.spinner import Spinner
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
from pip._internal.utils.misc import format_size
|
||||
|
||||
try:
|
||||
from pip._vendor import colorama
|
||||
# Lots of different errors can come from this, including SystemError and
|
||||
# ImportError.
|
||||
except Exception:
|
||||
colorama = None
|
||||
|
||||
|
||||
def _select_progress_class(preferred, fallback):
|
||||
# type: (Bar, Bar) -> Bar
|
||||
encoding = getattr(preferred.file, "encoding", None)
|
||||
|
||||
# If we don't know what encoding this file is in, then we'll just assume
|
||||
# that it doesn't support unicode and use the ASCII bar.
|
||||
if not encoding:
|
||||
return fallback
|
||||
|
||||
# Collect all of the possible characters we want to use with the preferred
|
||||
# bar.
|
||||
characters = [
|
||||
getattr(preferred, "empty_fill", ""),
|
||||
getattr(preferred, "fill", ""),
|
||||
]
|
||||
characters += list(getattr(preferred, "phases", []))
|
||||
|
||||
# Try to decode the characters we're using for the bar using the encoding
|
||||
# of the given file, if this works then we'll assume that we can use the
|
||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
||||
try:
|
||||
"".join(characters).encode(encoding)
|
||||
except UnicodeEncodeError:
|
||||
return fallback
|
||||
else:
|
||||
return preferred
|
||||
|
||||
|
||||
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
|
||||
|
||||
|
||||
class InterruptibleMixin:
|
||||
"""
|
||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
||||
|
||||
This allows downloads to be interrupted without leaving temporary state
|
||||
(like hidden cursors) behind.
|
||||
|
||||
This class is similar to the progress library's existing SigIntMixin
|
||||
helper, but as of version 1.2, that helper has the following problems:
|
||||
|
||||
1. It calls sys.exit().
|
||||
2. It discards the existing SIGINT handler completely.
|
||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
||||
which will have unexpected delayed effects if the user triggers an
|
||||
unrelated keyboard interrupt some time after a progress-displaying
|
||||
download has already completed, for example.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (List[Any], Dict[Any, Any]) -> None
|
||||
"""
|
||||
Save the original SIGINT handler for later.
|
||||
"""
|
||||
# https://github.com/python/mypy/issues/5887
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
|
||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
||||
|
||||
# If signal() returns None, the previous handler was not installed from
|
||||
# Python, and we cannot restore it. This probably should not happen,
|
||||
# but if it does, we must restore something sensible instead, at least.
|
||||
# The least bad option should be Python's default SIGINT handler, which
|
||||
# just raises KeyboardInterrupt.
|
||||
if self.original_handler is None:
|
||||
self.original_handler = default_int_handler
|
||||
|
||||
def finish(self):
|
||||
# type: () -> None
|
||||
"""
|
||||
Restore the original SIGINT handler after finishing.
|
||||
|
||||
This should happen regardless of whether the progress display finishes
|
||||
normally, or gets interrupted.
|
||||
"""
|
||||
super().finish() # type: ignore
|
||||
signal(SIGINT, self.original_handler)
|
||||
|
||||
def handle_sigint(self, signum, frame): # type: ignore
|
||||
"""
|
||||
Call self.finish() before delegating to the original SIGINT handler.
|
||||
|
||||
This handler should only be in place while the progress display is
|
||||
active.
|
||||
"""
|
||||
self.finish()
|
||||
self.original_handler(signum, frame)
|
||||
|
||||
|
||||
class SilentBar(Bar):
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
|
||||
class BlueEmojiBar(IncrementalBar):
|
||||
|
||||
suffix = "%(percent)d%%"
|
||||
bar_prefix = " "
|
||||
bar_suffix = " "
|
||||
phases = ("\U0001F539", "\U0001F537", "\U0001F535")
|
||||
|
||||
|
||||
class DownloadProgressMixin:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (List[Any], Dict[Any, Any]) -> None
|
||||
# https://github.com/python/mypy/issues/5887
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
self.message = (" " * (get_indentation() + 2)) + self.message # type: str
|
||||
|
||||
@property
|
||||
def downloaded(self):
|
||||
# type: () -> str
|
||||
return format_size(self.index) # type: ignore
|
||||
|
||||
@property
|
||||
def download_speed(self):
|
||||
# type: () -> str
|
||||
# Avoid zero division errors...
|
||||
if self.avg == 0.0: # type: ignore
|
||||
return "..."
|
||||
return format_size(1 / self.avg) + "/s" # type: ignore
|
||||
|
||||
@property
|
||||
def pretty_eta(self):
|
||||
# type: () -> str
|
||||
if self.eta: # type: ignore
|
||||
return f"eta {self.eta_td}" # type: ignore
|
||||
return ""
|
||||
|
||||
def iter(self, it): # type: ignore
|
||||
for x in it:
|
||||
yield x
|
||||
# B305 is incorrectly raised here
|
||||
# https://github.com/PyCQA/flake8-bugbear/issues/59
|
||||
self.next(len(x)) # noqa: B305
|
||||
self.finish()
|
||||
|
||||
|
||||
class WindowsMixin:
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (List[Any], Dict[Any, Any]) -> None
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
||||
# Windows.
|
||||
# This call needs to go before the super() call, so that hide_cursor
|
||||
# is set in time. The base progress bar class writes the "hide cursor"
|
||||
# code to the terminal in its init, so if we don't set this soon
|
||||
# enough, we get a "hide" with no corresponding "show"...
|
||||
if WINDOWS and self.hide_cursor: # type: ignore
|
||||
self.hide_cursor = False
|
||||
|
||||
# https://github.com/python/mypy/issues/5887
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
|
||||
# Check if we are running on Windows and we have the colorama module,
|
||||
# if we do then wrap our file with it.
|
||||
if WINDOWS and colorama:
|
||||
self.file = colorama.AnsiToWin32(self.file) # type: ignore
|
||||
# The progress code expects to be able to call self.file.isatty()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
||||
# The progress code expects to be able to call self.file.flush()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.flush = lambda: self.file.wrapped.flush()
|
||||
|
||||
|
||||
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
|
||||
|
||||
file = sys.stdout
|
||||
message = "%(percent)d%%"
|
||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||
|
||||
|
||||
class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadBar(BaseDownloadProgressBar, Bar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadProgressSpinner(
|
||||
WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
|
||||
):
|
||||
|
||||
file = sys.stdout
|
||||
suffix = "%(downloaded)s %(download_speed)s"
|
||||
|
||||
def next_phase(self):
|
||||
# type: () -> str
|
||||
if not hasattr(self, "_phaser"):
|
||||
self._phaser = itertools.cycle(self.phases)
|
||||
return next(self._phaser)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
message = self.message % self
|
||||
phase = self.next_phase()
|
||||
suffix = self.suffix % self
|
||||
line = "".join(
|
||||
[
|
||||
message,
|
||||
" " if message else "",
|
||||
phase,
|
||||
" " if suffix else "",
|
||||
suffix,
|
||||
]
|
||||
)
|
||||
|
||||
self.writeln(line)
|
||||
|
||||
|
||||
BAR_TYPES = {
|
||||
"off": (DownloadSilentBar, DownloadSilentBar),
|
||||
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
||||
"ascii": (DownloadBar, DownloadProgressSpinner),
|
||||
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
||||
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
|
||||
}
|
||||
|
||||
|
||||
def DownloadProgressProvider(progress_bar, max=None): # type: ignore
|
||||
if max is None or max == 0:
|
||||
return BAR_TYPES[progress_bar][1]().iter
|
||||
else:
|
||||
return BAR_TYPES[progress_bar][0](max=max).iter
|
||||
@ -0,0 +1,461 @@
|
||||
"""Contains the Command base classes that depend on PipSession.
|
||||
|
||||
The classes in this module are in a separate module so the commands not
|
||||
needing download / PackageFinder capability don't unnecessarily import the
|
||||
PackageFinder machinery and all its vendored dependencies, etc.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from functools import partial
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.command_context import CommandContextMixIn
|
||||
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip._internal.index.collector import LinkCollector
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.models.target_python import TargetPython
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable,
|
||||
install_req_from_line,
|
||||
install_req_from_parsed_requirement,
|
||||
install_req_from_req_string,
|
||||
)
|
||||
from pip._internal.req.req_file import parse_requirements
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolution.base import BaseResolver
|
||||
from pip._internal.self_outdated_check import pip_self_version_check
|
||||
from pip._internal.utils.temp_dir import (
|
||||
TempDirectory,
|
||||
TempDirectoryTypeRegistry,
|
||||
tempdir_kinds,
|
||||
)
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SessionCommandMixin(CommandContextMixIn):
|
||||
|
||||
"""
|
||||
A class mixin for command classes needing _build_session().
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
super().__init__()
|
||||
self._session = None # Optional[PipSession]
|
||||
|
||||
@classmethod
|
||||
def _get_index_urls(cls, options):
|
||||
# type: (Values) -> Optional[List[str]]
|
||||
"""Return a list of index urls from user-provided options."""
|
||||
index_urls = []
|
||||
if not getattr(options, "no_index", False):
|
||||
url = getattr(options, "index_url", None)
|
||||
if url:
|
||||
index_urls.append(url)
|
||||
urls = getattr(options, "extra_index_urls", None)
|
||||
if urls:
|
||||
index_urls.extend(urls)
|
||||
# Return None rather than an empty list
|
||||
return index_urls or None
|
||||
|
||||
def get_default_session(self, options):
|
||||
# type: (Values) -> PipSession
|
||||
"""Get a default-managed session."""
|
||||
if self._session is None:
|
||||
self._session = self.enter_context(self._build_session(options))
|
||||
# there's no type annotation on requests.Session, so it's
|
||||
# automatically ContextManager[Any] and self._session becomes Any,
|
||||
# then https://github.com/python/mypy/issues/7696 kicks in
|
||||
assert self._session is not None
|
||||
return self._session
|
||||
|
||||
def _build_session(self, options, retries=None, timeout=None):
|
||||
# type: (Values, Optional[int], Optional[int]) -> PipSession
|
||||
assert not options.cache_dir or os.path.isabs(options.cache_dir)
|
||||
session = PipSession(
|
||||
cache=(
|
||||
os.path.join(options.cache_dir, "http") if options.cache_dir else None
|
||||
),
|
||||
retries=retries if retries is not None else options.retries,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
index_urls=self._get_index_urls(options),
|
||||
)
|
||||
|
||||
# Handle custom ca-bundles from the user
|
||||
if options.cert:
|
||||
session.verify = options.cert
|
||||
|
||||
# Handle SSL client certificate
|
||||
if options.client_cert:
|
||||
session.cert = options.client_cert
|
||||
|
||||
# Handle timeouts
|
||||
if options.timeout or timeout:
|
||||
session.timeout = timeout if timeout is not None else options.timeout
|
||||
|
||||
# Handle configured proxies
|
||||
if options.proxy:
|
||||
session.proxies = {
|
||||
"http": options.proxy,
|
||||
"https": options.proxy,
|
||||
}
|
||||
|
||||
# Determine if we can prompt the user for authentication or not
|
||||
session.auth.prompting = not options.no_input
|
||||
|
||||
return session
|
||||
|
||||
|
||||
class IndexGroupCommand(Command, SessionCommandMixin):
|
||||
|
||||
"""
|
||||
Abstract base class for commands with the index_group options.
|
||||
|
||||
This also corresponds to the commands that permit the pip version check.
|
||||
"""
|
||||
|
||||
def handle_pip_version_check(self, options):
|
||||
# type: (Values) -> None
|
||||
"""
|
||||
Do the pip version check if not disabled.
|
||||
|
||||
This overrides the default behavior of not doing the check.
|
||||
"""
|
||||
# Make sure the index_group options are present.
|
||||
assert hasattr(options, "no_index")
|
||||
|
||||
if options.disable_pip_version_check or options.no_index:
|
||||
return
|
||||
|
||||
# Otherwise, check if we're using the latest version of pip available.
|
||||
session = self._build_session(
|
||||
options, retries=0, timeout=min(5, options.timeout)
|
||||
)
|
||||
with session:
|
||||
pip_self_version_check(session, options)
|
||||
|
||||
|
||||
KEEPABLE_TEMPDIR_TYPES = [
|
||||
tempdir_kinds.BUILD_ENV,
|
||||
tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||
tempdir_kinds.REQ_BUILD,
|
||||
]
|
||||
|
||||
|
||||
def warn_if_run_as_root():
|
||||
# type: () -> None
|
||||
"""Output a warning for sudo users on Unix.
|
||||
|
||||
In a virtual environment, sudo pip still writes to virtualenv.
|
||||
On Windows, users may run pip as Administrator without issues.
|
||||
This warning only applies to Unix root users outside of virtualenv.
|
||||
"""
|
||||
if running_under_virtualenv():
|
||||
return
|
||||
if not hasattr(os, "getuid"):
|
||||
return
|
||||
# On Windows, there are no "system managed" Python packages. Installing as
|
||||
# Administrator via pip is the correct way of updating system environments.
|
||||
#
|
||||
# We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
|
||||
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
||||
if sys.platform == "win32" or sys.platform == "cygwin":
|
||||
return
|
||||
if sys.platform == "darwin" or sys.platform == "linux":
|
||||
if os.getuid() != 0:
|
||||
return
|
||||
logger.warning(
|
||||
"Running pip as root will break packages and permissions. "
|
||||
"You should install packages reliably by using venv: "
|
||||
"https://pip.pypa.io/warnings/venv"
|
||||
)
|
||||
|
||||
|
||||
def with_cleanup(func):
|
||||
# type: (Any) -> Any
|
||||
"""Decorator for common logic related to managing temporary
|
||||
directories.
|
||||
"""
|
||||
|
||||
def configure_tempdir_registry(registry):
|
||||
# type: (TempDirectoryTypeRegistry) -> None
|
||||
for t in KEEPABLE_TEMPDIR_TYPES:
|
||||
registry.set_delete(t, False)
|
||||
|
||||
def wrapper(self, options, args):
|
||||
# type: (RequirementCommand, Values, List[Any]) -> Optional[int]
|
||||
assert self.tempdir_registry is not None
|
||||
if options.no_clean:
|
||||
configure_tempdir_registry(self.tempdir_registry)
|
||||
|
||||
try:
|
||||
return func(self, options, args)
|
||||
except PreviousBuildDirError:
|
||||
# This kind of conflict can occur when the user passes an explicit
|
||||
# build directory with a pre-existing folder. In that case we do
|
||||
# not want to accidentally remove it.
|
||||
configure_tempdir_registry(self.tempdir_registry)
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class RequirementCommand(IndexGroupCommand):
|
||||
def __init__(self, *args, **kw):
|
||||
# type: (Any, Any) -> None
|
||||
super().__init__(*args, **kw)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.no_clean())
|
||||
|
||||
@staticmethod
|
||||
def determine_resolver_variant(options):
|
||||
# type: (Values) -> str
|
||||
"""Determines which resolver should be used, based on the given options."""
|
||||
if "legacy-resolver" in options.deprecated_features_enabled:
|
||||
return "legacy"
|
||||
|
||||
return "2020-resolver"
|
||||
|
||||
@classmethod
|
||||
def make_requirement_preparer(
|
||||
cls,
|
||||
temp_build_dir, # type: TempDirectory
|
||||
options, # type: Values
|
||||
req_tracker, # type: RequirementTracker
|
||||
session, # type: PipSession
|
||||
finder, # type: PackageFinder
|
||||
use_user_site, # type: bool
|
||||
download_dir=None, # type: str
|
||||
):
|
||||
# type: (...) -> RequirementPreparer
|
||||
"""
|
||||
Create a RequirementPreparer instance for the given parameters.
|
||||
"""
|
||||
temp_build_dir_path = temp_build_dir.path
|
||||
assert temp_build_dir_path is not None
|
||||
|
||||
resolver_variant = cls.determine_resolver_variant(options)
|
||||
if resolver_variant == "2020-resolver":
|
||||
lazy_wheel = "fast-deps" in options.features_enabled
|
||||
if lazy_wheel:
|
||||
logger.warning(
|
||||
"pip is using lazily downloaded wheels using HTTP "
|
||||
"range requests to obtain dependency information. "
|
||||
"This experimental feature is enabled through "
|
||||
"--use-feature=fast-deps and it is not ready for "
|
||||
"production."
|
||||
)
|
||||
else:
|
||||
lazy_wheel = False
|
||||
if "fast-deps" in options.features_enabled:
|
||||
logger.warning(
|
||||
"fast-deps has no effect when used with the legacy resolver."
|
||||
)
|
||||
|
||||
return RequirementPreparer(
|
||||
build_dir=temp_build_dir_path,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=download_dir,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
session=session,
|
||||
progress_bar=options.progress_bar,
|
||||
finder=finder,
|
||||
require_hashes=options.require_hashes,
|
||||
use_user_site=use_user_site,
|
||||
lazy_wheel=lazy_wheel,
|
||||
in_tree_build="in-tree-build" in options.features_enabled,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def make_resolver(
|
||||
cls,
|
||||
preparer, # type: RequirementPreparer
|
||||
finder, # type: PackageFinder
|
||||
options, # type: Values
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
use_user_site=False, # type: bool
|
||||
ignore_installed=True, # type: bool
|
||||
ignore_requires_python=False, # type: bool
|
||||
force_reinstall=False, # type: bool
|
||||
upgrade_strategy="to-satisfy-only", # type: str
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
py_version_info=None, # type: Optional[Tuple[int, ...]]
|
||||
):
|
||||
# type: (...) -> BaseResolver
|
||||
"""
|
||||
Create a Resolver instance for the given parameters.
|
||||
"""
|
||||
make_install_req = partial(
|
||||
install_req_from_req_string,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=use_pep517,
|
||||
)
|
||||
resolver_variant = cls.determine_resolver_variant(options)
|
||||
# The long import name and duplicated invocation is needed to convince
|
||||
# Mypy into correctly typechecking. Otherwise it would complain the
|
||||
# "Resolver" class being redefined.
|
||||
if resolver_variant == "2020-resolver":
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
|
||||
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
wheel_cache=wheel_cache,
|
||||
make_install_req=make_install_req,
|
||||
use_user_site=use_user_site,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=ignore_installed,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
force_reinstall=force_reinstall,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
py_version_info=py_version_info,
|
||||
)
|
||||
import pip._internal.resolution.legacy.resolver
|
||||
|
||||
return pip._internal.resolution.legacy.resolver.Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
wheel_cache=wheel_cache,
|
||||
make_install_req=make_install_req,
|
||||
use_user_site=use_user_site,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=ignore_installed,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
force_reinstall=force_reinstall,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
py_version_info=py_version_info,
|
||||
)
|
||||
|
||||
def get_requirements(
|
||||
self,
|
||||
args, # type: List[str]
|
||||
options, # type: Values
|
||||
finder, # type: PackageFinder
|
||||
session, # type: PipSession
|
||||
):
|
||||
# type: (...) -> List[InstallRequirement]
|
||||
"""
|
||||
Parse command-line arguments into the corresponding requirements.
|
||||
"""
|
||||
requirements = [] # type: List[InstallRequirement]
|
||||
for filename in options.constraints:
|
||||
for parsed_req in parse_requirements(
|
||||
filename,
|
||||
constraint=True,
|
||||
finder=finder,
|
||||
options=options,
|
||||
session=session,
|
||||
):
|
||||
req_to_add = install_req_from_parsed_requirement(
|
||||
parsed_req,
|
||||
isolated=options.isolated_mode,
|
||||
user_supplied=False,
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
for req in args:
|
||||
req_to_add = install_req_from_line(
|
||||
req,
|
||||
None,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
user_supplied=True,
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
for req in options.editables:
|
||||
req_to_add = install_req_from_editable(
|
||||
req,
|
||||
user_supplied=True,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
# NOTE: options.require_hashes may be set if --require-hashes is True
|
||||
for filename in options.requirements:
|
||||
for parsed_req in parse_requirements(
|
||||
filename, finder=finder, options=options, session=session
|
||||
):
|
||||
req_to_add = install_req_from_parsed_requirement(
|
||||
parsed_req,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
user_supplied=True,
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
# If any requirement has hash options, enable hash checking.
|
||||
if any(req.has_hash_options for req in requirements):
|
||||
options.require_hashes = True
|
||||
|
||||
if not (args or options.editables or options.requirements):
|
||||
opts = {"name": self.name}
|
||||
if options.find_links:
|
||||
raise CommandError(
|
||||
"You must give at least one requirement to {name} "
|
||||
'(maybe you meant "pip {name} {links}"?)'.format(
|
||||
**dict(opts, links=" ".join(options.find_links))
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise CommandError(
|
||||
"You must give at least one requirement to {name} "
|
||||
'(see "pip help {name}")'.format(**opts)
|
||||
)
|
||||
|
||||
return requirements
|
||||
|
||||
@staticmethod
|
||||
def trace_basic_info(finder):
|
||||
# type: (PackageFinder) -> None
|
||||
"""
|
||||
Trace basic information about the provided objects.
|
||||
"""
|
||||
# Display where finder is looking for packages
|
||||
search_scope = finder.search_scope
|
||||
locations = search_scope.get_formatted_locations()
|
||||
if locations:
|
||||
logger.info(locations)
|
||||
|
||||
def _build_package_finder(
|
||||
self,
|
||||
options, # type: Values
|
||||
session, # type: PipSession
|
||||
target_python=None, # type: Optional[TargetPython]
|
||||
ignore_requires_python=None, # type: Optional[bool]
|
||||
):
|
||||
# type: (...) -> PackageFinder
|
||||
"""
|
||||
Create a package finder appropriate to this requirement command.
|
||||
|
||||
:param ignore_requires_python: Whether to ignore incompatible
|
||||
"Requires-Python" values in links. Defaults to False.
|
||||
"""
|
||||
link_collector = LinkCollector.create(session, options=options)
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=True,
|
||||
format_control=options.format_control,
|
||||
allow_all_prereleases=options.pre,
|
||||
prefer_binary=options.prefer_binary,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
)
|
||||
|
||||
return PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
target_python=target_python,
|
||||
)
|
||||
@ -0,0 +1,172 @@
|
||||
import contextlib
|
||||
import itertools
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from typing import IO, Iterator
|
||||
|
||||
from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SpinnerInterface:
|
||||
def spin(self):
|
||||
# type: () -> None
|
||||
raise NotImplementedError()
|
||||
|
||||
def finish(self, final_status):
|
||||
# type: (str) -> None
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class InteractiveSpinner(SpinnerInterface):
|
||||
def __init__(
|
||||
self,
|
||||
message,
|
||||
file=None,
|
||||
spin_chars="-\\|/",
|
||||
# Empirically, 8 updates/second looks nice
|
||||
min_update_interval_seconds=0.125,
|
||||
):
|
||||
# type: (str, IO[str], str, float) -> None
|
||||
self._message = message
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
self._file = file
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._finished = False
|
||||
|
||||
self._spin_cycle = itertools.cycle(spin_chars)
|
||||
|
||||
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||
self._width = 0
|
||||
|
||||
def _write(self, status):
|
||||
# type: (str) -> None
|
||||
assert not self._finished
|
||||
# Erase what we wrote before by backspacing to the beginning, writing
|
||||
# spaces to overwrite the old text, and then backspacing again
|
||||
backup = "\b" * self._width
|
||||
self._file.write(backup + " " * self._width + backup)
|
||||
# Now we have a blank slate to add our status
|
||||
self._file.write(status)
|
||||
self._width = len(status)
|
||||
self._file.flush()
|
||||
self._rate_limiter.reset()
|
||||
|
||||
def spin(self):
|
||||
# type: () -> None
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._write(next(self._spin_cycle))
|
||||
|
||||
def finish(self, final_status):
|
||||
# type: (str) -> None
|
||||
if self._finished:
|
||||
return
|
||||
self._write(final_status)
|
||||
self._file.write("\n")
|
||||
self._file.flush()
|
||||
self._finished = True
|
||||
|
||||
|
||||
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
||||
# We still print updates occasionally (once every 60 seconds by default) to
|
||||
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||
# an indication that a task has frozen.
|
||||
class NonInteractiveSpinner(SpinnerInterface):
|
||||
def __init__(self, message, min_update_interval_seconds=60):
|
||||
# type: (str, float) -> None
|
||||
self._message = message
|
||||
self._finished = False
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._update("started")
|
||||
|
||||
def _update(self, status):
|
||||
# type: (str) -> None
|
||||
assert not self._finished
|
||||
self._rate_limiter.reset()
|
||||
logger.info("%s: %s", self._message, status)
|
||||
|
||||
def spin(self):
|
||||
# type: () -> None
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._update("still running...")
|
||||
|
||||
def finish(self, final_status):
|
||||
# type: (str) -> None
|
||||
if self._finished:
|
||||
return
|
||||
self._update(f"finished with status '{final_status}'")
|
||||
self._finished = True
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
def __init__(self, min_update_interval_seconds):
|
||||
# type: (float) -> None
|
||||
self._min_update_interval_seconds = min_update_interval_seconds
|
||||
self._last_update = 0 # type: float
|
||||
|
||||
def ready(self):
|
||||
# type: () -> bool
|
||||
now = time.time()
|
||||
delta = now - self._last_update
|
||||
return delta >= self._min_update_interval_seconds
|
||||
|
||||
def reset(self):
|
||||
# type: () -> None
|
||||
self._last_update = time.time()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def open_spinner(message):
|
||||
# type: (str) -> Iterator[SpinnerInterface]
|
||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||
# through the logging system, but it acts like it has level INFO,
|
||||
# i.e. it's only displayed if we're at level INFO or better.
|
||||
# Non-interactive spinner goes through the logging system, so it is always
|
||||
# in sync with logging configuration.
|
||||
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||
spinner = InteractiveSpinner(message) # type: SpinnerInterface
|
||||
else:
|
||||
spinner = NonInteractiveSpinner(message)
|
||||
try:
|
||||
with hidden_cursor(sys.stdout):
|
||||
yield spinner
|
||||
except KeyboardInterrupt:
|
||||
spinner.finish("canceled")
|
||||
raise
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
raise
|
||||
else:
|
||||
spinner.finish("done")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def hidden_cursor(file):
|
||||
# type: (IO[str]) -> Iterator[None]
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||
# even via colorama. So don't even try.
|
||||
if WINDOWS:
|
||||
yield
|
||||
# We don't want to clutter the output with control characters if we're
|
||||
# writing to a file, or if the user is running with --quiet.
|
||||
# See https://github.com/pypa/pip/issues/3418
|
||||
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
||||
yield
|
||||
else:
|
||||
file.write(HIDE_CURSOR)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
file.write(SHOW_CURSOR)
|
||||
@ -0,0 +1,6 @@
|
||||
SUCCESS = 0
|
||||
ERROR = 1
|
||||
UNKNOWN_ERROR = 2
|
||||
VIRTUALENV_NOT_FOUND = 3
|
||||
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||
NO_MATCHES_FOUND = 23
|
||||
@ -0,0 +1,110 @@
|
||||
"""
|
||||
Package containing all pip commands
|
||||
"""
|
||||
|
||||
import importlib
|
||||
from collections import OrderedDict, namedtuple
|
||||
from typing import Any, Optional
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
|
||||
CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
|
||||
|
||||
# The ordering matters for help display.
|
||||
# Also, even though the module path starts with the same
|
||||
# "pip._internal.commands" prefix in each case, we include the full path
|
||||
# because it makes testing easier (specifically when modifying commands_dict
|
||||
# in test setup / teardown by adding info for a FakeCommand class defined
|
||||
# in a test-related module).
|
||||
# Finally, we need to pass an iterable of pairs here rather than a dict
|
||||
# so that the ordering won't be lost when using Python 2.7.
|
||||
commands_dict = OrderedDict([
|
||||
('install', CommandInfo(
|
||||
'pip._internal.commands.install', 'InstallCommand',
|
||||
'Install packages.',
|
||||
)),
|
||||
('download', CommandInfo(
|
||||
'pip._internal.commands.download', 'DownloadCommand',
|
||||
'Download packages.',
|
||||
)),
|
||||
('uninstall', CommandInfo(
|
||||
'pip._internal.commands.uninstall', 'UninstallCommand',
|
||||
'Uninstall packages.',
|
||||
)),
|
||||
('freeze', CommandInfo(
|
||||
'pip._internal.commands.freeze', 'FreezeCommand',
|
||||
'Output installed packages in requirements format.',
|
||||
)),
|
||||
('list', CommandInfo(
|
||||
'pip._internal.commands.list', 'ListCommand',
|
||||
'List installed packages.',
|
||||
)),
|
||||
('show', CommandInfo(
|
||||
'pip._internal.commands.show', 'ShowCommand',
|
||||
'Show information about installed packages.',
|
||||
)),
|
||||
('check', CommandInfo(
|
||||
'pip._internal.commands.check', 'CheckCommand',
|
||||
'Verify installed packages have compatible dependencies.',
|
||||
)),
|
||||
('config', CommandInfo(
|
||||
'pip._internal.commands.configuration', 'ConfigurationCommand',
|
||||
'Manage local and global configuration.',
|
||||
)),
|
||||
('search', CommandInfo(
|
||||
'pip._internal.commands.search', 'SearchCommand',
|
||||
'Search PyPI for packages.',
|
||||
)),
|
||||
('cache', CommandInfo(
|
||||
'pip._internal.commands.cache', 'CacheCommand',
|
||||
"Inspect and manage pip's wheel cache.",
|
||||
)),
|
||||
('wheel', CommandInfo(
|
||||
'pip._internal.commands.wheel', 'WheelCommand',
|
||||
'Build wheels from your requirements.',
|
||||
)),
|
||||
('hash', CommandInfo(
|
||||
'pip._internal.commands.hash', 'HashCommand',
|
||||
'Compute hashes of package archives.',
|
||||
)),
|
||||
('completion', CommandInfo(
|
||||
'pip._internal.commands.completion', 'CompletionCommand',
|
||||
'A helper command used for command completion.',
|
||||
)),
|
||||
('debug', CommandInfo(
|
||||
'pip._internal.commands.debug', 'DebugCommand',
|
||||
'Show information useful for debugging.',
|
||||
)),
|
||||
('help', CommandInfo(
|
||||
'pip._internal.commands.help', 'HelpCommand',
|
||||
'Show help for commands.',
|
||||
)),
|
||||
]) # type: OrderedDict[str, CommandInfo]
|
||||
|
||||
|
||||
def create_command(name, **kwargs):
|
||||
# type: (str, **Any) -> Command
|
||||
"""
|
||||
Create an instance of the Command class with the given name.
|
||||
"""
|
||||
module_path, class_name, summary = commands_dict[name]
|
||||
module = importlib.import_module(module_path)
|
||||
command_class = getattr(module, class_name)
|
||||
command = command_class(name=name, summary=summary, **kwargs)
|
||||
|
||||
return command
|
||||
|
||||
|
||||
def get_similar_commands(name):
|
||||
# type: (str) -> Optional[str]
|
||||
"""Command name auto-correct."""
|
||||
from difflib import get_close_matches
|
||||
|
||||
name = name.lower()
|
||||
|
||||
close_commands = get_close_matches(name, commands_dict.keys())
|
||||
|
||||
if close_commands:
|
||||
return close_commands[0]
|
||||
else:
|
||||
return None
|
||||
@ -0,0 +1,228 @@
|
||||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
from optparse import Values
|
||||
from typing import Any, List
|
||||
|
||||
import pip._internal.utils.filesystem as filesystem
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, PipError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheCommand(Command):
|
||||
"""
|
||||
Inspect and manage pip's wheel cache.
|
||||
|
||||
Subcommands:
|
||||
|
||||
- dir: Show the cache directory.
|
||||
- info: Show information about the cache.
|
||||
- list: List filenames of packages stored in the cache.
|
||||
- remove: Remove one or more package from the cache.
|
||||
- purge: Remove all items from the cache.
|
||||
|
||||
``<pattern>`` can be a glob expression or a package name.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog dir
|
||||
%prog info
|
||||
%prog list [<pattern>] [--format=[human, abspath]]
|
||||
%prog remove <pattern>
|
||||
%prog purge
|
||||
"""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--format',
|
||||
action='store',
|
||||
dest='list_format',
|
||||
default="human",
|
||||
choices=('human', 'abspath'),
|
||||
help="Select the output format among: human (default) or abspath"
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[Any]) -> int
|
||||
handlers = {
|
||||
"dir": self.get_cache_dir,
|
||||
"info": self.get_cache_info,
|
||||
"list": self.list_cache_items,
|
||||
"remove": self.remove_cache_items,
|
||||
"purge": self.purge_cache,
|
||||
}
|
||||
|
||||
if not options.cache_dir:
|
||||
logger.error("pip cache commands can not "
|
||||
"function since cache is disabled.")
|
||||
return ERROR
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error(
|
||||
"Need an action (%s) to perform.",
|
||||
", ".join(sorted(handlers)),
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def get_cache_dir(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
logger.info(options.cache_dir)
|
||||
|
||||
def get_cache_info(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
num_http_files = len(self._find_http_files(options))
|
||||
num_packages = len(self._find_wheels(options, '*'))
|
||||
|
||||
http_cache_location = self._cache_dir(options, 'http')
|
||||
wheels_cache_location = self._cache_dir(options, 'wheels')
|
||||
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
||||
wheels_cache_size = filesystem.format_directory_size(
|
||||
wheels_cache_location
|
||||
)
|
||||
|
||||
message = textwrap.dedent("""
|
||||
Package index page cache location: {http_cache_location}
|
||||
Package index page cache size: {http_cache_size}
|
||||
Number of HTTP files: {num_http_files}
|
||||
Wheels location: {wheels_cache_location}
|
||||
Wheels size: {wheels_cache_size}
|
||||
Number of wheels: {package_count}
|
||||
""").format(
|
||||
http_cache_location=http_cache_location,
|
||||
http_cache_size=http_cache_size,
|
||||
num_http_files=num_http_files,
|
||||
wheels_cache_location=wheels_cache_location,
|
||||
package_count=num_packages,
|
||||
wheels_cache_size=wheels_cache_size,
|
||||
).strip()
|
||||
|
||||
logger.info(message)
|
||||
|
||||
def list_cache_items(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if len(args) > 1:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
if args:
|
||||
pattern = args[0]
|
||||
else:
|
||||
pattern = '*'
|
||||
|
||||
files = self._find_wheels(options, pattern)
|
||||
if options.list_format == 'human':
|
||||
self.format_for_human(files)
|
||||
else:
|
||||
self.format_for_abspath(files)
|
||||
|
||||
def format_for_human(self, files):
|
||||
# type: (List[str]) -> None
|
||||
if not files:
|
||||
logger.info('Nothing cached.')
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
wheel = os.path.basename(filename)
|
||||
size = filesystem.format_file_size(filename)
|
||||
results.append(f' - {wheel} ({size})')
|
||||
logger.info('Cache contents:\n')
|
||||
logger.info('\n'.join(sorted(results)))
|
||||
|
||||
def format_for_abspath(self, files):
|
||||
# type: (List[str]) -> None
|
||||
if not files:
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
results.append(filename)
|
||||
|
||||
logger.info('\n'.join(sorted(results)))
|
||||
|
||||
def remove_cache_items(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if len(args) > 1:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
if not args:
|
||||
raise CommandError('Please provide a pattern')
|
||||
|
||||
files = self._find_wheels(options, args[0])
|
||||
|
||||
# Only fetch http files if no specific pattern given
|
||||
if args[0] == '*':
|
||||
files += self._find_http_files(options)
|
||||
|
||||
if not files:
|
||||
raise CommandError('No matching packages')
|
||||
|
||||
for filename in files:
|
||||
os.unlink(filename)
|
||||
logger.debug('Removed %s', filename)
|
||||
logger.info('Files removed: %s', len(files))
|
||||
|
||||
def purge_cache(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
return self.remove_cache_items(options, ['*'])
|
||||
|
||||
def _cache_dir(self, options, subdir):
|
||||
# type: (Values, str) -> str
|
||||
return os.path.join(options.cache_dir, subdir)
|
||||
|
||||
def _find_http_files(self, options):
|
||||
# type: (Values) -> List[str]
|
||||
http_dir = self._cache_dir(options, 'http')
|
||||
return filesystem.find_files(http_dir, '*')
|
||||
|
||||
def _find_wheels(self, options, pattern):
|
||||
# type: (Values, str) -> List[str]
|
||||
wheel_dir = self._cache_dir(options, 'wheels')
|
||||
|
||||
# The wheel filename format, as specified in PEP 427, is:
|
||||
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||
#
|
||||
# Additionally, non-alphanumeric values in the distribution are
|
||||
# normalized to underscores (_), meaning hyphens can never occur
|
||||
# before `-{version}`.
|
||||
#
|
||||
# Given that information:
|
||||
# - If the pattern we're given contains a hyphen (-), the user is
|
||||
# providing at least the version. Thus, we can just append `*.whl`
|
||||
# to match the rest of it.
|
||||
# - If the pattern we're given doesn't contain a hyphen (-), the
|
||||
# user is only providing the name. Thus, we append `-*.whl` to
|
||||
# match the hyphen before the version, followed by anything else.
|
||||
#
|
||||
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
||||
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
||||
|
||||
return filesystem.find_files(wheel_dir, pattern)
|
||||
@ -0,0 +1,48 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import Any, List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.operations.check import (
|
||||
check_package_set,
|
||||
create_package_set_from_installed,
|
||||
)
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CheckCommand(Command):
|
||||
"""Verify installed packages have compatible dependencies."""
|
||||
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[Any]) -> int
|
||||
|
||||
package_set, parsing_probs = create_package_set_from_installed()
|
||||
missing, conflicting = check_package_set(package_set)
|
||||
|
||||
for project_name in missing:
|
||||
version = package_set[project_name].version
|
||||
for dependency in missing[project_name]:
|
||||
write_output(
|
||||
"%s %s requires %s, which is not installed.",
|
||||
project_name, version, dependency[0],
|
||||
)
|
||||
|
||||
for project_name in conflicting:
|
||||
version = package_set[project_name].version
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
write_output(
|
||||
"%s %s has requirement %s, but you have %s %s.",
|
||||
project_name, version, req, dep_name, dep_version,
|
||||
)
|
||||
|
||||
if missing or conflicting or parsing_probs:
|
||||
return ERROR
|
||||
else:
|
||||
write_output("No broken requirements found.")
|
||||
return SUCCESS
|
||||
@ -0,0 +1,93 @@
|
||||
import sys
|
||||
import textwrap
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.utils.misc import get_prog
|
||||
|
||||
BASE_COMPLETION = """
|
||||
# pip {shell} completion start{script}# pip {shell} completion end
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
'bash': """
|
||||
_pip_completion()
|
||||
{{
|
||||
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
|
||||
}}
|
||||
complete -o default -F _pip_completion {prog}
|
||||
""",
|
||||
'zsh': """
|
||||
function _pip_completion {{
|
||||
local words cword
|
||||
read -Ac words
|
||||
read -cn cword
|
||||
reply=( $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$(( cword-1 )) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
|
||||
}}
|
||||
compctl -K _pip_completion {prog}
|
||||
""",
|
||||
'fish': """
|
||||
function __fish_complete_pip
|
||||
set -lx COMP_WORDS (commandline -o) ""
|
||||
set -lx COMP_CWORD ( \\
|
||||
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
||||
)
|
||||
set -lx PIP_AUTO_COMPLETE 1
|
||||
string split \\ -- (eval $COMP_WORDS[1])
|
||||
end
|
||||
complete -fa "(__fish_complete_pip)" -c {prog}
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
class CompletionCommand(Command):
|
||||
"""A helper command to be used for command completion."""
|
||||
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'--bash', '-b',
|
||||
action='store_const',
|
||||
const='bash',
|
||||
dest='shell',
|
||||
help='Emit completion code for bash')
|
||||
self.cmd_opts.add_option(
|
||||
'--zsh', '-z',
|
||||
action='store_const',
|
||||
const='zsh',
|
||||
dest='shell',
|
||||
help='Emit completion code for zsh')
|
||||
self.cmd_opts.add_option(
|
||||
'--fish', '-f',
|
||||
action='store_const',
|
||||
const='fish',
|
||||
dest='shell',
|
||||
help='Emit completion code for fish')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = textwrap.dedent(
|
||||
COMPLETION_SCRIPTS.get(options.shell, '').format(
|
||||
prog=get_prog())
|
||||
)
|
||||
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
||||
return SUCCESS
|
||||
else:
|
||||
sys.stderr.write(
|
||||
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
|
||||
)
|
||||
return SUCCESS
|
||||
@ -0,0 +1,280 @@
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.configuration import (
|
||||
Configuration,
|
||||
Kind,
|
||||
get_configuration_files,
|
||||
kinds,
|
||||
)
|
||||
from pip._internal.exceptions import PipError
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import get_prog, write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationCommand(Command):
|
||||
"""
|
||||
Manage local and global configuration.
|
||||
|
||||
Subcommands:
|
||||
|
||||
- list: List the active configuration (or from the file specified)
|
||||
- edit: Edit the configuration file in an editor
|
||||
- get: Get the value associated with name
|
||||
- set: Set the name=value
|
||||
- unset: Unset the value associated with name
|
||||
- debug: List the configuration files and values defined under them
|
||||
|
||||
If none of --user, --global and --site are passed, a virtual
|
||||
environment configuration file is used if one is active and the file
|
||||
exists. Otherwise, all modifications happen on the to the user file by
|
||||
default.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [<file-option>] list
|
||||
%prog [<file-option>] [--editor <editor-path>] edit
|
||||
|
||||
%prog [<file-option>] get name
|
||||
%prog [<file-option>] set name value
|
||||
%prog [<file-option>] unset name
|
||||
%prog [<file-option>] debug
|
||||
"""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'--editor',
|
||||
dest='editor',
|
||||
action='store',
|
||||
default=None,
|
||||
help=(
|
||||
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
||||
'environment variables if not provided.'
|
||||
)
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--global',
|
||||
dest='global_file',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use the system-wide configuration file only'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user_file',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use the user configuration file only'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--site',
|
||||
dest='site_file',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use the current environment configuration file only'
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
handlers = {
|
||||
"list": self.list_values,
|
||||
"edit": self.open_in_editor,
|
||||
"get": self.get_name,
|
||||
"set": self.set_name_value,
|
||||
"unset": self.unset_name,
|
||||
"debug": self.list_config_values,
|
||||
}
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error(
|
||||
"Need an action (%s) to perform.",
|
||||
", ".join(sorted(handlers)),
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Determine which configuration files are to be loaded
|
||||
# Depends on whether the command is modifying.
|
||||
try:
|
||||
load_only = self._determine_file(
|
||||
options, need_value=(action in ["get", "set", "unset", "edit"])
|
||||
)
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
# Load a new configuration
|
||||
self.configuration = Configuration(
|
||||
isolated=options.isolated_mode, load_only=load_only
|
||||
)
|
||||
self.configuration.load()
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def _determine_file(self, options, need_value):
|
||||
# type: (Values, bool) -> Optional[Kind]
|
||||
file_options = [key for key, value in (
|
||||
(kinds.USER, options.user_file),
|
||||
(kinds.GLOBAL, options.global_file),
|
||||
(kinds.SITE, options.site_file),
|
||||
) if value]
|
||||
|
||||
if not file_options:
|
||||
if not need_value:
|
||||
return None
|
||||
# Default to user, unless there's a site file.
|
||||
elif any(
|
||||
os.path.exists(site_config_file)
|
||||
for site_config_file in get_configuration_files()[kinds.SITE]
|
||||
):
|
||||
return kinds.SITE
|
||||
else:
|
||||
return kinds.USER
|
||||
elif len(file_options) == 1:
|
||||
return file_options[0]
|
||||
|
||||
raise PipError(
|
||||
"Need exactly one file to operate upon "
|
||||
"(--user, --site, --global) to perform."
|
||||
)
|
||||
|
||||
def list_values(self, options, args):
|
||||
# type: (Values, List[str]) -> None
|
||||
self._get_n_args(args, "list", n=0)
|
||||
|
||||
for key, value in sorted(self.configuration.items()):
|
||||
write_output("%s=%r", key, value)
|
||||
|
||||
def get_name(self, options, args):
|
||||
# type: (Values, List[str]) -> None
|
||||
key = self._get_n_args(args, "get [name]", n=1)
|
||||
value = self.configuration.get_value(key)
|
||||
|
||||
write_output("%s", value)
|
||||
|
||||
def set_name_value(self, options, args):
|
||||
# type: (Values, List[str]) -> None
|
||||
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
||||
self.configuration.set_value(key, value)
|
||||
|
||||
self._save_configuration()
|
||||
|
||||
def unset_name(self, options, args):
|
||||
# type: (Values, List[str]) -> None
|
||||
key = self._get_n_args(args, "unset [name]", n=1)
|
||||
self.configuration.unset_value(key)
|
||||
|
||||
self._save_configuration()
|
||||
|
||||
def list_config_values(self, options, args):
|
||||
# type: (Values, List[str]) -> None
|
||||
"""List config key-value pairs across different config files"""
|
||||
self._get_n_args(args, "debug", n=0)
|
||||
|
||||
self.print_env_var_values()
|
||||
# Iterate over config files and print if they exist, and the
|
||||
# key-value pairs present in them if they do
|
||||
for variant, files in sorted(self.configuration.iter_config_files()):
|
||||
write_output("%s:", variant)
|
||||
for fname in files:
|
||||
with indent_log():
|
||||
file_exists = os.path.exists(fname)
|
||||
write_output("%s, exists: %r",
|
||||
fname, file_exists)
|
||||
if file_exists:
|
||||
self.print_config_file_values(variant)
|
||||
|
||||
def print_config_file_values(self, variant):
|
||||
# type: (Kind) -> None
|
||||
"""Get key-value pairs from the file of a variant"""
|
||||
for name, value in self.configuration.\
|
||||
get_values_in_config(variant).items():
|
||||
with indent_log():
|
||||
write_output("%s: %s", name, value)
|
||||
|
||||
def print_env_var_values(self):
|
||||
# type: () -> None
|
||||
"""Get key-values pairs present as environment variables"""
|
||||
write_output("%s:", 'env_var')
|
||||
with indent_log():
|
||||
for key, value in sorted(self.configuration.get_environ_vars()):
|
||||
env_var = f'PIP_{key.upper()}'
|
||||
write_output("%s=%r", env_var, value)
|
||||
|
||||
def open_in_editor(self, options, args):
|
||||
# type: (Values, List[str]) -> None
|
||||
editor = self._determine_editor(options)
|
||||
|
||||
fname = self.configuration.get_file_to_edit()
|
||||
if fname is None:
|
||||
raise PipError("Could not determine appropriate file.")
|
||||
|
||||
try:
|
||||
subprocess.check_call([editor, fname])
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise PipError(
|
||||
"Editor Subprocess exited with exit code {}"
|
||||
.format(e.returncode)
|
||||
)
|
||||
|
||||
def _get_n_args(self, args, example, n):
|
||||
# type: (List[str], str, int) -> Any
|
||||
"""Helper to make sure the command got the right number of arguments
|
||||
"""
|
||||
if len(args) != n:
|
||||
msg = (
|
||||
'Got unexpected number of arguments, expected {}. '
|
||||
'(example: "{} config {}")'
|
||||
).format(n, get_prog(), example)
|
||||
raise PipError(msg)
|
||||
|
||||
if n == 1:
|
||||
return args[0]
|
||||
else:
|
||||
return args
|
||||
|
||||
def _save_configuration(self):
|
||||
# type: () -> None
|
||||
# We successfully ran a modifying command. Need to save the
|
||||
# configuration.
|
||||
try:
|
||||
self.configuration.save()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Unable to save configuration. Please report this as a bug."
|
||||
)
|
||||
raise PipError("Internal Error.")
|
||||
|
||||
def _determine_editor(self, options):
|
||||
# type: (Values) -> str
|
||||
if options.editor is not None:
|
||||
return options.editor
|
||||
elif "VISUAL" in os.environ:
|
||||
return os.environ["VISUAL"]
|
||||
elif "EDITOR" in os.environ:
|
||||
return os.environ["EDITOR"]
|
||||
else:
|
||||
raise PipError("Could not determine editor to use.")
|
||||
@ -0,0 +1,215 @@
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from optparse import Values
|
||||
from types import ModuleType
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import pip._vendor
|
||||
from pip._vendor.certifi import where
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.configuration import Configuration
|
||||
from pip._internal.metadata import get_environment
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import get_pip_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def show_value(name, value):
|
||||
# type: (str, Any) -> None
|
||||
logger.info('%s: %s', name, value)
|
||||
|
||||
|
||||
def show_sys_implementation():
|
||||
# type: () -> None
|
||||
logger.info('sys.implementation:')
|
||||
implementation_name = sys.implementation.name
|
||||
with indent_log():
|
||||
show_value('name', implementation_name)
|
||||
|
||||
|
||||
def create_vendor_txt_map():
|
||||
# type: () -> Dict[str, str]
|
||||
vendor_txt_path = os.path.join(
|
||||
os.path.dirname(pip_location),
|
||||
'_vendor',
|
||||
'vendor.txt'
|
||||
)
|
||||
|
||||
with open(vendor_txt_path) as f:
|
||||
# Purge non version specifying lines.
|
||||
# Also, remove any space prefix or suffixes (including comments).
|
||||
lines = [line.strip().split(' ', 1)[0]
|
||||
for line in f.readlines() if '==' in line]
|
||||
|
||||
# Transform into "module" -> version dict.
|
||||
return dict(line.split('==', 1) for line in lines) # type: ignore
|
||||
|
||||
|
||||
def get_module_from_module_name(module_name):
|
||||
# type: (str) -> ModuleType
|
||||
# Module name can be uppercase in vendor.txt for some reason...
|
||||
module_name = module_name.lower()
|
||||
# PATCH: setuptools is actually only pkg_resources.
|
||||
if module_name == 'setuptools':
|
||||
module_name = 'pkg_resources'
|
||||
|
||||
__import__(
|
||||
f'pip._vendor.{module_name}',
|
||||
globals(),
|
||||
locals(),
|
||||
level=0
|
||||
)
|
||||
return getattr(pip._vendor, module_name)
|
||||
|
||||
|
||||
def get_vendor_version_from_module(module_name):
|
||||
# type: (str) -> Optional[str]
|
||||
module = get_module_from_module_name(module_name)
|
||||
version = getattr(module, '__version__', None)
|
||||
|
||||
if not version:
|
||||
# Try to find version in debundled module info.
|
||||
env = get_environment([os.path.dirname(module.__file__)])
|
||||
dist = env.get_distribution(module_name)
|
||||
if dist:
|
||||
version = str(dist.version)
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def show_actual_vendor_versions(vendor_txt_versions):
|
||||
# type: (Dict[str, str]) -> None
|
||||
"""Log the actual version and print extra info if there is
|
||||
a conflict or if the actual version could not be imported.
|
||||
"""
|
||||
for module_name, expected_version in vendor_txt_versions.items():
|
||||
extra_message = ''
|
||||
actual_version = get_vendor_version_from_module(module_name)
|
||||
if not actual_version:
|
||||
extra_message = ' (Unable to locate actual module version, using'\
|
||||
' vendor.txt specified version)'
|
||||
actual_version = expected_version
|
||||
elif parse_version(actual_version) != parse_version(expected_version):
|
||||
extra_message = ' (CONFLICT: vendor.txt suggests version should'\
|
||||
' be {})'.format(expected_version)
|
||||
logger.info('%s==%s%s', module_name, actual_version, extra_message)
|
||||
|
||||
|
||||
def show_vendor_versions():
|
||||
# type: () -> None
|
||||
logger.info('vendored library versions:')
|
||||
|
||||
vendor_txt_versions = create_vendor_txt_map()
|
||||
with indent_log():
|
||||
show_actual_vendor_versions(vendor_txt_versions)
|
||||
|
||||
|
||||
def show_tags(options):
|
||||
# type: (Values) -> None
|
||||
tag_limit = 10
|
||||
|
||||
target_python = make_target_python(options)
|
||||
tags = target_python.get_tags()
|
||||
|
||||
# Display the target options that were explicitly provided.
|
||||
formatted_target = target_python.format_given()
|
||||
suffix = ''
|
||||
if formatted_target:
|
||||
suffix = f' (target: {formatted_target})'
|
||||
|
||||
msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
|
||||
logger.info(msg)
|
||||
|
||||
if options.verbose < 1 and len(tags) > tag_limit:
|
||||
tags_limited = True
|
||||
tags = tags[:tag_limit]
|
||||
else:
|
||||
tags_limited = False
|
||||
|
||||
with indent_log():
|
||||
for tag in tags:
|
||||
logger.info(str(tag))
|
||||
|
||||
if tags_limited:
|
||||
msg = (
|
||||
'...\n'
|
||||
'[First {tag_limit} tags shown. Pass --verbose to show all.]'
|
||||
).format(tag_limit=tag_limit)
|
||||
logger.info(msg)
|
||||
|
||||
|
||||
def ca_bundle_info(config):
|
||||
# type: (Configuration) -> str
|
||||
levels = set()
|
||||
for key, _ in config.items():
|
||||
levels.add(key.split('.')[0])
|
||||
|
||||
if not levels:
|
||||
return "Not specified"
|
||||
|
||||
levels_that_override_global = ['install', 'wheel', 'download']
|
||||
global_overriding_level = [
|
||||
level for level in levels if level in levels_that_override_global
|
||||
]
|
||||
if not global_overriding_level:
|
||||
return 'global'
|
||||
|
||||
if 'global' in levels:
|
||||
levels.remove('global')
|
||||
return ", ".join(levels)
|
||||
|
||||
|
||||
class DebugCommand(Command):
|
||||
"""
|
||||
Display debug information.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog <options>"""
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
self.parser.config.load()
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
logger.warning(
|
||||
"This command is only meant for debugging. "
|
||||
"Do not use this with automation for parsing and getting these "
|
||||
"details, since the output and options of this command may "
|
||||
"change without notice."
|
||||
)
|
||||
show_value('pip version', get_pip_version())
|
||||
show_value('sys.version', sys.version)
|
||||
show_value('sys.executable', sys.executable)
|
||||
show_value('sys.getdefaultencoding', sys.getdefaultencoding())
|
||||
show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
|
||||
show_value(
|
||||
'locale.getpreferredencoding', locale.getpreferredencoding(),
|
||||
)
|
||||
show_value('sys.platform', sys.platform)
|
||||
show_sys_implementation()
|
||||
|
||||
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
||||
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
|
||||
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
|
||||
show_value("pip._vendor.certifi.where()", where())
|
||||
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||
|
||||
show_vendor_versions()
|
||||
|
||||
show_tags(options)
|
||||
|
||||
return SUCCESS
|
||||
@ -0,0 +1,141 @@
|
||||
import logging
|
||||
import os
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadCommand(RequirementCommand):
|
||||
"""
|
||||
Download packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports downloading from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be downloaded.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] <vcs project url> ...
|
||||
%prog [options] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
self.cmd_opts.add_option(cmdoptions.pre())
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Download packages into <dir>."),
|
||||
)
|
||||
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
|
||||
options.ignore_installed = True
|
||||
# editable doesn't really make sense for `pip download`, but the bowels
|
||||
# of the RequirementSet code require that property.
|
||||
options.editables = []
|
||||
|
||||
cmdoptions.check_dist_restriction(options)
|
||||
|
||||
options.download_dir = normalize_path(options.download_dir)
|
||||
ensure_dir(options.download_dir)
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
target_python = make_target_python(options)
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
|
||||
req_tracker = self.enter_context(get_requirement_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
kind="download",
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
req_tracker=req_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=options.download_dir,
|
||||
use_user_site=False,
|
||||
)
|
||||
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
py_version_info=options.python_version,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(
|
||||
reqs, check_supported_wheels=True
|
||||
)
|
||||
|
||||
downloaded = [] # type: List[str]
|
||||
for req in requirement_set.requirements.values():
|
||||
if req.satisfied_by is None:
|
||||
assert req.name is not None
|
||||
preparer.save_linked_requirement(req)
|
||||
downloaded.append(req.name)
|
||||
if downloaded:
|
||||
write_output('Successfully downloaded %s', ' '.join(downloaded))
|
||||
|
||||
return SUCCESS
|
||||
@ -0,0 +1,104 @@
|
||||
import sys
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.operations.freeze import freeze
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
|
||||
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
"""
|
||||
Output installed packages in requirements format.
|
||||
|
||||
packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help="Use the order in the given requirements file and its "
|
||||
"comments when generating output. This option can be "
|
||||
"used multiple times.")
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='URL',
|
||||
help='URL for finding packages, which will be added to the '
|
||||
'output.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
dest='local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not output '
|
||||
'globally-installed packages.')
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.cmd_opts.add_option(
|
||||
'--all',
|
||||
dest='freeze_all',
|
||||
action='store_true',
|
||||
help='Do not skip these packages in the output:'
|
||||
' {}'.format(', '.join(DEV_PKGS)))
|
||||
self.cmd_opts.add_option(
|
||||
'--exclude-editable',
|
||||
dest='exclude_editable',
|
||||
action='store_true',
|
||||
help='Exclude editable package from output.')
|
||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
skip = set(stdlib_pkgs)
|
||||
if not options.freeze_all:
|
||||
skip.update(DEV_PKGS)
|
||||
|
||||
if options.excludes:
|
||||
skip.update(options.excludes)
|
||||
|
||||
cmdoptions.check_list_path_option(options)
|
||||
|
||||
if options.find_links:
|
||||
deprecated(
|
||||
"--find-links option in pip freeze is deprecated.",
|
||||
replacement=None,
|
||||
gone_in="21.2",
|
||||
issue=9069,
|
||||
)
|
||||
|
||||
for line in freeze(
|
||||
requirement=options.requirements,
|
||||
find_links=options.find_links,
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
paths=options.path,
|
||||
isolated=options.isolated_mode,
|
||||
skip=skip,
|
||||
exclude_editable=options.exclude_editable,
|
||||
):
|
||||
sys.stdout.write(line + '\n')
|
||||
return SUCCESS
|
||||
@ -0,0 +1,58 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import sys
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||
from pip._internal.utils.misc import read_chunks, write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashCommand(Command):
|
||||
"""
|
||||
Compute a hash of a local package archive.
|
||||
|
||||
These can be used with --hash in a requirements file to do repeatable
|
||||
installs.
|
||||
"""
|
||||
|
||||
usage = '%prog [options] <file> ...'
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'-a', '--algorithm',
|
||||
dest='algorithm',
|
||||
choices=STRONG_HASHES,
|
||||
action='store',
|
||||
default=FAVORITE_HASH,
|
||||
help='The hash algorithm to use: one of {}'.format(
|
||||
', '.join(STRONG_HASHES)))
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
if not args:
|
||||
self.parser.print_usage(sys.stderr)
|
||||
return ERROR
|
||||
|
||||
algorithm = options.algorithm
|
||||
for path in args:
|
||||
write_output('%s:\n--hash=%s:%s',
|
||||
path, algorithm, _hash_of_file(path, algorithm))
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def _hash_of_file(path, algorithm):
|
||||
# type: (str, str) -> str
|
||||
"""Return the hash digest of a file."""
|
||||
with open(path, 'rb') as archive:
|
||||
hash = hashlib.new(algorithm)
|
||||
for chunk in read_chunks(archive):
|
||||
hash.update(chunk)
|
||||
return hash.hexdigest()
|
||||
@ -0,0 +1,42 @@
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
|
||||
|
||||
class HelpCommand(Command):
|
||||
"""Show help for commands"""
|
||||
|
||||
usage = """
|
||||
%prog <command>"""
|
||||
ignore_require_venv = True
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
from pip._internal.commands import (
|
||||
commands_dict,
|
||||
create_command,
|
||||
get_similar_commands,
|
||||
)
|
||||
|
||||
try:
|
||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||
cmd_name = args[0] # the command we need help for
|
||||
except IndexError:
|
||||
return SUCCESS
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = [f'unknown command "{cmd_name}"']
|
||||
if guess:
|
||||
msg.append(f'maybe you meant "{guess}"')
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
command = create_command(cmd_name)
|
||||
command.parser.print_help()
|
||||
|
||||
return SUCCESS
|
||||
@ -0,0 +1,740 @@
|
||||
import errno
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import site
|
||||
from optparse import SUPPRESS_HELP, Values
|
||||
from typing import Iterable, List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.req_command import (
|
||||
RequirementCommand,
|
||||
warn_if_run_as_root,
|
||||
with_cleanup,
|
||||
)
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, InstallationError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.metadata import get_environment
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
||||
from pip._internal.req import install_given_reqs
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||
from pip._internal.utils.distutils_args import parse_distutils_args
|
||||
from pip._internal.utils.filesystem import test_writable_dir
|
||||
from pip._internal.utils.misc import (
|
||||
ensure_dir,
|
||||
get_pip_version,
|
||||
protect_pip_from_modification_on_windows,
|
||||
write_output,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.virtualenv import (
|
||||
running_under_virtualenv,
|
||||
virtualenv_no_global,
|
||||
)
|
||||
from pip._internal.wheel_builder import (
|
||||
BinaryAllowedPredicate,
|
||||
build,
|
||||
should_build_for_install_command,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_check_binary_allowed(format_control):
|
||||
# type: (FormatControl) -> BinaryAllowedPredicate
|
||||
def check_binary_allowed(req):
|
||||
# type: (InstallRequirement) -> bool
|
||||
canonical_name = canonicalize_name(req.name or "")
|
||||
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
||||
return "binary" in allowed_formats
|
||||
|
||||
return check_binary_allowed
|
||||
|
||||
|
||||
class InstallCommand(RequirementCommand):
|
||||
"""
|
||||
Install packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports installing from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be installed.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.pre())
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.editable())
|
||||
self.cmd_opts.add_option(
|
||||
'-t', '--target',
|
||||
dest='target_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help='Install packages into <dir>. '
|
||||
'By default this will not replace existing files/folders in '
|
||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||
'with new versions.'
|
||||
)
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='use_user_site',
|
||||
action='store_true',
|
||||
help="Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)")
|
||||
self.cmd_opts.add_option(
|
||||
'--no-user',
|
||||
dest='use_user_site',
|
||||
action='store_false',
|
||||
help=SUPPRESS_HELP)
|
||||
self.cmd_opts.add_option(
|
||||
'--root',
|
||||
dest='root_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root "
|
||||
"directory.")
|
||||
self.cmd_opts.add_option(
|
||||
'--prefix',
|
||||
dest='prefix_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed")
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-U', '--upgrade',
|
||||
dest='upgrade',
|
||||
action='store_true',
|
||||
help='Upgrade all specified packages to the newest available '
|
||||
'version. The handling of dependencies depends on the '
|
||||
'upgrade-strategy used.'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--upgrade-strategy',
|
||||
dest='upgrade_strategy',
|
||||
default='only-if-needed',
|
||||
choices=['only-if-needed', 'eager'],
|
||||
help='Determines how dependency upgrading should be handled '
|
||||
'[default: %default]. '
|
||||
'"eager" - dependencies are upgraded regardless of '
|
||||
'whether the currently installed version satisfies the '
|
||||
'requirements of the upgraded package(s). '
|
||||
'"only-if-needed" - are upgraded only when they do not '
|
||||
'satisfy the requirements of the upgraded package(s).'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--force-reinstall',
|
||||
dest='force_reinstall',
|
||||
action='store_true',
|
||||
help='Reinstall all packages even if they are already '
|
||||
'up-to-date.')
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-I', '--ignore-installed',
|
||||
dest='ignore_installed',
|
||||
action='store_true',
|
||||
help='Ignore the installed packages, overwriting them. '
|
||||
'This can break your system if the existing package '
|
||||
'is of a different version or was installed '
|
||||
'with a different package manager!'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.install_options())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--compile",
|
||||
action="store_true",
|
||||
dest="compile",
|
||||
default=True,
|
||||
help="Compile Python source files to bytecode",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--no-compile",
|
||||
action="store_false",
|
||||
dest="compile",
|
||||
help="Do not compile Python source files to bytecode",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--no-warn-script-location",
|
||||
action="store_false",
|
||||
dest="warn_script_location",
|
||||
default=True,
|
||||
help="Do not warn when installing scripts outside PATH",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--no-warn-conflicts",
|
||||
action="store_false",
|
||||
dest="warn_about_conflicts",
|
||||
default=True,
|
||||
help="Do not warn about broken dependencies",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
if options.use_user_site and options.target_dir is not None:
|
||||
raise CommandError("Can not combine '--user' and '--target'")
|
||||
|
||||
cmdoptions.check_install_build_global(options)
|
||||
upgrade_strategy = "to-satisfy-only"
|
||||
if options.upgrade:
|
||||
upgrade_strategy = options.upgrade_strategy
|
||||
|
||||
cmdoptions.check_dist_restriction(options, check_target=True)
|
||||
|
||||
install_options = options.install_options or []
|
||||
|
||||
logger.debug("Using %s", get_pip_version())
|
||||
options.use_user_site = decide_user_install(
|
||||
options.use_user_site,
|
||||
prefix_path=options.prefix_path,
|
||||
target_dir=options.target_dir,
|
||||
root_path=options.root_path,
|
||||
isolated_mode=options.isolated_mode,
|
||||
)
|
||||
|
||||
target_temp_dir = None # type: Optional[TempDirectory]
|
||||
target_temp_dir_path = None # type: Optional[str]
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if (os.path.exists(options.target_dir) and not
|
||||
os.path.isdir(options.target_dir)):
|
||||
raise CommandError(
|
||||
"Target path exists but is not a directory, will not "
|
||||
"continue."
|
||||
)
|
||||
|
||||
# Create a target directory for using with the target option
|
||||
target_temp_dir = TempDirectory(kind="target")
|
||||
target_temp_dir_path = target_temp_dir.path
|
||||
self.enter_context(target_temp_dir)
|
||||
|
||||
global_options = options.global_options or []
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
target_python = make_target_python(options)
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
req_tracker = self.enter_context(get_requirement_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
kind="install",
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
try:
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
|
||||
reject_location_related_install_options(
|
||||
reqs, options.install_options
|
||||
)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
req_tracker=req_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
use_user_site=options.use_user_site,
|
||||
)
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
wheel_cache=wheel_cache,
|
||||
use_user_site=options.use_user_site,
|
||||
ignore_installed=options.ignore_installed,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
force_reinstall=options.force_reinstall,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
use_pep517=options.use_pep517,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(
|
||||
reqs, check_supported_wheels=not options.target_dir
|
||||
)
|
||||
|
||||
try:
|
||||
pip_req = requirement_set.get_requirement("pip")
|
||||
except KeyError:
|
||||
modifying_pip = False
|
||||
else:
|
||||
# If we're not replacing an already installed pip,
|
||||
# we're not modifying it.
|
||||
modifying_pip = pip_req.satisfied_by is None
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip=modifying_pip
|
||||
)
|
||||
|
||||
check_binary_allowed = get_check_binary_allowed(
|
||||
finder.format_control
|
||||
)
|
||||
|
||||
reqs_to_build = [
|
||||
r for r in requirement_set.requirements.values()
|
||||
if should_build_for_install_command(
|
||||
r, check_binary_allowed
|
||||
)
|
||||
]
|
||||
|
||||
_, build_failures = build(
|
||||
reqs_to_build,
|
||||
wheel_cache=wheel_cache,
|
||||
verify=True,
|
||||
build_options=[],
|
||||
global_options=[],
|
||||
)
|
||||
|
||||
# If we're using PEP 517, we cannot do a direct install
|
||||
# so we fail here.
|
||||
pep517_build_failure_names = [
|
||||
r.name # type: ignore
|
||||
for r in build_failures if r.use_pep517
|
||||
] # type: List[str]
|
||||
if pep517_build_failure_names:
|
||||
raise InstallationError(
|
||||
"Could not build wheels for {} which use"
|
||||
" PEP 517 and cannot be installed directly".format(
|
||||
", ".join(pep517_build_failure_names)
|
||||
)
|
||||
)
|
||||
|
||||
# For now, we just warn about failures building legacy
|
||||
# requirements, as we'll fall through to a direct
|
||||
# install for those.
|
||||
for r in build_failures:
|
||||
if not r.use_pep517:
|
||||
r.legacy_install_reason = 8368
|
||||
|
||||
to_install = resolver.get_installation_order(
|
||||
requirement_set
|
||||
)
|
||||
|
||||
# Check for conflicts in the package set we're installing.
|
||||
conflicts = None # type: Optional[ConflictDetails]
|
||||
should_warn_about_conflicts = (
|
||||
not options.ignore_dependencies and
|
||||
options.warn_about_conflicts
|
||||
)
|
||||
if should_warn_about_conflicts:
|
||||
conflicts = self._determine_conflicts(to_install)
|
||||
|
||||
# Don't warn about script install locations if
|
||||
# --target has been specified
|
||||
warn_script_location = options.warn_script_location
|
||||
if options.target_dir:
|
||||
warn_script_location = False
|
||||
|
||||
installed = install_given_reqs(
|
||||
to_install,
|
||||
install_options,
|
||||
global_options,
|
||||
root=options.root_path,
|
||||
home=target_temp_dir_path,
|
||||
prefix=options.prefix_path,
|
||||
warn_script_location=warn_script_location,
|
||||
use_user_site=options.use_user_site,
|
||||
pycompile=options.compile,
|
||||
)
|
||||
|
||||
lib_locations = get_lib_location_guesses(
|
||||
user=options.use_user_site,
|
||||
home=target_temp_dir_path,
|
||||
root=options.root_path,
|
||||
prefix=options.prefix_path,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
env = get_environment(lib_locations)
|
||||
|
||||
installed.sort(key=operator.attrgetter('name'))
|
||||
items = []
|
||||
for result in installed:
|
||||
item = result.name
|
||||
try:
|
||||
installed_dist = env.get_distribution(item)
|
||||
if installed_dist is not None:
|
||||
item = f"{item}-{installed_dist.version}"
|
||||
except Exception:
|
||||
pass
|
||||
items.append(item)
|
||||
|
||||
if conflicts is not None:
|
||||
self._warn_about_conflicts(
|
||||
conflicts,
|
||||
resolver_variant=self.determine_resolver_variant(options),
|
||||
)
|
||||
|
||||
installed_desc = ' '.join(items)
|
||||
if installed_desc:
|
||||
write_output(
|
||||
'Successfully installed %s', installed_desc,
|
||||
)
|
||||
except OSError as error:
|
||||
show_traceback = (self.verbosity >= 1)
|
||||
|
||||
message = create_os_error_message(
|
||||
error, show_traceback, options.use_user_site,
|
||||
)
|
||||
logger.error(message, exc_info=show_traceback) # noqa
|
||||
|
||||
return ERROR
|
||||
|
||||
if options.target_dir:
|
||||
assert target_temp_dir
|
||||
self._handle_target_dir(
|
||||
options.target_dir, target_temp_dir, options.upgrade
|
||||
)
|
||||
|
||||
warn_if_run_as_root()
|
||||
return SUCCESS
|
||||
|
||||
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
|
||||
# type: (str, TempDirectory, bool) -> None
|
||||
ensure_dir(target_dir)
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
lib_dir_list = []
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
scheme = get_scheme('', home=target_temp_dir.path)
|
||||
purelib_dir = scheme.purelib
|
||||
platlib_dir = scheme.platlib
|
||||
data_dir = scheme.data
|
||||
|
||||
if os.path.exists(purelib_dir):
|
||||
lib_dir_list.append(purelib_dir)
|
||||
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
||||
lib_dir_list.append(platlib_dir)
|
||||
if os.path.exists(data_dir):
|
||||
lib_dir_list.append(data_dir)
|
||||
|
||||
for lib_dir in lib_dir_list:
|
||||
for item in os.listdir(lib_dir):
|
||||
if lib_dir == data_dir:
|
||||
ddir = os.path.join(data_dir, item)
|
||||
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
||||
continue
|
||||
target_item_dir = os.path.join(target_dir, item)
|
||||
if os.path.exists(target_item_dir):
|
||||
if not upgrade:
|
||||
logger.warning(
|
||||
'Target directory %s already exists. Specify '
|
||||
'--upgrade to force replacement.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.islink(target_item_dir):
|
||||
logger.warning(
|
||||
'Target directory %s already exists and is '
|
||||
'a link. pip will not automatically replace '
|
||||
'links, please remove if replacement is '
|
||||
'desired.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.isdir(target_item_dir):
|
||||
shutil.rmtree(target_item_dir)
|
||||
else:
|
||||
os.remove(target_item_dir)
|
||||
|
||||
shutil.move(
|
||||
os.path.join(lib_dir, item),
|
||||
target_item_dir
|
||||
)
|
||||
|
||||
def _determine_conflicts(self, to_install):
|
||||
# type: (List[InstallRequirement]) -> Optional[ConflictDetails]
|
||||
try:
|
||||
return check_install_conflicts(to_install)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Error while checking for conflicts. Please file an issue on "
|
||||
"pip's issue tracker: https://github.com/pypa/pip/issues/new"
|
||||
)
|
||||
return None
|
||||
|
||||
def _warn_about_conflicts(self, conflict_details, resolver_variant):
|
||||
# type: (ConflictDetails, str) -> None
|
||||
package_set, (missing, conflicting) = conflict_details
|
||||
if not missing and not conflicting:
|
||||
return
|
||||
|
||||
parts = [] # type: List[str]
|
||||
if resolver_variant == "legacy":
|
||||
parts.append(
|
||||
"pip's legacy dependency resolver does not consider dependency "
|
||||
"conflicts when selecting packages. This behaviour is the "
|
||||
"source of the following dependency conflicts."
|
||||
)
|
||||
else:
|
||||
assert resolver_variant == "2020-resolver"
|
||||
parts.append(
|
||||
"pip's dependency resolver does not currently take into account "
|
||||
"all the packages that are installed. This behaviour is the "
|
||||
"source of the following dependency conflicts."
|
||||
)
|
||||
|
||||
# NOTE: There is some duplication here, with commands/check.py
|
||||
for project_name in missing:
|
||||
version = package_set[project_name][0]
|
||||
for dependency in missing[project_name]:
|
||||
message = (
|
||||
"{name} {version} requires {requirement}, "
|
||||
"which is not installed."
|
||||
).format(
|
||||
name=project_name,
|
||||
version=version,
|
||||
requirement=dependency[1],
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
for project_name in conflicting:
|
||||
version = package_set[project_name][0]
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
message = (
|
||||
"{name} {version} requires {requirement}, but {you} have "
|
||||
"{dep_name} {dep_version} which is incompatible."
|
||||
).format(
|
||||
name=project_name,
|
||||
version=version,
|
||||
requirement=req,
|
||||
dep_name=dep_name,
|
||||
dep_version=dep_version,
|
||||
you=("you" if resolver_variant == "2020-resolver" else "you'll")
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
logger.critical("\n".join(parts))
|
||||
|
||||
|
||||
def get_lib_location_guesses(
|
||||
user=False, # type: bool
|
||||
home=None, # type: Optional[str]
|
||||
root=None, # type: Optional[str]
|
||||
isolated=False, # type: bool
|
||||
prefix=None # type: Optional[str]
|
||||
):
|
||||
# type:(...) -> List[str]
|
||||
scheme = get_scheme(
|
||||
'',
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
return [scheme.purelib, scheme.platlib]
|
||||
|
||||
|
||||
def site_packages_writable(root, isolated):
|
||||
# type: (Optional[str], bool) -> bool
|
||||
return all(
|
||||
test_writable_dir(d) for d in set(
|
||||
get_lib_location_guesses(root=root, isolated=isolated))
|
||||
)
|
||||
|
||||
|
||||
def decide_user_install(
|
||||
use_user_site, # type: Optional[bool]
|
||||
prefix_path=None, # type: Optional[str]
|
||||
target_dir=None, # type: Optional[str]
|
||||
root_path=None, # type: Optional[str]
|
||||
isolated_mode=False, # type: bool
|
||||
):
|
||||
# type: (...) -> bool
|
||||
"""Determine whether to do a user install based on the input options.
|
||||
|
||||
If use_user_site is False, no additional checks are done.
|
||||
If use_user_site is True, it is checked for compatibility with other
|
||||
options.
|
||||
If use_user_site is None, the default behaviour depends on the environment,
|
||||
which is provided by the other arguments.
|
||||
"""
|
||||
# In some cases (config from tox), use_user_site can be set to an integer
|
||||
# rather than a bool, which 'use_user_site is False' wouldn't catch.
|
||||
if (use_user_site is not None) and (not use_user_site):
|
||||
logger.debug("Non-user install by explicit request")
|
||||
return False
|
||||
|
||||
if use_user_site:
|
||||
if prefix_path:
|
||||
raise CommandError(
|
||||
"Can not combine '--user' and '--prefix' as they imply "
|
||||
"different installation locations"
|
||||
)
|
||||
if virtualenv_no_global():
|
||||
raise InstallationError(
|
||||
"Can not perform a '--user' install. User site-packages "
|
||||
"are not visible in this virtualenv."
|
||||
)
|
||||
logger.debug("User install by explicit request")
|
||||
return True
|
||||
|
||||
# If we are here, user installs have not been explicitly requested/avoided
|
||||
assert use_user_site is None
|
||||
|
||||
# user install incompatible with --prefix/--target
|
||||
if prefix_path or target_dir:
|
||||
logger.debug("Non-user install due to --prefix or --target option")
|
||||
return False
|
||||
|
||||
# If user installs are not enabled, choose a non-user install
|
||||
if not site.ENABLE_USER_SITE:
|
||||
logger.debug("Non-user install because user site-packages disabled")
|
||||
return False
|
||||
|
||||
# If we have permission for a non-user install, do that,
|
||||
# otherwise do a user install.
|
||||
if site_packages_writable(root=root_path, isolated=isolated_mode):
|
||||
logger.debug("Non-user install because site-packages writeable")
|
||||
return False
|
||||
|
||||
logger.info("Defaulting to user installation because normal site-packages "
|
||||
"is not writeable")
|
||||
return True
|
||||
|
||||
|
||||
def reject_location_related_install_options(requirements, options):
|
||||
# type: (List[InstallRequirement], Optional[List[str]]) -> None
|
||||
"""If any location-changing --install-option arguments were passed for
|
||||
requirements or on the command-line, then show a deprecation warning.
|
||||
"""
|
||||
def format_options(option_names):
|
||||
# type: (Iterable[str]) -> List[str]
|
||||
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
||||
|
||||
offenders = []
|
||||
|
||||
for requirement in requirements:
|
||||
install_options = requirement.install_options
|
||||
location_options = parse_distutils_args(install_options)
|
||||
if location_options:
|
||||
offenders.append(
|
||||
"{!r} from {}".format(
|
||||
format_options(location_options.keys()), requirement
|
||||
)
|
||||
)
|
||||
|
||||
if options:
|
||||
location_options = parse_distutils_args(options)
|
||||
if location_options:
|
||||
offenders.append(
|
||||
"{!r} from command line".format(
|
||||
format_options(location_options.keys())
|
||||
)
|
||||
)
|
||||
|
||||
if not offenders:
|
||||
return
|
||||
|
||||
raise CommandError(
|
||||
"Location-changing options found in --install-option: {}."
|
||||
" This is unsupported, use pip-level options like --user,"
|
||||
" --prefix, --root, and --target instead.".format(
|
||||
"; ".join(offenders)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_os_error_message(error, show_traceback, using_user_site):
|
||||
# type: (OSError, bool, bool) -> str
|
||||
"""Format an error message for an OSError
|
||||
|
||||
It may occur anytime during the execution of the install command.
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Mention the error if we are not going to show a traceback
|
||||
parts.append("Could not install packages due to an OSError")
|
||||
if not show_traceback:
|
||||
parts.append(": ")
|
||||
parts.append(str(error))
|
||||
else:
|
||||
parts.append(".")
|
||||
|
||||
# Spilt the error indication from a helper message (if any)
|
||||
parts[-1] += "\n"
|
||||
|
||||
# Suggest useful actions to the user:
|
||||
# (1) using user site-packages or (2) verifying the permissions
|
||||
if error.errno == errno.EACCES:
|
||||
user_option_part = "Consider using the `--user` option"
|
||||
permissions_part = "Check the permissions"
|
||||
|
||||
if not running_under_virtualenv() and not using_user_site:
|
||||
parts.extend([
|
||||
user_option_part, " or ",
|
||||
permissions_part.lower(),
|
||||
])
|
||||
else:
|
||||
parts.append(permissions_part)
|
||||
parts.append(".\n")
|
||||
|
||||
return "".join(parts).strip() + "\n"
|
||||
@ -0,0 +1,319 @@
|
||||
import json
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import Iterator, List, Set, Tuple
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.req_command import IndexGroupCommand
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.index.collector import LinkCollector
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable,
|
||||
get_installed_distributions,
|
||||
tabulate,
|
||||
write_output,
|
||||
)
|
||||
from pip._internal.utils.packaging import get_installer
|
||||
from pip._internal.utils.parallel import map_multithread
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListCommand(IndexGroupCommand):
|
||||
"""
|
||||
List installed packages, including editables.
|
||||
|
||||
Packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'-o', '--outdated',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List outdated packages')
|
||||
self.cmd_opts.add_option(
|
||||
'-u', '--uptodate',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List uptodate packages')
|
||||
self.cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List editable projects.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('If in a virtualenv that has global access, do not list '
|
||||
'globally-installed packages.'),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--format',
|
||||
action='store',
|
||||
dest='list_format',
|
||||
default="columns",
|
||||
choices=('columns', 'freeze', 'json'),
|
||||
help="Select the output format among: columns (default), freeze, "
|
||||
"or json",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--not-required',
|
||||
action='store_true',
|
||||
dest='not_required',
|
||||
help="List packages that are not dependencies of "
|
||||
"installed packages.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--exclude-editable',
|
||||
action='store_false',
|
||||
dest='include_editable',
|
||||
help='Exclude editable package from output.',
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--include-editable',
|
||||
action='store_true',
|
||||
dest='include_editable',
|
||||
help='Include editable package from output.',
|
||||
default=True,
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group, self.parser
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def _build_package_finder(self, options, session):
|
||||
# type: (Values, PipSession) -> PackageFinder
|
||||
"""
|
||||
Create a package finder appropriate to this list command.
|
||||
"""
|
||||
link_collector = LinkCollector.create(session, options=options)
|
||||
|
||||
# Pass allow_yanked=False to ignore yanked versions.
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=False,
|
||||
allow_all_prereleases=options.pre,
|
||||
)
|
||||
|
||||
return PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError(
|
||||
"Options --outdated and --uptodate cannot be combined.")
|
||||
|
||||
cmdoptions.check_list_path_option(options)
|
||||
|
||||
skip = set(stdlib_pkgs)
|
||||
if options.excludes:
|
||||
skip.update(options.excludes)
|
||||
|
||||
packages = get_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable,
|
||||
include_editables=options.include_editable,
|
||||
paths=options.path,
|
||||
skip=skip,
|
||||
)
|
||||
|
||||
# get_not_required must be called firstly in order to find and
|
||||
# filter out all dependencies correctly. Otherwise a package
|
||||
# can't be identified as requirement because some parent packages
|
||||
# could be filtered out before.
|
||||
if options.not_required:
|
||||
packages = self.get_not_required(packages, options)
|
||||
|
||||
if options.outdated:
|
||||
packages = self.get_outdated(packages, options)
|
||||
elif options.uptodate:
|
||||
packages = self.get_uptodate(packages, options)
|
||||
|
||||
self.output_package_listing(packages, options)
|
||||
return SUCCESS
|
||||
|
||||
def get_outdated(self, packages, options):
|
||||
# type: (List[Distribution], Values) -> List[Distribution]
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version > dist.parsed_version
|
||||
]
|
||||
|
||||
def get_uptodate(self, packages, options):
|
||||
# type: (List[Distribution], Values) -> List[Distribution]
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version == dist.parsed_version
|
||||
]
|
||||
|
||||
def get_not_required(self, packages, options):
|
||||
# type: (List[Distribution], Values) -> List[Distribution]
|
||||
dep_keys = set() # type: Set[Distribution]
|
||||
for dist in packages:
|
||||
dep_keys.update(requirement.key for requirement in dist.requires())
|
||||
|
||||
# Create a set to remove duplicate packages, and cast it to a list
|
||||
# to keep the return type consistent with get_outdated and
|
||||
# get_uptodate
|
||||
return list({pkg for pkg in packages if pkg.key not in dep_keys})
|
||||
|
||||
def iter_packages_latest_infos(self, packages, options):
|
||||
# type: (List[Distribution], Values) -> Iterator[Distribution]
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
|
||||
def latest_info(dist):
|
||||
# type: (Distribution) -> Distribution
|
||||
all_candidates = finder.find_all_candidates(dist.key)
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
all_candidates = [candidate for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease]
|
||||
|
||||
evaluator = finder.make_candidate_evaluator(
|
||||
project_name=dist.project_name,
|
||||
)
|
||||
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
||||
if best_candidate is None:
|
||||
return None
|
||||
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.link.is_wheel:
|
||||
typ = 'wheel'
|
||||
else:
|
||||
typ = 'sdist'
|
||||
# This is dirty but makes the rest of the code much cleaner
|
||||
dist.latest_version = remote_version
|
||||
dist.latest_filetype = typ
|
||||
return dist
|
||||
|
||||
for dist in map_multithread(latest_info, packages):
|
||||
if dist is not None:
|
||||
yield dist
|
||||
|
||||
def output_package_listing(self, packages, options):
|
||||
# type: (List[Distribution], Values) -> None
|
||||
packages = sorted(
|
||||
packages,
|
||||
key=lambda dist: dist.project_name.lower(),
|
||||
)
|
||||
if options.list_format == 'columns' and packages:
|
||||
data, header = format_for_columns(packages, options)
|
||||
self.output_package_listing_columns(data, header)
|
||||
elif options.list_format == 'freeze':
|
||||
for dist in packages:
|
||||
if options.verbose >= 1:
|
||||
write_output("%s==%s (%s)", dist.project_name,
|
||||
dist.version, dist.location)
|
||||
else:
|
||||
write_output("%s==%s", dist.project_name, dist.version)
|
||||
elif options.list_format == 'json':
|
||||
write_output(format_for_json(packages, options))
|
||||
|
||||
def output_package_listing_columns(self, data, header):
|
||||
# type: (List[List[str]], List[str]) -> None
|
||||
# insert the header first: we need to know the size of column names
|
||||
if len(data) > 0:
|
||||
data.insert(0, header)
|
||||
|
||||
pkg_strings, sizes = tabulate(data)
|
||||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
||||
|
||||
for val in pkg_strings:
|
||||
write_output(val)
|
||||
|
||||
|
||||
def format_for_columns(pkgs, options):
|
||||
# type: (List[Distribution], Values) -> Tuple[List[List[str]], List[str]]
|
||||
"""
|
||||
Convert the package data into something usable
|
||||
by output_package_listing_columns.
|
||||
"""
|
||||
running_outdated = options.outdated
|
||||
# Adjust the header for the `pip list --outdated` case.
|
||||
if running_outdated:
|
||||
header = ["Package", "Version", "Latest", "Type"]
|
||||
else:
|
||||
header = ["Package", "Version"]
|
||||
|
||||
data = []
|
||||
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
|
||||
header.append("Location")
|
||||
if options.verbose >= 1:
|
||||
header.append("Installer")
|
||||
|
||||
for proj in pkgs:
|
||||
# if we're working on the 'outdated' list, separate out the
|
||||
# latest_version and type
|
||||
row = [proj.project_name, proj.version]
|
||||
|
||||
if running_outdated:
|
||||
row.append(proj.latest_version)
|
||||
row.append(proj.latest_filetype)
|
||||
|
||||
if options.verbose >= 1 or dist_is_editable(proj):
|
||||
row.append(proj.location)
|
||||
if options.verbose >= 1:
|
||||
row.append(get_installer(proj))
|
||||
|
||||
data.append(row)
|
||||
|
||||
return data, header
|
||||
|
||||
|
||||
def format_for_json(packages, options):
|
||||
# type: (List[Distribution], Values) -> str
|
||||
data = []
|
||||
for dist in packages:
|
||||
info = {
|
||||
'name': dist.project_name,
|
||||
'version': str(dist.version),
|
||||
}
|
||||
if options.verbose >= 1:
|
||||
info['location'] = dist.location
|
||||
info['installer'] = get_installer(dist)
|
||||
if options.outdated:
|
||||
info['latest_version'] = str(dist.latest_version)
|
||||
info['latest_filetype'] = dist.latest_filetype
|
||||
data.append(info)
|
||||
return json.dumps(data)
|
||||
@ -0,0 +1,162 @@
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
import xmlrpc.client
|
||||
from collections import OrderedDict
|
||||
from optparse import Values
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional
|
||||
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.req_command import SessionCommandMixin
|
||||
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.network.xmlrpc import PipXmlrpcTransport
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import TypedDict
|
||||
|
||||
class TransformedHit(TypedDict):
|
||||
name: str
|
||||
summary: str
|
||||
versions: List[str]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchCommand(Command, SessionCommandMixin):
|
||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||
|
||||
usage = """
|
||||
%prog [options] <query>"""
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'-i', '--index',
|
||||
dest='index',
|
||||
metavar='URL',
|
||||
default=PyPI.pypi_url,
|
||||
help='Base URL of Python Package Index (default %default)')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
if not args:
|
||||
raise CommandError('Missing required argument (search query).')
|
||||
query = args
|
||||
pypi_hits = self.search(query, options)
|
||||
hits = transform_hits(pypi_hits)
|
||||
|
||||
terminal_width = None
|
||||
if sys.stdout.isatty():
|
||||
terminal_width = shutil.get_terminal_size()[0]
|
||||
|
||||
print_results(hits, terminal_width=terminal_width)
|
||||
if pypi_hits:
|
||||
return SUCCESS
|
||||
return NO_MATCHES_FOUND
|
||||
|
||||
def search(self, query, options):
|
||||
# type: (List[str], Values) -> List[Dict[str, str]]
|
||||
index_url = options.index
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
transport = PipXmlrpcTransport(index_url, session)
|
||||
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
||||
try:
|
||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||
except xmlrpc.client.Fault as fault:
|
||||
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
||||
code=fault.faultCode,
|
||||
string=fault.faultString,
|
||||
)
|
||||
raise CommandError(message)
|
||||
assert isinstance(hits, list)
|
||||
return hits
|
||||
|
||||
|
||||
def transform_hits(hits):
|
||||
# type: (List[Dict[str, str]]) -> List[TransformedHit]
|
||||
"""
|
||||
The list from pypi is really a list of versions. We want a list of
|
||||
packages with the list of versions stored inline. This converts the
|
||||
list from pypi into one we can use.
|
||||
"""
|
||||
packages = OrderedDict() # type: OrderedDict[str, TransformedHit]
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary']
|
||||
version = hit['version']
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {
|
||||
'name': name,
|
||||
'summary': summary,
|
||||
'versions': [version],
|
||||
}
|
||||
else:
|
||||
packages[name]['versions'].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]['versions']):
|
||||
packages[name]['summary'] = summary
|
||||
|
||||
return list(packages.values())
|
||||
|
||||
|
||||
def print_results(hits, name_column_width=None, terminal_width=None):
|
||||
# type: (List[TransformedHit], Optional[int], Optional[int]) -> None
|
||||
if not hits:
|
||||
return
|
||||
if name_column_width is None:
|
||||
name_column_width = max([
|
||||
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
||||
for hit in hits
|
||||
]) + 4
|
||||
|
||||
env = get_default_environment()
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary'] or ''
|
||||
latest = highest_version(hit.get('versions', ['-']))
|
||||
if terminal_width is not None:
|
||||
target_width = terminal_width - name_column_width - 5
|
||||
if target_width > 10:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary_lines = textwrap.wrap(summary, target_width)
|
||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(
|
||||
summary_lines)
|
||||
|
||||
name_latest = f'{name} ({latest})'
|
||||
line = f'{name_latest:{name_column_width}} - {summary}'
|
||||
try:
|
||||
write_output(line)
|
||||
dist = env.get_distribution(name)
|
||||
if dist is not None:
|
||||
with indent_log():
|
||||
if dist.version == latest:
|
||||
write_output('INSTALLED: %s (latest)', dist.version)
|
||||
else:
|
||||
write_output('INSTALLED: %s', dist.version)
|
||||
if parse_version(latest).pre:
|
||||
write_output('LATEST: %s (pre-release; install'
|
||||
' with "pip install --pre")', latest)
|
||||
else:
|
||||
write_output('LATEST: %s', latest)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
|
||||
def highest_version(versions):
|
||||
# type: (List[str]) -> str
|
||||
return max(versions, key=parse_version)
|
||||
@ -0,0 +1,181 @@
|
||||
import logging
|
||||
import os
|
||||
from email.parser import FeedParser
|
||||
from optparse import Values
|
||||
from typing import Dict, Iterator, List
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
"""
|
||||
Show information about one or more installed packages.
|
||||
|
||||
The output is in RFC-compliant mail header format.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--files',
|
||||
dest='files',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Show the full list of installed files for each package.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
if not args:
|
||||
logger.warning('ERROR: Please provide a package name or names.')
|
||||
return ERROR
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
if not print_results(
|
||||
results, list_files=options.files, verbose=options.verbose):
|
||||
return ERROR
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def search_packages_info(query):
|
||||
# type: (List[str]) -> Iterator[Dict[str, str]]
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||
directory.
|
||||
"""
|
||||
installed = {}
|
||||
for p in pkg_resources.working_set:
|
||||
installed[canonicalize_name(p.project_name)] = p
|
||||
|
||||
query_names = [canonicalize_name(name) for name in query]
|
||||
missing = sorted(
|
||||
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
||||
)
|
||||
if missing:
|
||||
logger.warning('Package(s) not found: %s', ', '.join(missing))
|
||||
|
||||
def get_requiring_packages(package_name):
|
||||
# type: (str) -> List[str]
|
||||
canonical_name = canonicalize_name(package_name)
|
||||
return [
|
||||
pkg.project_name for pkg in pkg_resources.working_set
|
||||
if canonical_name in
|
||||
[canonicalize_name(required.name) for required in
|
||||
pkg.requires()]
|
||||
]
|
||||
|
||||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
||||
package = {
|
||||
'name': dist.project_name,
|
||||
'version': dist.version,
|
||||
'location': dist.location,
|
||||
'requires': [dep.project_name for dep in dist.requires()],
|
||||
'required_by': get_requiring_packages(dist.project_name)
|
||||
}
|
||||
file_list = None
|
||||
metadata = ''
|
||||
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
||||
# RECORDs should be part of .dist-info metadatas
|
||||
if dist.has_metadata('RECORD'):
|
||||
lines = dist.get_metadata_lines('RECORD')
|
||||
paths = [line.split(',')[0] for line in lines]
|
||||
paths = [os.path.join(dist.location, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('METADATA'):
|
||||
metadata = dist.get_metadata('METADATA')
|
||||
else:
|
||||
# Otherwise use pip's log for .egg-info's
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
paths = dist.get_metadata_lines('installed-files.txt')
|
||||
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('PKG-INFO'):
|
||||
metadata = dist.get_metadata('PKG-INFO')
|
||||
|
||||
if dist.has_metadata('entry_points.txt'):
|
||||
entry_points = dist.get_metadata_lines('entry_points.txt')
|
||||
package['entry_points'] = entry_points
|
||||
|
||||
if dist.has_metadata('INSTALLER'):
|
||||
for line in dist.get_metadata_lines('INSTALLER'):
|
||||
if line.strip():
|
||||
package['installer'] = line.strip()
|
||||
break
|
||||
|
||||
# @todo: Should pkg_resources.Distribution have a
|
||||
# `get_pkg_info` method?
|
||||
feed_parser = FeedParser()
|
||||
feed_parser.feed(metadata)
|
||||
pkg_info_dict = feed_parser.close()
|
||||
for key in ('metadata-version', 'summary',
|
||||
'home-page', 'author', 'author-email', 'license'):
|
||||
package[key] = pkg_info_dict.get(key)
|
||||
|
||||
# It looks like FeedParser cannot deal with repeated headers
|
||||
classifiers = []
|
||||
for line in metadata.splitlines():
|
||||
if line.startswith('Classifier: '):
|
||||
classifiers.append(line[len('Classifier: '):])
|
||||
package['classifiers'] = classifiers
|
||||
|
||||
if file_list:
|
||||
package['files'] = sorted(file_list)
|
||||
yield package
|
||||
|
||||
|
||||
def print_results(distributions, list_files=False, verbose=False):
|
||||
# type: (Iterator[Dict[str, str]], bool, bool) -> bool
|
||||
"""
|
||||
Print the information from installed distributions found.
|
||||
"""
|
||||
results_printed = False
|
||||
for i, dist in enumerate(distributions):
|
||||
results_printed = True
|
||||
if i > 0:
|
||||
write_output("---")
|
||||
|
||||
write_output("Name: %s", dist.get('name', ''))
|
||||
write_output("Version: %s", dist.get('version', ''))
|
||||
write_output("Summary: %s", dist.get('summary', ''))
|
||||
write_output("Home-page: %s", dist.get('home-page', ''))
|
||||
write_output("Author: %s", dist.get('author', ''))
|
||||
write_output("Author-email: %s", dist.get('author-email', ''))
|
||||
write_output("License: %s", dist.get('license', ''))
|
||||
write_output("Location: %s", dist.get('location', ''))
|
||||
write_output("Requires: %s", ', '.join(dist.get('requires', [])))
|
||||
write_output("Required-by: %s", ', '.join(dist.get('required_by', [])))
|
||||
|
||||
if verbose:
|
||||
write_output("Metadata-Version: %s",
|
||||
dist.get('metadata-version', ''))
|
||||
write_output("Installer: %s", dist.get('installer', ''))
|
||||
write_output("Classifiers:")
|
||||
for classifier in dist.get('classifiers', []):
|
||||
write_output(" %s", classifier)
|
||||
write_output("Entry-points:")
|
||||
for entry in dist.get('entry_points', []):
|
||||
write_output(" %s", entry.strip())
|
||||
if list_files:
|
||||
write_output("Files:")
|
||||
for line in dist.get('files', []):
|
||||
write_output(" %s", line.strip())
|
||||
if "files" not in dist:
|
||||
write_output("Cannot locate installed-files.txt")
|
||||
return results_printed
|
||||
@ -0,0 +1,92 @@
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.req import parse_requirements
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_line,
|
||||
install_req_from_parsed_requirement,
|
||||
)
|
||||
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
||||
|
||||
|
||||
class UninstallCommand(Command, SessionCommandMixin):
|
||||
"""
|
||||
Uninstall packages.
|
||||
|
||||
pip is able to uninstall most installed packages. Known exceptions are:
|
||||
|
||||
- Pure distutils packages installed with ``python setup.py install``, which
|
||||
leave behind no metadata to determine what files were installed.
|
||||
- Script wrappers installed by ``python setup.py develop``.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <package> ...
|
||||
%prog [options] -r <requirements file> ..."""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Uninstall all the packages listed in the given requirements '
|
||||
'file. This option can be used multiple times.',
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'-y', '--yes',
|
||||
dest='yes',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation of uninstall deletions.")
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
session = self.get_default_session(options)
|
||||
|
||||
reqs_to_uninstall = {}
|
||||
for name in args:
|
||||
req = install_req_from_line(
|
||||
name, isolated=options.isolated_mode,
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
for filename in options.requirements:
|
||||
for parsed_req in parse_requirements(
|
||||
filename,
|
||||
options=options,
|
||||
session=session):
|
||||
req = install_req_from_parsed_requirement(
|
||||
parsed_req,
|
||||
isolated=options.isolated_mode
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
if not reqs_to_uninstall:
|
||||
raise InstallationError(
|
||||
f'You must give at least one requirement to {self.name} (see '
|
||||
f'"pip help {self.name}")'
|
||||
)
|
||||
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip="pip" in reqs_to_uninstall
|
||||
)
|
||||
|
||||
for req in reqs_to_uninstall.values():
|
||||
uninstall_pathset = req.uninstall(
|
||||
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
||||
)
|
||||
if uninstall_pathset:
|
||||
uninstall_pathset.commit()
|
||||
|
||||
warn_if_run_as_root()
|
||||
return SUCCESS
|
||||
@ -0,0 +1,178 @@
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WheelCommand(RequirementCommand):
|
||||
"""
|
||||
Build Wheel archives for your requirements and dependencies.
|
||||
|
||||
Wheel is a built-package format, and offers the advantage of not
|
||||
recompiling your software during every install. For more details, see the
|
||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||
|
||||
Requirements: setuptools>=0.8, and wheel.
|
||||
|
||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||
package to build individual wheels.
|
||||
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self):
|
||||
# type: () -> None
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-w', '--wheel-dir',
|
||||
dest='wheel_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Build wheels into <dir>, where the default is the "
|
||||
"current working directory."),
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.editable())
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--no-verify',
|
||||
dest='no_verify',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't verify if built wheel is valid.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.build_options())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[str]) -> int
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
finder = self._build_package_finder(options, session)
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
options.wheel_dir = normalize_path(options.wheel_dir)
|
||||
ensure_dir(options.wheel_dir)
|
||||
|
||||
req_tracker = self.enter_context(get_requirement_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
kind="wheel",
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
req_tracker=req_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=options.wheel_dir,
|
||||
use_user_site=False,
|
||||
)
|
||||
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
wheel_cache=wheel_cache,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
use_pep517=options.use_pep517,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(
|
||||
reqs, check_supported_wheels=True
|
||||
)
|
||||
|
||||
reqs_to_build = [] # type: List[InstallRequirement]
|
||||
for req in requirement_set.requirements.values():
|
||||
if req.is_wheel:
|
||||
preparer.save_linked_requirement(req)
|
||||
elif should_build_for_wheel_command(req):
|
||||
reqs_to_build.append(req)
|
||||
|
||||
# build wheels
|
||||
build_successes, build_failures = build(
|
||||
reqs_to_build,
|
||||
wheel_cache=wheel_cache,
|
||||
verify=(not options.no_verify),
|
||||
build_options=options.build_options or [],
|
||||
global_options=options.global_options or [],
|
||||
)
|
||||
for req in build_successes:
|
||||
assert req.link and req.link.is_wheel
|
||||
assert req.local_file_path
|
||||
# copy from cache to target directory
|
||||
try:
|
||||
shutil.copy(req.local_file_path, options.wheel_dir)
|
||||
except OSError as e:
|
||||
logger.warning(
|
||||
"Building wheel for %s failed: %s",
|
||||
req.name, e,
|
||||
)
|
||||
build_failures.append(req)
|
||||
if len(build_failures) != 0:
|
||||
raise CommandError(
|
||||
"Failed to build one or more wheels"
|
||||
)
|
||||
|
||||
return SUCCESS
|
||||
@ -0,0 +1,403 @@
|
||||
"""Configuration management setup
|
||||
|
||||
Some terminology:
|
||||
- name
|
||||
As written in config files.
|
||||
- value
|
||||
Value associated with a name
|
||||
- key
|
||||
Name combined with it's section (section.name)
|
||||
- variant
|
||||
A single word describing where the configuration key-value pair came from
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||
|
||||
from pip._internal.exceptions import (
|
||||
ConfigurationError,
|
||||
ConfigurationFileCouldNotBeLoaded,
|
||||
)
|
||||
from pip._internal.utils import appdirs
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.misc import ensure_dir, enum
|
||||
|
||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||
Kind = NewType("Kind", str)
|
||||
|
||||
CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
|
||||
ENV_NAMES_IGNORED = "version", "help"
|
||||
|
||||
# The kinds of configurations there are.
|
||||
kinds = enum(
|
||||
USER="user", # User Specific
|
||||
GLOBAL="global", # System Wide
|
||||
SITE="site", # [Virtual] Environment Specific
|
||||
ENV="env", # from PIP_CONFIG_FILE
|
||||
ENV_VAR="env-var", # from Environment Variables
|
||||
)
|
||||
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
||||
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||
def _normalize_name(name):
|
||||
# type: (str) -> str
|
||||
"""Make a name consistent regardless of source (environment or file)
|
||||
"""
|
||||
name = name.lower().replace('_', '-')
|
||||
if name.startswith('--'):
|
||||
name = name[2:] # only prefer long opts
|
||||
return name
|
||||
|
||||
|
||||
def _disassemble_key(name):
|
||||
# type: (str) -> List[str]
|
||||
if "." not in name:
|
||||
error_message = (
|
||||
"Key does not contain dot separated section and key. "
|
||||
"Perhaps you wanted to use 'global.{}' instead?"
|
||||
).format(name)
|
||||
raise ConfigurationError(error_message)
|
||||
return name.split(".", 1)
|
||||
|
||||
|
||||
def get_configuration_files():
|
||||
# type: () -> Dict[Kind, List[str]]
|
||||
global_config_files = [
|
||||
os.path.join(path, CONFIG_BASENAME)
|
||||
for path in appdirs.site_config_dirs('pip')
|
||||
]
|
||||
|
||||
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
||||
legacy_config_file = os.path.join(
|
||||
os.path.expanduser('~'),
|
||||
'pip' if WINDOWS else '.pip',
|
||||
CONFIG_BASENAME,
|
||||
)
|
||||
new_config_file = os.path.join(
|
||||
appdirs.user_config_dir("pip"), CONFIG_BASENAME
|
||||
)
|
||||
return {
|
||||
kinds.GLOBAL: global_config_files,
|
||||
kinds.SITE: [site_config_file],
|
||||
kinds.USER: [legacy_config_file, new_config_file],
|
||||
}
|
||||
|
||||
|
||||
class Configuration:
|
||||
"""Handles management of configuration.
|
||||
|
||||
Provides an interface to accessing and managing configuration files.
|
||||
|
||||
This class converts provides an API that takes "section.key-name" style
|
||||
keys and stores the value associated with it as "key-name" under the
|
||||
section "section".
|
||||
|
||||
This allows for a clean interface wherein the both the section and the
|
||||
key-name are preserved in an easy to manage form in the configuration files
|
||||
and the data stored is also nice.
|
||||
"""
|
||||
|
||||
def __init__(self, isolated, load_only=None):
|
||||
# type: (bool, Optional[Kind]) -> None
|
||||
super().__init__()
|
||||
|
||||
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
||||
raise ConfigurationError(
|
||||
"Got invalid value for load_only - should be one of {}".format(
|
||||
", ".join(map(repr, VALID_LOAD_ONLY))
|
||||
)
|
||||
)
|
||||
self.isolated = isolated
|
||||
self.load_only = load_only
|
||||
|
||||
# Because we keep track of where we got the data from
|
||||
self._parsers = {
|
||||
variant: [] for variant in OVERRIDE_ORDER
|
||||
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
||||
self._config = {
|
||||
variant: {} for variant in OVERRIDE_ORDER
|
||||
} # type: Dict[Kind, Dict[str, Any]]
|
||||
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
||||
|
||||
def load(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from configuration files and environment
|
||||
"""
|
||||
self._load_config_files()
|
||||
if not self.isolated:
|
||||
self._load_environment_vars()
|
||||
|
||||
def get_file_to_edit(self):
|
||||
# type: () -> Optional[str]
|
||||
"""Returns the file with highest priority in configuration
|
||||
"""
|
||||
assert self.load_only is not None, \
|
||||
"Need to be specified a file to be editing"
|
||||
|
||||
try:
|
||||
return self._get_parser_to_modify()[0]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def items(self):
|
||||
# type: () -> Iterable[Tuple[str, Any]]
|
||||
"""Returns key-value pairs like dict.items() representing the loaded
|
||||
configuration
|
||||
"""
|
||||
return self._dictionary.items()
|
||||
|
||||
def get_value(self, key):
|
||||
# type: (str) -> Any
|
||||
"""Get a value from the configuration.
|
||||
"""
|
||||
try:
|
||||
return self._dictionary[key]
|
||||
except KeyError:
|
||||
raise ConfigurationError(f"No such key - {key}")
|
||||
|
||||
def set_value(self, key, value):
|
||||
# type: (str, Any) -> None
|
||||
"""Modify a value in the configuration.
|
||||
"""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
assert self.load_only
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
|
||||
# Modify the parser and the configuration
|
||||
if not parser.has_section(section):
|
||||
parser.add_section(section)
|
||||
parser.set(section, name, value)
|
||||
|
||||
self._config[self.load_only][key] = value
|
||||
self._mark_as_modified(fname, parser)
|
||||
|
||||
def unset_value(self, key):
|
||||
# type: (str) -> None
|
||||
"""Unset a value in the configuration."""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
assert self.load_only
|
||||
if key not in self._config[self.load_only]:
|
||||
raise ConfigurationError(f"No such key - {key}")
|
||||
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
if not (parser.has_section(section)
|
||||
and parser.remove_option(section, name)):
|
||||
# The option was not removed.
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=1]. Please report as a bug."
|
||||
)
|
||||
|
||||
# The section may be empty after the option was removed.
|
||||
if not parser.items(section):
|
||||
parser.remove_section(section)
|
||||
self._mark_as_modified(fname, parser)
|
||||
|
||||
del self._config[self.load_only][key]
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
"""Save the current in-memory state.
|
||||
"""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
for fname, parser in self._modified_parsers:
|
||||
logger.info("Writing to %s", fname)
|
||||
|
||||
# Ensure directory exists.
|
||||
ensure_dir(os.path.dirname(fname))
|
||||
|
||||
with open(fname, "w") as f:
|
||||
parser.write(f)
|
||||
|
||||
#
|
||||
# Private routines
|
||||
#
|
||||
|
||||
def _ensure_have_load_only(self):
|
||||
# type: () -> None
|
||||
if self.load_only is None:
|
||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||
logger.debug("Will be working with %s variant only", self.load_only)
|
||||
|
||||
@property
|
||||
def _dictionary(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
"""A dictionary representing the loaded configuration.
|
||||
"""
|
||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||
# are not needed here.
|
||||
retval = {}
|
||||
|
||||
for variant in OVERRIDE_ORDER:
|
||||
retval.update(self._config[variant])
|
||||
|
||||
return retval
|
||||
|
||||
def _load_config_files(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from configuration files
|
||||
"""
|
||||
config_files = dict(self.iter_config_files())
|
||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||
logger.debug(
|
||||
"Skipping loading configuration files due to "
|
||||
"environment's PIP_CONFIG_FILE being os.devnull"
|
||||
)
|
||||
return
|
||||
|
||||
for variant, files in config_files.items():
|
||||
for fname in files:
|
||||
# If there's specific variant set in `load_only`, load only
|
||||
# that variant, not the others.
|
||||
if self.load_only is not None and variant != self.load_only:
|
||||
logger.debug(
|
||||
"Skipping file '%s' (variant: %s)", fname, variant
|
||||
)
|
||||
continue
|
||||
|
||||
parser = self._load_file(variant, fname)
|
||||
|
||||
# Keeping track of the parsers used
|
||||
self._parsers[variant].append((fname, parser))
|
||||
|
||||
def _load_file(self, variant, fname):
|
||||
# type: (Kind, str) -> RawConfigParser
|
||||
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
||||
parser = self._construct_parser(fname)
|
||||
|
||||
for section in parser.sections():
|
||||
items = parser.items(section)
|
||||
self._config[variant].update(self._normalized_keys(section, items))
|
||||
|
||||
return parser
|
||||
|
||||
def _construct_parser(self, fname):
|
||||
# type: (str) -> RawConfigParser
|
||||
parser = configparser.RawConfigParser()
|
||||
# If there is no such file, don't bother reading it but create the
|
||||
# parser anyway, to hold the data.
|
||||
# Doing this is useful when modifying and saving files, where we don't
|
||||
# need to construct a parser.
|
||||
if os.path.exists(fname):
|
||||
try:
|
||||
parser.read(fname)
|
||||
except UnicodeDecodeError:
|
||||
# See https://github.com/pypa/pip/issues/4963
|
||||
raise ConfigurationFileCouldNotBeLoaded(
|
||||
reason="contains invalid {} characters".format(
|
||||
locale.getpreferredencoding(False)
|
||||
),
|
||||
fname=fname,
|
||||
)
|
||||
except configparser.Error as error:
|
||||
# See https://github.com/pypa/pip/issues/4893
|
||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||
return parser
|
||||
|
||||
def _load_environment_vars(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from environment variables
|
||||
"""
|
||||
self._config[kinds.ENV_VAR].update(
|
||||
self._normalized_keys(":env:", self.get_environ_vars())
|
||||
)
|
||||
|
||||
def _normalized_keys(self, section, items):
|
||||
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
||||
"""Normalizes items to construct a dictionary with normalized keys.
|
||||
|
||||
This routine is where the names become keys and are made the same
|
||||
regardless of source - configuration files or environment.
|
||||
"""
|
||||
normalized = {}
|
||||
for name, val in items:
|
||||
key = section + "." + _normalize_name(name)
|
||||
normalized[key] = val
|
||||
return normalized
|
||||
|
||||
def get_environ_vars(self):
|
||||
# type: () -> Iterable[Tuple[str, str]]
|
||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||
for key, val in os.environ.items():
|
||||
if key.startswith("PIP_"):
|
||||
name = key[4:].lower()
|
||||
if name not in ENV_NAMES_IGNORED:
|
||||
yield name, val
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def iter_config_files(self):
|
||||
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
||||
"""Yields variant and configuration files associated with it.
|
||||
|
||||
This should be treated like items of a dictionary.
|
||||
"""
|
||||
# SMELL: Move the conditions out of this function
|
||||
|
||||
# environment variables have the lowest priority
|
||||
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
||||
if config_file is not None:
|
||||
yield kinds.ENV, [config_file]
|
||||
else:
|
||||
yield kinds.ENV, []
|
||||
|
||||
config_files = get_configuration_files()
|
||||
|
||||
# at the base we have any global configuration
|
||||
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
|
||||
|
||||
# per-user configuration next
|
||||
should_load_user_config = not self.isolated and not (
|
||||
config_file and os.path.exists(config_file)
|
||||
)
|
||||
if should_load_user_config:
|
||||
# The legacy config file is overridden by the new config file
|
||||
yield kinds.USER, config_files[kinds.USER]
|
||||
|
||||
# finally virtualenv configuration first trumping others
|
||||
yield kinds.SITE, config_files[kinds.SITE]
|
||||
|
||||
def get_values_in_config(self, variant):
|
||||
# type: (Kind) -> Dict[str, Any]
|
||||
"""Get values present in a config file"""
|
||||
return self._config[variant]
|
||||
|
||||
def _get_parser_to_modify(self):
|
||||
# type: () -> Tuple[str, RawConfigParser]
|
||||
# Determine which parser to modify
|
||||
assert self.load_only
|
||||
parsers = self._parsers[self.load_only]
|
||||
if not parsers:
|
||||
# This should not happen if everything works correctly.
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=2]. Please report as a bug."
|
||||
)
|
||||
|
||||
# Use the highest priority parser.
|
||||
return parsers[-1]
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def _mark_as_modified(self, fname, parser):
|
||||
# type: (str, RawConfigParser) -> None
|
||||
file_parser_tuple = (fname, parser)
|
||||
if file_parser_tuple not in self._modified_parsers:
|
||||
self._modified_parsers.append(file_parser_tuple)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return f"{self.__class__.__name__}({self._dictionary!r})"
|
||||
@ -0,0 +1,20 @@
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.distributions.sdist import SourceDistribution
|
||||
from pip._internal.distributions.wheel import WheelDistribution
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
|
||||
def make_distribution_for_install_requirement(install_req):
|
||||
# type: (InstallRequirement) -> AbstractDistribution
|
||||
"""Returns a Distribution for the given InstallRequirement"""
|
||||
# Editable requirements will always be source distributions. They use the
|
||||
# legacy logic until we create a modern standard for them.
|
||||
if install_req.editable:
|
||||
return SourceDistribution(install_req)
|
||||
|
||||
# If it's a wheel, it's a WheelDistribution
|
||||
if install_req.is_wheel:
|
||||
return WheelDistribution(install_req)
|
||||
|
||||
# Otherwise, a SourceDistribution
|
||||
return SourceDistribution(install_req)
|
||||
@ -0,0 +1,39 @@
|
||||
import abc
|
||||
from typing import Optional
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.req import InstallRequirement
|
||||
|
||||
|
||||
class AbstractDistribution(metaclass=abc.ABCMeta):
|
||||
"""A base class for handling installable artifacts.
|
||||
|
||||
The requirements for anything installable are as follows:
|
||||
|
||||
- we must be able to determine the requirement name
|
||||
(or we can't correctly handle the non-upgrade case).
|
||||
|
||||
- for packages with setup requirements, we must also be able
|
||||
to determine their requirements without installing additional
|
||||
packages (for the same reason as run-time dependencies)
|
||||
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
"""
|
||||
|
||||
def __init__(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
super().__init__()
|
||||
self.req = req
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_pkg_resources_distribution(self):
|
||||
# type: () -> Optional[Distribution]
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> None
|
||||
raise NotImplementedError()
|
||||
@ -0,0 +1,22 @@
|
||||
from typing import Optional
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
|
||||
class InstalledDistribution(AbstractDistribution):
|
||||
"""Represents an installed package.
|
||||
|
||||
This does not need any preparation as the required information has already
|
||||
been computed.
|
||||
"""
|
||||
|
||||
def get_pkg_resources_distribution(self):
|
||||
# type: () -> Optional[Distribution]
|
||||
return self.req.satisfied_by
|
||||
|
||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> None
|
||||
pass
|
||||
@ -0,0 +1,95 @@
|
||||
import logging
|
||||
from typing import Set, Tuple
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SourceDistribution(AbstractDistribution):
|
||||
"""Represents a source distribution.
|
||||
|
||||
The preparation step for these needs metadata for the packages to be
|
||||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||
"""
|
||||
|
||||
def get_pkg_resources_distribution(self):
|
||||
# type: () -> Distribution
|
||||
return self.req.get_dist()
|
||||
|
||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> None
|
||||
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
||||
self.req.load_pyproject_toml()
|
||||
|
||||
# Set up the build isolation, if this requirement should be isolated
|
||||
should_isolate = self.req.use_pep517 and build_isolation
|
||||
if should_isolate:
|
||||
self._setup_isolation(finder)
|
||||
|
||||
self.req.prepare_metadata()
|
||||
|
||||
def _setup_isolation(self, finder):
|
||||
# type: (PackageFinder) -> None
|
||||
def _raise_conflicts(conflicting_with, conflicting_reqs):
|
||||
# type: (str, Set[Tuple[str, str]]) -> None
|
||||
format_string = (
|
||||
"Some build dependencies for {requirement} "
|
||||
"conflict with {conflicting_with}: {description}."
|
||||
)
|
||||
error_message = format_string.format(
|
||||
requirement=self.req,
|
||||
conflicting_with=conflicting_with,
|
||||
description=", ".join(
|
||||
f"{installed} is incompatible with {wanted}"
|
||||
for installed, wanted in sorted(conflicting)
|
||||
),
|
||||
)
|
||||
raise InstallationError(error_message)
|
||||
|
||||
# Isolate in a BuildEnvironment and install the build-time
|
||||
# requirements.
|
||||
pyproject_requires = self.req.pyproject_requires
|
||||
assert pyproject_requires is not None
|
||||
|
||||
self.req.build_env = BuildEnvironment()
|
||||
self.req.build_env.install_requirements(
|
||||
finder, pyproject_requires, "overlay", "Installing build dependencies"
|
||||
)
|
||||
conflicting, missing = self.req.build_env.check_requirements(
|
||||
self.req.requirements_to_check
|
||||
)
|
||||
if conflicting:
|
||||
_raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
||||
if missing:
|
||||
logger.warning(
|
||||
"Missing build requirements in pyproject.toml for %s.",
|
||||
self.req,
|
||||
)
|
||||
logger.warning(
|
||||
"The project does not specify a build backend, and "
|
||||
"pip cannot fall back to setuptools without %s.",
|
||||
" and ".join(map(repr, sorted(missing))),
|
||||
)
|
||||
# Install any extra build dependencies that the backend requests.
|
||||
# This must be done in a second pass, as the pyproject.toml
|
||||
# dependencies must be installed before we can call the backend.
|
||||
with self.req.build_env:
|
||||
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
||||
backend = self.req.pep517_backend
|
||||
assert backend is not None
|
||||
with backend.subprocess_runner(runner):
|
||||
reqs = backend.get_requires_for_build_wheel()
|
||||
|
||||
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
||||
if conflicting:
|
||||
_raise_conflicts("the backend dependencies", conflicting)
|
||||
self.req.build_env.install_requirements(
|
||||
finder, missing, "normal", "Installing backend dependencies"
|
||||
)
|
||||
@ -0,0 +1,34 @@
|
||||
from zipfile import ZipFile
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||
|
||||
|
||||
class WheelDistribution(AbstractDistribution):
|
||||
"""Represents a wheel distribution.
|
||||
|
||||
This does not need any preparation as wheels can be directly unpacked.
|
||||
"""
|
||||
|
||||
def get_pkg_resources_distribution(self):
|
||||
# type: () -> Distribution
|
||||
"""Loads the metadata from the wheel file into memory and returns a
|
||||
Distribution that uses it, not relying on the wheel file or
|
||||
requirement.
|
||||
"""
|
||||
# Set as part of preparation during download.
|
||||
assert self.req.local_file_path
|
||||
# Wheels are never unnamed.
|
||||
assert self.req.name
|
||||
|
||||
with ZipFile(self.req.local_file_path, allowZip64=True) as z:
|
||||
return pkg_resources_distribution_for_wheel(
|
||||
z, self.req.name, self.req.local_file_path
|
||||
)
|
||||
|
||||
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> None
|
||||
pass
|
||||
@ -0,0 +1,397 @@
|
||||
"""Exceptions used throughout package"""
|
||||
|
||||
import configparser
|
||||
from itertools import chain, groupby, repeat
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.requests.models import Request, Response
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hashlib import _Hash
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""Base pip exception"""
|
||||
|
||||
|
||||
class ConfigurationError(PipError):
|
||||
"""General exception in configuration"""
|
||||
|
||||
|
||||
class InstallationError(PipError):
|
||||
"""General exception during installation"""
|
||||
|
||||
|
||||
class UninstallationError(PipError):
|
||||
"""General exception during uninstallation"""
|
||||
|
||||
|
||||
class NoneMetadataError(PipError):
|
||||
"""
|
||||
Raised when accessing "METADATA" or "PKG-INFO" metadata for a
|
||||
pip._vendor.pkg_resources.Distribution object and
|
||||
`dist.has_metadata('METADATA')` returns True but
|
||||
`dist.get_metadata('METADATA')` returns None (and similarly for
|
||||
"PKG-INFO").
|
||||
"""
|
||||
|
||||
def __init__(self, dist, metadata_name):
|
||||
# type: (Distribution, str) -> None
|
||||
"""
|
||||
:param dist: A Distribution object.
|
||||
:param metadata_name: The name of the metadata being accessed
|
||||
(can be "METADATA" or "PKG-INFO").
|
||||
"""
|
||||
self.dist = dist
|
||||
self.metadata_name = metadata_name
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
# Use `dist` in the error message because its stringification
|
||||
# includes more information, like the version and location.
|
||||
return (
|
||||
'None {} metadata found for distribution: {}'.format(
|
||||
self.metadata_name, self.dist,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class UserInstallationInvalid(InstallationError):
|
||||
"""A --user install is requested on an environment without user site."""
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return "User base directory is not specified"
|
||||
|
||||
|
||||
class InvalidSchemeCombination(InstallationError):
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
before = ", ".join(str(a) for a in self.args[:-1])
|
||||
return f"Cannot set {before} and {self.args[-1]} together"
|
||||
|
||||
|
||||
class DistributionNotFound(InstallationError):
|
||||
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||
|
||||
|
||||
class RequirementsFileParseError(InstallationError):
|
||||
"""Raised when a general error occurs parsing a requirements file line."""
|
||||
|
||||
|
||||
class BestVersionAlreadyInstalled(PipError):
|
||||
"""Raised when the most up-to-date version of a package is already
|
||||
installed."""
|
||||
|
||||
|
||||
class BadCommand(PipError):
|
||||
"""Raised when virtualenv or a command is not found"""
|
||||
|
||||
|
||||
class CommandError(PipError):
|
||||
"""Raised when there is an error in command-line arguments"""
|
||||
|
||||
|
||||
class PreviousBuildDirError(PipError):
|
||||
"""Raised when there's a previous conflicting build directory"""
|
||||
|
||||
|
||||
class NetworkConnectionError(PipError):
|
||||
"""HTTP connection error"""
|
||||
|
||||
def __init__(self, error_msg, response=None, request=None):
|
||||
# type: (str, Response, Request) -> None
|
||||
"""
|
||||
Initialize NetworkConnectionError with `request` and `response`
|
||||
objects.
|
||||
"""
|
||||
self.response = response
|
||||
self.request = request
|
||||
self.error_msg = error_msg
|
||||
if (self.response is not None and not self.request and
|
||||
hasattr(response, 'request')):
|
||||
self.request = self.response.request
|
||||
super().__init__(error_msg, response, request)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return str(self.error_msg)
|
||||
|
||||
|
||||
class InvalidWheelFilename(InstallationError):
|
||||
"""Invalid wheel filename."""
|
||||
|
||||
|
||||
class UnsupportedWheel(InstallationError):
|
||||
"""Unsupported wheel."""
|
||||
|
||||
|
||||
class MetadataInconsistent(InstallationError):
|
||||
"""Built metadata contains inconsistent information.
|
||||
|
||||
This is raised when the metadata contains values (e.g. name and version)
|
||||
that do not match the information previously obtained from sdist filename
|
||||
or user-supplied ``#egg=`` value.
|
||||
"""
|
||||
def __init__(self, ireq, field, f_val, m_val):
|
||||
# type: (InstallRequirement, str, str, str) -> None
|
||||
self.ireq = ireq
|
||||
self.field = field
|
||||
self.f_val = f_val
|
||||
self.m_val = m_val
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
template = (
|
||||
"Requested {} has inconsistent {}: "
|
||||
"filename has {!r}, but metadata has {!r}"
|
||||
)
|
||||
return template.format(self.ireq, self.field, self.f_val, self.m_val)
|
||||
|
||||
|
||||
class InstallationSubprocessError(InstallationError):
|
||||
"""A subprocess call failed during installation."""
|
||||
def __init__(self, returncode, description):
|
||||
# type: (int, str) -> None
|
||||
self.returncode = returncode
|
||||
self.description = description
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return (
|
||||
"Command errored out with exit status {}: {} "
|
||||
"Check the logs for full command output."
|
||||
).format(self.returncode, self.description)
|
||||
|
||||
|
||||
class HashErrors(InstallationError):
|
||||
"""Multiple HashError instances rolled into one for reporting"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.errors = [] # type: List[HashError]
|
||||
|
||||
def append(self, error):
|
||||
# type: (HashError) -> None
|
||||
self.errors.append(error)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
lines = []
|
||||
self.errors.sort(key=lambda e: e.order)
|
||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||
lines.append(cls.head)
|
||||
lines.extend(e.body() for e in errors_of_cls)
|
||||
if lines:
|
||||
return '\n'.join(lines)
|
||||
return ''
|
||||
|
||||
def __nonzero__(self):
|
||||
# type: () -> bool
|
||||
return bool(self.errors)
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return self.__nonzero__()
|
||||
|
||||
|
||||
class HashError(InstallationError):
|
||||
"""
|
||||
A failure to verify a package against known-good hashes
|
||||
|
||||
:cvar order: An int sorting hash exception classes by difficulty of
|
||||
recovery (lower being harder), so the user doesn't bother fretting
|
||||
about unpinned packages when he has deeper issues, like VCS
|
||||
dependencies, to deal with. Also keeps error reports in a
|
||||
deterministic order.
|
||||
:cvar head: A section heading for display above potentially many
|
||||
exceptions of this kind
|
||||
:ivar req: The InstallRequirement that triggered this error. This is
|
||||
pasted on after the exception is instantiated, because it's not
|
||||
typically available earlier.
|
||||
|
||||
"""
|
||||
req = None # type: Optional[InstallRequirement]
|
||||
head = ''
|
||||
order = -1 # type: int
|
||||
|
||||
def body(self):
|
||||
# type: () -> str
|
||||
"""Return a summary of me for display under the heading.
|
||||
|
||||
This default implementation simply prints a description of the
|
||||
triggering requirement.
|
||||
|
||||
:param req: The InstallRequirement that provoked this error, with
|
||||
its link already populated by the resolver's _populate_link().
|
||||
|
||||
"""
|
||||
return f' {self._requirement_name()}'
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return f'{self.head}\n{self.body()}'
|
||||
|
||||
def _requirement_name(self):
|
||||
# type: () -> str
|
||||
"""Return a description of the requirement that triggered me.
|
||||
|
||||
This default implementation returns long description of the req, with
|
||||
line numbers
|
||||
|
||||
"""
|
||||
return str(self.req) if self.req else 'unknown package'
|
||||
|
||||
|
||||
class VcsHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 0
|
||||
head = ("Can't verify hashes for these requirements because we don't "
|
||||
"have a way to hash version control repositories:")
|
||||
|
||||
|
||||
class DirectoryUrlHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 1
|
||||
head = ("Can't verify hashes for these file:// requirements because they "
|
||||
"point to directories:")
|
||||
|
||||
|
||||
class HashMissing(HashError):
|
||||
"""A hash was needed for a requirement but is absent."""
|
||||
|
||||
order = 2
|
||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
||||
'missing from some requirements. Here is a list of those '
|
||||
'requirements along with the hashes their downloaded archives '
|
||||
'actually had. Add lines like these to your requirements files to '
|
||||
'prevent tampering. (If you did not enable --require-hashes '
|
||||
'manually, note that it turns on automatically when any package '
|
||||
'has a hash.)')
|
||||
|
||||
def __init__(self, gotten_hash):
|
||||
# type: (str) -> None
|
||||
"""
|
||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||
just downloaded
|
||||
"""
|
||||
self.gotten_hash = gotten_hash
|
||||
|
||||
def body(self):
|
||||
# type: () -> str
|
||||
# Dodge circular import.
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||
|
||||
package = None
|
||||
if self.req:
|
||||
# In the case of URL-based requirements, display the original URL
|
||||
# seen in the requirements file rather than the package name,
|
||||
# so the output can be directly copied into the requirements file.
|
||||
package = (self.req.original_link if self.req.original_link
|
||||
# In case someone feeds something downright stupid
|
||||
# to InstallRequirement's constructor.
|
||||
else getattr(self.req, 'req', None))
|
||||
return ' {} --hash={}:{}'.format(package or 'unknown package',
|
||||
FAVORITE_HASH,
|
||||
self.gotten_hash)
|
||||
|
||||
|
||||
class HashUnpinned(HashError):
|
||||
"""A requirement had a hash specified but was not pinned to a specific
|
||||
version."""
|
||||
|
||||
order = 3
|
||||
head = ('In --require-hashes mode, all requirements must have their '
|
||||
'versions pinned with ==. These do not:')
|
||||
|
||||
|
||||
class HashMismatch(HashError):
|
||||
"""
|
||||
Distribution file hash values don't match.
|
||||
|
||||
:ivar package_name: The name of the package that triggered the hash
|
||||
mismatch. Feel free to write to this after the exception is raise to
|
||||
improve its error message.
|
||||
|
||||
"""
|
||||
order = 4
|
||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
||||
'FILE. If you have updated the package versions, please update '
|
||||
'the hashes. Otherwise, examine the package contents carefully; '
|
||||
'someone may have tampered with them.')
|
||||
|
||||
def __init__(self, allowed, gots):
|
||||
# type: (Dict[str, List[str]], Dict[str, _Hash]) -> None
|
||||
"""
|
||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
:param gots: A dict of algorithm names pointing to hashes we
|
||||
actually got from the files under suspicion
|
||||
"""
|
||||
self.allowed = allowed
|
||||
self.gots = gots
|
||||
|
||||
def body(self):
|
||||
# type: () -> str
|
||||
return ' {}:\n{}'.format(self._requirement_name(),
|
||||
self._hash_comparison())
|
||||
|
||||
def _hash_comparison(self):
|
||||
# type: () -> str
|
||||
"""
|
||||
Return a comparison of actual and expected hash values.
|
||||
|
||||
Example::
|
||||
|
||||
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
||||
or 123451234512345123451234512345123451234512345
|
||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||
|
||||
"""
|
||||
def hash_then_or(hash_name):
|
||||
# type: (str) -> chain[str]
|
||||
# For now, all the decent hashes have 6-char names, so we can get
|
||||
# away with hard-coding space literals.
|
||||
return chain([hash_name], repeat(' or'))
|
||||
|
||||
lines = [] # type: List[str]
|
||||
for hash_name, expecteds in self.allowed.items():
|
||||
prefix = hash_then_or(hash_name)
|
||||
lines.extend((' Expected {} {}'.format(next(prefix), e))
|
||||
for e in expecteds)
|
||||
lines.append(' Got {}\n'.format(
|
||||
self.gots[hash_name].hexdigest()))
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class UnsupportedPythonVersion(InstallationError):
|
||||
"""Unsupported python version according to Requires-Python package
|
||||
metadata."""
|
||||
|
||||
|
||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||
"""When there are errors while loading a configuration file
|
||||
"""
|
||||
|
||||
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
||||
# type: (str, Optional[str], Optional[configparser.Error]) -> None
|
||||
super().__init__(error)
|
||||
self.reason = reason
|
||||
self.fname = fname
|
||||
self.error = error
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
if self.fname is not None:
|
||||
message_part = f" in {self.fname}."
|
||||
else:
|
||||
assert self.error is not None
|
||||
message_part = f".\n{self.error}\n"
|
||||
return f"Configuration file {self.reason}{message_part}"
|
||||
@ -0,0 +1,2 @@
|
||||
"""Index interaction code
|
||||
"""
|
||||
@ -0,0 +1,556 @@
|
||||
"""
|
||||
The main purpose of this module is to expose LinkCollector.collect_sources().
|
||||
"""
|
||||
|
||||
import cgi
|
||||
import collections
|
||||
import functools
|
||||
import html
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import xml.etree.ElementTree
|
||||
from optparse import Values
|
||||
from typing import (
|
||||
Callable,
|
||||
Iterable,
|
||||
List,
|
||||
MutableMapping,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Sequence,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor import html5lib, requests
|
||||
from pip._vendor.requests import Response
|
||||
from pip._vendor.requests.exceptions import RetryError, SSLError
|
||||
|
||||
from pip._internal.exceptions import NetworkConnectionError
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.search_scope import SearchScope
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import raise_for_status
|
||||
from pip._internal.utils.filetypes import is_archive_file
|
||||
from pip._internal.utils.misc import pairwise, redact_auth_from_url
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
from .sources import CandidatesFromPage, LinkSource, build_source
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
HTMLElement = xml.etree.ElementTree.Element
|
||||
ResponseHeaders = MutableMapping[str, str]
|
||||
|
||||
|
||||
def _match_vcs_scheme(url):
|
||||
# type: (str) -> Optional[str]
|
||||
"""Look for VCS schemes in the URL.
|
||||
|
||||
Returns the matched VCS scheme, or None if there's no match.
|
||||
"""
|
||||
for scheme in vcs.schemes:
|
||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
||||
return scheme
|
||||
return None
|
||||
|
||||
|
||||
class _NotHTML(Exception):
|
||||
def __init__(self, content_type, request_desc):
|
||||
# type: (str, str) -> None
|
||||
super().__init__(content_type, request_desc)
|
||||
self.content_type = content_type
|
||||
self.request_desc = request_desc
|
||||
|
||||
|
||||
def _ensure_html_header(response):
|
||||
# type: (Response) -> None
|
||||
"""Check the Content-Type header to ensure the response contains HTML.
|
||||
|
||||
Raises `_NotHTML` if the content type is not text/html.
|
||||
"""
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if not content_type.lower().startswith("text/html"):
|
||||
raise _NotHTML(content_type, response.request.method)
|
||||
|
||||
|
||||
class _NotHTTP(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _ensure_html_response(url, session):
|
||||
# type: (str, PipSession) -> None
|
||||
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
||||
|
||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||
`_NotHTML` if the content type is not text/html.
|
||||
"""
|
||||
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
||||
if scheme not in {'http', 'https'}:
|
||||
raise _NotHTTP()
|
||||
|
||||
resp = session.head(url, allow_redirects=True)
|
||||
raise_for_status(resp)
|
||||
|
||||
_ensure_html_header(resp)
|
||||
|
||||
|
||||
def _get_html_response(url, session):
|
||||
# type: (str, PipSession) -> Response
|
||||
"""Access an HTML page with GET, and return the response.
|
||||
|
||||
This consists of three parts:
|
||||
|
||||
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
||||
check the Content-Type is HTML, to avoid downloading a large file.
|
||||
Raise `_NotHTTP` if the content type cannot be determined, or
|
||||
`_NotHTML` if it is not HTML.
|
||||
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
||||
3. Check the Content-Type header to make sure we got HTML, and raise
|
||||
`_NotHTML` otherwise.
|
||||
"""
|
||||
if is_archive_file(Link(url).filename):
|
||||
_ensure_html_response(url, session=session)
|
||||
|
||||
logger.debug('Getting page %s', redact_auth_from_url(url))
|
||||
|
||||
resp = session.get(
|
||||
url,
|
||||
headers={
|
||||
"Accept": "text/html",
|
||||
# We don't want to blindly returned cached data for
|
||||
# /simple/, because authors generally expecting that
|
||||
# twine upload && pip install will function, but if
|
||||
# they've done a pip install in the last ~10 minutes
|
||||
# it won't. Thus by setting this to zero we will not
|
||||
# blindly use any cached data, however the benefit of
|
||||
# using max-age=0 instead of no-cache, is that we will
|
||||
# still support conditional requests, so we will still
|
||||
# minimize traffic sent in cases where the page hasn't
|
||||
# changed at all, we will just always incur the round
|
||||
# trip for the conditional GET now instead of only
|
||||
# once per 10 minutes.
|
||||
# For more information, please see pypa/pip#5670.
|
||||
"Cache-Control": "max-age=0",
|
||||
},
|
||||
)
|
||||
raise_for_status(resp)
|
||||
|
||||
# The check for archives above only works if the url ends with
|
||||
# something that looks like an archive. However that is not a
|
||||
# requirement of an url. Unless we issue a HEAD request on every
|
||||
# url we cannot know ahead of time for sure if something is HTML
|
||||
# or not. However we can check after we've downloaded it.
|
||||
_ensure_html_header(resp)
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def _get_encoding_from_headers(headers):
|
||||
# type: (ResponseHeaders) -> Optional[str]
|
||||
"""Determine if we have any encoding information in our headers.
|
||||
"""
|
||||
if headers and "Content-Type" in headers:
|
||||
content_type, params = cgi.parse_header(headers["Content-Type"])
|
||||
if "charset" in params:
|
||||
return params['charset']
|
||||
return None
|
||||
|
||||
|
||||
def _determine_base_url(document, page_url):
|
||||
# type: (HTMLElement, str) -> str
|
||||
"""Determine the HTML document's base URL.
|
||||
|
||||
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
||||
attribute denotes the base URL of anchor tags in the document. If there is
|
||||
no such tag (or if it does not have a valid href attribute), the HTML
|
||||
file's URL is used as the base URL.
|
||||
|
||||
:param document: An HTML document representation. The current
|
||||
implementation expects the result of ``html5lib.parse()``.
|
||||
:param page_url: The URL of the HTML document.
|
||||
"""
|
||||
for base in document.findall(".//base"):
|
||||
href = base.get("href")
|
||||
if href is not None:
|
||||
return href
|
||||
return page_url
|
||||
|
||||
|
||||
def _clean_url_path_part(part):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||
"""
|
||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||
return urllib.parse.quote(urllib.parse.unquote(part))
|
||||
|
||||
|
||||
def _clean_file_url_path(part):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Clean the first part of a URL path that corresponds to a local
|
||||
filesystem path (i.e. the first part after splitting on "@" characters).
|
||||
"""
|
||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||
# Also, on Windows the path part might contain a drive letter which
|
||||
# should not be quoted. On Linux where drive letters do not
|
||||
# exist, the colon should be quoted. We rely on urllib.request
|
||||
# to do the right thing here.
|
||||
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
||||
|
||||
|
||||
# percent-encoded: /
|
||||
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
|
||||
|
||||
|
||||
def _clean_url_path(path, is_local_path):
|
||||
# type: (str, bool) -> str
|
||||
"""
|
||||
Clean the path portion of a URL.
|
||||
"""
|
||||
if is_local_path:
|
||||
clean_func = _clean_file_url_path
|
||||
else:
|
||||
clean_func = _clean_url_path_part
|
||||
|
||||
# Split on the reserved characters prior to cleaning so that
|
||||
# revision strings in VCS URLs are properly preserved.
|
||||
parts = _reserved_chars_re.split(path)
|
||||
|
||||
cleaned_parts = []
|
||||
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
|
||||
cleaned_parts.append(clean_func(to_clean))
|
||||
# Normalize %xx escapes (e.g. %2f -> %2F)
|
||||
cleaned_parts.append(reserved.upper())
|
||||
|
||||
return ''.join(cleaned_parts)
|
||||
|
||||
|
||||
def _clean_link(url):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Make sure a link is fully quoted.
|
||||
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
||||
and without double-quoting other characters.
|
||||
"""
|
||||
# Split the URL into parts according to the general structure
|
||||
# `scheme://netloc/path;parameters?query#fragment`.
|
||||
result = urllib.parse.urlparse(url)
|
||||
# If the netloc is empty, then the URL refers to a local filesystem path.
|
||||
is_local_path = not result.netloc
|
||||
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
||||
return urllib.parse.urlunparse(result._replace(path=path))
|
||||
|
||||
|
||||
def _create_link_from_element(
|
||||
anchor, # type: HTMLElement
|
||||
page_url, # type: str
|
||||
base_url, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[Link]
|
||||
"""
|
||||
Convert an anchor element in a simple repository page to a Link.
|
||||
"""
|
||||
href = anchor.get("href")
|
||||
if not href:
|
||||
return None
|
||||
|
||||
url = _clean_link(urllib.parse.urljoin(base_url, href))
|
||||
pyrequire = anchor.get('data-requires-python')
|
||||
pyrequire = html.unescape(pyrequire) if pyrequire else None
|
||||
|
||||
yanked_reason = anchor.get('data-yanked')
|
||||
if yanked_reason:
|
||||
yanked_reason = html.unescape(yanked_reason)
|
||||
|
||||
link = Link(
|
||||
url,
|
||||
comes_from=page_url,
|
||||
requires_python=pyrequire,
|
||||
yanked_reason=yanked_reason,
|
||||
)
|
||||
|
||||
return link
|
||||
|
||||
|
||||
class CacheablePageContent:
|
||||
def __init__(self, page):
|
||||
# type: (HTMLPage) -> None
|
||||
assert page.cache_link_parsing
|
||||
self.page = page
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
return (isinstance(other, type(self)) and
|
||||
self.page.url == other.page.url)
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(self.page.url)
|
||||
|
||||
|
||||
def with_cached_html_pages(
|
||||
fn, # type: Callable[[HTMLPage], Iterable[Link]]
|
||||
):
|
||||
# type: (...) -> Callable[[HTMLPage], List[Link]]
|
||||
"""
|
||||
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
||||
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
||||
`page` has `page.cache_link_parsing == False`.
|
||||
"""
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def wrapper(cacheable_page):
|
||||
# type: (CacheablePageContent) -> List[Link]
|
||||
return list(fn(cacheable_page.page))
|
||||
|
||||
@functools.wraps(fn)
|
||||
def wrapper_wrapper(page):
|
||||
# type: (HTMLPage) -> List[Link]
|
||||
if page.cache_link_parsing:
|
||||
return wrapper(CacheablePageContent(page))
|
||||
return list(fn(page))
|
||||
|
||||
return wrapper_wrapper
|
||||
|
||||
|
||||
@with_cached_html_pages
|
||||
def parse_links(page):
|
||||
# type: (HTMLPage) -> Iterable[Link]
|
||||
"""
|
||||
Parse an HTML document, and yield its anchor elements as Link objects.
|
||||
"""
|
||||
document = html5lib.parse(
|
||||
page.content,
|
||||
transport_encoding=page.encoding,
|
||||
namespaceHTMLElements=False,
|
||||
)
|
||||
|
||||
url = page.url
|
||||
base_url = _determine_base_url(document, url)
|
||||
for anchor in document.findall(".//a"):
|
||||
link = _create_link_from_element(
|
||||
anchor,
|
||||
page_url=url,
|
||||
base_url=base_url,
|
||||
)
|
||||
if link is None:
|
||||
continue
|
||||
yield link
|
||||
|
||||
|
||||
class HTMLPage:
|
||||
"""Represents one page, along with its URL"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
content, # type: bytes
|
||||
encoding, # type: Optional[str]
|
||||
url, # type: str
|
||||
cache_link_parsing=True, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
:param encoding: the encoding to decode the given content.
|
||||
:param url: the URL from which the HTML was downloaded.
|
||||
:param cache_link_parsing: whether links parsed from this page's url
|
||||
should be cached. PyPI index urls should
|
||||
have this set to False, for example.
|
||||
"""
|
||||
self.content = content
|
||||
self.encoding = encoding
|
||||
self.url = url
|
||||
self.cache_link_parsing = cache_link_parsing
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return redact_auth_from_url(self.url)
|
||||
|
||||
|
||||
def _handle_get_page_fail(
|
||||
link, # type: Link
|
||||
reason, # type: Union[str, Exception]
|
||||
meth=None # type: Optional[Callable[..., None]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
if meth is None:
|
||||
meth = logger.debug
|
||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||
|
||||
|
||||
def _make_html_page(response, cache_link_parsing=True):
|
||||
# type: (Response, bool) -> HTMLPage
|
||||
encoding = _get_encoding_from_headers(response.headers)
|
||||
return HTMLPage(
|
||||
response.content,
|
||||
encoding=encoding,
|
||||
url=response.url,
|
||||
cache_link_parsing=cache_link_parsing)
|
||||
|
||||
|
||||
def _get_html_page(link, session=None):
|
||||
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"_get_html_page() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
url = link.url.split('#', 1)[0]
|
||||
|
||||
# Check for VCS schemes that do not support lookup as web pages.
|
||||
vcs_scheme = _match_vcs_scheme(url)
|
||||
if vcs_scheme:
|
||||
logger.warning('Cannot look at %s URL %s because it does not support '
|
||||
'lookup as web pages.', vcs_scheme, link)
|
||||
return None
|
||||
|
||||
# Tack index.html onto file:// URLs that point to directories
|
||||
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
||||
if (scheme == 'file' and os.path.isdir(urllib.request.url2pathname(path))):
|
||||
# add trailing slash if not present so urljoin doesn't trim
|
||||
# final segment
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url = urllib.parse.urljoin(url, 'index.html')
|
||||
logger.debug(' file: URL is directory, getting %s', url)
|
||||
|
||||
try:
|
||||
resp = _get_html_response(url, session=session)
|
||||
except _NotHTTP:
|
||||
logger.warning(
|
||||
'Skipping page %s because it looks like an archive, and cannot '
|
||||
'be checked by a HTTP HEAD request.', link,
|
||||
)
|
||||
except _NotHTML as exc:
|
||||
logger.warning(
|
||||
'Skipping page %s because the %s request got Content-Type: %s.'
|
||||
'The only supported Content-Type is text/html',
|
||||
link, exc.request_desc, exc.content_type,
|
||||
)
|
||||
except NetworkConnectionError as exc:
|
||||
_handle_get_page_fail(link, exc)
|
||||
except RetryError as exc:
|
||||
_handle_get_page_fail(link, exc)
|
||||
except SSLError as exc:
|
||||
reason = "There was a problem confirming the ssl certificate: "
|
||||
reason += str(exc)
|
||||
_handle_get_page_fail(link, reason, meth=logger.info)
|
||||
except requests.ConnectionError as exc:
|
||||
_handle_get_page_fail(link, f"connection error: {exc}")
|
||||
except requests.Timeout:
|
||||
_handle_get_page_fail(link, "timed out")
|
||||
else:
|
||||
return _make_html_page(resp,
|
||||
cache_link_parsing=link.cache_link_parsing)
|
||||
return None
|
||||
|
||||
|
||||
class CollectedSources(NamedTuple):
|
||||
find_links: Sequence[Optional[LinkSource]]
|
||||
index_urls: Sequence[Optional[LinkSource]]
|
||||
|
||||
|
||||
class LinkCollector:
|
||||
|
||||
"""
|
||||
Responsible for collecting Link objects from all configured locations,
|
||||
making network requests as needed.
|
||||
|
||||
The class's main method is its collect_sources() method.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session, # type: PipSession
|
||||
search_scope, # type: SearchScope
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.search_scope = search_scope
|
||||
self.session = session
|
||||
|
||||
@classmethod
|
||||
def create(cls, session, options, suppress_no_index=False):
|
||||
# type: (PipSession, Values, bool) -> LinkCollector
|
||||
"""
|
||||
:param session: The Session to use to make requests.
|
||||
:param suppress_no_index: Whether to ignore the --no-index option
|
||||
when constructing the SearchScope object.
|
||||
"""
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index and not suppress_no_index:
|
||||
logger.debug(
|
||||
'Ignoring indexes: %s',
|
||||
','.join(redact_auth_from_url(url) for url in index_urls),
|
||||
)
|
||||
index_urls = []
|
||||
|
||||
# Make sure find_links is a list before passing to create().
|
||||
find_links = options.find_links or []
|
||||
|
||||
search_scope = SearchScope.create(
|
||||
find_links=find_links, index_urls=index_urls,
|
||||
)
|
||||
link_collector = LinkCollector(
|
||||
session=session, search_scope=search_scope,
|
||||
)
|
||||
return link_collector
|
||||
|
||||
@property
|
||||
def find_links(self):
|
||||
# type: () -> List[str]
|
||||
return self.search_scope.find_links
|
||||
|
||||
def fetch_page(self, location):
|
||||
# type: (Link) -> Optional[HTMLPage]
|
||||
"""
|
||||
Fetch an HTML page containing package links.
|
||||
"""
|
||||
return _get_html_page(location, session=self.session)
|
||||
|
||||
def collect_sources(
|
||||
self,
|
||||
project_name: str,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
) -> CollectedSources:
|
||||
# The OrderedDict calls deduplicate sources by URL.
|
||||
index_url_sources = collections.OrderedDict(
|
||||
build_source(
|
||||
loc,
|
||||
candidates_from_page=candidates_from_page,
|
||||
page_validator=self.session.is_secure_origin,
|
||||
expand_dir=False,
|
||||
cache_link_parsing=False,
|
||||
)
|
||||
for loc in self.search_scope.get_index_urls_locations(project_name)
|
||||
).values()
|
||||
find_links_sources = collections.OrderedDict(
|
||||
build_source(
|
||||
loc,
|
||||
candidates_from_page=candidates_from_page,
|
||||
page_validator=self.session.is_secure_origin,
|
||||
expand_dir=True,
|
||||
cache_link_parsing=True,
|
||||
)
|
||||
for loc in self.find_links
|
||||
).values()
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
lines = [
|
||||
f"* {s.link}"
|
||||
for s in itertools.chain(find_links_sources, index_url_sources)
|
||||
if s is not None and s.link is not None
|
||||
]
|
||||
lines = [
|
||||
f"{len(lines)} location(s) to search "
|
||||
f"for versions of {project_name}:"
|
||||
] + lines
|
||||
logger.debug("\n".join(lines))
|
||||
|
||||
return CollectedSources(
|
||||
find_links=list(find_links_sources),
|
||||
index_urls=list(index_url_sources),
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,224 @@
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import pathlib
|
||||
from typing import Callable, Iterable, Optional, Tuple
|
||||
|
||||
from pip._internal.models.candidate import InstallationCandidate
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||
from pip._internal.vcs import is_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
FoundCandidates = Iterable[InstallationCandidate]
|
||||
FoundLinks = Iterable[Link]
|
||||
CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
|
||||
PageValidator = Callable[[Link], bool]
|
||||
|
||||
|
||||
class LinkSource:
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
"""Returns the underlying link, if there's one."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
"""Candidates found by parsing an archive listing HTML file."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
"""Links found by specifying archives directly."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _is_html_file(file_url: str) -> bool:
|
||||
return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
|
||||
|
||||
|
||||
class _FlatDirectorySource(LinkSource):
|
||||
"""Link source specified by ``--find-links=<path-to-dir>``.
|
||||
|
||||
This looks the content of the directory, and returns:
|
||||
|
||||
* ``page_candidates``: Links listed on each HTML file in the directory.
|
||||
* ``file_candidates``: Archives in the directory.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
path: str,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._path = pathlib.Path(os.path.realpath(path))
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return None
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
for path in self._path.iterdir():
|
||||
url = path_to_url(str(path))
|
||||
if not _is_html_file(url):
|
||||
continue
|
||||
yield from self._candidates_from_page(Link(url))
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
for path in self._path.iterdir():
|
||||
url = path_to_url(str(path))
|
||||
if _is_html_file(url):
|
||||
continue
|
||||
yield Link(url)
|
||||
|
||||
|
||||
class _LocalFileSource(LinkSource):
|
||||
"""``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
|
||||
|
||||
If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
|
||||
the option, it is converted to a URL first. This returns:
|
||||
|
||||
* ``page_candidates``: Links listed on an HTML file.
|
||||
* ``file_candidates``: The non-HTML file.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
link: Link,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._link = link
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return self._link
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
if not _is_html_file(self._link.url):
|
||||
return
|
||||
yield from self._candidates_from_page(self._link)
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
if _is_html_file(self._link.url):
|
||||
return
|
||||
yield self._link
|
||||
|
||||
|
||||
class _RemoteFileSource(LinkSource):
|
||||
"""``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
|
||||
|
||||
This returns:
|
||||
|
||||
* ``page_candidates``: Links listed on an HTML file.
|
||||
* ``file_candidates``: The non-HTML file.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
page_validator: PageValidator,
|
||||
link: Link,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._page_validator = page_validator
|
||||
self._link = link
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return self._link
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
if not self._page_validator(self._link):
|
||||
return
|
||||
yield from self._candidates_from_page(self._link)
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
yield self._link
|
||||
|
||||
|
||||
class _IndexDirectorySource(LinkSource):
|
||||
"""``--[extra-]index-url=<path-to-directory>``.
|
||||
|
||||
This is treated like a remote URL; ``candidates_from_page`` contains logic
|
||||
for this by appending ``index.html`` to the link.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
link: Link,
|
||||
) -> None:
|
||||
self._candidates_from_page = candidates_from_page
|
||||
self._link = link
|
||||
|
||||
@property
|
||||
def link(self) -> Optional[Link]:
|
||||
return self._link
|
||||
|
||||
def page_candidates(self) -> FoundCandidates:
|
||||
yield from self._candidates_from_page(self._link)
|
||||
|
||||
def file_links(self) -> FoundLinks:
|
||||
return ()
|
||||
|
||||
|
||||
def build_source(
|
||||
location: str,
|
||||
*,
|
||||
candidates_from_page: CandidatesFromPage,
|
||||
page_validator: PageValidator,
|
||||
expand_dir: bool,
|
||||
cache_link_parsing: bool,
|
||||
) -> Tuple[Optional[str], Optional[LinkSource]]:
|
||||
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
if os.path.exists(location): # Is a local path.
|
||||
url = path_to_url(location)
|
||||
path = location
|
||||
elif location.startswith("file:"): # A file: URL.
|
||||
url = location
|
||||
path = url_to_path(location)
|
||||
elif is_url(location):
|
||||
url = location
|
||||
|
||||
if url is None:
|
||||
msg = (
|
||||
"Location '%s' is ignored: "
|
||||
"it is either a non-existing path or lacks a specific scheme."
|
||||
)
|
||||
logger.warning(msg, location)
|
||||
return (None, None)
|
||||
|
||||
if path is None:
|
||||
source: LinkSource = _RemoteFileSource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
page_validator=page_validator,
|
||||
link=Link(url, cache_link_parsing=cache_link_parsing),
|
||||
)
|
||||
return (url, source)
|
||||
|
||||
if os.path.isdir(path):
|
||||
if expand_dir:
|
||||
source = _FlatDirectorySource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
path=path,
|
||||
)
|
||||
else:
|
||||
source = _IndexDirectorySource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
link=Link(url, cache_link_parsing=cache_link_parsing),
|
||||
)
|
||||
return (url, source)
|
||||
elif os.path.isfile(path):
|
||||
source = _LocalFileSource(
|
||||
candidates_from_page=candidates_from_page,
|
||||
link=Link(url, cache_link_parsing=cache_link_parsing),
|
||||
)
|
||||
return (url, source)
|
||||
logger.warning(
|
||||
"Location '%s' is ignored: it is neither a file nor a directory.",
|
||||
location,
|
||||
)
|
||||
return (url, None)
|
||||
@ -0,0 +1,184 @@
|
||||
import logging
|
||||
import pathlib
|
||||
import sys
|
||||
import sysconfig
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||
|
||||
from . import _distutils, _sysconfig
|
||||
from .base import (
|
||||
USER_CACHE_DIR,
|
||||
get_major_minor_version,
|
||||
get_src_prefix,
|
||||
site_packages,
|
||||
user_site,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"USER_CACHE_DIR",
|
||||
"get_bin_prefix",
|
||||
"get_bin_user",
|
||||
"get_major_minor_version",
|
||||
"get_platlib",
|
||||
"get_prefixed_libs",
|
||||
"get_purelib",
|
||||
"get_scheme",
|
||||
"get_src_prefix",
|
||||
"site_packages",
|
||||
"user_site",
|
||||
]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _default_base(*, user: bool) -> str:
|
||||
if user:
|
||||
base = sysconfig.get_config_var("userbase")
|
||||
else:
|
||||
base = sysconfig.get_config_var("base")
|
||||
assert base is not None
|
||||
return base
|
||||
|
||||
|
||||
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
||||
if old == new:
|
||||
return False
|
||||
issue_url = "https://github.com/pypa/pip/issues/9617"
|
||||
message = (
|
||||
"Value for %s does not match. Please report this to <%s>"
|
||||
"\ndistutils: %s"
|
||||
"\nsysconfig: %s"
|
||||
)
|
||||
logger.debug(message, key, issue_url, old, new)
|
||||
return True
|
||||
|
||||
|
||||
def _log_context(
|
||||
*,
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
prefix: Optional[str] = None,
|
||||
) -> None:
|
||||
message = (
|
||||
"Additional context:" "\nuser = %r" "\nhome = %r" "\nroot = %r" "\nprefix = %r"
|
||||
)
|
||||
logger.debug(message, user, home, root, prefix)
|
||||
|
||||
|
||||
def get_scheme(
|
||||
dist_name, # type: str
|
||||
user=False, # type: bool
|
||||
home=None, # type: Optional[str]
|
||||
root=None, # type: Optional[str]
|
||||
isolated=False, # type: bool
|
||||
prefix=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> Scheme
|
||||
old = _distutils.get_scheme(
|
||||
dist_name,
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
new = _sysconfig.get_scheme(
|
||||
dist_name,
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
base = prefix or home or _default_base(user=user)
|
||||
warned = []
|
||||
for k in SCHEME_KEYS:
|
||||
# Extra join because distutils can return relative paths.
|
||||
old_v = pathlib.Path(base, getattr(old, k))
|
||||
new_v = pathlib.Path(getattr(new, k))
|
||||
|
||||
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
||||
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
||||
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
||||
# and not warn about it. See bpo-43307 and python/cpython#24628.
|
||||
skip_pypy_special_case = (
|
||||
sys.implementation.name == "pypy"
|
||||
and home is not None
|
||||
and k in ("platlib", "purelib")
|
||||
and old_v.parent == new_v.parent
|
||||
and old_v.name == "python"
|
||||
and new_v.name == "pypy"
|
||||
)
|
||||
if skip_pypy_special_case:
|
||||
continue
|
||||
|
||||
warned.append(_warn_if_mismatch(old_v, new_v, key=f"scheme.{k}"))
|
||||
|
||||
if any(warned):
|
||||
_log_context(user=user, home=home, root=root, prefix=prefix)
|
||||
|
||||
return old
|
||||
|
||||
|
||||
def get_bin_prefix():
|
||||
# type: () -> str
|
||||
old = _distutils.get_bin_prefix()
|
||||
new = _sysconfig.get_bin_prefix()
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
||||
_log_context()
|
||||
return old
|
||||
|
||||
|
||||
def get_bin_user():
|
||||
# type: () -> str
|
||||
return _sysconfig.get_scheme("", user=True).scripts
|
||||
|
||||
|
||||
def get_purelib():
|
||||
# type: () -> str
|
||||
"""Return the default pure-Python lib location."""
|
||||
old = _distutils.get_purelib()
|
||||
new = _sysconfig.get_purelib()
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
||||
_log_context()
|
||||
return old
|
||||
|
||||
|
||||
def get_platlib():
|
||||
# type: () -> str
|
||||
"""Return the default platform-shared lib location."""
|
||||
old = _distutils.get_platlib()
|
||||
new = _sysconfig.get_platlib()
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
||||
_log_context()
|
||||
return old
|
||||
|
||||
|
||||
def get_prefixed_libs(prefix):
|
||||
# type: (str) -> List[str]
|
||||
"""Return the lib locations under ``prefix``."""
|
||||
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
||||
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
||||
|
||||
warned = [
|
||||
_warn_if_mismatch(
|
||||
pathlib.Path(old_pure),
|
||||
pathlib.Path(new_pure),
|
||||
key="prefixed-purelib",
|
||||
),
|
||||
_warn_if_mismatch(
|
||||
pathlib.Path(old_plat),
|
||||
pathlib.Path(new_plat),
|
||||
key="prefixed-platlib",
|
||||
),
|
||||
]
|
||||
if any(warned):
|
||||
_log_context(prefix=prefix)
|
||||
|
||||
if old_pure == old_plat:
|
||||
return [old_pure]
|
||||
return [old_pure, old_plat]
|
||||
@ -0,0 +1,150 @@
|
||||
"""Locations where we look for configs, install stuff, etc"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.cmd import Command as DistutilsCommand
|
||||
from distutils.command.install import SCHEME_KEYS
|
||||
from distutils.command.install import install as distutils_install_command
|
||||
from distutils.sysconfig import get_python_lib
|
||||
from typing import Dict, List, Optional, Tuple, Union, cast
|
||||
|
||||
from pip._internal.models.scheme import Scheme
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from .base import get_major_minor_version
|
||||
|
||||
|
||||
def _distutils_scheme(
|
||||
dist_name, user=False, home=None, root=None, isolated=False, prefix=None
|
||||
):
|
||||
# type:(str, bool, str, str, bool, str) -> Dict[str, str]
|
||||
"""
|
||||
Return a distutils install scheme
|
||||
"""
|
||||
from distutils.dist import Distribution
|
||||
|
||||
dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]]
|
||||
if isolated:
|
||||
dist_args["script_args"] = ["--no-user-cfg"]
|
||||
|
||||
d = Distribution(dist_args)
|
||||
d.parse_config_files()
|
||||
obj = None # type: Optional[DistutilsCommand]
|
||||
obj = d.get_command_obj("install", create=True)
|
||||
assert obj is not None
|
||||
i = cast(distutils_install_command, obj)
|
||||
# NOTE: setting user or home has the side-effect of creating the home dir
|
||||
# or user base for installations during finalize_options()
|
||||
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||
assert not (user and prefix), f"user={user} prefix={prefix}"
|
||||
assert not (home and prefix), f"home={home} prefix={prefix}"
|
||||
i.user = user or i.user
|
||||
if user or home:
|
||||
i.prefix = ""
|
||||
i.prefix = prefix or i.prefix
|
||||
i.home = home or i.home
|
||||
i.root = root or i.root
|
||||
i.finalize_options()
|
||||
|
||||
scheme = {}
|
||||
for key in SCHEME_KEYS:
|
||||
scheme[key] = getattr(i, "install_" + key)
|
||||
|
||||
# install_lib specified in setup.cfg should install *everything*
|
||||
# into there (i.e. it takes precedence over both purelib and
|
||||
# platlib). Note, i.install_lib is *always* set after
|
||||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
if "install_lib" in d.get_option_dict("install"):
|
||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||
|
||||
if running_under_virtualenv():
|
||||
scheme["headers"] = os.path.join(
|
||||
i.prefix,
|
||||
"include",
|
||||
"site",
|
||||
f"python{get_major_minor_version()}",
|
||||
dist_name,
|
||||
)
|
||||
|
||||
if root is not None:
|
||||
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
||||
scheme["headers"] = os.path.join(
|
||||
root,
|
||||
path_no_drive[1:],
|
||||
)
|
||||
|
||||
return scheme
|
||||
|
||||
|
||||
def get_scheme(
|
||||
dist_name, # type: str
|
||||
user=False, # type: bool
|
||||
home=None, # type: Optional[str]
|
||||
root=None, # type: Optional[str]
|
||||
isolated=False, # type: bool
|
||||
prefix=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> Scheme
|
||||
"""
|
||||
Get the "scheme" corresponding to the input parameters. The distutils
|
||||
documentation provides the context for the available schemes:
|
||||
https://docs.python.org/3/install/index.html#alternate-installation
|
||||
|
||||
:param dist_name: the name of the package to retrieve the scheme for, used
|
||||
in the headers scheme path
|
||||
:param user: indicates to use the "user" scheme
|
||||
:param home: indicates to use the "home" scheme and provides the base
|
||||
directory for the same
|
||||
:param root: root under which other directories are re-based
|
||||
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
|
||||
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
|
||||
scheme paths
|
||||
:param prefix: indicates to use the "prefix" scheme and provides the
|
||||
base directory for the same
|
||||
"""
|
||||
scheme = _distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
||||
return Scheme(
|
||||
platlib=scheme["platlib"],
|
||||
purelib=scheme["purelib"],
|
||||
headers=scheme["headers"],
|
||||
scripts=scheme["scripts"],
|
||||
data=scheme["data"],
|
||||
)
|
||||
|
||||
|
||||
def get_bin_prefix():
|
||||
# type: () -> str
|
||||
if WINDOWS:
|
||||
bin_py = os.path.join(sys.prefix, "Scripts")
|
||||
# buildout uses 'bin' on Windows too?
|
||||
if not os.path.exists(bin_py):
|
||||
bin_py = os.path.join(sys.prefix, "bin")
|
||||
return bin_py
|
||||
# Forcing to use /usr/local/bin for standard macOS framework installs
|
||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
||||
return "/usr/local/bin"
|
||||
return os.path.join(sys.prefix, "bin")
|
||||
|
||||
|
||||
def get_purelib():
|
||||
# type: () -> str
|
||||
return get_python_lib(plat_specific=False)
|
||||
|
||||
|
||||
def get_platlib():
|
||||
# type: () -> str
|
||||
return get_python_lib(plat_specific=True)
|
||||
|
||||
|
||||
def get_prefixed_libs(prefix):
|
||||
# type: (str) -> Tuple[str, str]
|
||||
return (
|
||||
get_python_lib(plat_specific=False, prefix=prefix),
|
||||
get_python_lib(plat_specific=True, prefix=prefix),
|
||||
)
|
||||
@ -0,0 +1,180 @@
|
||||
import distutils.util # FIXME: For change_root.
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
import typing
|
||||
|
||||
from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
|
||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from .base import get_major_minor_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Notes on _infer_* functions.
|
||||
# Unfortunately ``_get_default_scheme()`` is private, so there's no way to
|
||||
# ask things like "what is the '_prefix' scheme on this platform". These
|
||||
# functions try to answer that with some heuristics while accounting for ad-hoc
|
||||
# platforms not covered by CPython's default sysconfig implementation. If the
|
||||
# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
|
||||
# a POSIX scheme.
|
||||
|
||||
_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
|
||||
|
||||
|
||||
def _infer_prefix():
|
||||
# type: () -> str
|
||||
"""Try to find a prefix scheme for the current platform.
|
||||
|
||||
This tries:
|
||||
|
||||
* Implementation + OS, used by PyPy on Windows (``pypy_nt``).
|
||||
* Implementation without OS, used by PyPy on POSIX (``pypy``).
|
||||
* OS + "prefix", used by CPython on POSIX (``posix_prefix``).
|
||||
* Just the OS name, used by CPython on Windows (``nt``).
|
||||
|
||||
If none of the above works, fall back to ``posix_prefix``.
|
||||
"""
|
||||
implementation_suffixed = f"{sys.implementation.name}_{os.name}"
|
||||
if implementation_suffixed in _AVAILABLE_SCHEMES:
|
||||
return implementation_suffixed
|
||||
if sys.implementation.name in _AVAILABLE_SCHEMES:
|
||||
return sys.implementation.name
|
||||
suffixed = f"{os.name}_prefix"
|
||||
if suffixed in _AVAILABLE_SCHEMES:
|
||||
return suffixed
|
||||
if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
|
||||
return os.name
|
||||
return "posix_prefix"
|
||||
|
||||
|
||||
def _infer_user():
|
||||
# type: () -> str
|
||||
"""Try to find a user scheme for the current platform."""
|
||||
suffixed = f"{os.name}_user"
|
||||
if suffixed in _AVAILABLE_SCHEMES:
|
||||
return suffixed
|
||||
if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
|
||||
raise UserInstallationInvalid()
|
||||
return "posix_user"
|
||||
|
||||
|
||||
def _infer_home():
|
||||
# type: () -> str
|
||||
"""Try to find a home for the current platform."""
|
||||
suffixed = f"{os.name}_home"
|
||||
if suffixed in _AVAILABLE_SCHEMES:
|
||||
return suffixed
|
||||
return "posix_home"
|
||||
|
||||
|
||||
# Update these keys if the user sets a custom home.
|
||||
_HOME_KEYS = [
|
||||
"installed_base",
|
||||
"base",
|
||||
"installed_platbase",
|
||||
"platbase",
|
||||
"prefix",
|
||||
"exec_prefix",
|
||||
]
|
||||
if sysconfig.get_config_var("userbase") is not None:
|
||||
_HOME_KEYS.append("userbase")
|
||||
|
||||
|
||||
def get_scheme(
|
||||
dist_name, # type: str
|
||||
user=False, # type: bool
|
||||
home=None, # type: typing.Optional[str]
|
||||
root=None, # type: typing.Optional[str]
|
||||
isolated=False, # type: bool
|
||||
prefix=None, # type: typing.Optional[str]
|
||||
):
|
||||
# type: (...) -> Scheme
|
||||
"""
|
||||
Get the "scheme" corresponding to the input parameters.
|
||||
|
||||
:param dist_name: the name of the package to retrieve the scheme for, used
|
||||
in the headers scheme path
|
||||
:param user: indicates to use the "user" scheme
|
||||
:param home: indicates to use the "home" scheme
|
||||
:param root: root under which other directories are re-based
|
||||
:param isolated: ignored, but kept for distutils compatibility (where
|
||||
this controls whether the user-site pydistutils.cfg is honored)
|
||||
:param prefix: indicates to use the "prefix" scheme and provides the
|
||||
base directory for the same
|
||||
"""
|
||||
if user and prefix:
|
||||
raise InvalidSchemeCombination("--user", "--prefix")
|
||||
if home and prefix:
|
||||
raise InvalidSchemeCombination("--home", "--prefix")
|
||||
|
||||
if home is not None:
|
||||
scheme_name = _infer_home()
|
||||
elif user:
|
||||
scheme_name = _infer_user()
|
||||
else:
|
||||
scheme_name = _infer_prefix()
|
||||
|
||||
if home is not None:
|
||||
variables = {k: home for k in _HOME_KEYS}
|
||||
elif prefix is not None:
|
||||
variables = {k: prefix for k in _HOME_KEYS}
|
||||
else:
|
||||
variables = {}
|
||||
|
||||
paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
|
||||
|
||||
# Logic here is very arbitrary, we're doing it for compatibility, don't ask.
|
||||
# 1. Pip historically uses a special header path in virtual environments.
|
||||
# 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
|
||||
# only do the same when not running in a virtual environment because
|
||||
# pip's historical header path logic (see point 1) did not do this.
|
||||
if running_under_virtualenv():
|
||||
if user:
|
||||
base = variables.get("userbase", sys.prefix)
|
||||
else:
|
||||
base = variables.get("base", sys.prefix)
|
||||
python_xy = f"python{get_major_minor_version()}"
|
||||
paths["include"] = os.path.join(base, "include", "site", python_xy)
|
||||
elif not dist_name:
|
||||
dist_name = "UNKNOWN"
|
||||
|
||||
scheme = Scheme(
|
||||
platlib=paths["platlib"],
|
||||
purelib=paths["purelib"],
|
||||
headers=os.path.join(paths["include"], dist_name),
|
||||
scripts=paths["scripts"],
|
||||
data=paths["data"],
|
||||
)
|
||||
if root is not None:
|
||||
for key in SCHEME_KEYS:
|
||||
value = distutils.util.change_root(root, getattr(scheme, key))
|
||||
setattr(scheme, key, value)
|
||||
return scheme
|
||||
|
||||
|
||||
def get_bin_prefix():
|
||||
# type: () -> str
|
||||
# Forcing to use /usr/local/bin for standard macOS framework installs.
|
||||
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
||||
return "/usr/local/bin"
|
||||
return sysconfig.get_paths()["scripts"]
|
||||
|
||||
|
||||
def get_purelib():
|
||||
# type: () -> str
|
||||
return sysconfig.get_paths()["purelib"]
|
||||
|
||||
|
||||
def get_platlib():
|
||||
# type: () -> str
|
||||
return sysconfig.get_paths()["platlib"]
|
||||
|
||||
|
||||
def get_prefixed_libs(prefix):
|
||||
# type: (str) -> typing.Tuple[str, str]
|
||||
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
||||
return (paths["purelib"], paths["platlib"])
|
||||
@ -0,0 +1,48 @@
|
||||
import os
|
||||
import site
|
||||
import sys
|
||||
import sysconfig
|
||||
import typing
|
||||
|
||||
from pip._internal.utils import appdirs
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
# Application Directories
|
||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||
|
||||
# FIXME doesn't account for venv linked to global site-packages
|
||||
site_packages = sysconfig.get_path("purelib") # type: typing.Optional[str]
|
||||
|
||||
|
||||
def get_major_minor_version():
|
||||
# type: () -> str
|
||||
"""
|
||||
Return the major-minor version of the current Python as a string, e.g.
|
||||
"3.7" or "3.10".
|
||||
"""
|
||||
return "{}.{}".format(*sys.version_info)
|
||||
|
||||
|
||||
def get_src_prefix():
|
||||
# type: () -> str
|
||||
if running_under_virtualenv():
|
||||
src_prefix = os.path.join(sys.prefix, "src")
|
||||
else:
|
||||
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
||||
try:
|
||||
src_prefix = os.path.join(os.getcwd(), "src")
|
||||
except OSError:
|
||||
# In case the current working directory has been renamed or deleted
|
||||
sys.exit("The folder you are executing pip from can no longer be found.")
|
||||
|
||||
# under macOS + virtualenv sys.prefix is not properly resolved
|
||||
# it is something like /path/to/python/bin/..
|
||||
return os.path.abspath(src_prefix)
|
||||
|
||||
|
||||
try:
|
||||
# Use getusersitepackages if this is present, as it ensures that the
|
||||
# value is initialised properly.
|
||||
user_site = site.getusersitepackages() # type: typing.Optional[str]
|
||||
except AttributeError:
|
||||
user_site = site.USER_SITE
|
||||
@ -0,0 +1,13 @@
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
def main(args=None):
|
||||
# type: (Optional[List[str]]) -> int
|
||||
"""This is preserved for old console scripts that may still be referencing
|
||||
it.
|
||||
|
||||
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||
"""
|
||||
from pip._internal.utils.entrypoints import _wrapper
|
||||
|
||||
return _wrapper(args)
|
||||
@ -0,0 +1,43 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from .base import BaseDistribution, BaseEnvironment
|
||||
|
||||
|
||||
def get_default_environment():
|
||||
# type: () -> BaseEnvironment
|
||||
"""Get the default representation for the current environment.
|
||||
|
||||
This returns an Environment instance from the chosen backend. The default
|
||||
Environment instance should be built from ``sys.path`` and may use caching
|
||||
to share instance state accorss calls.
|
||||
"""
|
||||
from .pkg_resources import Environment
|
||||
|
||||
return Environment.default()
|
||||
|
||||
|
||||
def get_environment(paths):
|
||||
# type: (Optional[List[str]]) -> BaseEnvironment
|
||||
"""Get a representation of the environment specified by ``paths``.
|
||||
|
||||
This returns an Environment instance from the chosen backend based on the
|
||||
given import paths. The backend must build a fresh instance representing
|
||||
the state of installed distributions when this function is called.
|
||||
"""
|
||||
from .pkg_resources import Environment
|
||||
|
||||
return Environment.from_paths(paths)
|
||||
|
||||
|
||||
def get_wheel_distribution(wheel_path, canonical_name):
|
||||
# type: (str, str) -> BaseDistribution
|
||||
"""Get the representation of the specified wheel's distribution metadata.
|
||||
|
||||
This returns a Distribution instance from the chosen backend based on
|
||||
the given wheel's ``.dist-info`` directory.
|
||||
|
||||
:param canonical_name: Normalized project name of the given wheel.
|
||||
"""
|
||||
from .pkg_resources import Distribution
|
||||
|
||||
return Distribution.from_wheel(wheel_path, canonical_name)
|
||||
@ -0,0 +1,142 @@
|
||||
import logging
|
||||
import re
|
||||
from typing import Container, Iterator, List, Optional, Union
|
||||
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.utils.misc import stdlib_pkgs # TODO: Move definition here.
|
||||
|
||||
DistributionVersion = Union[LegacyVersion, Version]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseDistribution:
|
||||
@property
|
||||
def location(self):
|
||||
# type: () -> Optional[str]
|
||||
"""Where the distribution is loaded from.
|
||||
|
||||
A string value is not necessarily a filesystem path, since distributions
|
||||
can be loaded from other sources, e.g. arbitrary zip archives. ``None``
|
||||
means the distribution is created in-memory.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def metadata_version(self):
|
||||
# type: () -> Optional[str]
|
||||
"""Value of "Metadata-Version:" in the distribution, if available."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def canonical_name(self):
|
||||
# type: () -> str
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
# type: () -> DistributionVersion
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def installer(self):
|
||||
# type: () -> str
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def editable(self):
|
||||
# type: () -> bool
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
# type: () -> bool
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def in_usersite(self):
|
||||
# type: () -> bool
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class BaseEnvironment:
|
||||
"""An environment containing distributions to introspect."""
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
# type: () -> BaseEnvironment
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_paths(cls, paths):
|
||||
# type: (Optional[List[str]]) -> BaseEnvironment
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_distribution(self, name):
|
||||
# type: (str) -> Optional[BaseDistribution]
|
||||
"""Given a requirement name, return the installed distributions."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _iter_distributions(self):
|
||||
# type: () -> Iterator[BaseDistribution]
|
||||
"""Iterate through installed distributions.
|
||||
|
||||
This function should be implemented by subclass, but never called
|
||||
directly. Use the public ``iter_distribution()`` instead, which
|
||||
implements additional logic to make sure the distributions are valid.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def iter_distributions(self):
|
||||
# type: () -> Iterator[BaseDistribution]
|
||||
"""Iterate through installed distributions."""
|
||||
for dist in self._iter_distributions():
|
||||
# Make sure the distribution actually comes from a valid Python
|
||||
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
||||
# e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
|
||||
# valid project name pattern is taken from PEP 508.
|
||||
project_name_valid = re.match(
|
||||
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
|
||||
dist.canonical_name,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
if not project_name_valid:
|
||||
logger.warning(
|
||||
"Ignoring invalid distribution %s (%s)",
|
||||
dist.canonical_name,
|
||||
dist.location,
|
||||
)
|
||||
continue
|
||||
yield dist
|
||||
|
||||
def iter_installed_distributions(
|
||||
self,
|
||||
local_only=True, # type: bool
|
||||
skip=stdlib_pkgs, # type: Container[str]
|
||||
include_editables=True, # type: bool
|
||||
editables_only=False, # type: bool
|
||||
user_only=False, # type: bool
|
||||
):
|
||||
# type: (...) -> Iterator[BaseDistribution]
|
||||
"""Return a list of installed distributions.
|
||||
|
||||
:param local_only: If True (default), only return installations
|
||||
local to the current virtualenv, if in a virtualenv.
|
||||
:param skip: An iterable of canonicalized project names to ignore;
|
||||
defaults to ``stdlib_pkgs``.
|
||||
:param include_editables: If False, don't report editables.
|
||||
:param editables_only: If True, only report editables.
|
||||
:param user_only: If True, only report installations in the user
|
||||
site directory.
|
||||
"""
|
||||
it = self.iter_distributions()
|
||||
if local_only:
|
||||
it = (d for d in it if d.local)
|
||||
if not include_editables:
|
||||
it = (d for d in it if not d.editable)
|
||||
if editables_only:
|
||||
it = (d for d in it if d.editable)
|
||||
if user_only:
|
||||
it = (d for d in it if d.in_usersite)
|
||||
return (d for d in it if d.canonical_name not in skip)
|
||||
@ -0,0 +1,126 @@
|
||||
import zipfile
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.utils import misc # TODO: Move definition here.
|
||||
from pip._internal.utils.packaging import get_installer
|
||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||
|
||||
from .base import BaseDistribution, BaseEnvironment, DistributionVersion
|
||||
|
||||
|
||||
class Distribution(BaseDistribution):
|
||||
def __init__(self, dist):
|
||||
# type: (pkg_resources.Distribution) -> None
|
||||
self._dist = dist
|
||||
|
||||
@classmethod
|
||||
def from_wheel(cls, path, name):
|
||||
# type: (str, str) -> Distribution
|
||||
with zipfile.ZipFile(path, allowZip64=True) as zf:
|
||||
dist = pkg_resources_distribution_for_wheel(zf, name, path)
|
||||
return cls(dist)
|
||||
|
||||
@property
|
||||
def location(self):
|
||||
# type: () -> Optional[str]
|
||||
return self._dist.location
|
||||
|
||||
@property
|
||||
def metadata_version(self):
|
||||
# type: () -> Optional[str]
|
||||
for line in self._dist.get_metadata_lines(self._dist.PKG_INFO):
|
||||
if line.lower().startswith("metadata-version:"):
|
||||
return line.split(":", 1)[-1].strip()
|
||||
return None
|
||||
|
||||
@property
|
||||
def canonical_name(self):
|
||||
# type: () -> str
|
||||
return canonicalize_name(self._dist.project_name)
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
# type: () -> DistributionVersion
|
||||
return parse_version(self._dist.version)
|
||||
|
||||
@property
|
||||
def installer(self):
|
||||
# type: () -> str
|
||||
return get_installer(self._dist)
|
||||
|
||||
@property
|
||||
def editable(self):
|
||||
# type: () -> bool
|
||||
return misc.dist_is_editable(self._dist)
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
# type: () -> bool
|
||||
return misc.dist_is_local(self._dist)
|
||||
|
||||
@property
|
||||
def in_usersite(self):
|
||||
# type: () -> bool
|
||||
return misc.dist_in_usersite(self._dist)
|
||||
|
||||
|
||||
class Environment(BaseEnvironment):
|
||||
def __init__(self, ws):
|
||||
# type: (pkg_resources.WorkingSet) -> None
|
||||
self._ws = ws
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
# type: () -> BaseEnvironment
|
||||
return cls(pkg_resources.working_set)
|
||||
|
||||
@classmethod
|
||||
def from_paths(cls, paths):
|
||||
# type: (Optional[List[str]]) -> BaseEnvironment
|
||||
return cls(pkg_resources.WorkingSet(paths))
|
||||
|
||||
def _search_distribution(self, name):
|
||||
# type: (str) -> Optional[BaseDistribution]
|
||||
"""Find a distribution matching the ``name`` in the environment.
|
||||
|
||||
This searches from *all* distributions available in the environment, to
|
||||
match the behavior of ``pkg_resources.get_distribution()``.
|
||||
"""
|
||||
canonical_name = canonicalize_name(name)
|
||||
for dist in self.iter_distributions():
|
||||
if dist.canonical_name == canonical_name:
|
||||
return dist
|
||||
return None
|
||||
|
||||
def get_distribution(self, name):
|
||||
# type: (str) -> Optional[BaseDistribution]
|
||||
|
||||
# Search the distribution by looking through the working set.
|
||||
dist = self._search_distribution(name)
|
||||
if dist:
|
||||
return dist
|
||||
|
||||
# If distribution could not be found, call working_set.require to
|
||||
# update the working set, and try to find the distribution again.
|
||||
# This might happen for e.g. when you install a package twice, once
|
||||
# using setup.py develop and again using setup.py install. Now when
|
||||
# running pip uninstall twice, the package gets removed from the
|
||||
# working set in the first uninstall, so we have to populate the
|
||||
# working set again so that pip knows about it and the packages gets
|
||||
# picked up and is successfully uninstalled the second time too.
|
||||
try:
|
||||
# We didn't pass in any version specifiers, so this can never
|
||||
# raise pkg_resources.VersionConflict.
|
||||
self._ws.require(name)
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return None
|
||||
return self._search_distribution(name)
|
||||
|
||||
def _iter_distributions(self):
|
||||
# type: () -> Iterator[BaseDistribution]
|
||||
for dist in self._ws:
|
||||
yield Distribution(dist)
|
||||
@ -0,0 +1,2 @@
|
||||
"""A package that contains models that represent entities.
|
||||
"""
|
||||
@ -0,0 +1,34 @@
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
|
||||
|
||||
class InstallationCandidate(KeyBasedCompareMixin):
|
||||
"""Represents a potential "candidate" for installation.
|
||||
"""
|
||||
|
||||
__slots__ = ["name", "version", "link"]
|
||||
|
||||
def __init__(self, name, version, link):
|
||||
# type: (str, str, Link) -> None
|
||||
self.name = name
|
||||
self.version = parse_version(version)
|
||||
self.link = link
|
||||
|
||||
super().__init__(
|
||||
key=(self.name, self.version, self.link),
|
||||
defining_class=InstallationCandidate
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||
self.name, self.version, self.link,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return '{!r} candidate (version {} at {})'.format(
|
||||
self.name, self.version, self.link,
|
||||
)
|
||||
@ -0,0 +1,233 @@
|
||||
""" PEP 610 """
|
||||
import json
|
||||
import re
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
|
||||
|
||||
__all__ = [
|
||||
"DirectUrl",
|
||||
"DirectUrlValidationError",
|
||||
"DirInfo",
|
||||
"ArchiveInfo",
|
||||
"VcsInfo",
|
||||
]
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
DIRECT_URL_METADATA_NAME = "direct_url.json"
|
||||
ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
|
||||
|
||||
|
||||
class DirectUrlValidationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _get(d, expected_type, key, default=None):
|
||||
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T]
|
||||
"""Get value from dictionary and verify expected type."""
|
||||
if key not in d:
|
||||
return default
|
||||
value = d[key]
|
||||
if not isinstance(value, expected_type):
|
||||
raise DirectUrlValidationError(
|
||||
"{!r} has unexpected type for {} (expected {})".format(
|
||||
value, key, expected_type
|
||||
)
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _get_required(d, expected_type, key, default=None):
|
||||
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> T
|
||||
value = _get(d, expected_type, key, default)
|
||||
if value is None:
|
||||
raise DirectUrlValidationError(f"{key} must have a value")
|
||||
return value
|
||||
|
||||
|
||||
def _exactly_one_of(infos):
|
||||
# type: (Iterable[Optional[InfoType]]) -> InfoType
|
||||
infos = [info for info in infos if info is not None]
|
||||
if not infos:
|
||||
raise DirectUrlValidationError(
|
||||
"missing one of archive_info, dir_info, vcs_info"
|
||||
)
|
||||
if len(infos) > 1:
|
||||
raise DirectUrlValidationError(
|
||||
"more than one of archive_info, dir_info, vcs_info"
|
||||
)
|
||||
assert infos[0] is not None
|
||||
return infos[0]
|
||||
|
||||
|
||||
def _filter_none(**kwargs):
|
||||
# type: (Any) -> Dict[str, Any]
|
||||
"""Make dict excluding None values."""
|
||||
return {k: v for k, v in kwargs.items() if v is not None}
|
||||
|
||||
|
||||
class VcsInfo:
|
||||
name = "vcs_info"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vcs, # type: str
|
||||
commit_id, # type: str
|
||||
requested_revision=None, # type: Optional[str]
|
||||
resolved_revision=None, # type: Optional[str]
|
||||
resolved_revision_type=None, # type: Optional[str]
|
||||
):
|
||||
self.vcs = vcs
|
||||
self.requested_revision = requested_revision
|
||||
self.commit_id = commit_id
|
||||
self.resolved_revision = resolved_revision
|
||||
self.resolved_revision_type = resolved_revision_type
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d):
|
||||
# type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo]
|
||||
if d is None:
|
||||
return None
|
||||
return cls(
|
||||
vcs=_get_required(d, str, "vcs"),
|
||||
commit_id=_get_required(d, str, "commit_id"),
|
||||
requested_revision=_get(d, str, "requested_revision"),
|
||||
resolved_revision=_get(d, str, "resolved_revision"),
|
||||
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
||||
)
|
||||
|
||||
def _to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return _filter_none(
|
||||
vcs=self.vcs,
|
||||
requested_revision=self.requested_revision,
|
||||
commit_id=self.commit_id,
|
||||
resolved_revision=self.resolved_revision,
|
||||
resolved_revision_type=self.resolved_revision_type,
|
||||
)
|
||||
|
||||
|
||||
class ArchiveInfo:
|
||||
name = "archive_info"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hash=None, # type: Optional[str]
|
||||
):
|
||||
self.hash = hash
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d):
|
||||
# type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo]
|
||||
if d is None:
|
||||
return None
|
||||
return cls(hash=_get(d, str, "hash"))
|
||||
|
||||
def _to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return _filter_none(hash=self.hash)
|
||||
|
||||
|
||||
class DirInfo:
|
||||
name = "dir_info"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
editable=False, # type: bool
|
||||
):
|
||||
self.editable = editable
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d):
|
||||
# type: (Optional[Dict[str, Any]]) -> Optional[DirInfo]
|
||||
if d is None:
|
||||
return None
|
||||
return cls(
|
||||
editable=_get_required(d, bool, "editable", default=False)
|
||||
)
|
||||
|
||||
def _to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return _filter_none(editable=self.editable or None)
|
||||
|
||||
|
||||
InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
||||
|
||||
|
||||
class DirectUrl:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url, # type: str
|
||||
info, # type: InfoType
|
||||
subdirectory=None, # type: Optional[str]
|
||||
):
|
||||
self.url = url
|
||||
self.info = info
|
||||
self.subdirectory = subdirectory
|
||||
|
||||
def _remove_auth_from_netloc(self, netloc):
|
||||
# type: (str) -> str
|
||||
if "@" not in netloc:
|
||||
return netloc
|
||||
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
||||
if (
|
||||
isinstance(self.info, VcsInfo) and
|
||||
self.info.vcs == "git" and
|
||||
user_pass == "git"
|
||||
):
|
||||
return netloc
|
||||
if ENV_VAR_RE.match(user_pass):
|
||||
return netloc
|
||||
return netloc_no_user_pass
|
||||
|
||||
@property
|
||||
def redacted_url(self):
|
||||
# type: () -> str
|
||||
"""url with user:password part removed unless it is formed with
|
||||
environment variables as specified in PEP 610, or it is ``git``
|
||||
in the case of a git URL.
|
||||
"""
|
||||
purl = urllib.parse.urlsplit(self.url)
|
||||
netloc = self._remove_auth_from_netloc(purl.netloc)
|
||||
surl = urllib.parse.urlunsplit(
|
||||
(purl.scheme, netloc, purl.path, purl.query, purl.fragment)
|
||||
)
|
||||
return surl
|
||||
|
||||
def validate(self):
|
||||
# type: () -> None
|
||||
self.from_dict(self.to_dict())
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
# type: (Dict[str, Any]) -> DirectUrl
|
||||
return DirectUrl(
|
||||
url=_get_required(d, str, "url"),
|
||||
subdirectory=_get(d, str, "subdirectory"),
|
||||
info=_exactly_one_of(
|
||||
[
|
||||
ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
|
||||
DirInfo._from_dict(_get(d, dict, "dir_info")),
|
||||
VcsInfo._from_dict(_get(d, dict, "vcs_info")),
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
res = _filter_none(
|
||||
url=self.redacted_url,
|
||||
subdirectory=self.subdirectory,
|
||||
)
|
||||
res[self.info.name] = self.info._to_dict()
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, s):
|
||||
# type: (str) -> DirectUrl
|
||||
return cls.from_dict(json.loads(s))
|
||||
|
||||
def to_json(self):
|
||||
# type: () -> str
|
||||
return json.dumps(self.to_dict(), sort_keys=True)
|
||||
@ -0,0 +1,86 @@
|
||||
from typing import FrozenSet, Optional, Set
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import CommandError
|
||||
|
||||
|
||||
class FormatControl:
|
||||
"""Helper for managing formats from which a package can be installed.
|
||||
"""
|
||||
|
||||
__slots__ = ["no_binary", "only_binary"]
|
||||
|
||||
def __init__(self, no_binary=None, only_binary=None):
|
||||
# type: (Optional[Set[str]], Optional[Set[str]]) -> None
|
||||
if no_binary is None:
|
||||
no_binary = set()
|
||||
if only_binary is None:
|
||||
only_binary = set()
|
||||
|
||||
self.no_binary = no_binary
|
||||
self.only_binary = only_binary
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
if self.__slots__ != other.__slots__:
|
||||
return False
|
||||
|
||||
return all(
|
||||
getattr(self, k) == getattr(other, k)
|
||||
for k in self.__slots__
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "{}({}, {})".format(
|
||||
self.__class__.__name__,
|
||||
self.no_binary,
|
||||
self.only_binary
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def handle_mutual_excludes(value, target, other):
|
||||
# type: (str, Set[str], Set[str]) -> None
|
||||
if value.startswith('-'):
|
||||
raise CommandError(
|
||||
"--no-binary / --only-binary option requires 1 argument."
|
||||
)
|
||||
new = value.split(',')
|
||||
while ':all:' in new:
|
||||
other.clear()
|
||||
target.clear()
|
||||
target.add(':all:')
|
||||
del new[:new.index(':all:') + 1]
|
||||
# Without a none, we want to discard everything as :all: covers it
|
||||
if ':none:' not in new:
|
||||
return
|
||||
for name in new:
|
||||
if name == ':none:':
|
||||
target.clear()
|
||||
continue
|
||||
name = canonicalize_name(name)
|
||||
other.discard(name)
|
||||
target.add(name)
|
||||
|
||||
def get_allowed_formats(self, canonical_name):
|
||||
# type: (str) -> FrozenSet[str]
|
||||
result = {"binary", "source"}
|
||||
if canonical_name in self.only_binary:
|
||||
result.discard('source')
|
||||
elif canonical_name in self.no_binary:
|
||||
result.discard('binary')
|
||||
elif ':all:' in self.only_binary:
|
||||
result.discard('source')
|
||||
elif ':all:' in self.no_binary:
|
||||
result.discard('binary')
|
||||
return frozenset(result)
|
||||
|
||||
def disallow_binaries(self):
|
||||
# type: () -> None
|
||||
self.handle_mutual_excludes(
|
||||
':all:', self.no_binary, self.only_binary,
|
||||
)
|
||||
@ -0,0 +1,34 @@
|
||||
import urllib.parse
|
||||
|
||||
|
||||
class PackageIndex:
|
||||
"""Represents a Package Index and provides easier access to endpoints
|
||||
"""
|
||||
|
||||
__slots__ = ['url', 'netloc', 'simple_url', 'pypi_url',
|
||||
'file_storage_domain']
|
||||
|
||||
def __init__(self, url, file_storage_domain):
|
||||
# type: (str, str) -> None
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.netloc = urllib.parse.urlsplit(url).netloc
|
||||
self.simple_url = self._url_for_path('simple')
|
||||
self.pypi_url = self._url_for_path('pypi')
|
||||
|
||||
# This is part of a temporary hack used to block installs of PyPI
|
||||
# packages which depend on external urls only necessary until PyPI can
|
||||
# block such packages themselves
|
||||
self.file_storage_domain = file_storage_domain
|
||||
|
||||
def _url_for_path(self, path):
|
||||
# type: (str) -> str
|
||||
return urllib.parse.urljoin(self.url, path)
|
||||
|
||||
|
||||
PyPI = PackageIndex(
|
||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
||||
)
|
||||
TestPyPI = PackageIndex(
|
||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
||||
)
|
||||
@ -0,0 +1,248 @@
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Optional, Tuple, Union
|
||||
|
||||
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.misc import (
|
||||
redact_auth_from_url,
|
||||
split_auth_from_netloc,
|
||||
splitext,
|
||||
)
|
||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.collector import HTMLPage
|
||||
|
||||
|
||||
class Link(KeyBasedCompareMixin):
|
||||
"""Represents a parsed link from a Package Index's simple URL
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"_parsed_url",
|
||||
"_url",
|
||||
"comes_from",
|
||||
"requires_python",
|
||||
"yanked_reason",
|
||||
"cache_link_parsing",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url, # type: str
|
||||
comes_from=None, # type: Optional[Union[str, HTMLPage]]
|
||||
requires_python=None, # type: Optional[str]
|
||||
yanked_reason=None, # type: Optional[str]
|
||||
cache_link_parsing=True, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
:param url: url of the resource pointed to (href of the link)
|
||||
:param comes_from: instance of HTMLPage where the link was found,
|
||||
or string.
|
||||
:param requires_python: String containing the `Requires-Python`
|
||||
metadata field, specified in PEP 345. This may be specified by
|
||||
a data-requires-python attribute in the HTML link tag, as
|
||||
described in PEP 503.
|
||||
:param yanked_reason: the reason the file has been yanked, if the
|
||||
file has been yanked, or None if the file hasn't been yanked.
|
||||
This is the value of the "data-yanked" attribute, if present, in
|
||||
a simple repository HTML link. If the file has been yanked but
|
||||
no reason was provided, this should be the empty string. See
|
||||
PEP 592 for more information and the specification.
|
||||
:param cache_link_parsing: A flag that is used elsewhere to determine
|
||||
whether resources retrieved from this link
|
||||
should be cached. PyPI index urls should
|
||||
generally have this set to False, for
|
||||
example.
|
||||
"""
|
||||
|
||||
# url can be a UNC windows share
|
||||
if url.startswith('\\\\'):
|
||||
url = path_to_url(url)
|
||||
|
||||
self._parsed_url = urllib.parse.urlsplit(url)
|
||||
# Store the url as a private attribute to prevent accidentally
|
||||
# trying to set a new value.
|
||||
self._url = url
|
||||
|
||||
self.comes_from = comes_from
|
||||
self.requires_python = requires_python if requires_python else None
|
||||
self.yanked_reason = yanked_reason
|
||||
|
||||
super().__init__(key=url, defining_class=Link)
|
||||
|
||||
self.cache_link_parsing = cache_link_parsing
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
if self.requires_python:
|
||||
rp = f' (requires-python:{self.requires_python})'
|
||||
else:
|
||||
rp = ''
|
||||
if self.comes_from:
|
||||
return '{} (from {}){}'.format(
|
||||
redact_auth_from_url(self._url), self.comes_from, rp)
|
||||
else:
|
||||
return redact_auth_from_url(str(self._url))
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return f'<Link {self}>'
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
# type: () -> str
|
||||
return self._url
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
# type: () -> str
|
||||
path = self.path.rstrip('/')
|
||||
name = posixpath.basename(path)
|
||||
if not name:
|
||||
# Make sure we don't leak auth information if the netloc
|
||||
# includes a username and password.
|
||||
netloc, user_pass = split_auth_from_netloc(self.netloc)
|
||||
return netloc
|
||||
|
||||
name = urllib.parse.unquote(name)
|
||||
assert name, f'URL {self._url!r} produced no filename'
|
||||
return name
|
||||
|
||||
@property
|
||||
def file_path(self):
|
||||
# type: () -> str
|
||||
return url_to_path(self.url)
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
# type: () -> str
|
||||
return self._parsed_url.scheme
|
||||
|
||||
@property
|
||||
def netloc(self):
|
||||
# type: () -> str
|
||||
"""
|
||||
This can contain auth information.
|
||||
"""
|
||||
return self._parsed_url.netloc
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
# type: () -> str
|
||||
return urllib.parse.unquote(self._parsed_url.path)
|
||||
|
||||
def splitext(self):
|
||||
# type: () -> Tuple[str, str]
|
||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
||||
|
||||
@property
|
||||
def ext(self):
|
||||
# type: () -> str
|
||||
return self.splitext()[1]
|
||||
|
||||
@property
|
||||
def url_without_fragment(self):
|
||||
# type: () -> str
|
||||
scheme, netloc, path, query, fragment = self._parsed_url
|
||||
return urllib.parse.urlunsplit((scheme, netloc, path, query, None))
|
||||
|
||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
||||
|
||||
@property
|
||||
def egg_fragment(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._egg_fragment_re.search(self._url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
||||
|
||||
@property
|
||||
def subdirectory_fragment(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._subdirectory_fragment_re.search(self._url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_hash_re = re.compile(
|
||||
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
|
||||
)
|
||||
|
||||
@property
|
||||
def hash(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._hash_re.search(self._url)
|
||||
if match:
|
||||
return match.group(2)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hash_name(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._hash_re.search(self._url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
@property
|
||||
def show_url(self):
|
||||
# type: () -> str
|
||||
return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
|
||||
|
||||
@property
|
||||
def is_file(self):
|
||||
# type: () -> bool
|
||||
return self.scheme == 'file'
|
||||
|
||||
def is_existing_dir(self):
|
||||
# type: () -> bool
|
||||
return self.is_file and os.path.isdir(self.file_path)
|
||||
|
||||
@property
|
||||
def is_wheel(self):
|
||||
# type: () -> bool
|
||||
return self.ext == WHEEL_EXTENSION
|
||||
|
||||
@property
|
||||
def is_vcs(self):
|
||||
# type: () -> bool
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
return self.scheme in vcs.all_schemes
|
||||
|
||||
@property
|
||||
def is_yanked(self):
|
||||
# type: () -> bool
|
||||
return self.yanked_reason is not None
|
||||
|
||||
@property
|
||||
def has_hash(self):
|
||||
# type: () -> bool
|
||||
return self.hash_name is not None
|
||||
|
||||
def is_hash_allowed(self, hashes):
|
||||
# type: (Optional[Hashes]) -> bool
|
||||
"""
|
||||
Return True if the link has a hash and it is allowed.
|
||||
"""
|
||||
if hashes is None or not self.has_hash:
|
||||
return False
|
||||
# Assert non-None so mypy knows self.hash_name and self.hash are str.
|
||||
assert self.hash_name is not None
|
||||
assert self.hash is not None
|
||||
|
||||
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
||||
|
||||
|
||||
# TODO: Relax this comparison logic to ignore, for example, fragments.
|
||||
def links_equivalent(link1, link2):
|
||||
# type: (Link, Link) -> bool
|
||||
return link1 == link2
|
||||
@ -0,0 +1,31 @@
|
||||
"""
|
||||
For types associated with installation schemes.
|
||||
|
||||
For a general overview of available schemes and their context, see
|
||||
https://docs.python.org/3/install/index.html#alternate-installation.
|
||||
"""
|
||||
|
||||
|
||||
SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data']
|
||||
|
||||
|
||||
class Scheme:
|
||||
"""A Scheme holds paths which are used as the base directories for
|
||||
artifacts associated with a Python package.
|
||||
"""
|
||||
|
||||
__slots__ = SCHEME_KEYS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
platlib, # type: str
|
||||
purelib, # type: str
|
||||
headers, # type: str
|
||||
scripts, # type: str
|
||||
data, # type: str
|
||||
):
|
||||
self.platlib = platlib
|
||||
self.purelib = purelib
|
||||
self.headers = headers
|
||||
self.scripts = scripts
|
||||
self.data = data
|
||||
@ -0,0 +1,131 @@
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import posixpath
|
||||
import urllib.parse
|
||||
from typing import List
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.utils.compat import has_tls
|
||||
from pip._internal.utils.misc import normalize_path, redact_auth_from_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchScope:
|
||||
|
||||
"""
|
||||
Encapsulates the locations that pip is configured to search.
|
||||
"""
|
||||
|
||||
__slots__ = ["find_links", "index_urls"]
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
find_links, # type: List[str]
|
||||
index_urls, # type: List[str]
|
||||
):
|
||||
# type: (...) -> SearchScope
|
||||
"""
|
||||
Create a SearchScope object after normalizing the `find_links`.
|
||||
"""
|
||||
# Build find_links. If an argument starts with ~, it may be
|
||||
# a local file relative to a home directory. So try normalizing
|
||||
# it and if it exists, use the normalized version.
|
||||
# This is deliberately conservative - it might be fine just to
|
||||
# blindly normalize anything starting with a ~...
|
||||
built_find_links = [] # type: List[str]
|
||||
for link in find_links:
|
||||
if link.startswith('~'):
|
||||
new_link = normalize_path(link)
|
||||
if os.path.exists(new_link):
|
||||
link = new_link
|
||||
built_find_links.append(link)
|
||||
|
||||
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
||||
# relies on TLS.
|
||||
if not has_tls():
|
||||
for link in itertools.chain(index_urls, built_find_links):
|
||||
parsed = urllib.parse.urlparse(link)
|
||||
if parsed.scheme == 'https':
|
||||
logger.warning(
|
||||
'pip is configured with locations that require '
|
||||
'TLS/SSL, however the ssl module in Python is not '
|
||||
'available.'
|
||||
)
|
||||
break
|
||||
|
||||
return cls(
|
||||
find_links=built_find_links,
|
||||
index_urls=index_urls,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
find_links, # type: List[str]
|
||||
index_urls, # type: List[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.find_links = find_links
|
||||
self.index_urls = index_urls
|
||||
|
||||
def get_formatted_locations(self):
|
||||
# type: () -> str
|
||||
lines = []
|
||||
redacted_index_urls = []
|
||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||
for url in self.index_urls:
|
||||
|
||||
redacted_index_url = redact_auth_from_url(url)
|
||||
|
||||
# Parse the URL
|
||||
purl = urllib.parse.urlsplit(redacted_index_url)
|
||||
|
||||
# URL is generally invalid if scheme and netloc is missing
|
||||
# there are issues with Python and URL parsing, so this test
|
||||
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
||||
# always parse invalid URLs correctly - it should raise
|
||||
# exceptions for malformed URLs
|
||||
if not purl.scheme and not purl.netloc:
|
||||
logger.warning(
|
||||
'The index url "%s" seems invalid, '
|
||||
'please provide a scheme.', redacted_index_url)
|
||||
|
||||
redacted_index_urls.append(redacted_index_url)
|
||||
|
||||
lines.append('Looking in indexes: {}'.format(
|
||||
', '.join(redacted_index_urls)))
|
||||
|
||||
if self.find_links:
|
||||
lines.append(
|
||||
'Looking in links: {}'.format(', '.join(
|
||||
redact_auth_from_url(url) for url in self.find_links))
|
||||
)
|
||||
return '\n'.join(lines)
|
||||
|
||||
def get_index_urls_locations(self, project_name):
|
||||
# type: (str) -> List[str]
|
||||
"""Returns the locations found via self.index_urls
|
||||
|
||||
Checks the url_name on the main (first in the list) index and
|
||||
use this url_name to produce all locations
|
||||
"""
|
||||
|
||||
def mkurl_pypi_url(url):
|
||||
# type: (str) -> str
|
||||
loc = posixpath.join(
|
||||
url,
|
||||
urllib.parse.quote(canonicalize_name(project_name)))
|
||||
# For maximum compatibility with easy_install, ensure the path
|
||||
# ends in a trailing slash. Although this isn't in the spec
|
||||
# (and PyPI can handle it without the slash) some other index
|
||||
# implementations might break if they relied on easy_install's
|
||||
# behavior.
|
||||
if not loc.endswith('/'):
|
||||
loc = loc + '/'
|
||||
return loc
|
||||
|
||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
||||
@ -0,0 +1,47 @@
|
||||
from typing import Optional
|
||||
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
|
||||
|
||||
class SelectionPreferences:
|
||||
"""
|
||||
Encapsulates the candidate selection preferences for downloading
|
||||
and installing files.
|
||||
"""
|
||||
|
||||
__slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control',
|
||||
'prefer_binary', 'ignore_requires_python']
|
||||
|
||||
# Don't include an allow_yanked default value to make sure each call
|
||||
# site considers whether yanked releases are allowed. This also causes
|
||||
# that decision to be made explicit in the calling code, which helps
|
||||
# people when reading the code.
|
||||
def __init__(
|
||||
self,
|
||||
allow_yanked, # type: bool
|
||||
allow_all_prereleases=False, # type: bool
|
||||
format_control=None, # type: Optional[FormatControl]
|
||||
prefer_binary=False, # type: bool
|
||||
ignore_requires_python=None, # type: Optional[bool]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Create a SelectionPreferences object.
|
||||
|
||||
:param allow_yanked: Whether files marked as yanked (in the sense
|
||||
of PEP 592) are permitted to be candidates for install.
|
||||
:param format_control: A FormatControl object or None. Used to control
|
||||
the selection of source packages / binary packages when consulting
|
||||
the index and links.
|
||||
:param prefer_binary: Whether to prefer an old, but valid, binary
|
||||
dist over a new source dist.
|
||||
:param ignore_requires_python: Whether to ignore incompatible
|
||||
"Requires-Python" values in links. Defaults to False.
|
||||
"""
|
||||
if ignore_requires_python is None:
|
||||
ignore_requires_python = False
|
||||
|
||||
self.allow_yanked = allow_yanked
|
||||
self.allow_all_prereleases = allow_all_prereleases
|
||||
self.format_control = format_control
|
||||
self.prefer_binary = prefer_binary
|
||||
self.ignore_requires_python = ignore_requires_python
|
||||
@ -0,0 +1,114 @@
|
||||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
|
||||
from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
|
||||
from pip._internal.utils.misc import normalize_version_info
|
||||
|
||||
|
||||
class TargetPython:
|
||||
|
||||
"""
|
||||
Encapsulates the properties of a Python interpreter one is targeting
|
||||
for a package install, download, etc.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"_given_py_version_info",
|
||||
"abis",
|
||||
"implementation",
|
||||
"platforms",
|
||||
"py_version",
|
||||
"py_version_info",
|
||||
"_valid_tags",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
platforms=None, # type: Optional[List[str]]
|
||||
py_version_info=None, # type: Optional[Tuple[int, ...]]
|
||||
abis=None, # type: Optional[List[str]]
|
||||
implementation=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
:param platforms: A list of strings or None. If None, searches for
|
||||
packages that are supported by the current system. Otherwise, will
|
||||
find packages that can be built on the platforms passed in. These
|
||||
packages will only be downloaded for distribution: they will
|
||||
not be built locally.
|
||||
:param py_version_info: An optional tuple of ints representing the
|
||||
Python version information to use (e.g. `sys.version_info[:3]`).
|
||||
This can have length 1, 2, or 3 when provided.
|
||||
:param abis: A list of strings or None. This is passed to
|
||||
compatibility_tags.py's get_supported() function as is.
|
||||
:param implementation: A string or None. This is passed to
|
||||
compatibility_tags.py's get_supported() function as is.
|
||||
"""
|
||||
# Store the given py_version_info for when we call get_supported().
|
||||
self._given_py_version_info = py_version_info
|
||||
|
||||
if py_version_info is None:
|
||||
py_version_info = sys.version_info[:3]
|
||||
else:
|
||||
py_version_info = normalize_version_info(py_version_info)
|
||||
|
||||
py_version = '.'.join(map(str, py_version_info[:2]))
|
||||
|
||||
self.abis = abis
|
||||
self.implementation = implementation
|
||||
self.platforms = platforms
|
||||
self.py_version = py_version
|
||||
self.py_version_info = py_version_info
|
||||
|
||||
# This is used to cache the return value of get_tags().
|
||||
self._valid_tags = None # type: Optional[List[Tag]]
|
||||
|
||||
def format_given(self):
|
||||
# type: () -> str
|
||||
"""
|
||||
Format the given, non-None attributes for display.
|
||||
"""
|
||||
display_version = None
|
||||
if self._given_py_version_info is not None:
|
||||
display_version = '.'.join(
|
||||
str(part) for part in self._given_py_version_info
|
||||
)
|
||||
|
||||
key_values = [
|
||||
('platforms', self.platforms),
|
||||
('version_info', display_version),
|
||||
('abis', self.abis),
|
||||
('implementation', self.implementation),
|
||||
]
|
||||
return ' '.join(
|
||||
f'{key}={value!r}' for key, value in key_values
|
||||
if value is not None
|
||||
)
|
||||
|
||||
def get_tags(self):
|
||||
# type: () -> List[Tag]
|
||||
"""
|
||||
Return the supported PEP 425 tags to check wheel candidates against.
|
||||
|
||||
The tags are returned in order of preference (most preferred first).
|
||||
"""
|
||||
if self._valid_tags is None:
|
||||
# Pass versions=None if no py_version_info was given since
|
||||
# versions=None uses special default logic.
|
||||
py_version_info = self._given_py_version_info
|
||||
if py_version_info is None:
|
||||
version = None
|
||||
else:
|
||||
version = version_info_to_nodot(py_version_info)
|
||||
|
||||
tags = get_supported(
|
||||
version=version,
|
||||
platforms=self.platforms,
|
||||
abis=self.abis,
|
||||
impl=self.implementation,
|
||||
)
|
||||
self._valid_tags = tags
|
||||
|
||||
return self._valid_tags
|
||||
@ -0,0 +1,95 @@
|
||||
"""Represents a wheel file and provides access to the various parts of the
|
||||
name that have meaning.
|
||||
"""
|
||||
import re
|
||||
from typing import Dict, Iterable, List
|
||||
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
|
||||
from pip._internal.exceptions import InvalidWheelFilename
|
||||
|
||||
|
||||
class Wheel:
|
||||
"""A wheel file"""
|
||||
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
||||
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE
|
||||
)
|
||||
|
||||
def __init__(self, filename):
|
||||
# type: (str) -> None
|
||||
"""
|
||||
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||
"""
|
||||
wheel_info = self.wheel_file_re.match(filename)
|
||||
if not wheel_info:
|
||||
raise InvalidWheelFilename(
|
||||
f"{filename} is not a valid wheel filename."
|
||||
)
|
||||
self.filename = filename
|
||||
self.name = wheel_info.group('name').replace('_', '-')
|
||||
# we'll assume "_" means "-" due to wheel naming scheme
|
||||
# (https://github.com/pypa/pip/issues/1150)
|
||||
self.version = wheel_info.group('ver').replace('_', '-')
|
||||
self.build_tag = wheel_info.group('build')
|
||||
self.pyversions = wheel_info.group('pyver').split('.')
|
||||
self.abis = wheel_info.group('abi').split('.')
|
||||
self.plats = wheel_info.group('plat').split('.')
|
||||
|
||||
# All the tag combinations from this file
|
||||
self.file_tags = {
|
||||
Tag(x, y, z) for x in self.pyversions
|
||||
for y in self.abis for z in self.plats
|
||||
}
|
||||
|
||||
def get_formatted_file_tags(self):
|
||||
# type: () -> List[str]
|
||||
"""Return the wheel's tags as a sorted list of strings."""
|
||||
return sorted(str(tag) for tag in self.file_tags)
|
||||
|
||||
def support_index_min(self, tags):
|
||||
# type: (List[Tag]) -> int
|
||||
"""Return the lowest index that one of the wheel's file_tag combinations
|
||||
achieves in the given list of supported tags.
|
||||
|
||||
For example, if there are 8 supported tags and one of the file tags
|
||||
is first in the list, then return 0.
|
||||
|
||||
:param tags: the PEP 425 tags to check the wheel against, in order
|
||||
with most preferred first.
|
||||
|
||||
:raises ValueError: If none of the wheel's file tags match one of
|
||||
the supported tags.
|
||||
"""
|
||||
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
||||
|
||||
def find_most_preferred_tag(self, tags, tag_to_priority):
|
||||
# type: (List[Tag], Dict[Tag, int]) -> int
|
||||
"""Return the priority of the most preferred tag that one of the wheel's file
|
||||
tag combinations acheives in the given list of supported tags using the given
|
||||
tag_to_priority mapping, where lower priorities are more-preferred.
|
||||
|
||||
This is used in place of support_index_min in some cases in order to avoid
|
||||
an expensive linear scan of a large list of tags.
|
||||
|
||||
:param tags: the PEP 425 tags to check the wheel against.
|
||||
:param tag_to_priority: a mapping from tag to priority of that tag, where
|
||||
lower is more preferred.
|
||||
|
||||
:raises ValueError: If none of the wheel's file tags match one of
|
||||
the supported tags.
|
||||
"""
|
||||
return min(
|
||||
tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
|
||||
)
|
||||
|
||||
def supported(self, tags):
|
||||
# type: (Iterable[Tag]) -> bool
|
||||
"""Return whether the wheel is compatible with one of the given tags.
|
||||
|
||||
:param tags: the PEP 425 tags to check the wheel against.
|
||||
"""
|
||||
return not self.file_tags.isdisjoint(tags)
|
||||
@ -0,0 +1,2 @@
|
||||
"""Contains purely network-related utilities.
|
||||
"""
|
||||
@ -0,0 +1,312 @@
|
||||
"""Network Authentication Helpers
|
||||
|
||||
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
||||
providing credentials in the context of network requests.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||
from pip._vendor.requests.models import Request, Response
|
||||
from pip._vendor.requests.utils import get_netrc_auth
|
||||
|
||||
from pip._internal.utils.misc import (
|
||||
ask,
|
||||
ask_input,
|
||||
ask_password,
|
||||
remove_auth_from_url,
|
||||
split_auth_netloc_from_url,
|
||||
)
|
||||
from pip._internal.vcs.versioncontrol import AuthInfo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Credentials = Tuple[str, str, str]
|
||||
|
||||
try:
|
||||
import keyring
|
||||
except ImportError:
|
||||
keyring = None
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Keyring is skipped due to an exception: %s", str(exc),
|
||||
)
|
||||
keyring = None
|
||||
|
||||
|
||||
def get_keyring_auth(url, username):
|
||||
# type: (Optional[str], Optional[str]) -> Optional[AuthInfo]
|
||||
"""Return the tuple auth for a given url from keyring."""
|
||||
global keyring
|
||||
if not url or not keyring:
|
||||
return None
|
||||
|
||||
try:
|
||||
try:
|
||||
get_credential = keyring.get_credential
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
logger.debug("Getting credentials from keyring for %s", url)
|
||||
cred = get_credential(url, username)
|
||||
if cred is not None:
|
||||
return cred.username, cred.password
|
||||
return None
|
||||
|
||||
if username:
|
||||
logger.debug("Getting password from keyring for %s", url)
|
||||
password = keyring.get_password(url, username)
|
||||
if password:
|
||||
return username, password
|
||||
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Keyring is skipped due to an exception: %s", str(exc),
|
||||
)
|
||||
keyring = None
|
||||
return None
|
||||
|
||||
|
||||
class MultiDomainBasicAuth(AuthBase):
|
||||
|
||||
def __init__(self, prompting=True, index_urls=None):
|
||||
# type: (bool, Optional[List[str]]) -> None
|
||||
self.prompting = prompting
|
||||
self.index_urls = index_urls
|
||||
self.passwords = {} # type: Dict[str, AuthInfo]
|
||||
# When the user is prompted to enter credentials and keyring is
|
||||
# available, we will offer to save them. If the user accepts,
|
||||
# this value is set to the credentials they entered. After the
|
||||
# request authenticates, the caller should call
|
||||
# ``save_credentials`` to save these.
|
||||
self._credentials_to_save = None # type: Optional[Credentials]
|
||||
|
||||
def _get_index_url(self, url):
|
||||
# type: (str) -> Optional[str]
|
||||
"""Return the original index URL matching the requested URL.
|
||||
|
||||
Cached or dynamically generated credentials may work against
|
||||
the original index URL rather than just the netloc.
|
||||
|
||||
The provided url should have had its username and password
|
||||
removed already. If the original index url had credentials then
|
||||
they will be included in the return value.
|
||||
|
||||
Returns None if no matching index was found, or if --no-index
|
||||
was specified by the user.
|
||||
"""
|
||||
if not url or not self.index_urls:
|
||||
return None
|
||||
|
||||
for u in self.index_urls:
|
||||
prefix = remove_auth_from_url(u).rstrip("/") + "/"
|
||||
if url.startswith(prefix):
|
||||
return u
|
||||
return None
|
||||
|
||||
def _get_new_credentials(self, original_url, allow_netrc=True,
|
||||
allow_keyring=False):
|
||||
# type: (str, bool, bool) -> AuthInfo
|
||||
"""Find and return credentials for the specified URL."""
|
||||
# Split the credentials and netloc from the url.
|
||||
url, netloc, url_user_password = split_auth_netloc_from_url(
|
||||
original_url,
|
||||
)
|
||||
|
||||
# Start with the credentials embedded in the url
|
||||
username, password = url_user_password
|
||||
if username is not None and password is not None:
|
||||
logger.debug("Found credentials in url for %s", netloc)
|
||||
return url_user_password
|
||||
|
||||
# Find a matching index url for this request
|
||||
index_url = self._get_index_url(url)
|
||||
if index_url:
|
||||
# Split the credentials from the url.
|
||||
index_info = split_auth_netloc_from_url(index_url)
|
||||
if index_info:
|
||||
index_url, _, index_url_user_password = index_info
|
||||
logger.debug("Found index url %s", index_url)
|
||||
|
||||
# If an index URL was found, try its embedded credentials
|
||||
if index_url and index_url_user_password[0] is not None:
|
||||
username, password = index_url_user_password
|
||||
if username is not None and password is not None:
|
||||
logger.debug("Found credentials in index url for %s", netloc)
|
||||
return index_url_user_password
|
||||
|
||||
# Get creds from netrc if we still don't have them
|
||||
if allow_netrc:
|
||||
netrc_auth = get_netrc_auth(original_url)
|
||||
if netrc_auth:
|
||||
logger.debug("Found credentials in netrc for %s", netloc)
|
||||
return netrc_auth
|
||||
|
||||
# If we don't have a password and keyring is available, use it.
|
||||
if allow_keyring:
|
||||
# The index url is more specific than the netloc, so try it first
|
||||
kr_auth = (
|
||||
get_keyring_auth(index_url, username) or
|
||||
get_keyring_auth(netloc, username)
|
||||
)
|
||||
if kr_auth:
|
||||
logger.debug("Found credentials in keyring for %s", netloc)
|
||||
return kr_auth
|
||||
|
||||
return username, password
|
||||
|
||||
def _get_url_and_credentials(self, original_url):
|
||||
# type: (str) -> Tuple[str, Optional[str], Optional[str]]
|
||||
"""Return the credentials to use for the provided URL.
|
||||
|
||||
If allowed, netrc and keyring may be used to obtain the
|
||||
correct credentials.
|
||||
|
||||
Returns (url_without_credentials, username, password). Note
|
||||
that even if the original URL contains credentials, this
|
||||
function may return a different username and password.
|
||||
"""
|
||||
url, netloc, _ = split_auth_netloc_from_url(original_url)
|
||||
|
||||
# Use any stored credentials that we have for this netloc
|
||||
username, password = self.passwords.get(netloc, (None, None))
|
||||
|
||||
if username is None and password is None:
|
||||
# No stored credentials. Acquire new credentials without prompting
|
||||
# the user. (e.g. from netrc, keyring, or the URL itself)
|
||||
username, password = self._get_new_credentials(original_url)
|
||||
|
||||
if username is not None or password is not None:
|
||||
# Convert the username and password if they're None, so that
|
||||
# this netloc will show up as "cached" in the conditional above.
|
||||
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
|
||||
# cache the value that is going to be used.
|
||||
username = username or ""
|
||||
password = password or ""
|
||||
|
||||
# Store any acquired credentials.
|
||||
self.passwords[netloc] = (username, password)
|
||||
|
||||
assert (
|
||||
# Credentials were found
|
||||
(username is not None and password is not None) or
|
||||
# Credentials were not found
|
||||
(username is None and password is None)
|
||||
), f"Could not load credentials from url: {original_url}"
|
||||
|
||||
return url, username, password
|
||||
|
||||
def __call__(self, req):
|
||||
# type: (Request) -> Request
|
||||
# Get credentials for this request
|
||||
url, username, password = self._get_url_and_credentials(req.url)
|
||||
|
||||
# Set the url of the request to the url without any credentials
|
||||
req.url = url
|
||||
|
||||
if username is not None and password is not None:
|
||||
# Send the basic auth with this request
|
||||
req = HTTPBasicAuth(username, password)(req)
|
||||
|
||||
# Attach a hook to handle 401 responses
|
||||
req.register_hook("response", self.handle_401)
|
||||
|
||||
return req
|
||||
|
||||
# Factored out to allow for easy patching in tests
|
||||
def _prompt_for_password(self, netloc):
|
||||
# type: (str) -> Tuple[Optional[str], Optional[str], bool]
|
||||
username = ask_input(f"User for {netloc}: ")
|
||||
if not username:
|
||||
return None, None, False
|
||||
auth = get_keyring_auth(netloc, username)
|
||||
if auth and auth[0] is not None and auth[1] is not None:
|
||||
return auth[0], auth[1], False
|
||||
password = ask_password("Password: ")
|
||||
return username, password, True
|
||||
|
||||
# Factored out to allow for easy patching in tests
|
||||
def _should_save_password_to_keyring(self):
|
||||
# type: () -> bool
|
||||
if not keyring:
|
||||
return False
|
||||
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
||||
|
||||
def handle_401(self, resp, **kwargs):
|
||||
# type: (Response, **Any) -> Response
|
||||
# We only care about 401 responses, anything else we want to just
|
||||
# pass through the actual response
|
||||
if resp.status_code != 401:
|
||||
return resp
|
||||
|
||||
# We are not able to prompt the user so simply return the response
|
||||
if not self.prompting:
|
||||
return resp
|
||||
|
||||
parsed = urllib.parse.urlparse(resp.url)
|
||||
|
||||
# Query the keyring for credentials:
|
||||
username, password = self._get_new_credentials(resp.url,
|
||||
allow_netrc=False,
|
||||
allow_keyring=True)
|
||||
|
||||
# Prompt the user for a new username and password
|
||||
save = False
|
||||
if not username and not password:
|
||||
username, password, save = self._prompt_for_password(parsed.netloc)
|
||||
|
||||
# Store the new username and password to use for future requests
|
||||
self._credentials_to_save = None
|
||||
if username is not None and password is not None:
|
||||
self.passwords[parsed.netloc] = (username, password)
|
||||
|
||||
# Prompt to save the password to keyring
|
||||
if save and self._should_save_password_to_keyring():
|
||||
self._credentials_to_save = (parsed.netloc, username, password)
|
||||
|
||||
# Consume content and release the original connection to allow our new
|
||||
# request to reuse the same one.
|
||||
resp.content
|
||||
resp.raw.release_conn()
|
||||
|
||||
# Add our new username and password to the request
|
||||
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
||||
req.register_hook("response", self.warn_on_401)
|
||||
|
||||
# On successful request, save the credentials that were used to
|
||||
# keyring. (Note that if the user responded "no" above, this member
|
||||
# is not set and nothing will be saved.)
|
||||
if self._credentials_to_save:
|
||||
req.register_hook("response", self.save_credentials)
|
||||
|
||||
# Send our new request
|
||||
new_resp = resp.connection.send(req, **kwargs)
|
||||
new_resp.history.append(resp)
|
||||
|
||||
return new_resp
|
||||
|
||||
def warn_on_401(self, resp, **kwargs):
|
||||
# type: (Response, **Any) -> None
|
||||
"""Response callback to warn about incorrect credentials."""
|
||||
if resp.status_code == 401:
|
||||
logger.warning(
|
||||
'401 Error, Credentials not correct for %s', resp.request.url,
|
||||
)
|
||||
|
||||
def save_credentials(self, resp, **kwargs):
|
||||
# type: (Response, **Any) -> None
|
||||
"""Response callback to save credentials on success."""
|
||||
assert keyring is not None, "should never reach here without keyring"
|
||||
if not keyring:
|
||||
return
|
||||
|
||||
creds = self._credentials_to_save
|
||||
self._credentials_to_save = None
|
||||
if creds and resp.status_code < 400:
|
||||
try:
|
||||
logger.info('Saving credentials to keyring')
|
||||
keyring.set_password(*creds)
|
||||
except Exception:
|
||||
logger.exception('Failed to save credentials')
|
||||
@ -0,0 +1,76 @@
|
||||
"""HTTP cache implementation.
|
||||
"""
|
||||
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterator, Optional
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
from pip._vendor.cachecontrol.caches import FileCache
|
||||
from pip._vendor.requests.models import Response
|
||||
|
||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
|
||||
def is_from_cache(response):
|
||||
# type: (Response) -> bool
|
||||
return getattr(response, "from_cache", False)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def suppressed_cache_errors():
|
||||
# type: () -> Iterator[None]
|
||||
"""If we can't access the cache then we can just skip caching and process
|
||||
requests as if caching wasn't enabled.
|
||||
"""
|
||||
try:
|
||||
yield
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
class SafeFileCache(BaseCache):
|
||||
"""
|
||||
A file based cache which is safe to use even when the target directory may
|
||||
not be accessible or writable.
|
||||
"""
|
||||
|
||||
def __init__(self, directory):
|
||||
# type: (str) -> None
|
||||
assert directory is not None, "Cache directory must not be None."
|
||||
super().__init__()
|
||||
self.directory = directory
|
||||
|
||||
def _get_cache_path(self, name):
|
||||
# type: (str) -> str
|
||||
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
||||
# class for backwards-compatibility and to avoid using a non-public
|
||||
# method.
|
||||
hashed = FileCache.encode(name)
|
||||
parts = list(hashed[:5]) + [hashed]
|
||||
return os.path.join(self.directory, *parts)
|
||||
|
||||
def get(self, key):
|
||||
# type: (str) -> Optional[bytes]
|
||||
path = self._get_cache_path(key)
|
||||
with suppressed_cache_errors():
|
||||
with open(path, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
def set(self, key, value):
|
||||
# type: (str, bytes) -> None
|
||||
path = self._get_cache_path(key)
|
||||
with suppressed_cache_errors():
|
||||
ensure_dir(os.path.dirname(path))
|
||||
|
||||
with adjacent_tmp_file(path) as f:
|
||||
f.write(value)
|
||||
|
||||
replace(f.name, path)
|
||||
|
||||
def delete(self, key):
|
||||
# type: (str) -> None
|
||||
path = self._get_cache_path(key)
|
||||
with suppressed_cache_errors():
|
||||
os.remove(path)
|
||||
@ -0,0 +1,196 @@
|
||||
"""Download files with progress indicators.
|
||||
"""
|
||||
import cgi
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
from typing import Iterable, Optional, Tuple
|
||||
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
|
||||
from pip._internal.cli.progress_bars import DownloadProgressProvider
|
||||
from pip._internal.exceptions import NetworkConnectionError
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.network.cache import is_from_cache
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
||||
from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_http_response_size(resp):
|
||||
# type: (Response) -> Optional[int]
|
||||
try:
|
||||
return int(resp.headers['content-length'])
|
||||
except (ValueError, KeyError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _prepare_download(
|
||||
resp, # type: Response
|
||||
link, # type: Link
|
||||
progress_bar # type: str
|
||||
):
|
||||
# type: (...) -> Iterable[bytes]
|
||||
total_length = _get_http_response_size(resp)
|
||||
|
||||
if link.netloc == PyPI.file_storage_domain:
|
||||
url = link.show_url
|
||||
else:
|
||||
url = link.url_without_fragment
|
||||
|
||||
logged_url = redact_auth_from_url(url)
|
||||
|
||||
if total_length:
|
||||
logged_url = '{} ({})'.format(logged_url, format_size(total_length))
|
||||
|
||||
if is_from_cache(resp):
|
||||
logger.info("Using cached %s", logged_url)
|
||||
else:
|
||||
logger.info("Downloading %s", logged_url)
|
||||
|
||||
if logger.getEffectiveLevel() > logging.INFO:
|
||||
show_progress = False
|
||||
elif is_from_cache(resp):
|
||||
show_progress = False
|
||||
elif not total_length:
|
||||
show_progress = True
|
||||
elif total_length > (40 * 1000):
|
||||
show_progress = True
|
||||
else:
|
||||
show_progress = False
|
||||
|
||||
chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
|
||||
|
||||
if not show_progress:
|
||||
return chunks
|
||||
|
||||
return DownloadProgressProvider(
|
||||
progress_bar, max=total_length
|
||||
)(chunks)
|
||||
|
||||
|
||||
def sanitize_content_filename(filename):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Sanitize the "filename" value from a Content-Disposition header.
|
||||
"""
|
||||
return os.path.basename(filename)
|
||||
|
||||
|
||||
def parse_content_disposition(content_disposition, default_filename):
|
||||
# type: (str, str) -> str
|
||||
"""
|
||||
Parse the "filename" value from a Content-Disposition header, and
|
||||
return the default filename if the result is empty.
|
||||
"""
|
||||
_type, params = cgi.parse_header(content_disposition)
|
||||
filename = params.get('filename')
|
||||
if filename:
|
||||
# We need to sanitize the filename to prevent directory traversal
|
||||
# in case the filename contains ".." path parts.
|
||||
filename = sanitize_content_filename(filename)
|
||||
return filename or default_filename
|
||||
|
||||
|
||||
def _get_http_response_filename(resp, link):
|
||||
# type: (Response, Link) -> str
|
||||
"""Get an ideal filename from the given HTTP response, falling back to
|
||||
the link filename if not provided.
|
||||
"""
|
||||
filename = link.filename # fallback
|
||||
# Have a look at the Content-Disposition header for a better guess
|
||||
content_disposition = resp.headers.get('content-disposition')
|
||||
if content_disposition:
|
||||
filename = parse_content_disposition(content_disposition, filename)
|
||||
ext = splitext(filename)[1] # type: Optional[str]
|
||||
if not ext:
|
||||
ext = mimetypes.guess_extension(
|
||||
resp.headers.get('content-type', '')
|
||||
)
|
||||
if ext:
|
||||
filename += ext
|
||||
if not ext and link.url != resp.url:
|
||||
ext = os.path.splitext(resp.url)[1]
|
||||
if ext:
|
||||
filename += ext
|
||||
return filename
|
||||
|
||||
|
||||
def _http_get_download(session, link):
|
||||
# type: (PipSession, Link) -> Response
|
||||
target_url = link.url.split('#', 1)[0]
|
||||
resp = session.get(target_url, headers=HEADERS, stream=True)
|
||||
raise_for_status(resp)
|
||||
return resp
|
||||
|
||||
|
||||
class Downloader:
|
||||
def __init__(
|
||||
self,
|
||||
session, # type: PipSession
|
||||
progress_bar, # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
self._session = session
|
||||
self._progress_bar = progress_bar
|
||||
|
||||
def __call__(self, link, location):
|
||||
# type: (Link, str) -> Tuple[str, str]
|
||||
"""Download the file given by link into location."""
|
||||
try:
|
||||
resp = _http_get_download(self._session, link)
|
||||
except NetworkConnectionError as e:
|
||||
assert e.response is not None
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s", e.response.status_code, link
|
||||
)
|
||||
raise
|
||||
|
||||
filename = _get_http_response_filename(resp, link)
|
||||
filepath = os.path.join(location, filename)
|
||||
|
||||
chunks = _prepare_download(resp, link, self._progress_bar)
|
||||
with open(filepath, 'wb') as content_file:
|
||||
for chunk in chunks:
|
||||
content_file.write(chunk)
|
||||
content_type = resp.headers.get('Content-Type', '')
|
||||
return filepath, content_type
|
||||
|
||||
|
||||
class BatchDownloader:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session, # type: PipSession
|
||||
progress_bar, # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
self._session = session
|
||||
self._progress_bar = progress_bar
|
||||
|
||||
def __call__(self, links, location):
|
||||
# type: (Iterable[Link], str) -> Iterable[Tuple[Link, Tuple[str, str]]]
|
||||
"""Download the files given by links into location."""
|
||||
for link in links:
|
||||
try:
|
||||
resp = _http_get_download(self._session, link)
|
||||
except NetworkConnectionError as e:
|
||||
assert e.response is not None
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s",
|
||||
e.response.status_code, link,
|
||||
)
|
||||
raise
|
||||
|
||||
filename = _get_http_response_filename(resp, link)
|
||||
filepath = os.path.join(location, filename)
|
||||
|
||||
chunks = _prepare_download(resp, link, self._progress_bar)
|
||||
with open(filepath, 'wb') as content_file:
|
||||
for chunk in chunks:
|
||||
content_file.write(chunk)
|
||||
content_type = resp.headers.get('Content-Type', '')
|
||||
yield link, (filepath, content_type)
|
||||
@ -0,0 +1,224 @@
|
||||
"""Lazy ZIP over HTTP"""
|
||||
|
||||
__all__ = ['HTTPRangeRequestUnsupported', 'dist_from_wheel_url']
|
||||
|
||||
from bisect import bisect_left, bisect_right
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
||||
from zipfile import BadZipfile, ZipFile
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||
|
||||
|
||||
class HTTPRangeRequestUnsupported(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def dist_from_wheel_url(name, url, session):
|
||||
# type: (str, str, PipSession) -> Distribution
|
||||
"""Return a pkg_resources.Distribution from the given wheel URL.
|
||||
|
||||
This uses HTTP range requests to only fetch the potion of the wheel
|
||||
containing metadata, just enough for the object to be constructed.
|
||||
If such requests are not supported, HTTPRangeRequestUnsupported
|
||||
is raised.
|
||||
"""
|
||||
with LazyZipOverHTTP(url, session) as wheel:
|
||||
# For read-only ZIP files, ZipFile only needs methods read,
|
||||
# seek, seekable and tell, not the whole IO protocol.
|
||||
zip_file = ZipFile(wheel) # type: ignore
|
||||
# After context manager exit, wheel.name
|
||||
# is an invalid file by intention.
|
||||
return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name)
|
||||
|
||||
|
||||
class LazyZipOverHTTP:
|
||||
"""File-like object mapped to a ZIP file over HTTP.
|
||||
|
||||
This uses HTTP range requests to lazily fetch the file's content,
|
||||
which is supposed to be fed to ZipFile. If such requests are not
|
||||
supported by the server, raise HTTPRangeRequestUnsupported
|
||||
during initialization.
|
||||
"""
|
||||
|
||||
def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE):
|
||||
# type: (str, PipSession, int) -> None
|
||||
head = session.head(url, headers=HEADERS)
|
||||
raise_for_status(head)
|
||||
assert head.status_code == 200
|
||||
self._session, self._url, self._chunk_size = session, url, chunk_size
|
||||
self._length = int(head.headers['Content-Length'])
|
||||
self._file = NamedTemporaryFile()
|
||||
self.truncate(self._length)
|
||||
self._left = [] # type: List[int]
|
||||
self._right = [] # type: List[int]
|
||||
if 'bytes' not in head.headers.get('Accept-Ranges', 'none'):
|
||||
raise HTTPRangeRequestUnsupported('range request is not supported')
|
||||
self._check_zip()
|
||||
|
||||
@property
|
||||
def mode(self):
|
||||
# type: () -> str
|
||||
"""Opening mode, which is always rb."""
|
||||
return 'rb'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
# type: () -> str
|
||||
"""Path to the underlying file."""
|
||||
return self._file.name
|
||||
|
||||
def seekable(self):
|
||||
# type: () -> bool
|
||||
"""Return whether random access is supported, which is True."""
|
||||
return True
|
||||
|
||||
def close(self):
|
||||
# type: () -> None
|
||||
"""Close the file."""
|
||||
self._file.close()
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
# type: () -> bool
|
||||
"""Whether the file is closed."""
|
||||
return self._file.closed
|
||||
|
||||
def read(self, size=-1):
|
||||
# type: (int) -> bytes
|
||||
"""Read up to size bytes from the object and return them.
|
||||
|
||||
As a convenience, if size is unspecified or -1,
|
||||
all bytes until EOF are returned. Fewer than
|
||||
size bytes may be returned if EOF is reached.
|
||||
"""
|
||||
download_size = max(size, self._chunk_size)
|
||||
start, length = self.tell(), self._length
|
||||
stop = length if size < 0 else min(start+download_size, length)
|
||||
start = max(0, stop-download_size)
|
||||
self._download(start, stop-1)
|
||||
return self._file.read(size)
|
||||
|
||||
def readable(self):
|
||||
# type: () -> bool
|
||||
"""Return whether the file is readable, which is True."""
|
||||
return True
|
||||
|
||||
def seek(self, offset, whence=0):
|
||||
# type: (int, int) -> int
|
||||
"""Change stream position and return the new absolute position.
|
||||
|
||||
Seek to offset relative position indicated by whence:
|
||||
* 0: Start of stream (the default). pos should be >= 0;
|
||||
* 1: Current position - pos may be negative;
|
||||
* 2: End of stream - pos usually negative.
|
||||
"""
|
||||
return self._file.seek(offset, whence)
|
||||
|
||||
def tell(self):
|
||||
# type: () -> int
|
||||
"""Return the current possition."""
|
||||
return self._file.tell()
|
||||
|
||||
def truncate(self, size=None):
|
||||
# type: (Optional[int]) -> int
|
||||
"""Resize the stream to the given size in bytes.
|
||||
|
||||
If size is unspecified resize to the current position.
|
||||
The current stream position isn't changed.
|
||||
|
||||
Return the new file size.
|
||||
"""
|
||||
return self._file.truncate(size)
|
||||
|
||||
def writable(self):
|
||||
# type: () -> bool
|
||||
"""Return False."""
|
||||
return False
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> LazyZipOverHTTP
|
||||
self._file.__enter__()
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc):
|
||||
# type: (*Any) -> Optional[bool]
|
||||
return self._file.__exit__(*exc)
|
||||
|
||||
@contextmanager
|
||||
def _stay(self):
|
||||
# type: ()-> Iterator[None]
|
||||
"""Return a context manager keeping the position.
|
||||
|
||||
At the end of the block, seek back to original position.
|
||||
"""
|
||||
pos = self.tell()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.seek(pos)
|
||||
|
||||
def _check_zip(self):
|
||||
# type: () -> None
|
||||
"""Check and download until the file is a valid ZIP."""
|
||||
end = self._length - 1
|
||||
for start in reversed(range(0, end, self._chunk_size)):
|
||||
self._download(start, end)
|
||||
with self._stay():
|
||||
try:
|
||||
# For read-only ZIP files, ZipFile only needs
|
||||
# methods read, seek, seekable and tell.
|
||||
ZipFile(self) # type: ignore
|
||||
except BadZipfile:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
def _stream_response(self, start, end, base_headers=HEADERS):
|
||||
# type: (int, int, Dict[str, str]) -> Response
|
||||
"""Return HTTP response to a range request from start to end."""
|
||||
headers = base_headers.copy()
|
||||
headers['Range'] = f'bytes={start}-{end}'
|
||||
# TODO: Get range requests to be correctly cached
|
||||
headers['Cache-Control'] = 'no-cache'
|
||||
return self._session.get(self._url, headers=headers, stream=True)
|
||||
|
||||
def _merge(self, start, end, left, right):
|
||||
# type: (int, int, int, int) -> Iterator[Tuple[int, int]]
|
||||
"""Return an iterator of intervals to be fetched.
|
||||
|
||||
Args:
|
||||
start (int): Start of needed interval
|
||||
end (int): End of needed interval
|
||||
left (int): Index of first overlapping downloaded data
|
||||
right (int): Index after last overlapping downloaded data
|
||||
"""
|
||||
lslice, rslice = self._left[left:right], self._right[left:right]
|
||||
i = start = min([start]+lslice[:1])
|
||||
end = max([end]+rslice[-1:])
|
||||
for j, k in zip(lslice, rslice):
|
||||
if j > i:
|
||||
yield i, j-1
|
||||
i = k + 1
|
||||
if i <= end:
|
||||
yield i, end
|
||||
self._left[left:right], self._right[left:right] = [start], [end]
|
||||
|
||||
def _download(self, start, end):
|
||||
# type: (int, int) -> None
|
||||
"""Download bytes from start to end inclusively."""
|
||||
with self._stay():
|
||||
left = bisect_left(self._right, start)
|
||||
right = bisect_right(self._left, end)
|
||||
for start, end in self._merge(start, end, left, right):
|
||||
response = self._stream_response(start, end)
|
||||
response.raise_for_status()
|
||||
self.seek(start)
|
||||
for chunk in response_chunks(response, self._chunk_size):
|
||||
self._file.write(chunk)
|
||||
@ -0,0 +1,449 @@
|
||||
"""PipSession and supporting code, containing all pip-specific
|
||||
network request configuration and behavior.
|
||||
"""
|
||||
|
||||
# When mypy runs on Windows the call to distro.linux_distribution() is skipped
|
||||
# resulting in the failure:
|
||||
#
|
||||
# error: unused 'type: ignore' comment
|
||||
#
|
||||
# If the upstream module adds typing, this comment should be removed. See
|
||||
# https://github.com/nir0s/distro/pull/269
|
||||
#
|
||||
# mypy: warn-unused-ignores=False
|
||||
|
||||
import email.utils
|
||||
import ipaddress
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import urllib.parse
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
|
||||
|
||||
from pip._vendor import requests, urllib3
|
||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||
from pip._vendor.requests.models import PreparedRequest, Response
|
||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||
from pip._vendor.urllib3.connectionpool import ConnectionPool
|
||||
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
from pip import __version__
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.network.auth import MultiDomainBasicAuth
|
||||
from pip._internal.network.cache import SafeFileCache
|
||||
|
||||
# Import ssl from compat so the initial import occurs in only one place.
|
||||
from pip._internal.utils.compat import has_tls
|
||||
from pip._internal.utils.glibc import libc_ver
|
||||
from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
|
||||
from pip._internal.utils.urls import url_to_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
||||
|
||||
|
||||
# Ignore warning raised when using --trusted-host.
|
||||
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||
|
||||
|
||||
SECURE_ORIGINS = [
|
||||
# protocol, hostname, port
|
||||
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
||||
("https", "*", "*"),
|
||||
("*", "localhost", "*"),
|
||||
("*", "127.0.0.0/8", "*"),
|
||||
("*", "::1/128", "*"),
|
||||
("file", "*", None),
|
||||
# ssh is always secure.
|
||||
("ssh", "*", "*"),
|
||||
] # type: List[SecureOrigin]
|
||||
|
||||
|
||||
# These are environment variables present when running under various
|
||||
# CI systems. For each variable, some CI systems that use the variable
|
||||
# are indicated. The collection was chosen so that for each of a number
|
||||
# of popular systems, at least one of the environment variables is used.
|
||||
# This list is used to provide some indication of and lower bound for
|
||||
# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
|
||||
# For more background, see: https://github.com/pypa/pip/issues/5499
|
||||
CI_ENVIRONMENT_VARIABLES = (
|
||||
# Azure Pipelines
|
||||
'BUILD_BUILDID',
|
||||
# Jenkins
|
||||
'BUILD_ID',
|
||||
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
|
||||
'CI',
|
||||
# Explicit environment variable.
|
||||
'PIP_IS_CI',
|
||||
)
|
||||
|
||||
|
||||
def looks_like_ci():
|
||||
# type: () -> bool
|
||||
"""
|
||||
Return whether it looks like pip is running under CI.
|
||||
"""
|
||||
# We don't use the method of checking for a tty (e.g. using isatty())
|
||||
# because some CI systems mimic a tty (e.g. Travis CI). Thus that
|
||||
# method doesn't provide definitive information in either direction.
|
||||
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
|
||||
|
||||
|
||||
def user_agent():
|
||||
# type: () -> str
|
||||
"""
|
||||
Return a string representing the user agent.
|
||||
"""
|
||||
data = {
|
||||
"installer": {"name": "pip", "version": __version__},
|
||||
"python": platform.python_version(),
|
||||
"implementation": {
|
||||
"name": platform.python_implementation(),
|
||||
},
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
if data["implementation"]["name"] == 'CPython':
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
elif data["implementation"]["name"] == 'PyPy':
|
||||
pypy_version_info = sys.pypy_version_info # type: ignore
|
||||
if pypy_version_info.releaselevel == 'final':
|
||||
pypy_version_info = pypy_version_info[:3]
|
||||
data["implementation"]["version"] = ".".join(
|
||||
[str(x) for x in pypy_version_info]
|
||||
)
|
||||
elif data["implementation"]["name"] == 'Jython':
|
||||
# Complete Guess
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
elif data["implementation"]["name"] == 'IronPython':
|
||||
# Complete Guess
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
|
||||
if sys.platform.startswith("linux"):
|
||||
from pip._vendor import distro
|
||||
|
||||
# https://github.com/nir0s/distro/pull/269
|
||||
linux_distribution = distro.linux_distribution() # type: ignore
|
||||
distro_infos = dict(filter(
|
||||
lambda x: x[1],
|
||||
zip(["name", "version", "id"], linux_distribution),
|
||||
))
|
||||
libc = dict(filter(
|
||||
lambda x: x[1],
|
||||
zip(["lib", "version"], libc_ver()),
|
||||
))
|
||||
if libc:
|
||||
distro_infos["libc"] = libc
|
||||
if distro_infos:
|
||||
data["distro"] = distro_infos
|
||||
|
||||
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
||||
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
|
||||
|
||||
if platform.system():
|
||||
data.setdefault("system", {})["name"] = platform.system()
|
||||
|
||||
if platform.release():
|
||||
data.setdefault("system", {})["release"] = platform.release()
|
||||
|
||||
if platform.machine():
|
||||
data["cpu"] = platform.machine()
|
||||
|
||||
if has_tls():
|
||||
import _ssl as ssl
|
||||
data["openssl_version"] = ssl.OPENSSL_VERSION
|
||||
|
||||
setuptools_dist = get_default_environment().get_distribution("setuptools")
|
||||
if setuptools_dist is not None:
|
||||
data["setuptools_version"] = str(setuptools_dist.version)
|
||||
|
||||
# Use None rather than False so as not to give the impression that
|
||||
# pip knows it is not being run under CI. Rather, it is a null or
|
||||
# inconclusive result. Also, we include some value rather than no
|
||||
# value to make it easier to know that the check has been run.
|
||||
data["ci"] = True if looks_like_ci() else None
|
||||
|
||||
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
|
||||
if user_data is not None:
|
||||
data["user_data"] = user_data
|
||||
|
||||
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
||||
data=data,
|
||||
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
||||
)
|
||||
|
||||
|
||||
class LocalFSAdapter(BaseAdapter):
|
||||
|
||||
def send(
|
||||
self,
|
||||
request, # type: PreparedRequest
|
||||
stream=False, # type: bool
|
||||
timeout=None, # type: Optional[Union[float, Tuple[float, float]]]
|
||||
verify=True, # type: Union[bool, str]
|
||||
cert=None, # type: Optional[Union[str, Tuple[str, str]]]
|
||||
proxies=None, # type:Optional[Mapping[str, str]]
|
||||
):
|
||||
# type: (...) -> Response
|
||||
pathname = url_to_path(request.url)
|
||||
|
||||
resp = Response()
|
||||
resp.status_code = 200
|
||||
resp.url = request.url
|
||||
|
||||
try:
|
||||
stats = os.stat(pathname)
|
||||
except OSError as exc:
|
||||
resp.status_code = 404
|
||||
resp.raw = exc
|
||||
else:
|
||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||
resp.headers = CaseInsensitiveDict({
|
||||
"Content-Type": content_type,
|
||||
"Content-Length": stats.st_size,
|
||||
"Last-Modified": modified,
|
||||
})
|
||||
|
||||
resp.raw = open(pathname, "rb")
|
||||
resp.close = resp.raw.close
|
||||
|
||||
return resp
|
||||
|
||||
def close(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
|
||||
class InsecureHTTPAdapter(HTTPAdapter):
|
||||
|
||||
def cert_verify(
|
||||
self,
|
||||
conn, # type: ConnectionPool
|
||||
url, # type: str
|
||||
verify, # type: Union[bool, str]
|
||||
cert, # type: Optional[Union[str, Tuple[str, str]]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
||||
|
||||
|
||||
class InsecureCacheControlAdapter(CacheControlAdapter):
|
||||
|
||||
def cert_verify(
|
||||
self,
|
||||
conn, # type: ConnectionPool
|
||||
url, # type: str
|
||||
verify, # type: Union[bool, str]
|
||||
cert, # type: Optional[Union[str, Tuple[str, str]]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
||||
|
||||
|
||||
class PipSession(requests.Session):
|
||||
|
||||
timeout = None # type: Optional[int]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args, # type: Any
|
||||
retries=0, # type: int
|
||||
cache=None, # type: Optional[str]
|
||||
trusted_hosts=(), # type: Sequence[str]
|
||||
index_urls=None, # type: Optional[List[str]]
|
||||
**kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
:param trusted_hosts: Domains not to emit warnings for when not using
|
||||
HTTPS.
|
||||
"""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Namespace the attribute with "pip_" just in case to prevent
|
||||
# possible conflicts with the base class.
|
||||
self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]]
|
||||
|
||||
# Attach our User Agent to the request
|
||||
self.headers["User-Agent"] = user_agent()
|
||||
|
||||
# Attach our Authentication handler to the session
|
||||
self.auth = MultiDomainBasicAuth(index_urls=index_urls)
|
||||
|
||||
# Create our urllib3.Retry instance which will allow us to customize
|
||||
# how we handle retries.
|
||||
retries = urllib3.Retry(
|
||||
# Set the total number of retries that a particular request can
|
||||
# have.
|
||||
total=retries,
|
||||
|
||||
# A 503 error from PyPI typically means that the Fastly -> Origin
|
||||
# connection got interrupted in some way. A 503 error in general
|
||||
# is typically considered a transient error so we'll go ahead and
|
||||
# retry it.
|
||||
# A 500 may indicate transient error in Amazon S3
|
||||
# A 520 or 527 - may indicate transient error in CloudFlare
|
||||
status_forcelist=[500, 503, 520, 527],
|
||||
|
||||
# Add a small amount of back off between failed requests in
|
||||
# order to prevent hammering the service.
|
||||
backoff_factor=0.25,
|
||||
) # type: ignore
|
||||
|
||||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||
# support caching so we'll use it for all http:// URLs.
|
||||
# If caching is disabled, we will also use it for
|
||||
# https:// hosts that we've marked as ignoring
|
||||
# TLS errors for (trusted-hosts).
|
||||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||
|
||||
# We want to _only_ cache responses on securely fetched origins or when
|
||||
# the host is specified as trusted. We do this because
|
||||
# we can't validate the response of an insecurely/untrusted fetched
|
||||
# origin, and we don't want someone to be able to poison the cache and
|
||||
# require manual eviction from the cache to fix it.
|
||||
if cache:
|
||||
secure_adapter = CacheControlAdapter(
|
||||
cache=SafeFileCache(cache),
|
||||
max_retries=retries,
|
||||
)
|
||||
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
||||
cache=SafeFileCache(cache),
|
||||
max_retries=retries,
|
||||
)
|
||||
else:
|
||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||
self._trusted_host_adapter = insecure_adapter
|
||||
|
||||
self.mount("https://", secure_adapter)
|
||||
self.mount("http://", insecure_adapter)
|
||||
|
||||
# Enable file:// urls
|
||||
self.mount("file://", LocalFSAdapter())
|
||||
|
||||
for host in trusted_hosts:
|
||||
self.add_trusted_host(host, suppress_logging=True)
|
||||
|
||||
def update_index_urls(self, new_index_urls):
|
||||
# type: (List[str]) -> None
|
||||
"""
|
||||
:param new_index_urls: New index urls to update the authentication
|
||||
handler with.
|
||||
"""
|
||||
self.auth.index_urls = new_index_urls
|
||||
|
||||
def add_trusted_host(self, host, source=None, suppress_logging=False):
|
||||
# type: (str, Optional[str], bool) -> None
|
||||
"""
|
||||
:param host: It is okay to provide a host that has previously been
|
||||
added.
|
||||
:param source: An optional source string, for logging where the host
|
||||
string came from.
|
||||
"""
|
||||
if not suppress_logging:
|
||||
msg = f'adding trusted host: {host!r}'
|
||||
if source is not None:
|
||||
msg += f' (from {source})'
|
||||
logger.info(msg)
|
||||
|
||||
host_port = parse_netloc(host)
|
||||
if host_port not in self.pip_trusted_origins:
|
||||
self.pip_trusted_origins.append(host_port)
|
||||
|
||||
self.mount(
|
||||
build_url_from_netloc(host) + '/',
|
||||
self._trusted_host_adapter
|
||||
)
|
||||
if not host_port[1]:
|
||||
# Mount wildcard ports for the same host.
|
||||
self.mount(
|
||||
build_url_from_netloc(host) + ':',
|
||||
self._trusted_host_adapter
|
||||
)
|
||||
|
||||
def iter_secure_origins(self):
|
||||
# type: () -> Iterator[SecureOrigin]
|
||||
yield from SECURE_ORIGINS
|
||||
for host, port in self.pip_trusted_origins:
|
||||
yield ('*', host, '*' if port is None else port)
|
||||
|
||||
def is_secure_origin(self, location):
|
||||
# type: (Link) -> bool
|
||||
# Determine if this url used a secure transport mechanism
|
||||
parsed = urllib.parse.urlparse(str(location))
|
||||
origin_protocol, origin_host, origin_port = (
|
||||
parsed.scheme, parsed.hostname, parsed.port,
|
||||
)
|
||||
|
||||
# The protocol to use to see if the protocol matches.
|
||||
# Don't count the repository type as part of the protocol: in
|
||||
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
||||
# the last scheme.)
|
||||
origin_protocol = origin_protocol.rsplit('+', 1)[-1]
|
||||
|
||||
# Determine if our origin is a secure origin by looking through our
|
||||
# hardcoded list of secure origins, as well as any additional ones
|
||||
# configured on this PackageFinder instance.
|
||||
for secure_origin in self.iter_secure_origins():
|
||||
secure_protocol, secure_host, secure_port = secure_origin
|
||||
if origin_protocol != secure_protocol and secure_protocol != "*":
|
||||
continue
|
||||
|
||||
try:
|
||||
addr = ipaddress.ip_address(origin_host)
|
||||
network = ipaddress.ip_network(secure_host)
|
||||
except ValueError:
|
||||
# We don't have both a valid address or a valid network, so
|
||||
# we'll check this origin against hostnames.
|
||||
if (
|
||||
origin_host and
|
||||
origin_host.lower() != secure_host.lower() and
|
||||
secure_host != "*"
|
||||
):
|
||||
continue
|
||||
else:
|
||||
# We have a valid address and network, so see if the address
|
||||
# is contained within the network.
|
||||
if addr not in network:
|
||||
continue
|
||||
|
||||
# Check to see if the port matches.
|
||||
if (
|
||||
origin_port != secure_port and
|
||||
secure_port != "*" and
|
||||
secure_port is not None
|
||||
):
|
||||
continue
|
||||
|
||||
# If we've gotten here, then this origin matches the current
|
||||
# secure origin and we should return True
|
||||
return True
|
||||
|
||||
# If we've gotten to this point, then the origin isn't secure and we
|
||||
# will not accept it as a valid location to search. We will however
|
||||
# log a warning that we are ignoring it.
|
||||
logger.warning(
|
||||
"The repository located at %s is not a trusted or secure host and "
|
||||
"is being ignored. If this repository is available via HTTPS we "
|
||||
"recommend you use HTTPS instead, otherwise you may silence "
|
||||
"this warning and allow it anyway with '--trusted-host %s'.",
|
||||
origin_host,
|
||||
origin_host,
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# type: (str, str, *Any, **Any) -> Response
|
||||
# Allow setting a default timeout on a session
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
|
||||
# Dispatch the actual request
|
||||
return super().request(method, url, *args, **kwargs)
|
||||
@ -0,0 +1,95 @@
|
||||
from typing import Dict, Iterator
|
||||
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
|
||||
from pip._internal.exceptions import NetworkConnectionError
|
||||
|
||||
# The following comments and HTTP headers were originally added by
|
||||
# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03.
|
||||
#
|
||||
# We use Accept-Encoding: identity here because requests defaults to
|
||||
# accepting compressed responses. This breaks in a variety of ways
|
||||
# depending on how the server is configured.
|
||||
# - Some servers will notice that the file isn't a compressible file
|
||||
# and will leave the file alone and with an empty Content-Encoding
|
||||
# - Some servers will notice that the file is already compressed and
|
||||
# will leave the file alone, adding a Content-Encoding: gzip header
|
||||
# - Some servers won't notice anything at all and will take a file
|
||||
# that's already been compressed and compress it again, and set
|
||||
# the Content-Encoding: gzip header
|
||||
# By setting this to request only the identity encoding we're hoping
|
||||
# to eliminate the third case. Hopefully there does not exist a server
|
||||
# which when given a file will notice it is already compressed and that
|
||||
# you're not asking for a compressed file and will then decompress it
|
||||
# before sending because if that's the case I don't think it'll ever be
|
||||
# possible to make this work.
|
||||
HEADERS = {'Accept-Encoding': 'identity'} # type: Dict[str, str]
|
||||
|
||||
|
||||
def raise_for_status(resp):
|
||||
# type: (Response) -> None
|
||||
http_error_msg = ''
|
||||
if isinstance(resp.reason, bytes):
|
||||
# We attempt to decode utf-8 first because some servers
|
||||
# choose to localize their reason strings. If the string
|
||||
# isn't utf-8, we fall back to iso-8859-1 for all other
|
||||
# encodings.
|
||||
try:
|
||||
reason = resp.reason.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
reason = resp.reason.decode('iso-8859-1')
|
||||
else:
|
||||
reason = resp.reason
|
||||
|
||||
if 400 <= resp.status_code < 500:
|
||||
http_error_msg = (
|
||||
f'{resp.status_code} Client Error: {reason} for url: {resp.url}')
|
||||
|
||||
elif 500 <= resp.status_code < 600:
|
||||
http_error_msg = (
|
||||
f'{resp.status_code} Server Error: {reason} for url: {resp.url}')
|
||||
|
||||
if http_error_msg:
|
||||
raise NetworkConnectionError(http_error_msg, response=resp)
|
||||
|
||||
|
||||
def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE):
|
||||
# type: (Response, int) -> Iterator[bytes]
|
||||
"""Given a requests Response, provide the data chunks.
|
||||
"""
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
for chunk in response.raw.stream(
|
||||
chunk_size,
|
||||
# We use decode_content=False here because we don't
|
||||
# want urllib3 to mess with the raw bytes we get
|
||||
# from the server. If we decompress inside of
|
||||
# urllib3 then we cannot verify the checksum
|
||||
# because the checksum will be of the compressed
|
||||
# file. This breakage will only occur if the
|
||||
# server adds a Content-Encoding header, which
|
||||
# depends on how the server was configured:
|
||||
# - Some servers will notice that the file isn't a
|
||||
# compressible file and will leave the file alone
|
||||
# and with an empty Content-Encoding
|
||||
# - Some servers will notice that the file is
|
||||
# already compressed and will leave the file
|
||||
# alone and will add a Content-Encoding: gzip
|
||||
# header
|
||||
# - Some servers won't notice anything at all and
|
||||
# will take a file that's already been compressed
|
||||
# and compress it again and set the
|
||||
# Content-Encoding: gzip header
|
||||
#
|
||||
# By setting this not to decode automatically we
|
||||
# hope to eliminate problems with the second case.
|
||||
decode_content=False,
|
||||
):
|
||||
yield chunk
|
||||
except AttributeError:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
chunk = response.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
@ -0,0 +1,49 @@
|
||||
"""xmlrpclib.Transport implementation
|
||||
"""
|
||||
|
||||
import logging
|
||||
import urllib.parse
|
||||
import xmlrpc.client
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from pip._internal.exceptions import NetworkConnectionError
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import raise_for_status
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from xmlrpc.client import _HostType, _Marshallable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PipXmlrpcTransport(xmlrpc.client.Transport):
|
||||
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
||||
object.
|
||||
"""
|
||||
|
||||
def __init__(self, index_url, session, use_datetime=False):
|
||||
# type: (str, PipSession, bool) -> None
|
||||
super().__init__(use_datetime)
|
||||
index_parts = urllib.parse.urlparse(index_url)
|
||||
self._scheme = index_parts.scheme
|
||||
self._session = session
|
||||
|
||||
def request(self, host, handler, request_body, verbose=False):
|
||||
# type: (_HostType, str, bytes, bool) -> Tuple[_Marshallable, ...]
|
||||
assert isinstance(host, str)
|
||||
parts = (self._scheme, host, handler, None, None, None)
|
||||
url = urllib.parse.urlunparse(parts)
|
||||
try:
|
||||
headers = {'Content-Type': 'text/xml'}
|
||||
response = self._session.post(url, data=request_body,
|
||||
headers=headers, stream=True)
|
||||
raise_for_status(response)
|
||||
self.verbose = verbose
|
||||
return self.parse_response(response.raw)
|
||||
except NetworkConnectionError as exc:
|
||||
assert exc.response
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s",
|
||||
exc.response.status_code, url,
|
||||
)
|
||||
raise
|
||||
@ -0,0 +1,35 @@
|
||||
"""Metadata generation logic for source distributions.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
|
||||
def generate_metadata(build_env, backend):
|
||||
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
||||
"""Generate metadata using mechanisms described in PEP 517.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
metadata_tmpdir = TempDirectory(
|
||||
kind="modern-metadata", globally_managed=True
|
||||
)
|
||||
|
||||
metadata_dir = metadata_tmpdir.path
|
||||
|
||||
with build_env:
|
||||
# Note that Pep517HookCaller implements a fallback for
|
||||
# prepare_metadata_for_build_wheel, so we don't have to
|
||||
# consider the possibility that this hook doesn't exist.
|
||||
runner = runner_with_spinner_message("Preparing wheel metadata")
|
||||
with backend.subprocess_runner(runner):
|
||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
||||
metadata_dir
|
||||
)
|
||||
|
||||
return os.path.join(metadata_dir, distinfo_dir)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue