aaaa
parent
d5ff6cdf92
commit
b9e801a9b9
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11 (MySy_Back_Office)" project-jdk-type="Python SDK" />
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11 (Back_Office)" project-jdk-type="Python SDK" />
|
||||
<component name="PyCharmProfessionalAdvertiser">
|
||||
<option name="shown" value="true" />
|
||||
</component>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.11 (MySy_Back_Office)" jdkType="Python SDK" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.11 (Back_Office)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
|
@ -1,7 +1,495 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="1122d9e2-679f-46d6-8c4f-97e9ae4041b5" name="Changes" comment="sss - init new pc" />
|
||||
<list default="true" id="1122d9e2-679f-46d6-8c4f-97e9ae4041b5" name="Changes" comment="sss - init new pc">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/misc.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/misc.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/myclass_api.iml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/myclass_api.iml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Emargement/test_emargement.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Emargement/test_emargement.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/GlobalVariable.py" beforeDir="false" afterPath="$PROJECT_DIR$/GlobalVariable.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_221000011.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_221000011.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_221000012.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_221000012.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_221000013.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_221000013.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_221000014.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_221000014.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_221000015.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_221000015.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290001.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290001.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900010.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900010.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900011.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900011.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900012.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900012.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900013.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900013.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900014.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900014.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900015.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900015.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900016.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900016.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900017.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900017.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900018.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900018.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900019.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900019.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290002.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290002.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900020.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900020.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900021.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900021.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900022.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900022.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900023.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900023.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900024.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900024.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900025.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900025.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900026.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900026.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900027.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900027.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900028.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900028.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900029.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900029.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290003.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290003.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900030.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900030.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900031.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900031.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900032.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900032.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900033.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900033.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900034.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900034.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900035.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900035.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900036.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900036.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900037.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900037.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900038.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900038.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900039.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900039.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290004.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290004.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900040.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900040.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900041.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900041.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900042.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900042.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900043.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900043.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_22900044.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_22900044.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290005.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290005.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290006.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290006.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290007.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290007.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290008.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290008.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_FACT_2290009.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_FACT_2290009.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_00001.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_00001.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000010.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000010.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000011.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000011.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000012.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000012.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000013.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000013.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000014.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000014.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000015.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000015.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000016.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000016.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000017.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000017.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000018.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000018.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_000019.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_000019.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_00002.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_00002.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_00003.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_00003.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_00004.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_00004.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_00005.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_00005.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_001.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_001.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_002.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_002.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_202220220002.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_202220220002.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/invoice_MySy_202220220003.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/invoice_MySy_202220220003.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Invoices/test_emargement.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/Invoices/test_emargement.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Log/log_file.log" beforeDir="false" afterPath="$PROJECT_DIR$/Log/log_file.log" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/email_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/email_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/main.py" beforeDir="false" afterPath="$PROJECT_DIR$/main.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/prj_common.py" beforeDir="false" afterPath="$PROJECT_DIR$/prj_common.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_FACT_22900010.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_FACT_22900010.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_FACT_2290004.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_FACT_2290004.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_FACT_2290006.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_FACT_2290006.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_FACT_2290007.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_FACT_2290007.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_MySy_00001.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_MySy_00001.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_MySy_000019.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_MySy_000019.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_MySy_00002.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_MySy_00002.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_MySy_00004.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_MySy_00004.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_MySy_00005.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_MySy_00005.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/temp_direct/invoice_MySy_202220220002.pdf" beforeDir="false" afterPath="$PROJECT_DIR$/temp_direct/invoice_MySy_202220220002.pdf" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2-1.26.0.dist-info/INSTALLER" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2-1.26.0.dist-info/LICENSE" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2-1.26.0.dist-info/METADATA" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2-1.26.0.dist-info/RECORD" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2-1.26.0.dist-info/WHEEL" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2-1.26.0.dist-info/top_level.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/_version.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/filters.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/generic.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/merger.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/pagerange.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/pdf.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/PyPDF2/xmp.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/easy-install.pth" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/arcfour.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/ascii85.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/cmap/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/cmapdb.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/converter.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/encodingdb.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/fontmetrics.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/glyphlist.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/latin_enc.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/layout.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/lzw.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pdfcolor.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pdfdevice.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pdffont.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pdfinterp.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pdfparser.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pdftypes.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/pslexer.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/psparser.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/rijndael.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/runlength.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer/utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer3k-1.3.4.dist-info/INSTALLER" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer3k-1.3.4.dist-info/LICENSE.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer3k-1.3.4.dist-info/METADATA" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer3k-1.3.4.dist-info/RECORD" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer3k-1.3.4.dist-info/WHEEL" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pdfminer3k-1.3.4.dist-info/top_level.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/PKG-INFO" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/SOURCES.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/dependency_links.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/entry_points.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/not-zip-safe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/top_level.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__main__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/build_env.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cache.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/autocompletion.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/base_command.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/cmdoptions.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/main_parser.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/parser.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/configuration.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/download.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/freeze.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/install.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/list.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/search.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/show.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/wheel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/configuration.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/download.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/exceptions.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/index.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/locations.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/link.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/check.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/freeze.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/prepare.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pep425tags.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pyproject.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/resolve.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/bazaar.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/mercurial.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/subversion.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/wheel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/appdirs.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/adapter.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/cache.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/controller.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/serialize.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__main__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/cacert.pem" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/core.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/big5freq.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/big5prober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/chardistribution.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/charsetgroupprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/charsetprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cli/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cli/chardetect.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/codingstatemachine.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cp949prober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/enums.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/escprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/escsm.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/eucjpprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euckrfreq.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euckrprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euctwfreq.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euctwprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/gb2312freq.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/gb2312prober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/hebrewprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/jisfreq.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/jpcntx.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langbulgarianmodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langcyrillicmodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langgreekmodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langhebrewmodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langhungarianmodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langthaimodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langturkishmodel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/latin1prober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcharsetprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcsgroupprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcssm.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/sbcharsetprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/sbcsgroupprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/sjisprober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/universaldetector.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/utf8prober.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/version.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/ansi.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/ansitowin32.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/initialise.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/win32.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/winterm.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/misc.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/database.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/index.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/locators.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/manifest.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/markers.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/metadata.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/resources.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/scripts.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t32.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t64.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/util.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/version.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w32.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w64.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/wheel.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distro.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_ihatexml.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_inputstream.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_tokenizer.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/_base.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/datrie.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/py.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/constants.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/base.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/lint.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/optionaltags.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/sanitizer.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/whitespace.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/html5parser.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/serializer.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/genshi.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/sax.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/base.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/dom.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/base.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/dom.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree_lxml.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/genshi.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/codec.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/core.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/idnadata.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/intranges.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/package_data.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/uts46data.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/ipaddress.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/linklockfile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/mkdirlockfile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/pidlockfile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/sqlitelockfile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/symlinklockfile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/_version.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/exceptions.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/fallback.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__about__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/_compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/_structures.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/markers.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/requirements.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/specifiers.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/version.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/_in_process.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/build.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/check.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/colorlog.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/envbuild.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/wrappers.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/py31compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/bar.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/counter.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/helpers.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/spinner.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pyparsing.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/core.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/parser.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/test.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/writer.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__version__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/_internal_utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/adapters.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/api.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/auth.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/certs.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/compat.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/cookies.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/exceptions.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/help.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/hooks.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/models.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/packages.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/sessions.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/status_codes.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/structures.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/utils.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/retrying.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/six.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/_collections.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/connection.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/connectionpool.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_appengine_environ.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/bindings.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/low_level.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/appengine.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/ntlmpool.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/pyopenssl.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/securetransport.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/socks.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/exceptions.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/fields.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/filepost.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/makefile.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/six.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/poolmanager.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/request.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/response.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/connection.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/queue.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/request.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/response.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/retry.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/ssl_.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/timeout.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/url.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/wait.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/labels.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/mklabels.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/tests.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/x_user_defined.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/DESCRIPTION.rst" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/INSTALLER" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/METADATA" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/RECORD" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/WHEEL" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/metadata.json" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply-3.11.dist-info/top_level.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply/cpp.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply/ctokens.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply/lex.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply/yacc.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/ply/ygen.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/setuptools-40.8.0-py3.7.egg" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/setuptools.pth" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv-0.14.1.dist-info/INSTALLER" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv-0.14.1.dist-info/METADATA" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv-0.14.1.dist-info/RECORD" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv-0.14.1.dist-info/WHEEL" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv-0.14.1.dist-info/top_level.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv/py2.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv/py3.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Lib/site-packages/unicodecsv/test.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/Activate.ps1" beforeDir="false" afterPath="$PROJECT_DIR$/venv/Scripts/Activate.ps1" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/activate" beforeDir="false" afterPath="$PROJECT_DIR$/venv/Scripts/activate" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/activate.bat" beforeDir="false" afterPath="$PROJECT_DIR$/venv/Scripts/activate.bat" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/easy_install-3.7-script.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/easy_install-3.7.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/easy_install-3.7.exe.manifest" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/easy_install-script.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/easy_install.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/easy_install.exe.manifest" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/pip.exe" beforeDir="false" afterPath="$PROJECT_DIR$/venv/Scripts/pip.exe" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/pip3.7-script.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/pip3.7.exe" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/pip3.7.exe.manifest" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/Scripts/pip3.exe" beforeDir="false" afterPath="$PROJECT_DIR$/venv/Scripts/pip3.exe" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/venv/pyvenv.cfg" beforeDir="false" afterPath="$PROJECT_DIR$/venv/pyvenv.cfg" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
|
@ -34,9 +522,9 @@
|
|||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="C:\Users\cheri\Documents\myclass.com\Siteweb\Production\Ela_back\MySy_Back_Office\venv\Scripts\python.exe" />
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/main.py" />
|
||||
|
|
|
@ -14,18 +14,26 @@ import ela_spacy_common as lsc
|
|||
Cette variable definit l'environnement de travail
|
||||
"""
|
||||
#MYSY_ENV = "PROD"
|
||||
#MYSY_ENV = "REC"
|
||||
MYSY_ENV = "DEV"
|
||||
|
||||
|
||||
if (MYSY_ENV == "PROD"):
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb"
|
||||
client = MongoClient(CONNECTION_STRING)
|
||||
dbname = client['cherifdb']
|
||||
|
||||
|
||||
elif (MYSY_ENV == "DEV"):
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb_dev"
|
||||
client = MongoClient(CONNECTION_STRING)
|
||||
dbname = client['cherifdb_dev']
|
||||
|
||||
elif (MYSY_ENV == "REC"):
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb_rec"
|
||||
client = MongoClient(CONNECTION_STRING)
|
||||
dbname = client['cherifdb_rec']
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -39,7 +47,8 @@ Cette Variable definit les port de connexion
|
|||
en dev ou en production
|
||||
"""
|
||||
MYSY_PORT_PROD = 5000
|
||||
MYSY_PORT_DEV= 5001
|
||||
MYSY_PORT_DEV = 5001
|
||||
MYSY_PORT_REC = 5001
|
||||
|
||||
|
||||
|
||||
|
@ -184,6 +193,9 @@ if (MYSY_ENV == "PROD"):
|
|||
elif (MYSY_ENV == "DEV"):
|
||||
INVOICE_FTP_LOCAL_STORAGE_DIRECTORY = "/var/www/html/sftp_iexercice/mysy_invoices_dev/"
|
||||
|
||||
elif (MYSY_ENV == "REC"):
|
||||
INVOICE_FTP_LOCAL_STORAGE_DIRECTORY = "/var/www/html/sftp_iexercice/mysy_invoices_rec/"
|
||||
|
||||
|
||||
"""
|
||||
Cette Variable definit le nombre de formation associées à retourner
|
||||
|
|
146
Log/log_file.log
146
Log/log_file.log
|
@ -881990,3 +881990,149 @@ INFO:root:2022-11-15 19:25:02.770191 : ++ DBNAME Database(MongoClient(host=['loc
|
|||
INFO:root:2022-11-15 19:25:02.770191 : ++ FLASK PORT 5001 ++
|
||||
WARNING:werkzeug: * Debugger is active!
|
||||
INFO:werkzeug: * Debugger PIN: 977-468-048
|
||||
INFO:root:2022-11-16 15:55:08.836959 : ++++ ENVIRONNEMENT DEVELOPPEMENT ++++
|
||||
INFO:root:2022-11-16 15:55:08.836959 : ++ DATABASE mongodb://localhost:27017/cherifdb_dev ++
|
||||
INFO:root:2022-11-16 15:55:08.837962 : ++ DBNAME Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True), 'cherifdb_dev') ++
|
||||
INFO:root:2022-11-16 15:55:08.837962 : ++ FLASK PORT 5001 ++
|
||||
INFO:werkzeug:[31m[1mWARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.[0m
|
||||
* Running on http://localhost:5001
|
||||
INFO:werkzeug:[33mPress CTRL+C to quit[0m
|
||||
INFO:werkzeug: * Restarting with stat
|
||||
INFO:root:2022-11-16 15:55:17.384029 : ++++ ENVIRONNEMENT DEVELOPPEMENT ++++
|
||||
INFO:root:2022-11-16 15:55:17.384029 : ++ DATABASE mongodb://localhost:27017/cherifdb_dev ++
|
||||
INFO:root:2022-11-16 15:55:17.384029 : ++ DBNAME Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True), 'cherifdb_dev') ++
|
||||
INFO:root:2022-11-16 15:55:17.384029 : ++ FLASK PORT 5001 ++
|
||||
WARNING:werkzeug: * Debugger is active!
|
||||
INFO:werkzeug: * Debugger PIN: 127-713-359
|
||||
INFO:root:2022-11-16 16:07:00.112377 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:07:00.624631 : get_class - La valeur 'token' n'est pas presente dans liste
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:07:00] "GET /myclass/api/get_class/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:07:23.543766 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:07:23.543766 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:07:23.547783 : get_training_in_user_zone -'user_country_code' - ERRORRRR AT Line : 341
|
||||
INFO:root:2022-11-16 16:07:23.550770 : get_all_class -Expecting value: line 1 column 2 (char 1) - ERRORRRR AT Line : 191
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:07:23] "GET /myclass/api/get_all_class/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:07:54.927964 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:07:54.929987 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:07:54.932984 : get_training_in_user_zone -'user_city' - ERRORRRR AT Line : 342
|
||||
INFO:root:2022-11-16 16:07:54.935996 : get_all_class -Expecting value: line 1 column 2 (char 1) - ERRORRRR AT Line : 191
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:07:54] "GET /myclass/api/get_all_class/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:08:09.902748 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:08:09.903750 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:08:09] "GET /myclass/api/get_all_class/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:10:23.026528 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:10:23] "GET /myclass/api/Get_Suggested_Word/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:15:52.931079 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:52.932083 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:53] "POST /myclass/api/Get_Suggested_Fr_Cities/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:53] "POST /myclass/api/Get_Suggested_Word/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:15:53.618800 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:53.619792 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:53] "POST /myclass/api/get_all_class/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:15:54.144480 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.146480 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.147480 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.152483 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.154475 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.159481 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:15:54.485711 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.486710 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.487708 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.488706 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.489704 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.490720 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:15:54.814441 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.814441 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.815438 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.816439 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.818436 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:54.820442 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:54] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:15:55.146835 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:15:55.147830 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:55] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:15:55] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 16:16:05.650644 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 16:16:05.652646 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:16:05] "POST /myclass/api/get_class/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 16:16:05] "POST /myclass/api/get_associated_class_of_partnair/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:27:32.750997 : ++++ ENVIRONNEMENT DEVELOPPEMENT ++++
|
||||
INFO:root:2022-11-16 21:27:32.751995 : ++ DATABASE mongodb://localhost:27017/cherifdb_dev ++
|
||||
INFO:root:2022-11-16 21:27:32.752996 : ++ DBNAME Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True), 'cherifdb_dev') ++
|
||||
INFO:root:2022-11-16 21:27:32.752996 : ++ FLASK PORT 5001 ++
|
||||
INFO:werkzeug:[31m[1mWARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.[0m
|
||||
* Running on http://localhost:5001
|
||||
INFO:werkzeug:[33mPress CTRL+C to quit[0m
|
||||
INFO:werkzeug: * Restarting with stat
|
||||
INFO:root:2022-11-16 21:27:47.357956 : ++++ ENVIRONNEMENT DEVELOPPEMENT ++++
|
||||
INFO:root:2022-11-16 21:27:47.358963 : ++ DATABASE mongodb://localhost:27017/cherifdb_dev ++
|
||||
INFO:root:2022-11-16 21:27:47.358963 : ++ DBNAME Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True), 'cherifdb_dev') ++
|
||||
INFO:root:2022-11-16 21:27:47.358963 : ++ FLASK PORT 5001 ++
|
||||
WARNING:werkzeug: * Debugger is active!
|
||||
INFO:werkzeug: * Debugger PIN: 127-713-359
|
||||
INFO:root:2022-11-16 21:28:07.187223 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:07.189207 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:07] "POST /myclass/api/Get_Suggested_Fr_Cities/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:07] "POST /myclass/api/Get_Suggested_Word/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:07.439594 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:07.441595 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:07] "POST /myclass/api/get_all_class/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:07.895221 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:07] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:08.206027 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.207021 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.208024 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.209020 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.210035 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.211028 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:08.533384 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.534410 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.537398 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.540405 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:08.549931 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.550931 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:08.867477 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.868479 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.870478 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.870478 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.871477 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:root:2022-11-16 21:28:08.872470 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:08] "POST /myclass/api/getRecodedClassImage_from_front/ HTTP/1.1" 200 -
|
||||
INFO:root:2022-11-16 21:28:09.208662 : Security check : IP adresse '127.0.0.1' connected
|
||||
INFO:werkzeug:127.0.0.1 - - [16/Nov/2022 21:28:09] "POST /myclass/api/GetActiveSessionFormation_List/ HTTP/1.1" 200 -
|
||||
INFO:werkzeug: * Detected change in 'C:\\Users\\cheri\\Documents\\myclass.com\\Siteweb\\Production\\Ela_back\\Back_Office\\GlobalVariable.py', reloading
|
||||
INFO:werkzeug: * Restarting with stat
|
||||
|
|
14
email_mgt.py
14
email_mgt.py
|
@ -62,6 +62,8 @@ def send_user_account_mail_old(message, account_mail):
|
|||
my_url = "https://apimysy.iexercice.com/myclass/api/valide_user_account/"+str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
my_url = "http://localhost:"+str(MYSY_GV.MYSY_PORT_DEV)+"/myclass/api/valide_user_account/"+str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
my_url = "https://devapimysy.iexercice.com/myclass/api/valide_user_account/" + str(message)
|
||||
|
||||
|
||||
send_smtp_email = sib_api_v3_sdk.SendSmtpEmail(template_id=15, params={"mysyurl": my_url,
|
||||
|
@ -108,6 +110,8 @@ def send_user_thks_mail_old(message, account_mail):
|
|||
my_url = "https://apimysy.iexercice.com/myclass/api/valide_user_account/" + str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
my_url = "http://localhost:"+str(MYSY_GV.MYSY_PORT_DEV)+"/myclass/api/valide_user_account/" + str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
my_url = "https://devapimysy.iexercice.com/myclass/api/valide_user_account/" + str(message)
|
||||
|
||||
|
||||
|
||||
|
@ -139,6 +143,8 @@ def send_partner_account_mail_old(message, account_mail):
|
|||
my_url = "https://apimysy.iexercice.com/myclass/api/valide_partnair_account/" + str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
my_url = "http://localhost:"+str(MYSY_GV.MYSY_PORT_DEV)+"/myclass/api/valide_partnair_account/"+str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
my_url = "https://devapimysy.iexercice.com/myclass/api/valide_partnair_account/"+str(message)
|
||||
|
||||
|
||||
print(" mail enoye à toaddrs : "+toaddrs)
|
||||
|
@ -1110,6 +1116,9 @@ def send_user_account_mail(message, account_mail):
|
|||
my_url = "https://apimysy.iexercice.com/myclass/api/valide_user_account/"+str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
my_url = "http://localhost:"+str(MYSY_GV.MYSY_PORT_DEV)+"/myclass/api/valide_user_account/"+str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
my_url = "https://devapimysy.iexercice.com/myclass/api/valide_user_account/" + str(message)
|
||||
|
||||
|
||||
|
||||
smtpserver = smtplib.SMTP(MYSY_GV.O365_SMTP_COUNT_smtpsrv, MYSY_GV.O365_SMTP_COUNT_port)
|
||||
|
@ -1176,6 +1185,9 @@ def send_partner_account_mail(message, account_mail):
|
|||
my_url = "https://apimysy.iexercice.com/myclass/api/valide_partnair_account/" + str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
my_url = "http://localhost:"+str(MYSY_GV.MYSY_PORT_DEV)+"/myclass/api/valide_partnair_account/"+str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
my_url = "https://devapimysy.iexercice.com/myclass/api/valide_partnair_account/" + str(message)
|
||||
|
||||
|
||||
|
||||
smtpserver = smtplib.SMTP(MYSY_GV.O365_SMTP_COUNT_smtpsrv, MYSY_GV.O365_SMTP_COUNT_port)
|
||||
|
@ -1310,6 +1322,8 @@ def send_user_thks_mail(message, account_mail):
|
|||
my_url = "https://apimysy.iexercice.com/myclass/api/valide_user_account/" + str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
my_url = "http://localhost:"+str(MYSY_GV.MYSY_PORT_DEV)+"/myclass/api/valide_user_account/" + str(message)
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
my_url = "https://devapimysy.iexercice.com/myclass/api/valide_user_account/" + str(message)
|
||||
|
||||
|
||||
"""
|
||||
|
|
37
main.py
37
main.py
|
@ -245,6 +245,9 @@ def valide_user_account(value):
|
|||
url = 'https://www.mysy-training.com/mysy-account-activated/'
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
url = 'http://localhost:3009/mysy-account-activated/'
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
url = 'https://dev.mysy-training.com/mysy-account-activated/'
|
||||
|
||||
else:
|
||||
if (MYSY_GV.MYSY_ENV == "PROD"):
|
||||
url = 'https://www.mysy-training.com/erreur'
|
||||
|
@ -252,6 +255,9 @@ def valide_user_account(value):
|
|||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
url = 'http://localhost:3009/erreur/'
|
||||
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
url = 'https://dev.mysy-training.com/erreur'
|
||||
|
||||
return redirect(url)
|
||||
|
||||
|
||||
|
@ -593,11 +599,16 @@ def valide_partnair_account(value):
|
|||
url = 'https://www.mysy-training.com/PartnerLogin0/'
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
url = 'http://localhost:3009/PartnerLogin0'
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
url = 'https://dev.mysy-training.com/PartnerLogin0/'
|
||||
|
||||
else:
|
||||
if (MYSY_GV.MYSY_ENV == "PROD"):
|
||||
url = 'https://www.mysy-training.com/erreur'
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
url = 'https://www.mysy-training.com/erreur'
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
url = 'https://dev.mysy-training.com/erreur'
|
||||
|
||||
return redirect(url)
|
||||
|
||||
|
@ -1540,6 +1551,24 @@ def removeLSaccount():
|
|||
status, message = mycommon.removeLSaccount()
|
||||
return jsonify(status=status, message=message)
|
||||
|
||||
|
||||
"""
|
||||
Pour des test, et seulement pour les test
|
||||
cette fonction permet de mettre les formation du user "ls"
|
||||
en première page.
|
||||
|
||||
Cette fonction doit etre inactive en production
|
||||
"""
|
||||
@app.route('/myclass/api/PutLSClassFirst/', methods=['GET','POST'])
|
||||
@crossdomain(origin='*')
|
||||
def PutLSClassFirst():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = request.form.to_dict()
|
||||
print(" ### PutLSClassFirst : payload = ", str(payload))
|
||||
status, message = mycommon.PutLSClassFirst()
|
||||
return jsonify(status=status, message=message)
|
||||
|
||||
|
||||
"""
|
||||
Correction data - api oneshot
|
||||
"""
|
||||
|
@ -1956,6 +1985,7 @@ if __name__ == '__main__':
|
|||
mycommon.myprint("++ DBNAME " + str(MYSY_GV.dbname) + " ++")
|
||||
mycommon.myprint("++ FLASK PORT " + str(MYSY_GV.MYSY_PORT_PROD) + " ++")
|
||||
app.run(host='localhost', port=MYSY_GV.MYSY_PORT_PROD, debug=True, threaded=True)
|
||||
|
||||
elif (MYSY_GV.MYSY_ENV == "DEV"):
|
||||
mycommon.myprint(" ++++ ENVIRONNEMENT DEVELOPPEMENT ++++")
|
||||
mycommon.myprint("++ DATABASE " + MYSY_GV.CONNECTION_STRING + " ++")
|
||||
|
@ -1963,5 +1993,12 @@ if __name__ == '__main__':
|
|||
mycommon.myprint("++ FLASK PORT " + str(MYSY_GV.MYSY_PORT_DEV) + " ++")
|
||||
app.run(host='localhost', port=MYSY_GV.MYSY_PORT_DEV, debug=True, threaded=True)
|
||||
|
||||
elif (MYSY_GV.MYSY_ENV == "REC"):
|
||||
mycommon.myprint(" ++++ ENVIRONNEMENT DEVELOPPEMENT ++++")
|
||||
mycommon.myprint("++ DATABASE " + MYSY_GV.CONNECTION_STRING + " ++")
|
||||
mycommon.myprint("++ DBNAME " + str(MYSY_GV.dbname) + " ++")
|
||||
mycommon.myprint("++ FLASK PORT " + str(MYSY_GV.MYSY_PORT_REC) + " ++")
|
||||
app.run(host='localhost', port=MYSY_GV.MYSY_PORT_REC, debug=True, threaded=True)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -2300,3 +2300,41 @@ def remove_non_ascii(string):
|
|||
return ''.join(char for char in string if ord(char) < 128)
|
||||
|
||||
|
||||
"""
|
||||
Pour des test, et seulement pour les test
|
||||
cette fonction permet de mettre les formation du user "ls"
|
||||
en première page.
|
||||
|
||||
Cette fonction doit etre inactive en production
|
||||
"""
|
||||
def PutLSClassFirst():
|
||||
try:
|
||||
nb_line = 0
|
||||
# Recuperation du partenaire recid
|
||||
coll_account = MYSY_GV.dbname["partnair_account"]
|
||||
myquery = {"email": "ls.lutmanmicca@gmail.com"}
|
||||
|
||||
for val_tmp in coll_account.find(myquery):
|
||||
local_recid = val_tmp['recid']
|
||||
coll_class = MYSY_GV.dbname["myclass"]
|
||||
|
||||
quere_reset_ranking = {{}, {"$set": {"display_rank": "20"}}}
|
||||
result = coll_class.update_many(quere_reset_ranking)
|
||||
|
||||
quere_update = {{"partner_owner_recid": str(local_recid)}, {"$set": {"freeacces": "1", "display_rank": "70",
|
||||
"isalaune": "1"}}}
|
||||
result = coll_class.update_many(quere_update)
|
||||
|
||||
print("raw:", result.raw_result)
|
||||
print("acknowledged:", result.acknowledged)
|
||||
print("matched_count:", result.matched_count)
|
||||
|
||||
nb_line = result.matched_count
|
||||
|
||||
|
||||
return True, str(nb_line) + " formations ont mise sur la page principale. "
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||||
return False, False
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,29 +0,0 @@
|
|||
Copyright (c) 2006-2008, Mathieu Fenniak
|
||||
Some contributions copyright (c) 2007, Ashish Kulkarni <kulkarni.ashish@gmail.com>
|
||||
Some contributions copyright (c) 2014, Steve Witham <switham_github@mac-guyver.com>
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,36 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: PyPDF2
|
||||
Version: 1.26.0
|
||||
Summary: PDF toolkit
|
||||
Home-page: http://mstamy2.github.com/PyPDF2
|
||||
Author: Mathieu Fenniak
|
||||
Author-email: biziqe@mathieu.fenniak.net
|
||||
Maintainer: Phaseit, Inc.
|
||||
Maintainer-email: PyPDF2@phaseit.net
|
||||
License: UNKNOWN
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
|
||||
|
||||
A Pure-Python library built as a PDF toolkit. It is capable of:
|
||||
|
||||
- extracting document information (title, author, ...)
|
||||
- splitting documents page by page
|
||||
- merging documents page by page
|
||||
- cropping pages
|
||||
- merging multiple pages into a single page
|
||||
- encrypting and decrypting PDF files
|
||||
- and more!
|
||||
|
||||
By being Pure-Python, it should run on any Python platform without any
|
||||
dependencies on external libraries. It can also work entirely on StringIO
|
||||
objects rather than file streams, allowing for PDF manipulation in memory.
|
||||
It is therefore a useful tool for websites that manage or manipulate PDFs.
|
||||
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
PyPDF2-1.26.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyPDF2-1.26.0.dist-info/LICENSE,sha256=qXrCMOXzPvEKU2eoUOsB-R8aCwZONHQsd5TSKUVX9SQ,1605
|
||||
PyPDF2-1.26.0.dist-info/METADATA,sha256=-zoRYsKMLJ4lSDEmPJnHHoKvwRDGE5SO5FhcyyshZVQ,1247
|
||||
PyPDF2-1.26.0.dist-info/RECORD,,
|
||||
PyPDF2-1.26.0.dist-info/WHEEL,sha256=HAnC8L9o-1LKvUME-u3pEKJ5qkuu-E0ZqtcukbQ10Us,98
|
||||
PyPDF2-1.26.0.dist-info/top_level.txt,sha256=BERWrwqdvKXaVKhpnMbtO6b11qPA-mBt2r9a0VPF-Ow,7
|
||||
PyPDF2/__init__.py,sha256=ugkP-3fEFZZ2-54PmYpjJ5CISEPD5W8TikZlloOJZ5M,210
|
||||
PyPDF2/__pycache__/__init__.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/_version.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/filters.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/generic.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/merger.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/pagerange.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/pdf.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/utils.cpython-37.pyc,,
|
||||
PyPDF2/__pycache__/xmp.cpython-37.pyc,,
|
||||
PyPDF2/_version.py,sha256=R3jtfVjuv42PBofEXH4X4NPetaDQFqmBMrvpxtrdppg,23
|
||||
PyPDF2/filters.py,sha256=U4KQ7fJX129ePxoff-6-009e9kCWlj8_d2ipnm5QDG4,13167
|
||||
PyPDF2/generic.py,sha256=o2Dcu9g0uHz9Bntg7V2x_F2MtrZ0Qu0M6Jsdc0TuFWQ,45254
|
||||
PyPDF2/merger.py,sha256=2Cz4QaB8R-Zm3V5P2rI-QYdqMZlN4geaAtNfrPbcTM4,21387
|
||||
PyPDF2/pagerange.py,sha256=AEMerbVjzXE55sJ2EYZzBgH1Xt4NiUsHaiycoNaW8Ys,5534
|
||||
PyPDF2/pdf.py,sha256=1_-pvVGoG1C_ehVMb81swRMejkJ81jh1tRQrxOvGyyM,126327
|
||||
PyPDF2/utils.py,sha256=-ZQky5qa4gsO0zprA8V_E5sTNRBSa_ungvxvxjdHr64,7833
|
||||
PyPDF2/xmp.py,sha256=vdjDUAMCqb7-AhkuNaqCanviPHMpuJ-5adY8Kxe5jUc,13639
|
|
@ -1,5 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.33.6)
|
||||
Root-Is-Purelib: true
|
||||
Tag: cp37-none-any
|
||||
|
|
@ -1 +0,0 @@
|
|||
PyPDF2
|
|
@ -1,5 +0,0 @@
|
|||
from .pdf import PdfFileReader, PdfFileWriter
|
||||
from .merger import PdfFileMerger
|
||||
from .pagerange import PageRange, parse_filename_page_ranges
|
||||
from ._version import __version__
|
||||
__all__ = ["pdf", "PdfFileMerger"]
|
|
@ -1 +0,0 @@
|
|||
__version__ = '1.26.0'
|
|
@ -1,362 +0,0 @@
|
|||
# vim: sw=4:expandtab:foldmethod=marker
|
||||
#
|
||||
# Copyright (c) 2006, Mathieu Fenniak
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
# * The name of the author may not be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
"""
|
||||
Implementation of stream filters for PDF.
|
||||
"""
|
||||
__author__ = "Mathieu Fenniak"
|
||||
__author_email__ = "biziqe@mathieu.fenniak.net"
|
||||
|
||||
from .utils import PdfReadError, ord_, chr_
|
||||
from sys import version_info
|
||||
if version_info < ( 3, 0 ):
|
||||
from cStringIO import StringIO
|
||||
else:
|
||||
from io import StringIO
|
||||
import struct
|
||||
|
||||
try:
|
||||
import zlib
|
||||
|
||||
def decompress(data):
|
||||
return zlib.decompress(data)
|
||||
|
||||
def compress(data):
|
||||
return zlib.compress(data)
|
||||
|
||||
except ImportError:
|
||||
# Unable to import zlib. Attempt to use the System.IO.Compression
|
||||
# library from the .NET framework. (IronPython only)
|
||||
import System
|
||||
from System import IO, Collections, Array
|
||||
|
||||
def _string_to_bytearr(buf):
|
||||
retval = Array.CreateInstance(System.Byte, len(buf))
|
||||
for i in range(len(buf)):
|
||||
retval[i] = ord(buf[i])
|
||||
return retval
|
||||
|
||||
def _bytearr_to_string(bytes):
|
||||
retval = ""
|
||||
for i in range(bytes.Length):
|
||||
retval += chr(bytes[i])
|
||||
return retval
|
||||
|
||||
def _read_bytes(stream):
|
||||
ms = IO.MemoryStream()
|
||||
buf = Array.CreateInstance(System.Byte, 2048)
|
||||
while True:
|
||||
bytes = stream.Read(buf, 0, buf.Length)
|
||||
if bytes == 0:
|
||||
break
|
||||
else:
|
||||
ms.Write(buf, 0, bytes)
|
||||
retval = ms.ToArray()
|
||||
ms.Close()
|
||||
return retval
|
||||
|
||||
def decompress(data):
|
||||
bytes = _string_to_bytearr(data)
|
||||
ms = IO.MemoryStream()
|
||||
ms.Write(bytes, 0, bytes.Length)
|
||||
ms.Position = 0 # fseek 0
|
||||
gz = IO.Compression.DeflateStream(ms, IO.Compression.CompressionMode.Decompress)
|
||||
bytes = _read_bytes(gz)
|
||||
retval = _bytearr_to_string(bytes)
|
||||
gz.Close()
|
||||
return retval
|
||||
|
||||
def compress(data):
|
||||
bytes = _string_to_bytearr(data)
|
||||
ms = IO.MemoryStream()
|
||||
gz = IO.Compression.DeflateStream(ms, IO.Compression.CompressionMode.Compress, True)
|
||||
gz.Write(bytes, 0, bytes.Length)
|
||||
gz.Close()
|
||||
ms.Position = 0 # fseek 0
|
||||
bytes = ms.ToArray()
|
||||
retval = _bytearr_to_string(bytes)
|
||||
ms.Close()
|
||||
return retval
|
||||
|
||||
|
||||
class FlateDecode(object):
|
||||
def decode(data, decodeParms):
|
||||
data = decompress(data)
|
||||
predictor = 1
|
||||
if decodeParms:
|
||||
try:
|
||||
predictor = decodeParms.get("/Predictor", 1)
|
||||
except AttributeError:
|
||||
pass # usually an array with a null object was read
|
||||
|
||||
# predictor 1 == no predictor
|
||||
if predictor != 1:
|
||||
columns = decodeParms["/Columns"]
|
||||
# PNG prediction:
|
||||
if predictor >= 10 and predictor <= 15:
|
||||
output = StringIO()
|
||||
# PNG prediction can vary from row to row
|
||||
rowlength = columns + 1
|
||||
assert len(data) % rowlength == 0
|
||||
prev_rowdata = (0,) * rowlength
|
||||
for row in range(len(data) // rowlength):
|
||||
rowdata = [ord_(x) for x in data[(row*rowlength):((row+1)*rowlength)]]
|
||||
filterByte = rowdata[0]
|
||||
if filterByte == 0:
|
||||
pass
|
||||
elif filterByte == 1:
|
||||
for i in range(2, rowlength):
|
||||
rowdata[i] = (rowdata[i] + rowdata[i-1]) % 256
|
||||
elif filterByte == 2:
|
||||
for i in range(1, rowlength):
|
||||
rowdata[i] = (rowdata[i] + prev_rowdata[i]) % 256
|
||||
else:
|
||||
# unsupported PNG filter
|
||||
raise PdfReadError("Unsupported PNG filter %r" % filterByte)
|
||||
prev_rowdata = rowdata
|
||||
output.write(''.join([chr(x) for x in rowdata[1:]]))
|
||||
data = output.getvalue()
|
||||
else:
|
||||
# unsupported predictor
|
||||
raise PdfReadError("Unsupported flatedecode predictor %r" % predictor)
|
||||
return data
|
||||
decode = staticmethod(decode)
|
||||
|
||||
def encode(data):
|
||||
return compress(data)
|
||||
encode = staticmethod(encode)
|
||||
|
||||
|
||||
class ASCIIHexDecode(object):
|
||||
def decode(data, decodeParms=None):
|
||||
retval = ""
|
||||
char = ""
|
||||
x = 0
|
||||
while True:
|
||||
c = data[x]
|
||||
if c == ">":
|
||||
break
|
||||
elif c.isspace():
|
||||
x += 1
|
||||
continue
|
||||
char += c
|
||||
if len(char) == 2:
|
||||
retval += chr(int(char, base=16))
|
||||
char = ""
|
||||
x += 1
|
||||
assert char == ""
|
||||
return retval
|
||||
decode = staticmethod(decode)
|
||||
|
||||
|
||||
class LZWDecode(object):
|
||||
"""Taken from:
|
||||
http://www.java2s.com/Open-Source/Java-Document/PDF/PDF-Renderer/com/sun/pdfview/decode/LZWDecode.java.htm
|
||||
"""
|
||||
class decoder(object):
|
||||
def __init__(self, data):
|
||||
self.STOP=257
|
||||
self.CLEARDICT=256
|
||||
self.data=data
|
||||
self.bytepos=0
|
||||
self.bitpos=0
|
||||
self.dict=[""]*4096
|
||||
for i in range(256):
|
||||
self.dict[i]=chr(i)
|
||||
self.resetDict()
|
||||
|
||||
def resetDict(self):
|
||||
self.dictlen=258
|
||||
self.bitspercode=9
|
||||
|
||||
def nextCode(self):
|
||||
fillbits=self.bitspercode
|
||||
value=0
|
||||
while fillbits>0 :
|
||||
if self.bytepos >= len(self.data):
|
||||
return -1
|
||||
nextbits=ord(self.data[self.bytepos])
|
||||
bitsfromhere=8-self.bitpos
|
||||
if bitsfromhere>fillbits:
|
||||
bitsfromhere=fillbits
|
||||
value |= (((nextbits >> (8-self.bitpos-bitsfromhere)) &
|
||||
(0xff >> (8-bitsfromhere))) <<
|
||||
(fillbits-bitsfromhere))
|
||||
fillbits -= bitsfromhere
|
||||
self.bitpos += bitsfromhere
|
||||
if self.bitpos >=8:
|
||||
self.bitpos=0
|
||||
self.bytepos = self.bytepos+1
|
||||
return value
|
||||
|
||||
def decode(self):
|
||||
""" algorithm derived from:
|
||||
http://www.rasip.fer.hr/research/compress/algorithms/fund/lz/lzw.html
|
||||
and the PDFReference
|
||||
"""
|
||||
cW = self.CLEARDICT;
|
||||
baos=""
|
||||
while True:
|
||||
pW = cW;
|
||||
cW = self.nextCode();
|
||||
if cW == -1:
|
||||
raise PdfReadError("Missed the stop code in LZWDecode!")
|
||||
if cW == self.STOP:
|
||||
break;
|
||||
elif cW == self.CLEARDICT:
|
||||
self.resetDict();
|
||||
elif pW == self.CLEARDICT:
|
||||
baos+=self.dict[cW]
|
||||
else:
|
||||
if cW < self.dictlen:
|
||||
baos += self.dict[cW]
|
||||
p=self.dict[pW]+self.dict[cW][0]
|
||||
self.dict[self.dictlen]=p
|
||||
self.dictlen+=1
|
||||
else:
|
||||
p=self.dict[pW]+self.dict[pW][0]
|
||||
baos+=p
|
||||
self.dict[self.dictlen] = p;
|
||||
self.dictlen+=1
|
||||
if (self.dictlen >= (1 << self.bitspercode) - 1 and
|
||||
self.bitspercode < 12):
|
||||
self.bitspercode+=1
|
||||
return baos
|
||||
|
||||
@staticmethod
|
||||
def decode(data,decodeParams=None):
|
||||
return LZWDecode.decoder(data).decode()
|
||||
|
||||
|
||||
class ASCII85Decode(object):
|
||||
def decode(data, decodeParms=None):
|
||||
if version_info < ( 3, 0 ):
|
||||
retval = ""
|
||||
group = []
|
||||
x = 0
|
||||
hitEod = False
|
||||
# remove all whitespace from data
|
||||
data = [y for y in data if not (y in ' \n\r\t')]
|
||||
while not hitEod:
|
||||
c = data[x]
|
||||
if len(retval) == 0 and c == "<" and data[x+1] == "~":
|
||||
x += 2
|
||||
continue
|
||||
#elif c.isspace():
|
||||
# x += 1
|
||||
# continue
|
||||
elif c == 'z':
|
||||
assert len(group) == 0
|
||||
retval += '\x00\x00\x00\x00'
|
||||
x += 1
|
||||
continue
|
||||
elif c == "~" and data[x+1] == ">":
|
||||
if len(group) != 0:
|
||||
# cannot have a final group of just 1 char
|
||||
assert len(group) > 1
|
||||
cnt = len(group) - 1
|
||||
group += [ 85, 85, 85 ]
|
||||
hitEod = cnt
|
||||
else:
|
||||
break
|
||||
else:
|
||||
c = ord(c) - 33
|
||||
assert c >= 0 and c < 85
|
||||
group += [ c ]
|
||||
if len(group) >= 5:
|
||||
b = group[0] * (85**4) + \
|
||||
group[1] * (85**3) + \
|
||||
group[2] * (85**2) + \
|
||||
group[3] * 85 + \
|
||||
group[4]
|
||||
assert b < (2**32 - 1)
|
||||
c4 = chr((b >> 0) % 256)
|
||||
c3 = chr((b >> 8) % 256)
|
||||
c2 = chr((b >> 16) % 256)
|
||||
c1 = chr(b >> 24)
|
||||
retval += (c1 + c2 + c3 + c4)
|
||||
if hitEod:
|
||||
retval = retval[:-4+hitEod]
|
||||
group = []
|
||||
x += 1
|
||||
return retval
|
||||
else:
|
||||
if isinstance(data, str):
|
||||
data = data.encode('ascii')
|
||||
n = b = 0
|
||||
out = bytearray()
|
||||
for c in data:
|
||||
if ord('!') <= c and c <= ord('u'):
|
||||
n += 1
|
||||
b = b*85+(c-33)
|
||||
if n == 5:
|
||||
out += struct.pack(b'>L',b)
|
||||
n = b = 0
|
||||
elif c == ord('z'):
|
||||
assert n == 0
|
||||
out += b'\0\0\0\0'
|
||||
elif c == ord('~'):
|
||||
if n:
|
||||
for _ in range(5-n):
|
||||
b = b*85+84
|
||||
out += struct.pack(b'>L',b)[:n-1]
|
||||
break
|
||||
return bytes(out)
|
||||
decode = staticmethod(decode)
|
||||
|
||||
|
||||
def decodeStreamData(stream):
|
||||
from .generic import NameObject
|
||||
filters = stream.get("/Filter", ())
|
||||
if len(filters) and not isinstance(filters[0], NameObject):
|
||||
# we have a single filter instance
|
||||
filters = (filters,)
|
||||
data = stream._data
|
||||
# If there is not data to decode we should not try to decode the data.
|
||||
if data:
|
||||
for filterType in filters:
|
||||
if filterType == "/FlateDecode" or filterType == "/Fl":
|
||||
data = FlateDecode.decode(data, stream.get("/DecodeParms"))
|
||||
elif filterType == "/ASCIIHexDecode" or filterType == "/AHx":
|
||||
data = ASCIIHexDecode.decode(data)
|
||||
elif filterType == "/LZWDecode" or filterType == "/LZW":
|
||||
data = LZWDecode.decode(data, stream.get("/DecodeParms"))
|
||||
elif filterType == "/ASCII85Decode" or filterType == "/A85":
|
||||
data = ASCII85Decode.decode(data)
|
||||
elif filterType == "/Crypt":
|
||||
decodeParams = stream.get("/DecodeParams", {})
|
||||
if "/Name" not in decodeParams and "/Type" not in decodeParams:
|
||||
pass
|
||||
else:
|
||||
raise NotImplementedError("/Crypt filter with /Name or /Type not supported yet")
|
||||
else:
|
||||
# unsupported filter
|
||||
raise NotImplementedError("unsupported filter %s" % filterType)
|
||||
return data
|
File diff suppressed because it is too large
Load Diff
|
@ -1,553 +0,0 @@
|
|||
# vim: sw=4:expandtab:foldmethod=marker
|
||||
#
|
||||
# Copyright (c) 2006, Mathieu Fenniak
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
# * The name of the author may not be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from .generic import *
|
||||
from .utils import isString, str_
|
||||
from .pdf import PdfFileReader, PdfFileWriter
|
||||
from .pagerange import PageRange
|
||||
from sys import version_info
|
||||
if version_info < ( 3, 0 ):
|
||||
from cStringIO import StringIO
|
||||
StreamIO = StringIO
|
||||
else:
|
||||
from io import BytesIO
|
||||
from io import FileIO as file
|
||||
StreamIO = BytesIO
|
||||
|
||||
|
||||
class _MergedPage(object):
|
||||
"""
|
||||
_MergedPage is used internally by PdfFileMerger to collect necessary
|
||||
information on each page that is being merged.
|
||||
"""
|
||||
def __init__(self, pagedata, src, id):
|
||||
self.src = src
|
||||
self.pagedata = pagedata
|
||||
self.out_pagedata = None
|
||||
self.id = id
|
||||
|
||||
|
||||
class PdfFileMerger(object):
|
||||
"""
|
||||
Initializes a PdfFileMerger object. PdfFileMerger merges multiple PDFs
|
||||
into a single PDF. It can concatenate, slice, insert, or any combination
|
||||
of the above.
|
||||
|
||||
See the functions :meth:`merge()<merge>` (or :meth:`append()<append>`)
|
||||
and :meth:`write()<write>` for usage information.
|
||||
|
||||
:param bool strict: Determines whether user should be warned of all
|
||||
problems and also causes some correctable problems to be fatal.
|
||||
Defaults to ``True``.
|
||||
"""
|
||||
|
||||
def __init__(self, strict=True):
|
||||
self.inputs = []
|
||||
self.pages = []
|
||||
self.output = PdfFileWriter()
|
||||
self.bookmarks = []
|
||||
self.named_dests = []
|
||||
self.id_count = 0
|
||||
self.strict = strict
|
||||
|
||||
def merge(self, position, fileobj, bookmark=None, pages=None, import_bookmarks=True):
|
||||
"""
|
||||
Merges the pages from the given file into the output file at the
|
||||
specified page number.
|
||||
|
||||
:param int position: The *page number* to insert this file. File will
|
||||
be inserted after the given number.
|
||||
|
||||
:param fileobj: A File Object or an object that supports the standard read
|
||||
and seek methods similar to a File Object. Could also be a
|
||||
string representing a path to a PDF file.
|
||||
|
||||
:param str bookmark: Optionally, you may specify a bookmark to be applied at
|
||||
the beginning of the included file by supplying the text of the bookmark.
|
||||
|
||||
:param pages: can be a :ref:`Page Range <page-range>` or a ``(start, stop[, step])`` tuple
|
||||
to merge only the specified range of pages from the source
|
||||
document into the output document.
|
||||
|
||||
:param bool import_bookmarks: You may prevent the source document's bookmarks
|
||||
from being imported by specifying this as ``False``.
|
||||
"""
|
||||
|
||||
# This parameter is passed to self.inputs.append and means
|
||||
# that the stream used was created in this method.
|
||||
my_file = False
|
||||
|
||||
# If the fileobj parameter is a string, assume it is a path
|
||||
# and create a file object at that location. If it is a file,
|
||||
# copy the file's contents into a BytesIO (or StreamIO) stream object; if
|
||||
# it is a PdfFileReader, copy that reader's stream into a
|
||||
# BytesIO (or StreamIO) stream.
|
||||
# If fileobj is none of the above types, it is not modified
|
||||
decryption_key = None
|
||||
if isString(fileobj):
|
||||
fileobj = file(fileobj, 'rb')
|
||||
my_file = True
|
||||
elif isinstance(fileobj, file):
|
||||
fileobj.seek(0)
|
||||
filecontent = fileobj.read()
|
||||
fileobj = StreamIO(filecontent)
|
||||
my_file = True
|
||||
elif isinstance(fileobj, PdfFileReader):
|
||||
orig_tell = fileobj.stream.tell()
|
||||
fileobj.stream.seek(0)
|
||||
filecontent = StreamIO(fileobj.stream.read())
|
||||
fileobj.stream.seek(orig_tell) # reset the stream to its original location
|
||||
fileobj = filecontent
|
||||
if hasattr(fileobj, '_decryption_key'):
|
||||
decryption_key = fileobj._decryption_key
|
||||
my_file = True
|
||||
|
||||
# Create a new PdfFileReader instance using the stream
|
||||
# (either file or BytesIO or StringIO) created above
|
||||
pdfr = PdfFileReader(fileobj, strict=self.strict)
|
||||
if decryption_key is not None:
|
||||
pdfr._decryption_key = decryption_key
|
||||
|
||||
# Find the range of pages to merge.
|
||||
if pages == None:
|
||||
pages = (0, pdfr.getNumPages())
|
||||
elif isinstance(pages, PageRange):
|
||||
pages = pages.indices(pdfr.getNumPages())
|
||||
elif not isinstance(pages, tuple):
|
||||
raise TypeError('"pages" must be a tuple of (start, stop[, step])')
|
||||
|
||||
srcpages = []
|
||||
if bookmark:
|
||||
bookmark = Bookmark(TextStringObject(bookmark), NumberObject(self.id_count), NameObject('/Fit'))
|
||||
|
||||
outline = []
|
||||
if import_bookmarks:
|
||||
outline = pdfr.getOutlines()
|
||||
outline = self._trim_outline(pdfr, outline, pages)
|
||||
|
||||
if bookmark:
|
||||
self.bookmarks += [bookmark, outline]
|
||||
else:
|
||||
self.bookmarks += outline
|
||||
|
||||
dests = pdfr.namedDestinations
|
||||
dests = self._trim_dests(pdfr, dests, pages)
|
||||
self.named_dests += dests
|
||||
|
||||
# Gather all the pages that are going to be merged
|
||||
for i in range(*pages):
|
||||
pg = pdfr.getPage(i)
|
||||
|
||||
id = self.id_count
|
||||
self.id_count += 1
|
||||
|
||||
mp = _MergedPage(pg, pdfr, id)
|
||||
|
||||
srcpages.append(mp)
|
||||
|
||||
self._associate_dests_to_pages(srcpages)
|
||||
self._associate_bookmarks_to_pages(srcpages)
|
||||
|
||||
# Slice to insert the pages at the specified position
|
||||
self.pages[position:position] = srcpages
|
||||
|
||||
# Keep track of our input files so we can close them later
|
||||
self.inputs.append((fileobj, pdfr, my_file))
|
||||
|
||||
def append(self, fileobj, bookmark=None, pages=None, import_bookmarks=True):
|
||||
"""
|
||||
Identical to the :meth:`merge()<merge>` method, but assumes you want to concatenate
|
||||
all pages onto the end of the file instead of specifying a position.
|
||||
|
||||
:param fileobj: A File Object or an object that supports the standard read
|
||||
and seek methods similar to a File Object. Could also be a
|
||||
string representing a path to a PDF file.
|
||||
|
||||
:param str bookmark: Optionally, you may specify a bookmark to be applied at
|
||||
the beginning of the included file by supplying the text of the bookmark.
|
||||
|
||||
:param pages: can be a :ref:`Page Range <page-range>` or a ``(start, stop[, step])`` tuple
|
||||
to merge only the specified range of pages from the source
|
||||
document into the output document.
|
||||
|
||||
:param bool import_bookmarks: You may prevent the source document's bookmarks
|
||||
from being imported by specifying this as ``False``.
|
||||
"""
|
||||
|
||||
self.merge(len(self.pages), fileobj, bookmark, pages, import_bookmarks)
|
||||
|
||||
def write(self, fileobj):
|
||||
"""
|
||||
Writes all data that has been merged to the given output file.
|
||||
|
||||
:param fileobj: Output file. Can be a filename or any kind of
|
||||
file-like object.
|
||||
"""
|
||||
my_file = False
|
||||
if isString(fileobj):
|
||||
fileobj = file(fileobj, 'wb')
|
||||
my_file = True
|
||||
|
||||
# Add pages to the PdfFileWriter
|
||||
# The commented out line below was replaced with the two lines below it to allow PdfFileMerger to work with PyPdf 1.13
|
||||
for page in self.pages:
|
||||
self.output.addPage(page.pagedata)
|
||||
page.out_pagedata = self.output.getReference(self.output._pages.getObject()["/Kids"][-1].getObject())
|
||||
#idnum = self.output._objects.index(self.output._pages.getObject()["/Kids"][-1].getObject()) + 1
|
||||
#page.out_pagedata = IndirectObject(idnum, 0, self.output)
|
||||
|
||||
# Once all pages are added, create bookmarks to point at those pages
|
||||
self._write_dests()
|
||||
self._write_bookmarks()
|
||||
|
||||
# Write the output to the file
|
||||
self.output.write(fileobj)
|
||||
|
||||
if my_file:
|
||||
fileobj.close()
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Shuts all file descriptors (input and output) and clears all memory
|
||||
usage.
|
||||
"""
|
||||
self.pages = []
|
||||
for fo, pdfr, mine in self.inputs:
|
||||
if mine:
|
||||
fo.close()
|
||||
|
||||
self.inputs = []
|
||||
self.output = None
|
||||
|
||||
def addMetadata(self, infos):
|
||||
"""
|
||||
Add custom metadata to the output.
|
||||
|
||||
:param dict infos: a Python dictionary where each key is a field
|
||||
and each value is your new metadata.
|
||||
Example: ``{u'/Title': u'My title'}``
|
||||
"""
|
||||
self.output.addMetadata(infos)
|
||||
|
||||
def setPageLayout(self, layout):
|
||||
"""
|
||||
Set the page layout
|
||||
|
||||
:param str layout: The page layout to be used
|
||||
|
||||
Valid layouts are:
|
||||
/NoLayout Layout explicitly not specified
|
||||
/SinglePage Show one page at a time
|
||||
/OneColumn Show one column at a time
|
||||
/TwoColumnLeft Show pages in two columns, odd-numbered pages on the left
|
||||
/TwoColumnRight Show pages in two columns, odd-numbered pages on the right
|
||||
/TwoPageLeft Show two pages at a time, odd-numbered pages on the left
|
||||
/TwoPageRight Show two pages at a time, odd-numbered pages on the right
|
||||
"""
|
||||
self.output.setPageLayout(layout)
|
||||
|
||||
def setPageMode(self, mode):
|
||||
"""
|
||||
Set the page mode.
|
||||
|
||||
:param str mode: The page mode to use.
|
||||
|
||||
Valid modes are:
|
||||
/UseNone Do not show outlines or thumbnails panels
|
||||
/UseOutlines Show outlines (aka bookmarks) panel
|
||||
/UseThumbs Show page thumbnails panel
|
||||
/FullScreen Fullscreen view
|
||||
/UseOC Show Optional Content Group (OCG) panel
|
||||
/UseAttachments Show attachments panel
|
||||
"""
|
||||
self.output.setPageMode(mode)
|
||||
|
||||
def _trim_dests(self, pdf, dests, pages):
|
||||
"""
|
||||
Removes any named destinations that are not a part of the specified
|
||||
page set.
|
||||
"""
|
||||
new_dests = []
|
||||
prev_header_added = True
|
||||
for k, o in list(dests.items()):
|
||||
for j in range(*pages):
|
||||
if pdf.getPage(j).getObject() == o['/Page'].getObject():
|
||||
o[NameObject('/Page')] = o['/Page'].getObject()
|
||||
assert str_(k) == str_(o['/Title'])
|
||||
new_dests.append(o)
|
||||
break
|
||||
return new_dests
|
||||
|
||||
def _trim_outline(self, pdf, outline, pages):
|
||||
"""
|
||||
Removes any outline/bookmark entries that are not a part of the
|
||||
specified page set.
|
||||
"""
|
||||
new_outline = []
|
||||
prev_header_added = True
|
||||
for i, o in enumerate(outline):
|
||||
if isinstance(o, list):
|
||||
sub = self._trim_outline(pdf, o, pages)
|
||||
if sub:
|
||||
if not prev_header_added:
|
||||
new_outline.append(outline[i-1])
|
||||
new_outline.append(sub)
|
||||
else:
|
||||
prev_header_added = False
|
||||
for j in range(*pages):
|
||||
if pdf.getPage(j).getObject() == o['/Page'].getObject():
|
||||
o[NameObject('/Page')] = o['/Page'].getObject()
|
||||
new_outline.append(o)
|
||||
prev_header_added = True
|
||||
break
|
||||
return new_outline
|
||||
|
||||
def _write_dests(self):
|
||||
dests = self.named_dests
|
||||
|
||||
for v in dests:
|
||||
pageno = None
|
||||
pdf = None
|
||||
if '/Page' in v:
|
||||
for i, p in enumerate(self.pages):
|
||||
if p.id == v['/Page']:
|
||||
v[NameObject('/Page')] = p.out_pagedata
|
||||
pageno = i
|
||||
pdf = p.src
|
||||
break
|
||||
if pageno != None:
|
||||
self.output.addNamedDestinationObject(v)
|
||||
|
||||
def _write_bookmarks(self, bookmarks=None, parent=None):
|
||||
|
||||
if bookmarks == None:
|
||||
bookmarks = self.bookmarks
|
||||
|
||||
last_added = None
|
||||
for b in bookmarks:
|
||||
if isinstance(b, list):
|
||||
self._write_bookmarks(b, last_added)
|
||||
continue
|
||||
|
||||
pageno = None
|
||||
pdf = None
|
||||
if '/Page' in b:
|
||||
for i, p in enumerate(self.pages):
|
||||
if p.id == b['/Page']:
|
||||
#b[NameObject('/Page')] = p.out_pagedata
|
||||
args = [NumberObject(p.id), NameObject(b['/Type'])]
|
||||
#nothing more to add
|
||||
#if b['/Type'] == '/Fit' or b['/Type'] == '/FitB'
|
||||
if b['/Type'] == '/FitH' or b['/Type'] == '/FitBH':
|
||||
if '/Top' in b and not isinstance(b['/Top'], NullObject):
|
||||
args.append(FloatObject(b['/Top']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
del b['/Top']
|
||||
elif b['/Type'] == '/FitV' or b['/Type'] == '/FitBV':
|
||||
if '/Left' in b and not isinstance(b['/Left'], NullObject):
|
||||
args.append(FloatObject(b['/Left']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
del b['/Left']
|
||||
elif b['/Type'] == '/XYZ':
|
||||
if '/Left' in b and not isinstance(b['/Left'], NullObject):
|
||||
args.append(FloatObject(b['/Left']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
if '/Top' in b and not isinstance(b['/Top'], NullObject):
|
||||
args.append(FloatObject(b['/Top']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
if '/Zoom' in b and not isinstance(b['/Zoom'], NullObject):
|
||||
args.append(FloatObject(b['/Zoom']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
del b['/Top'], b['/Zoom'], b['/Left']
|
||||
elif b['/Type'] == '/FitR':
|
||||
if '/Left' in b and not isinstance(b['/Left'], NullObject):
|
||||
args.append(FloatObject(b['/Left']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
if '/Bottom' in b and not isinstance(b['/Bottom'], NullObject):
|
||||
args.append(FloatObject(b['/Bottom']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
if '/Right' in b and not isinstance(b['/Right'], NullObject):
|
||||
args.append(FloatObject(b['/Right']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
if '/Top' in b and not isinstance(b['/Top'], NullObject):
|
||||
args.append(FloatObject(b['/Top']))
|
||||
else:
|
||||
args.append(FloatObject(0))
|
||||
del b['/Left'], b['/Right'], b['/Bottom'], b['/Top']
|
||||
|
||||
b[NameObject('/A')] = DictionaryObject({NameObject('/S'): NameObject('/GoTo'), NameObject('/D'): ArrayObject(args)})
|
||||
|
||||
pageno = i
|
||||
pdf = p.src
|
||||
break
|
||||
if pageno != None:
|
||||
del b['/Page'], b['/Type']
|
||||
last_added = self.output.addBookmarkDict(b, parent)
|
||||
|
||||
def _associate_dests_to_pages(self, pages):
|
||||
for nd in self.named_dests:
|
||||
pageno = None
|
||||
np = nd['/Page']
|
||||
|
||||
if isinstance(np, NumberObject):
|
||||
continue
|
||||
|
||||
for p in pages:
|
||||
if np.getObject() == p.pagedata.getObject():
|
||||
pageno = p.id
|
||||
|
||||
if pageno != None:
|
||||
nd[NameObject('/Page')] = NumberObject(pageno)
|
||||
else:
|
||||
raise ValueError("Unresolved named destination '%s'" % (nd['/Title'],))
|
||||
|
||||
def _associate_bookmarks_to_pages(self, pages, bookmarks=None):
|
||||
if bookmarks == None:
|
||||
bookmarks = self.bookmarks
|
||||
|
||||
for b in bookmarks:
|
||||
if isinstance(b, list):
|
||||
self._associate_bookmarks_to_pages(pages, b)
|
||||
continue
|
||||
|
||||
pageno = None
|
||||
bp = b['/Page']
|
||||
|
||||
if isinstance(bp, NumberObject):
|
||||
continue
|
||||
|
||||
for p in pages:
|
||||
if bp.getObject() == p.pagedata.getObject():
|
||||
pageno = p.id
|
||||
|
||||
if pageno != None:
|
||||
b[NameObject('/Page')] = NumberObject(pageno)
|
||||
else:
|
||||
raise ValueError("Unresolved bookmark '%s'" % (b['/Title'],))
|
||||
|
||||
def findBookmark(self, bookmark, root=None):
|
||||
if root == None:
|
||||
root = self.bookmarks
|
||||
|
||||
for i, b in enumerate(root):
|
||||
if isinstance(b, list):
|
||||
res = self.findBookmark(bookmark, b)
|
||||
if res:
|
||||
return [i] + res
|
||||
elif b == bookmark or b['/Title'] == bookmark:
|
||||
return [i]
|
||||
|
||||
return None
|
||||
|
||||
def addBookmark(self, title, pagenum, parent=None):
|
||||
"""
|
||||
Add a bookmark to this PDF file.
|
||||
|
||||
:param str title: Title to use for this bookmark.
|
||||
:param int pagenum: Page number this bookmark will point to.
|
||||
:param parent: A reference to a parent bookmark to create nested
|
||||
bookmarks.
|
||||
"""
|
||||
if parent == None:
|
||||
iloc = [len(self.bookmarks)-1]
|
||||
elif isinstance(parent, list):
|
||||
iloc = parent
|
||||
else:
|
||||
iloc = self.findBookmark(parent)
|
||||
|
||||
dest = Bookmark(TextStringObject(title), NumberObject(pagenum), NameObject('/FitH'), NumberObject(826))
|
||||
|
||||
if parent == None:
|
||||
self.bookmarks.append(dest)
|
||||
else:
|
||||
bmparent = self.bookmarks
|
||||
for i in iloc[:-1]:
|
||||
bmparent = bmparent[i]
|
||||
npos = iloc[-1]+1
|
||||
if npos < len(bmparent) and isinstance(bmparent[npos], list):
|
||||
bmparent[npos].append(dest)
|
||||
else:
|
||||
bmparent.insert(npos, [dest])
|
||||
return dest
|
||||
|
||||
def addNamedDestination(self, title, pagenum):
|
||||
"""
|
||||
Add a destination to the output.
|
||||
|
||||
:param str title: Title to use
|
||||
:param int pagenum: Page number this destination points at.
|
||||
"""
|
||||
|
||||
dest = Destination(TextStringObject(title), NumberObject(pagenum), NameObject('/FitH'), NumberObject(826))
|
||||
self.named_dests.append(dest)
|
||||
|
||||
|
||||
class OutlinesObject(list):
|
||||
def __init__(self, pdf, tree, parent=None):
|
||||
list.__init__(self)
|
||||
self.tree = tree
|
||||
self.pdf = pdf
|
||||
self.parent = parent
|
||||
|
||||
def remove(self, index):
|
||||
obj = self[index]
|
||||
del self[index]
|
||||
self.tree.removeChild(obj)
|
||||
|
||||
def add(self, title, pagenum):
|
||||
pageRef = self.pdf.getObject(self.pdf._pages)['/Kids'][pagenum]
|
||||
action = DictionaryObject()
|
||||
action.update({
|
||||
NameObject('/D') : ArrayObject([pageRef, NameObject('/FitH'), NumberObject(826)]),
|
||||
NameObject('/S') : NameObject('/GoTo')
|
||||
})
|
||||
actionRef = self.pdf._addObject(action)
|
||||
bookmark = TreeObject()
|
||||
|
||||
bookmark.update({
|
||||
NameObject('/A'): actionRef,
|
||||
NameObject('/Title'): createStringObject(title),
|
||||
})
|
||||
|
||||
self.pdf._addObject(bookmark)
|
||||
|
||||
self.tree.addChild(bookmark)
|
||||
|
||||
def removeAll(self):
|
||||
for child in [x for x in self.tree.children()]:
|
||||
self.tree.removeChild(child)
|
||||
self.pop()
|
|
@ -1,152 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
Representation and utils for ranges of PDF file pages.
|
||||
|
||||
Copyright (c) 2014, Steve Witham <switham_github@mac-guyver.com>.
|
||||
All rights reserved. This software is available under a BSD license;
|
||||
see https://github.com/mstamy2/PyPDF2/blob/master/LICENSE
|
||||
"""
|
||||
|
||||
import re
|
||||
from .utils import isString
|
||||
|
||||
_INT_RE = r"(0|-?[1-9]\d*)" # A decimal int, don't allow "-0".
|
||||
PAGE_RANGE_RE = "^({int}|({int}?(:{int}?(:{int}?)?)))$".format(int=_INT_RE)
|
||||
# groups: 12 34 5 6 7 8
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
PAGE_RANGE_HELP = """Remember, page indices start with zero.
|
||||
Page range expression examples:
|
||||
: all pages. -1 last page.
|
||||
22 just the 23rd page. :-1 all but the last page.
|
||||
0:3 the first three pages. -2 second-to-last page.
|
||||
:3 the first three pages. -2: last two pages.
|
||||
5: from the sixth page onward. -3:-1 third & second to last.
|
||||
The third, "stride" or "step" number is also recognized.
|
||||
::2 0 2 4 ... to the end. 3:0:-1 3 2 1 but not 0.
|
||||
1:10:2 1 3 5 7 9 2::-1 2 1 0.
|
||||
::-1 all pages in reverse order.
|
||||
"""
|
||||
|
||||
|
||||
class PageRange(object):
|
||||
"""
|
||||
A slice-like representation of a range of page indices,
|
||||
i.e. page numbers, only starting at zero.
|
||||
The syntax is like what you would put between brackets [ ].
|
||||
The slice is one of the few Python types that can't be subclassed,
|
||||
but this class converts to and from slices, and allows similar use.
|
||||
o PageRange(str) parses a string representing a page range.
|
||||
o PageRange(slice) directly "imports" a slice.
|
||||
o to_slice() gives the equivalent slice.
|
||||
o str() and repr() allow printing.
|
||||
o indices(n) is like slice.indices(n).
|
||||
"""
|
||||
|
||||
def __init__(self, arg):
|
||||
"""
|
||||
Initialize with either a slice -- giving the equivalent page range,
|
||||
or a PageRange object -- making a copy,
|
||||
or a string like
|
||||
"int", "[int]:[int]" or "[int]:[int]:[int]",
|
||||
where the brackets indicate optional ints.
|
||||
{page_range_help}
|
||||
Note the difference between this notation and arguments to slice():
|
||||
slice(3) means the first three pages;
|
||||
PageRange("3") means the range of only the fourth page.
|
||||
However PageRange(slice(3)) means the first three pages.
|
||||
"""
|
||||
if isinstance(arg, slice):
|
||||
self._slice = arg
|
||||
return
|
||||
|
||||
if isinstance(arg, PageRange):
|
||||
self._slice = arg.to_slice()
|
||||
return
|
||||
|
||||
m = isString(arg) and re.match(PAGE_RANGE_RE, arg)
|
||||
if not m:
|
||||
raise ParseError(arg)
|
||||
elif m.group(2):
|
||||
# Special case: just an int means a range of one page.
|
||||
start = int(m.group(2))
|
||||
stop = start + 1 if start != -1 else None
|
||||
self._slice = slice(start, stop)
|
||||
else:
|
||||
self._slice = slice(*[int(g) if g else None
|
||||
for g in m.group(4, 6, 8)])
|
||||
|
||||
# Just formatting this when there is __doc__ for __init__
|
||||
if __init__.__doc__:
|
||||
__init__.__doc__ = __init__.__doc__.format(page_range_help=PAGE_RANGE_HELP)
|
||||
|
||||
@staticmethod
|
||||
def valid(input):
|
||||
""" True if input is a valid initializer for a PageRange. """
|
||||
return isinstance(input, slice) or \
|
||||
isinstance(input, PageRange) or \
|
||||
(isString(input)
|
||||
and bool(re.match(PAGE_RANGE_RE, input)))
|
||||
|
||||
def to_slice(self):
|
||||
""" Return the slice equivalent of this page range. """
|
||||
return self._slice
|
||||
|
||||
def __str__(self):
|
||||
""" A string like "1:2:3". """
|
||||
s = self._slice
|
||||
if s.step == None:
|
||||
if s.start != None and s.stop == s.start + 1:
|
||||
return str(s.start)
|
||||
|
||||
indices = s.start, s.stop
|
||||
else:
|
||||
indices = s.start, s.stop, s.step
|
||||
return ':'.join("" if i == None else str(i) for i in indices)
|
||||
|
||||
def __repr__(self):
|
||||
""" A string like "PageRange('1:2:3')". """
|
||||
return "PageRange(" + repr(str(self)) + ")"
|
||||
|
||||
def indices(self, n):
|
||||
"""
|
||||
n is the length of the list of pages to choose from.
|
||||
Returns arguments for range(). See help(slice.indices).
|
||||
"""
|
||||
return self._slice.indices(n)
|
||||
|
||||
|
||||
PAGE_RANGE_ALL = PageRange(":") # The range of all pages.
|
||||
|
||||
|
||||
def parse_filename_page_ranges(args):
|
||||
"""
|
||||
Given a list of filenames and page ranges, return a list of
|
||||
(filename, page_range) pairs.
|
||||
First arg must be a filename; other ags are filenames, page-range
|
||||
expressions, slice objects, or PageRange objects.
|
||||
A filename not followed by a page range indicates all pages of the file.
|
||||
"""
|
||||
pairs = []
|
||||
pdf_filename = None
|
||||
did_page_range = False
|
||||
for arg in args + [None]:
|
||||
if PageRange.valid(arg):
|
||||
if not pdf_filename:
|
||||
raise ValueError("The first argument must be a filename, " \
|
||||
"not a page range.")
|
||||
|
||||
pairs.append( (pdf_filename, PageRange(arg)) )
|
||||
did_page_range = True
|
||||
else:
|
||||
# New filename or end of list--do all of the previous file?
|
||||
if pdf_filename and not did_page_range:
|
||||
pairs.append( (pdf_filename, PAGE_RANGE_ALL) )
|
||||
|
||||
pdf_filename = arg
|
||||
did_page_range = False
|
||||
return pairs
|
File diff suppressed because it is too large
Load Diff
|
@ -1,295 +0,0 @@
|
|||
# Copyright (c) 2006, Mathieu Fenniak
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
# * The name of the author may not be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
Utility functions for PDF library.
|
||||
"""
|
||||
__author__ = "Mathieu Fenniak"
|
||||
__author_email__ = "biziqe@mathieu.fenniak.net"
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
try:
|
||||
import __builtin__ as builtins
|
||||
except ImportError: # Py3
|
||||
import builtins
|
||||
|
||||
|
||||
xrange_fn = getattr(builtins, "xrange", range)
|
||||
_basestring = getattr(builtins, "basestring", str)
|
||||
|
||||
bytes_type = type(bytes()) # Works the same in Python 2.X and 3.X
|
||||
string_type = getattr(builtins, "unicode", str)
|
||||
int_types = (int, long) if sys.version_info[0] < 3 else (int,)
|
||||
|
||||
|
||||
# Make basic type tests more consistent
|
||||
def isString(s):
|
||||
"""Test if arg is a string. Compatible with Python 2 and 3."""
|
||||
return isinstance(s, _basestring)
|
||||
|
||||
|
||||
def isInt(n):
|
||||
"""Test if arg is an int. Compatible with Python 2 and 3."""
|
||||
return isinstance(n, int_types)
|
||||
|
||||
|
||||
def isBytes(b):
|
||||
"""Test if arg is a bytes instance. Compatible with Python 2 and 3."""
|
||||
return isinstance(b, bytes_type)
|
||||
|
||||
|
||||
#custom implementation of warnings.formatwarning
|
||||
def formatWarning(message, category, filename, lineno, line=None):
|
||||
file = filename.replace("/", "\\").rsplit("\\", 1)[1] # find the file name
|
||||
return "%s: %s [%s:%s]\n" % (category.__name__, message, file, lineno)
|
||||
|
||||
|
||||
def readUntilWhitespace(stream, maxchars=None):
|
||||
"""
|
||||
Reads non-whitespace characters and returns them.
|
||||
Stops upon encountering whitespace or when maxchars is reached.
|
||||
"""
|
||||
txt = b_("")
|
||||
while True:
|
||||
tok = stream.read(1)
|
||||
if tok.isspace() or not tok:
|
||||
break
|
||||
txt += tok
|
||||
if len(txt) == maxchars:
|
||||
break
|
||||
return txt
|
||||
|
||||
|
||||
def readNonWhitespace(stream):
|
||||
"""
|
||||
Finds and reads the next non-whitespace character (ignores whitespace).
|
||||
"""
|
||||
tok = WHITESPACES[0]
|
||||
while tok in WHITESPACES:
|
||||
tok = stream.read(1)
|
||||
return tok
|
||||
|
||||
|
||||
def skipOverWhitespace(stream):
|
||||
"""
|
||||
Similar to readNonWhitespace, but returns a Boolean if more than
|
||||
one whitespace character was read.
|
||||
"""
|
||||
tok = WHITESPACES[0]
|
||||
cnt = 0;
|
||||
while tok in WHITESPACES:
|
||||
tok = stream.read(1)
|
||||
cnt+=1
|
||||
return (cnt > 1)
|
||||
|
||||
|
||||
def skipOverComment(stream):
|
||||
tok = stream.read(1)
|
||||
stream.seek(-1, 1)
|
||||
if tok == b_('%'):
|
||||
while tok not in (b_('\n'), b_('\r')):
|
||||
tok = stream.read(1)
|
||||
|
||||
|
||||
def readUntilRegex(stream, regex, ignore_eof=False):
|
||||
"""
|
||||
Reads until the regular expression pattern matched (ignore the match)
|
||||
Raise PdfStreamError on premature end-of-file.
|
||||
:param bool ignore_eof: If true, ignore end-of-line and return immediately
|
||||
"""
|
||||
name = b_('')
|
||||
while True:
|
||||
tok = stream.read(16)
|
||||
if not tok:
|
||||
# stream has truncated prematurely
|
||||
if ignore_eof == True:
|
||||
return name
|
||||
else:
|
||||
raise PdfStreamError("Stream has ended unexpectedly")
|
||||
m = regex.search(tok)
|
||||
if m is not None:
|
||||
name += tok[:m.start()]
|
||||
stream.seek(m.start()-len(tok), 1)
|
||||
break
|
||||
name += tok
|
||||
return name
|
||||
|
||||
|
||||
class ConvertFunctionsToVirtualList(object):
|
||||
def __init__(self, lengthFunction, getFunction):
|
||||
self.lengthFunction = lengthFunction
|
||||
self.getFunction = getFunction
|
||||
|
||||
def __len__(self):
|
||||
return self.lengthFunction()
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
indices = xrange_fn(*index.indices(len(self)))
|
||||
cls = type(self)
|
||||
return cls(indices.__len__, lambda idx: self[indices[idx]])
|
||||
if not isInt(index):
|
||||
raise TypeError("sequence indices must be integers")
|
||||
len_self = len(self)
|
||||
if index < 0:
|
||||
# support negative indexes
|
||||
index = len_self + index
|
||||
if index < 0 or index >= len_self:
|
||||
raise IndexError("sequence index out of range")
|
||||
return self.getFunction(index)
|
||||
|
||||
|
||||
def RC4_encrypt(key, plaintext):
|
||||
S = [i for i in range(256)]
|
||||
j = 0
|
||||
for i in range(256):
|
||||
j = (j + S[i] + ord_(key[i % len(key)])) % 256
|
||||
S[i], S[j] = S[j], S[i]
|
||||
i, j = 0, 0
|
||||
retval = b_("")
|
||||
for x in range(len(plaintext)):
|
||||
i = (i + 1) % 256
|
||||
j = (j + S[i]) % 256
|
||||
S[i], S[j] = S[j], S[i]
|
||||
t = S[(S[i] + S[j]) % 256]
|
||||
retval += b_(chr(ord_(plaintext[x]) ^ t))
|
||||
return retval
|
||||
|
||||
|
||||
def matrixMultiply(a, b):
|
||||
return [[sum([float(i)*float(j)
|
||||
for i, j in zip(row, col)]
|
||||
) for col in zip(*b)]
|
||||
for row in a]
|
||||
|
||||
|
||||
def markLocation(stream):
|
||||
"""Creates text file showing current location in context."""
|
||||
# Mainly for debugging
|
||||
RADIUS = 5000
|
||||
stream.seek(-RADIUS, 1)
|
||||
outputDoc = open('PyPDF2_pdfLocation.txt', 'w')
|
||||
outputDoc.write(stream.read(RADIUS))
|
||||
outputDoc.write('HERE')
|
||||
outputDoc.write(stream.read(RADIUS))
|
||||
outputDoc.close()
|
||||
stream.seek(-RADIUS, 1)
|
||||
|
||||
|
||||
class PyPdfError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PdfReadError(PyPdfError):
|
||||
pass
|
||||
|
||||
|
||||
class PageSizeNotDefinedError(PyPdfError):
|
||||
pass
|
||||
|
||||
|
||||
class PdfReadWarning(UserWarning):
|
||||
pass
|
||||
|
||||
|
||||
class PdfStreamError(PdfReadError):
|
||||
pass
|
||||
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
def b_(s):
|
||||
return s
|
||||
else:
|
||||
B_CACHE = {}
|
||||
|
||||
def b_(s):
|
||||
bc = B_CACHE
|
||||
if s in bc:
|
||||
return bc[s]
|
||||
if type(s) == bytes:
|
||||
return s
|
||||
else:
|
||||
r = s.encode('latin-1')
|
||||
if len(s) < 2:
|
||||
bc[s] = r
|
||||
return r
|
||||
|
||||
|
||||
def u_(s):
|
||||
if sys.version_info[0] < 3:
|
||||
return unicode(s, 'unicode_escape')
|
||||
else:
|
||||
return s
|
||||
|
||||
|
||||
def str_(b):
|
||||
if sys.version_info[0] < 3:
|
||||
return b
|
||||
else:
|
||||
if type(b) == bytes:
|
||||
return b.decode('latin-1')
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
def ord_(b):
|
||||
if sys.version_info[0] < 3 or type(b) == str:
|
||||
return ord(b)
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
def chr_(c):
|
||||
if sys.version_info[0] < 3:
|
||||
return c
|
||||
else:
|
||||
return chr(c)
|
||||
|
||||
|
||||
def barray(b):
|
||||
if sys.version_info[0] < 3:
|
||||
return b
|
||||
else:
|
||||
return bytearray(b)
|
||||
|
||||
|
||||
def hexencode(b):
|
||||
if sys.version_info[0] < 3:
|
||||
return b.encode('hex')
|
||||
else:
|
||||
import codecs
|
||||
coder = codecs.getencoder('hex_codec')
|
||||
return coder(b)[0]
|
||||
|
||||
|
||||
def hexStr(num):
|
||||
return hex(num).replace('L', '')
|
||||
|
||||
|
||||
WHITESPACES = [b_(x) for x in [' ', '\n', '\r', '\t', '\x00']]
|
|
@ -1,358 +0,0 @@
|
|||
import re
|
||||
import datetime
|
||||
import decimal
|
||||
from .generic import PdfObject
|
||||
from xml.dom import getDOMImplementation
|
||||
from xml.dom.minidom import parseString
|
||||
from .utils import u_
|
||||
|
||||
RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
DC_NAMESPACE = "http://purl.org/dc/elements/1.1/"
|
||||
XMP_NAMESPACE = "http://ns.adobe.com/xap/1.0/"
|
||||
PDF_NAMESPACE = "http://ns.adobe.com/pdf/1.3/"
|
||||
XMPMM_NAMESPACE = "http://ns.adobe.com/xap/1.0/mm/"
|
||||
|
||||
# What is the PDFX namespace, you might ask? I might ask that too. It's
|
||||
# a completely undocumented namespace used to place "custom metadata"
|
||||
# properties, which are arbitrary metadata properties with no semantic or
|
||||
# documented meaning. Elements in the namespace are key/value-style storage,
|
||||
# where the element name is the key and the content is the value. The keys
|
||||
# are transformed into valid XML identifiers by substituting an invalid
|
||||
# identifier character with \u2182 followed by the unicode hex ID of the
|
||||
# original character. A key like "my car" is therefore "my\u21820020car".
|
||||
#
|
||||
# \u2182, in case you're wondering, is the unicode character
|
||||
# \u{ROMAN NUMERAL TEN THOUSAND}, a straightforward and obvious choice for
|
||||
# escaping characters.
|
||||
#
|
||||
# Intentional users of the pdfx namespace should be shot on sight. A
|
||||
# custom data schema and sensical XML elements could be used instead, as is
|
||||
# suggested by Adobe's own documentation on XMP (under "Extensibility of
|
||||
# Schemas").
|
||||
#
|
||||
# Information presented here on the /pdfx/ schema is a result of limited
|
||||
# reverse engineering, and does not constitute a full specification.
|
||||
PDFX_NAMESPACE = "http://ns.adobe.com/pdfx/1.3/"
|
||||
|
||||
iso8601 = re.compile("""
|
||||
(?P<year>[0-9]{4})
|
||||
(-
|
||||
(?P<month>[0-9]{2})
|
||||
(-
|
||||
(?P<day>[0-9]+)
|
||||
(T
|
||||
(?P<hour>[0-9]{2}):
|
||||
(?P<minute>[0-9]{2})
|
||||
(:(?P<second>[0-9]{2}(.[0-9]+)?))?
|
||||
(?P<tzd>Z|[-+][0-9]{2}:[0-9]{2})
|
||||
)?
|
||||
)?
|
||||
)?
|
||||
""", re.VERBOSE)
|
||||
|
||||
|
||||
class XmpInformation(PdfObject):
|
||||
"""
|
||||
An object that represents Adobe XMP metadata.
|
||||
Usually accessed by :meth:`getXmpMetadata()<PyPDF2.PdfFileReader.getXmpMetadata>`
|
||||
"""
|
||||
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
docRoot = parseString(self.stream.getData())
|
||||
self.rdfRoot = docRoot.getElementsByTagNameNS(RDF_NAMESPACE, "RDF")[0]
|
||||
self.cache = {}
|
||||
|
||||
def writeToStream(self, stream, encryption_key):
|
||||
self.stream.writeToStream(stream, encryption_key)
|
||||
|
||||
def getElement(self, aboutUri, namespace, name):
|
||||
for desc in self.rdfRoot.getElementsByTagNameNS(RDF_NAMESPACE, "Description"):
|
||||
if desc.getAttributeNS(RDF_NAMESPACE, "about") == aboutUri:
|
||||
attr = desc.getAttributeNodeNS(namespace, name)
|
||||
if attr != None:
|
||||
yield attr
|
||||
for element in desc.getElementsByTagNameNS(namespace, name):
|
||||
yield element
|
||||
|
||||
def getNodesInNamespace(self, aboutUri, namespace):
|
||||
for desc in self.rdfRoot.getElementsByTagNameNS(RDF_NAMESPACE, "Description"):
|
||||
if desc.getAttributeNS(RDF_NAMESPACE, "about") == aboutUri:
|
||||
for i in range(desc.attributes.length):
|
||||
attr = desc.attributes.item(i)
|
||||
if attr.namespaceURI == namespace:
|
||||
yield attr
|
||||
for child in desc.childNodes:
|
||||
if child.namespaceURI == namespace:
|
||||
yield child
|
||||
|
||||
def _getText(self, element):
|
||||
text = ""
|
||||
for child in element.childNodes:
|
||||
if child.nodeType == child.TEXT_NODE:
|
||||
text += child.data
|
||||
return text
|
||||
|
||||
def _converter_string(value):
|
||||
return value
|
||||
|
||||
def _converter_date(value):
|
||||
m = iso8601.match(value)
|
||||
year = int(m.group("year"))
|
||||
month = int(m.group("month") or "1")
|
||||
day = int(m.group("day") or "1")
|
||||
hour = int(m.group("hour") or "0")
|
||||
minute = int(m.group("minute") or "0")
|
||||
second = decimal.Decimal(m.group("second") or "0")
|
||||
seconds = second.to_integral(decimal.ROUND_FLOOR)
|
||||
milliseconds = (second - seconds) * 1000000
|
||||
tzd = m.group("tzd") or "Z"
|
||||
dt = datetime.datetime(year, month, day, hour, minute, seconds, milliseconds)
|
||||
if tzd != "Z":
|
||||
tzd_hours, tzd_minutes = [int(x) for x in tzd.split(":")]
|
||||
tzd_hours *= -1
|
||||
if tzd_hours < 0:
|
||||
tzd_minutes *= -1
|
||||
dt = dt + datetime.timedelta(hours=tzd_hours, minutes=tzd_minutes)
|
||||
return dt
|
||||
_test_converter_date = staticmethod(_converter_date)
|
||||
|
||||
def _getter_bag(namespace, name, converter):
|
||||
def get(self):
|
||||
cached = self.cache.get(namespace, {}).get(name)
|
||||
if cached:
|
||||
return cached
|
||||
retval = []
|
||||
for element in self.getElement("", namespace, name):
|
||||
bags = element.getElementsByTagNameNS(RDF_NAMESPACE, "Bag")
|
||||
if len(bags):
|
||||
for bag in bags:
|
||||
for item in bag.getElementsByTagNameNS(RDF_NAMESPACE, "li"):
|
||||
value = self._getText(item)
|
||||
value = converter(value)
|
||||
retval.append(value)
|
||||
ns_cache = self.cache.setdefault(namespace, {})
|
||||
ns_cache[name] = retval
|
||||
return retval
|
||||
return get
|
||||
|
||||
def _getter_seq(namespace, name, converter):
|
||||
def get(self):
|
||||
cached = self.cache.get(namespace, {}).get(name)
|
||||
if cached:
|
||||
return cached
|
||||
retval = []
|
||||
for element in self.getElement("", namespace, name):
|
||||
seqs = element.getElementsByTagNameNS(RDF_NAMESPACE, "Seq")
|
||||
if len(seqs):
|
||||
for seq in seqs:
|
||||
for item in seq.getElementsByTagNameNS(RDF_NAMESPACE, "li"):
|
||||
value = self._getText(item)
|
||||
value = converter(value)
|
||||
retval.append(value)
|
||||
else:
|
||||
value = converter(self._getText(element))
|
||||
retval.append(value)
|
||||
ns_cache = self.cache.setdefault(namespace, {})
|
||||
ns_cache[name] = retval
|
||||
return retval
|
||||
return get
|
||||
|
||||
def _getter_langalt(namespace, name, converter):
|
||||
def get(self):
|
||||
cached = self.cache.get(namespace, {}).get(name)
|
||||
if cached:
|
||||
return cached
|
||||
retval = {}
|
||||
for element in self.getElement("", namespace, name):
|
||||
alts = element.getElementsByTagNameNS(RDF_NAMESPACE, "Alt")
|
||||
if len(alts):
|
||||
for alt in alts:
|
||||
for item in alt.getElementsByTagNameNS(RDF_NAMESPACE, "li"):
|
||||
value = self._getText(item)
|
||||
value = converter(value)
|
||||
retval[item.getAttribute("xml:lang")] = value
|
||||
else:
|
||||
retval["x-default"] = converter(self._getText(element))
|
||||
ns_cache = self.cache.setdefault(namespace, {})
|
||||
ns_cache[name] = retval
|
||||
return retval
|
||||
return get
|
||||
|
||||
def _getter_single(namespace, name, converter):
|
||||
def get(self):
|
||||
cached = self.cache.get(namespace, {}).get(name)
|
||||
if cached:
|
||||
return cached
|
||||
value = None
|
||||
for element in self.getElement("", namespace, name):
|
||||
if element.nodeType == element.ATTRIBUTE_NODE:
|
||||
value = element.nodeValue
|
||||
else:
|
||||
value = self._getText(element)
|
||||
break
|
||||
if value != None:
|
||||
value = converter(value)
|
||||
ns_cache = self.cache.setdefault(namespace, {})
|
||||
ns_cache[name] = value
|
||||
return value
|
||||
return get
|
||||
|
||||
dc_contributor = property(_getter_bag(DC_NAMESPACE, "contributor", _converter_string))
|
||||
"""
|
||||
Contributors to the resource (other than the authors). An unsorted
|
||||
array of names.
|
||||
"""
|
||||
|
||||
dc_coverage = property(_getter_single(DC_NAMESPACE, "coverage", _converter_string))
|
||||
"""
|
||||
Text describing the extent or scope of the resource.
|
||||
"""
|
||||
|
||||
dc_creator = property(_getter_seq(DC_NAMESPACE, "creator", _converter_string))
|
||||
"""
|
||||
A sorted array of names of the authors of the resource, listed in order
|
||||
of precedence.
|
||||
"""
|
||||
|
||||
dc_date = property(_getter_seq(DC_NAMESPACE, "date", _converter_date))
|
||||
"""
|
||||
A sorted array of dates (datetime.datetime instances) of signifigance to
|
||||
the resource. The dates and times are in UTC.
|
||||
"""
|
||||
|
||||
dc_description = property(_getter_langalt(DC_NAMESPACE, "description", _converter_string))
|
||||
"""
|
||||
A language-keyed dictionary of textual descriptions of the content of the
|
||||
resource.
|
||||
"""
|
||||
|
||||
dc_format = property(_getter_single(DC_NAMESPACE, "format", _converter_string))
|
||||
"""
|
||||
The mime-type of the resource.
|
||||
"""
|
||||
|
||||
dc_identifier = property(_getter_single(DC_NAMESPACE, "identifier", _converter_string))
|
||||
"""
|
||||
Unique identifier of the resource.
|
||||
"""
|
||||
|
||||
dc_language = property(_getter_bag(DC_NAMESPACE, "language", _converter_string))
|
||||
"""
|
||||
An unordered array specifying the languages used in the resource.
|
||||
"""
|
||||
|
||||
dc_publisher = property(_getter_bag(DC_NAMESPACE, "publisher", _converter_string))
|
||||
"""
|
||||
An unordered array of publisher names.
|
||||
"""
|
||||
|
||||
dc_relation = property(_getter_bag(DC_NAMESPACE, "relation", _converter_string))
|
||||
"""
|
||||
An unordered array of text descriptions of relationships to other
|
||||
documents.
|
||||
"""
|
||||
|
||||
dc_rights = property(_getter_langalt(DC_NAMESPACE, "rights", _converter_string))
|
||||
"""
|
||||
A language-keyed dictionary of textual descriptions of the rights the
|
||||
user has to this resource.
|
||||
"""
|
||||
|
||||
dc_source = property(_getter_single(DC_NAMESPACE, "source", _converter_string))
|
||||
"""
|
||||
Unique identifier of the work from which this resource was derived.
|
||||
"""
|
||||
|
||||
dc_subject = property(_getter_bag(DC_NAMESPACE, "subject", _converter_string))
|
||||
"""
|
||||
An unordered array of descriptive phrases or keywrods that specify the
|
||||
topic of the content of the resource.
|
||||
"""
|
||||
|
||||
dc_title = property(_getter_langalt(DC_NAMESPACE, "title", _converter_string))
|
||||
"""
|
||||
A language-keyed dictionary of the title of the resource.
|
||||
"""
|
||||
|
||||
dc_type = property(_getter_bag(DC_NAMESPACE, "type", _converter_string))
|
||||
"""
|
||||
An unordered array of textual descriptions of the document type.
|
||||
"""
|
||||
|
||||
pdf_keywords = property(_getter_single(PDF_NAMESPACE, "Keywords", _converter_string))
|
||||
"""
|
||||
An unformatted text string representing document keywords.
|
||||
"""
|
||||
|
||||
pdf_pdfversion = property(_getter_single(PDF_NAMESPACE, "PDFVersion", _converter_string))
|
||||
"""
|
||||
The PDF file version, for example 1.0, 1.3.
|
||||
"""
|
||||
|
||||
pdf_producer = property(_getter_single(PDF_NAMESPACE, "Producer", _converter_string))
|
||||
"""
|
||||
The name of the tool that created the PDF document.
|
||||
"""
|
||||
|
||||
xmp_createDate = property(_getter_single(XMP_NAMESPACE, "CreateDate", _converter_date))
|
||||
"""
|
||||
The date and time the resource was originally created. The date and
|
||||
time are returned as a UTC datetime.datetime object.
|
||||
"""
|
||||
|
||||
xmp_modifyDate = property(_getter_single(XMP_NAMESPACE, "ModifyDate", _converter_date))
|
||||
"""
|
||||
The date and time the resource was last modified. The date and time
|
||||
are returned as a UTC datetime.datetime object.
|
||||
"""
|
||||
|
||||
xmp_metadataDate = property(_getter_single(XMP_NAMESPACE, "MetadataDate", _converter_date))
|
||||
"""
|
||||
The date and time that any metadata for this resource was last
|
||||
changed. The date and time are returned as a UTC datetime.datetime
|
||||
object.
|
||||
"""
|
||||
|
||||
xmp_creatorTool = property(_getter_single(XMP_NAMESPACE, "CreatorTool", _converter_string))
|
||||
"""
|
||||
The name of the first known tool used to create the resource.
|
||||
"""
|
||||
|
||||
xmpmm_documentId = property(_getter_single(XMPMM_NAMESPACE, "DocumentID", _converter_string))
|
||||
"""
|
||||
The common identifier for all versions and renditions of this resource.
|
||||
"""
|
||||
|
||||
xmpmm_instanceId = property(_getter_single(XMPMM_NAMESPACE, "InstanceID", _converter_string))
|
||||
"""
|
||||
An identifier for a specific incarnation of a document, updated each
|
||||
time a file is saved.
|
||||
"""
|
||||
|
||||
def custom_properties(self):
|
||||
if not hasattr(self, "_custom_properties"):
|
||||
self._custom_properties = {}
|
||||
for node in self.getNodesInNamespace("", PDFX_NAMESPACE):
|
||||
key = node.localName
|
||||
while True:
|
||||
# see documentation about PDFX_NAMESPACE earlier in file
|
||||
idx = key.find(u_("\u2182"))
|
||||
if idx == -1:
|
||||
break
|
||||
key = key[:idx] + chr(int(key[idx+1:idx+5], base=16)) + key[idx+5:]
|
||||
if node.nodeType == node.ATTRIBUTE_NODE:
|
||||
value = node.nodeValue
|
||||
else:
|
||||
value = self._getText(node)
|
||||
self._custom_properties[key] = value
|
||||
return self._custom_properties
|
||||
|
||||
custom_properties = property(custom_properties)
|
||||
"""
|
||||
Retrieves custom metadata properties defined in the undocumented pdfx
|
||||
metadata schema.
|
||||
|
||||
:return: a dictionary of key/value items for custom metadata properties.
|
||||
:rtype: dict
|
||||
"""
|
|
@ -1,2 +0,0 @@
|
|||
./setuptools-40.8.0-py3.7.egg
|
||||
./pip-19.0.3-py3.7.egg
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
__version__ = '20191125'
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(__version__)
|
|
@ -1,54 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
""" Python implementation of Arcfour encryption algorithm.
|
||||
|
||||
This code is in the public domain.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
## Arcfour
|
||||
##
|
||||
class Arcfour:
|
||||
|
||||
"""
|
||||
>>> Arcfour(b'Key').process(b'Plaintext').hex()
|
||||
'bbf316e8d940af0ad3'
|
||||
>>> Arcfour(b'Wiki').process(b'pedia').hex()
|
||||
'1021bf0420'
|
||||
>>> Arcfour(b'Secret').process(b'Attack at dawn').hex()
|
||||
'45a01f645fc35b383552544b9bf5'
|
||||
"""
|
||||
|
||||
def __init__(self, key):
|
||||
s = list(range(256))
|
||||
j = 0
|
||||
klen = len(key)
|
||||
for i in range(256):
|
||||
j = (j + s[i] + key[i % klen]) % 256
|
||||
(s[i], s[j]) = (s[j], s[i])
|
||||
self.s = s
|
||||
(self.i, self.j) = (0, 0)
|
||||
return
|
||||
|
||||
def process(self, data):
|
||||
(i, j) = (self.i, self.j)
|
||||
s = self.s
|
||||
r = []
|
||||
for c in data:
|
||||
i = (i+1) % 256
|
||||
j = (j+s[i]) % 256
|
||||
(s[i], s[j]) = (s[j], s[i])
|
||||
k = s[(s[i]+s[j]) % 256]
|
||||
r.append(c ^ k)
|
||||
(self.i, self.j) = (i, j)
|
||||
return bytes(r)
|
||||
|
||||
encrypt = decrypt = process
|
||||
|
||||
new = Arcfour
|
||||
|
||||
# test
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
print('pdfminer.arcfour:', doctest.testmod())
|
|
@ -1,84 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
""" Python implementation of ASCII85/ASCIIHex decoder (Adobe version).
|
||||
|
||||
This code is in the public domain.
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
import struct
|
||||
|
||||
|
||||
# ascii85decode(data)
|
||||
def ascii85decode(data):
|
||||
"""
|
||||
In ASCII85 encoding, every four bytes are encoded with five ASCII
|
||||
letters, using 85 different types of characters (as 256**4 < 85**5).
|
||||
When the length of the original bytes is not a multiple of 4, a special
|
||||
rule is used for round up.
|
||||
|
||||
The Adobe's ASCII85 implementation is slightly different from
|
||||
its original in handling the last characters.
|
||||
|
||||
The sample string is taken from:
|
||||
http://en.wikipedia.org/w/index.php?title=Ascii85
|
||||
|
||||
>>> ascii85decode(b'9jqo^BlbD-BleB1DJ+*+F(f,q')
|
||||
b'Man is distinguished'
|
||||
>>> ascii85decode(b'E,9)oF*2M7/c~>')
|
||||
b'pleasure.'
|
||||
"""
|
||||
n = b = 0
|
||||
out = b''
|
||||
for c in data:
|
||||
if 33 <= c and c <= 117: # b'!' <= c and c <= b'u'
|
||||
n += 1
|
||||
b = b*85+(c-33)
|
||||
if n == 5:
|
||||
out += struct.pack('>L', b)
|
||||
n = b = 0
|
||||
elif c == 122: # b'z'
|
||||
assert n == 0
|
||||
out += b'\0\0\0\0'
|
||||
elif c == 126: # b'~'
|
||||
if n:
|
||||
for _ in range(5-n):
|
||||
b = b*85+84
|
||||
out += struct.pack('>L', b)[:n-1]
|
||||
break
|
||||
return out
|
||||
|
||||
# asciihexdecode(data)
|
||||
hex_re = re.compile(r'([a-f\d]{2})', re.IGNORECASE)
|
||||
trail_re = re.compile(r'^(?:[a-f\d]{2}|\s)*([a-f\d])[\s>]*$', re.IGNORECASE)
|
||||
|
||||
|
||||
def asciihexdecode(data):
|
||||
"""
|
||||
ASCIIHexDecode filter: PDFReference v1.4 section 3.3.1
|
||||
For each pair of ASCII hexadecimal digits (0-9 and A-F or a-f), the
|
||||
ASCIIHexDecode filter produces one byte of binary data. All white-space
|
||||
characters are ignored. A right angle bracket character (>) indicates
|
||||
EOD. Any other characters will cause an error. If the filter encounters
|
||||
the EOD marker after reading an odd number of hexadecimal digits, it
|
||||
will behave as if a 0 followed the last digit.
|
||||
|
||||
>>> asciihexdecode(b'61 62 2e6364 65')
|
||||
b'ab.cde'
|
||||
>>> asciihexdecode(b'61 62 2e6364 657>')
|
||||
b'ab.cdep'
|
||||
>>> asciihexdecode(b'7>')
|
||||
b'p'
|
||||
"""
|
||||
data = data.decode('latin1')
|
||||
out = [ int(hx,16) for hx in hex_re.findall(data) ]
|
||||
m = trail_re.search(data)
|
||||
if m:
|
||||
out.append(int(m.group(1),16) << 4)
|
||||
return bytes(out)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
print('pdfminer.ascii85', doctest.testmod())
|
|
@ -1,602 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
""" Adobe character mapping (CMap) support.
|
||||
|
||||
CMaps provide the mapping between character codes and Unicode
|
||||
code-points to character ids (CIDs).
|
||||
|
||||
More information is available on the Adobe website:
|
||||
|
||||
http://opensource.adobe.com/wiki/display/cmap/CMap+Resources
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import gzip
|
||||
import codecs
|
||||
import marshal
|
||||
import struct
|
||||
import logging
|
||||
from .psparser import PSStackParser
|
||||
from .psparser import PSSyntaxError
|
||||
from .psparser import PSEOF
|
||||
from .psparser import PSLiteral
|
||||
from .psparser import literal_name
|
||||
from .psparser import KWD
|
||||
from .encodingdb import name2unicode
|
||||
from .utils import choplist
|
||||
from .utils import nunpack
|
||||
|
||||
|
||||
class CMapError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
## CMapBase
|
||||
##
|
||||
class CMapBase:
|
||||
|
||||
debug = 0
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.attrs = kwargs.copy()
|
||||
return
|
||||
|
||||
def is_vertical(self):
|
||||
return self.attrs.get('WMode', 0) != 0
|
||||
|
||||
def set_attr(self, k, v):
|
||||
self.attrs[k] = v
|
||||
return
|
||||
|
||||
def add_code2cid(self, code, cid):
|
||||
return
|
||||
|
||||
def add_cid2unichr(self, cid, code):
|
||||
return
|
||||
|
||||
def use_cmap(self, cmap):
|
||||
return
|
||||
|
||||
|
||||
## CMap
|
||||
##
|
||||
class CMap(CMapBase):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
CMapBase.__init__(self, **kwargs)
|
||||
self.code2cid = {}
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<CMap: %s>' % self.attrs.get('CMapName')
|
||||
|
||||
def use_cmap(self, cmap):
|
||||
assert isinstance(cmap, CMap)
|
||||
|
||||
def copy(dst, src):
|
||||
for (k, v) in src.items():
|
||||
if isinstance(v, dict):
|
||||
d = {}
|
||||
dst[k] = d
|
||||
copy(d, v)
|
||||
else:
|
||||
dst[k] = v
|
||||
copy(self.code2cid, cmap.code2cid)
|
||||
return
|
||||
|
||||
def decode(self, code):
|
||||
if self.debug:
|
||||
logging.debug('decode: %r, %r' % (self, code))
|
||||
d = self.code2cid
|
||||
for c in code:
|
||||
if c in d:
|
||||
d = d[c]
|
||||
if isinstance(d, int):
|
||||
yield d
|
||||
d = self.code2cid
|
||||
else:
|
||||
d = self.code2cid
|
||||
return
|
||||
|
||||
def dump(self, out=sys.stdout, code2cid=None, code=None):
|
||||
if code2cid is None:
|
||||
code2cid = self.code2cid
|
||||
code = ()
|
||||
for (k, v) in sorted(code2cid.items()):
|
||||
c = code+(k,)
|
||||
if isinstance(v, int):
|
||||
out.write('code %r = cid %d\n' % (c, v))
|
||||
else:
|
||||
self.dump(out=out, code2cid=v, code=c)
|
||||
return
|
||||
|
||||
|
||||
## IdentityCMap
|
||||
##
|
||||
class IdentityCMap(CMapBase):
|
||||
|
||||
def decode(self, code):
|
||||
n = len(code)//2
|
||||
if n:
|
||||
return struct.unpack('>%dH' % n, code)
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
## UnicodeMap
|
||||
##
|
||||
class UnicodeMap(CMapBase):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
CMapBase.__init__(self, **kwargs)
|
||||
self.cid2unichr = {}
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<UnicodeMap: %s>' % self.attrs.get('CMapName')
|
||||
|
||||
def get_unichr(self, cid):
|
||||
if self.debug:
|
||||
logging.debug('get_unichr: %r, %r' % (self, cid))
|
||||
return self.cid2unichr[cid]
|
||||
|
||||
def dump(self, out=sys.stdout):
|
||||
for (k, v) in sorted(self.cid2unichr.items()):
|
||||
out.write('cid %d = unicode %r\n' % (k, v))
|
||||
return
|
||||
|
||||
|
||||
## FileCMap
|
||||
##
|
||||
class FileCMap(CMap):
|
||||
|
||||
def add_code2cid(self, code, cid):
|
||||
assert isinstance(code, bytes) and isinstance(cid, int)
|
||||
d = self.code2cid
|
||||
for c in code[:-1]:
|
||||
c = ord(c)
|
||||
if c in d:
|
||||
d = d[c]
|
||||
else:
|
||||
t = {}
|
||||
d[c] = t
|
||||
d = t
|
||||
c = ord(code[-1])
|
||||
d[c] = cid
|
||||
return
|
||||
|
||||
|
||||
## FileUnicodeMap
|
||||
##
|
||||
class FileUnicodeMap(UnicodeMap):
|
||||
|
||||
def add_cid2unichr(self, cid, code):
|
||||
assert isinstance(cid, int)
|
||||
if isinstance(code, PSLiteral):
|
||||
# Interpret as an Adobe glyph name.
|
||||
self.cid2unichr[cid] = name2unicode(code.name)
|
||||
elif isinstance(code, bytes):
|
||||
# Interpret as UTF-16BE.
|
||||
self.cid2unichr[cid] = code.decode('UTF-16BE', 'ignore')
|
||||
elif isinstance(code, int):
|
||||
self.cid2unichr[cid] = chr(code)
|
||||
else:
|
||||
raise TypeError(code)
|
||||
return
|
||||
|
||||
|
||||
## PyCMap
|
||||
##
|
||||
class PyCMap(CMap):
|
||||
|
||||
def __init__(self, name, module):
|
||||
CMap.__init__(self, CMapName=name)
|
||||
self.code2cid = module.CODE2CID
|
||||
if module.IS_VERTICAL:
|
||||
self.attrs['WMode'] = 1
|
||||
return
|
||||
|
||||
|
||||
## PyUnicodeMap
|
||||
##
|
||||
class PyUnicodeMap(UnicodeMap):
|
||||
|
||||
def __init__(self, name, module, vertical):
|
||||
UnicodeMap.__init__(self, CMapName=name)
|
||||
if vertical:
|
||||
self.cid2unichr = module.CID2UNICHR_V
|
||||
self.attrs['WMode'] = 1
|
||||
else:
|
||||
self.cid2unichr = module.CID2UNICHR_H
|
||||
return
|
||||
|
||||
|
||||
## CMapDB
|
||||
##
|
||||
class CMapDB:
|
||||
|
||||
_cmap_cache = {}
|
||||
_umap_cache = {}
|
||||
|
||||
class CMapNotFound(CMapError):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _load_data(klass, name):
|
||||
filename = '%s.marshal.gz' % name
|
||||
logging.info('loading: %r' % name)
|
||||
cmap_paths = (os.environ.get('CMAP_PATH', '/usr/share/pdfminer/'),
|
||||
os.path.join(os.path.dirname(__file__), 'cmap'),)
|
||||
for directory in cmap_paths:
|
||||
path = os.path.join(directory, filename)
|
||||
if os.path.exists(path):
|
||||
gzfile = gzip.open(path)
|
||||
try:
|
||||
return type(str(name), (), marshal.loads(gzfile.read()))
|
||||
finally:
|
||||
gzfile.close()
|
||||
else:
|
||||
raise CMapDB.CMapNotFound(name)
|
||||
|
||||
@classmethod
|
||||
def get_cmap(klass, name):
|
||||
if name == 'Identity-H':
|
||||
return IdentityCMap(WMode=0)
|
||||
elif name == 'Identity-V':
|
||||
return IdentityCMap(WMode=1)
|
||||
try:
|
||||
return klass._cmap_cache[name]
|
||||
except KeyError:
|
||||
pass
|
||||
data = klass._load_data(name)
|
||||
klass._cmap_cache[name] = cmap = PyCMap(name, data)
|
||||
return cmap
|
||||
|
||||
@classmethod
|
||||
def get_unicode_map(klass, name, vertical=False):
|
||||
try:
|
||||
return klass._umap_cache[name][vertical]
|
||||
except KeyError:
|
||||
pass
|
||||
data = klass._load_data('to-unicode-%s' % name)
|
||||
klass._umap_cache[name] = umaps = [PyUnicodeMap(name, data, v) for v in (False, True)]
|
||||
return umaps[vertical]
|
||||
|
||||
|
||||
## CMapParser
|
||||
##
|
||||
class CMapParser(PSStackParser):
|
||||
|
||||
def __init__(self, cmap, fp):
|
||||
PSStackParser.__init__(self, fp)
|
||||
self.cmap = cmap
|
||||
# some ToUnicode maps don't have "begincmap" keyword.
|
||||
self._in_cmap = True
|
||||
return
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.nextobject()
|
||||
except PSEOF:
|
||||
pass
|
||||
return
|
||||
|
||||
KEYWORD_BEGINCMAP = KWD(b'begincmap')
|
||||
KEYWORD_ENDCMAP = KWD(b'endcmap')
|
||||
KEYWORD_USECMAP = KWD(b'usecmap')
|
||||
KEYWORD_DEF = KWD(b'def')
|
||||
KEYWORD_BEGINCODESPACERANGE = KWD(b'begincodespacerange')
|
||||
KEYWORD_ENDCODESPACERANGE = KWD(b'endcodespacerange')
|
||||
KEYWORD_BEGINCIDRANGE = KWD(b'begincidrange')
|
||||
KEYWORD_ENDCIDRANGE = KWD(b'endcidrange')
|
||||
KEYWORD_BEGINCIDCHAR = KWD(b'begincidchar')
|
||||
KEYWORD_ENDCIDCHAR = KWD(b'endcidchar')
|
||||
KEYWORD_BEGINBFRANGE = KWD(b'beginbfrange')
|
||||
KEYWORD_ENDBFRANGE = KWD(b'endbfrange')
|
||||
KEYWORD_BEGINBFCHAR = KWD(b'beginbfchar')
|
||||
KEYWORD_ENDBFCHAR = KWD(b'endbfchar')
|
||||
KEYWORD_BEGINNOTDEFRANGE = KWD(b'beginnotdefrange')
|
||||
KEYWORD_ENDNOTDEFRANGE = KWD(b'endnotdefrange')
|
||||
|
||||
def do_keyword(self, pos, token):
|
||||
if token is self.KEYWORD_BEGINCMAP:
|
||||
self._in_cmap = True
|
||||
self.popall()
|
||||
return
|
||||
elif token is self.KEYWORD_ENDCMAP:
|
||||
self._in_cmap = False
|
||||
return
|
||||
if not self._in_cmap:
|
||||
return
|
||||
#
|
||||
if token is self.KEYWORD_DEF:
|
||||
try:
|
||||
((_, k), (_, v)) = self.pop(2)
|
||||
self.cmap.set_attr(literal_name(k), v)
|
||||
except PSSyntaxError:
|
||||
pass
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_USECMAP:
|
||||
try:
|
||||
((_, cmapname),) = self.pop(1)
|
||||
self.cmap.use_cmap(CMapDB.get_cmap(literal_name(cmapname)))
|
||||
except PSSyntaxError:
|
||||
pass
|
||||
except CMapDB.CMapNotFound:
|
||||
pass
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_BEGINCODESPACERANGE:
|
||||
self.popall()
|
||||
return
|
||||
if token is self.KEYWORD_ENDCODESPACERANGE:
|
||||
self.popall()
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_BEGINCIDRANGE:
|
||||
self.popall()
|
||||
return
|
||||
if token is self.KEYWORD_ENDCIDRANGE:
|
||||
objs = [obj for (__, obj) in self.popall()]
|
||||
for (s, e, cid) in choplist(3, objs):
|
||||
if (not isinstance(s, bytes) or not isinstance(e, bytes) or
|
||||
not isinstance(cid, int) or len(s) != len(e)):
|
||||
continue
|
||||
sprefix = s[:-4]
|
||||
eprefix = e[:-4]
|
||||
if sprefix != eprefix:
|
||||
continue
|
||||
svar = s[-4:]
|
||||
evar = e[-4:]
|
||||
s1 = nunpack(svar)
|
||||
e1 = nunpack(evar)
|
||||
vlen = len(svar)
|
||||
#assert s1 <= e1
|
||||
for i in range(e1-s1+1):
|
||||
x = sprefix+struct.pack('>L', s1+i)[-vlen:]
|
||||
self.cmap.add_code2cid(x, cid+i)
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_BEGINCIDCHAR:
|
||||
self.popall()
|
||||
return
|
||||
if token is self.KEYWORD_ENDCIDCHAR:
|
||||
objs = [obj for (__, obj) in self.popall()]
|
||||
for (cid, code) in choplist(2, objs):
|
||||
if isinstance(code, bytes) and isinstance(cid, bytes):
|
||||
self.cmap.add_code2cid(code, nunpack(cid))
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_BEGINBFRANGE:
|
||||
self.popall()
|
||||
return
|
||||
if token is self.KEYWORD_ENDBFRANGE:
|
||||
objs = [obj for (__, obj) in self.popall()]
|
||||
for (s, e, code) in choplist(3, objs):
|
||||
if (not isinstance(s, bytes) or not isinstance(e, bytes) or
|
||||
len(s) != len(e)):
|
||||
continue
|
||||
s1 = nunpack(s)
|
||||
e1 = nunpack(e)
|
||||
#assert s1 <= e1
|
||||
if isinstance(code, list):
|
||||
for i in range(e1-s1+1):
|
||||
self.cmap.add_cid2unichr(s1+i, code[i])
|
||||
else:
|
||||
var = code[-4:]
|
||||
base = nunpack(var)
|
||||
prefix = code[:-4]
|
||||
vlen = len(var)
|
||||
for i in range(e1-s1+1):
|
||||
x = prefix+struct.pack('>L', base+i)[-vlen:]
|
||||
self.cmap.add_cid2unichr(s1+i, x)
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_BEGINBFCHAR:
|
||||
self.popall()
|
||||
return
|
||||
if token is self.KEYWORD_ENDBFCHAR:
|
||||
objs = [obj for (__, obj) in self.popall()]
|
||||
for (cid, code) in choplist(2, objs):
|
||||
if isinstance(cid, bytes) and isinstance(code, bytes):
|
||||
self.cmap.add_cid2unichr(nunpack(cid), code)
|
||||
return
|
||||
|
||||
if token is self.KEYWORD_BEGINNOTDEFRANGE:
|
||||
self.popall()
|
||||
return
|
||||
if token is self.KEYWORD_ENDNOTDEFRANGE:
|
||||
self.popall()
|
||||
return
|
||||
|
||||
self.push((pos, token))
|
||||
return
|
||||
|
||||
|
||||
## CMapConverter
|
||||
##
|
||||
class CMapConverter:
|
||||
|
||||
def __init__(self, enc2codec={}):
|
||||
self.enc2codec = enc2codec
|
||||
self.code2cid = {} # {'cmapname': ...}
|
||||
self.is_vertical = {}
|
||||
self.cid2unichr_h = {} # {cid: unichr}
|
||||
self.cid2unichr_v = {} # {cid: unichr}
|
||||
return
|
||||
|
||||
def get_encs(self):
|
||||
return self.code2cid.keys()
|
||||
|
||||
def get_maps(self, enc):
|
||||
if enc.endswith('-H'):
|
||||
(hmapenc, vmapenc) = (enc, None)
|
||||
elif enc == 'H':
|
||||
(hmapenc, vmapenc) = ('H', 'V')
|
||||
else:
|
||||
(hmapenc, vmapenc) = (enc+'-H', enc+'-V')
|
||||
if hmapenc in self.code2cid:
|
||||
hmap = self.code2cid[hmapenc]
|
||||
else:
|
||||
hmap = {}
|
||||
self.code2cid[hmapenc] = hmap
|
||||
vmap = None
|
||||
if vmapenc:
|
||||
self.is_vertical[vmapenc] = True
|
||||
if vmapenc in self.code2cid:
|
||||
vmap = self.code2cid[vmapenc]
|
||||
else:
|
||||
vmap = {}
|
||||
self.code2cid[vmapenc] = vmap
|
||||
return (hmap, vmap)
|
||||
|
||||
def load(self, fp):
|
||||
encs = None
|
||||
for line in fp:
|
||||
(line,_,_) = line.strip().partition('#')
|
||||
if not line: continue
|
||||
values = line.split('\t')
|
||||
if encs is None:
|
||||
assert values[0] == 'CID'
|
||||
encs = values
|
||||
continue
|
||||
|
||||
def put(dmap, code, cid, force=False):
|
||||
for b in code[:-1]:
|
||||
if b in dmap:
|
||||
dmap = dmap[b]
|
||||
else:
|
||||
d = {}
|
||||
dmap[b] = d
|
||||
dmap = d
|
||||
b = code[-1]
|
||||
if force or ((b not in dmap) or dmap[b] == cid):
|
||||
dmap[b] = cid
|
||||
return
|
||||
|
||||
def add(unimap, enc, code):
|
||||
try:
|
||||
codec = self.enc2codec[enc]
|
||||
c = code.decode(codec, 'strict')
|
||||
if len(c) == 1:
|
||||
if c not in unimap:
|
||||
unimap[c] = 0
|
||||
unimap[c] += 1
|
||||
except KeyError:
|
||||
pass
|
||||
except UnicodeError:
|
||||
pass
|
||||
return
|
||||
|
||||
def pick(unimap):
|
||||
chars = sorted(
|
||||
unimap.items(),
|
||||
key=(lambda x:(x[1],-ord(x[0]))), reverse=True)
|
||||
(c,_) = chars[0]
|
||||
return c
|
||||
|
||||
cid = int(values[0])
|
||||
unimap_h = {}
|
||||
unimap_v = {}
|
||||
for (enc,value) in zip(encs, values):
|
||||
if enc == 'CID': continue
|
||||
if value == '*': continue
|
||||
|
||||
# hcodes, vcodes: encoded bytes for each writing mode.
|
||||
hcodes = []
|
||||
vcodes = []
|
||||
for code in value.split(','):
|
||||
vertical = code.endswith('v')
|
||||
if vertical:
|
||||
code = code[:-1]
|
||||
try:
|
||||
code = codecs.decode(code, 'hex')
|
||||
except:
|
||||
code = bytes([int(code, 16)])
|
||||
if vertical:
|
||||
vcodes.append(code)
|
||||
add(unimap_v, enc, code)
|
||||
else:
|
||||
hcodes.append(code)
|
||||
add(unimap_h, enc, code)
|
||||
# add cid to each map.
|
||||
(hmap, vmap) = self.get_maps(enc)
|
||||
if vcodes:
|
||||
assert vmap is not None
|
||||
for code in vcodes:
|
||||
put(vmap, code, cid, True)
|
||||
for code in hcodes:
|
||||
put(hmap, code, cid, True)
|
||||
else:
|
||||
for code in hcodes:
|
||||
put(hmap, code, cid)
|
||||
put(vmap, code, cid)
|
||||
|
||||
# Determine the "most popular" candidate.
|
||||
if unimap_h:
|
||||
self.cid2unichr_h[cid] = pick(unimap_h)
|
||||
if unimap_v or unimap_h:
|
||||
self.cid2unichr_v[cid] = pick(unimap_v or unimap_h)
|
||||
|
||||
return
|
||||
|
||||
def dump_cmap(self, fp, enc):
|
||||
data = dict(
|
||||
IS_VERTICAL=self.is_vertical.get(enc, False),
|
||||
CODE2CID=self.code2cid.get(enc),
|
||||
)
|
||||
fp.write(marshal.dumps(data))
|
||||
return
|
||||
|
||||
def dump_unicodemap(self, fp):
|
||||
data = dict(
|
||||
CID2UNICHR_H=self.cid2unichr_h,
|
||||
CID2UNICHR_V=self.cid2unichr_v,
|
||||
)
|
||||
fp.write(marshal.dumps(data))
|
||||
return
|
||||
|
||||
# convert_cmap
|
||||
def convert_cmap(outdir, regname, enc2codec, paths):
|
||||
converter = CMapConverter(enc2codec)
|
||||
|
||||
for path in paths:
|
||||
print('reading: %r...' % path)
|
||||
with open(path) as fp:
|
||||
converter.load(fp)
|
||||
|
||||
files = []
|
||||
for enc in converter.get_encs():
|
||||
fname = '%s.marshal.gz' % enc
|
||||
path = os.path.join(outdir, fname)
|
||||
print('writing: %r...' % path)
|
||||
with gzip.open(path, 'wb') as fp:
|
||||
converter.dump_cmap(fp, enc)
|
||||
files.append(path)
|
||||
|
||||
fname = 'to-unicode-%s.marshal.gz' % regname
|
||||
path = os.path.join(outdir, fname)
|
||||
print('writing: %r...' % path)
|
||||
with gzip.open(path, 'wb') as fp:
|
||||
converter.dump_unicodemap(fp)
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
# test
|
||||
def main(argv):
|
||||
args = argv[1:]
|
||||
for fname in args:
|
||||
with open(fname, 'rb') as fp:
|
||||
cmap = FileUnicodeMap()
|
||||
#cmap = FileCMap()
|
||||
CMapParser(cmap, fp).run()
|
||||
cmap.dump()
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,501 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import logging
|
||||
import re
|
||||
from .pdfdevice import PDFTextDevice
|
||||
from .pdffont import PDFUnicodeNotDefined
|
||||
from .layout import LTContainer
|
||||
from .layout import LTPage
|
||||
from .layout import LTText
|
||||
from .layout import LTLine
|
||||
from .layout import LTRect
|
||||
from .layout import LTCurve
|
||||
from .layout import LTFigure
|
||||
from .layout import LTImage
|
||||
from .layout import LTChar
|
||||
from .layout import LTTextLine
|
||||
from .layout import LTTextBox
|
||||
from .layout import LTTextBoxVertical
|
||||
from .layout import LTTextGroup
|
||||
from .utils import apply_matrix_pt
|
||||
from .utils import mult_matrix
|
||||
from .utils import q
|
||||
from .utils import bbox2str
|
||||
|
||||
|
||||
## PDFLayoutAnalyzer
|
||||
##
|
||||
class PDFLayoutAnalyzer(PDFTextDevice):
|
||||
|
||||
def __init__(self, rsrcmgr, pageno=1, laparams=None):
|
||||
PDFTextDevice.__init__(self, rsrcmgr)
|
||||
self.pageno = pageno
|
||||
self.laparams = laparams
|
||||
self._stack = []
|
||||
return
|
||||
|
||||
def begin_page(self, page, ctm):
|
||||
(x0, y0, x1, y1) = page.mediabox
|
||||
(x0, y0) = apply_matrix_pt(ctm, (x0, y0))
|
||||
(x1, y1) = apply_matrix_pt(ctm, (x1, y1))
|
||||
mediabox = (0, 0, abs(x0-x1), abs(y0-y1))
|
||||
self.cur_item = LTPage(self.pageno, mediabox)
|
||||
return
|
||||
|
||||
def end_page(self, page):
|
||||
assert not self._stack
|
||||
assert isinstance(self.cur_item, LTPage)
|
||||
if self.laparams is not None:
|
||||
self.cur_item.analyze(self.laparams)
|
||||
self.pageno += 1
|
||||
self.receive_layout(self.cur_item)
|
||||
return
|
||||
|
||||
def begin_figure(self, name, bbox, matrix):
|
||||
self._stack.append(self.cur_item)
|
||||
self.cur_item = LTFigure(name, bbox, mult_matrix(matrix, self.ctm))
|
||||
return
|
||||
|
||||
def end_figure(self, _):
|
||||
fig = self.cur_item
|
||||
assert isinstance(self.cur_item, LTFigure)
|
||||
self.cur_item = self._stack.pop()
|
||||
self.cur_item.add(fig)
|
||||
return
|
||||
|
||||
def render_image(self, name, stream):
|
||||
assert isinstance(self.cur_item, LTFigure)
|
||||
item = LTImage(name, stream,
|
||||
(self.cur_item.x0, self.cur_item.y0,
|
||||
self.cur_item.x1, self.cur_item.y1))
|
||||
self.cur_item.add(item)
|
||||
return
|
||||
|
||||
def paint_path(self, gstate, stroke, fill, evenodd, path):
|
||||
shape = ''.join(x[0] for x in path)
|
||||
if shape == 'ml':
|
||||
# horizontal/vertical line
|
||||
(_, x0, y0) = path[0]
|
||||
(_, x1, y1) = path[1]
|
||||
(x0, y0) = apply_matrix_pt(self.ctm, (x0, y0))
|
||||
(x1, y1) = apply_matrix_pt(self.ctm, (x1, y1))
|
||||
if x0 == x1 or y0 == y1:
|
||||
self.cur_item.add(LTLine(gstate.linewidth, (x0, y0), (x1, y1)))
|
||||
return
|
||||
if shape == 'mlllh':
|
||||
# rectangle
|
||||
(_, x0, y0) = path[0]
|
||||
(_, x1, y1) = path[1]
|
||||
(_, x2, y2) = path[2]
|
||||
(_, x3, y3) = path[3]
|
||||
(x0, y0) = apply_matrix_pt(self.ctm, (x0, y0))
|
||||
(x1, y1) = apply_matrix_pt(self.ctm, (x1, y1))
|
||||
(x2, y2) = apply_matrix_pt(self.ctm, (x2, y2))
|
||||
(x3, y3) = apply_matrix_pt(self.ctm, (x3, y3))
|
||||
if ((x0 == x1 and y1 == y2 and x2 == x3 and y3 == y0) or
|
||||
(y0 == y1 and x1 == x2 and y2 == y3 and x3 == x0)):
|
||||
self.cur_item.add(LTRect(gstate.linewidth, (x0, y0, x2, y2)))
|
||||
return
|
||||
# other shapes
|
||||
pts = []
|
||||
for p in path:
|
||||
for i in range(1, len(p), 2):
|
||||
pts.append(apply_matrix_pt(self.ctm, (p[i], p[i+1])))
|
||||
self.cur_item.add(LTCurve(gstate.linewidth, pts))
|
||||
return
|
||||
|
||||
def render_char(self, matrix, font, fontsize, scaling, rise, cid):
|
||||
try:
|
||||
text = font.to_unichr(cid)
|
||||
assert isinstance(text, str), text
|
||||
except PDFUnicodeNotDefined:
|
||||
text = self.handle_undefined_char(font, cid)
|
||||
textwidth = font.char_width(cid)
|
||||
textdisp = font.char_disp(cid)
|
||||
item = LTChar(matrix, font, fontsize, scaling, rise, text, textwidth, textdisp)
|
||||
self.cur_item.add(item)
|
||||
return item.adv
|
||||
|
||||
def handle_undefined_char(self, font, cid):
|
||||
logging.info('undefined: %r, %r' % (font, cid))
|
||||
return f'(cid:{cid})'
|
||||
|
||||
def receive_layout(self, ltpage):
|
||||
return
|
||||
|
||||
|
||||
## PDFPageAggregator
|
||||
##
|
||||
class PDFPageAggregator(PDFLayoutAnalyzer):
|
||||
|
||||
def __init__(self, rsrcmgr, pageno=1, laparams=None):
|
||||
PDFLayoutAnalyzer.__init__(self, rsrcmgr, pageno=pageno, laparams=laparams)
|
||||
self.result = None
|
||||
return
|
||||
|
||||
def receive_layout(self, ltpage):
|
||||
self.result = ltpage
|
||||
return
|
||||
|
||||
def get_result(self):
|
||||
return self.result
|
||||
|
||||
|
||||
## PDFConverter
|
||||
##
|
||||
class PDFConverter(PDFLayoutAnalyzer):
|
||||
|
||||
def __init__(self, rsrcmgr, outfp, pageno=1, laparams=None):
|
||||
PDFLayoutAnalyzer.__init__(self, rsrcmgr, pageno=pageno, laparams=laparams)
|
||||
self.outfp = outfp
|
||||
return
|
||||
|
||||
|
||||
## TextConverter
|
||||
##
|
||||
class TextConverter(PDFConverter):
|
||||
|
||||
def __init__(self, rsrcmgr, outfp, pageno=1, laparams=None,
|
||||
showpageno=False, imagewriter=None):
|
||||
PDFConverter.__init__(self, rsrcmgr, outfp, pageno=pageno, laparams=laparams)
|
||||
self.showpageno = showpageno
|
||||
self.imagewriter = imagewriter
|
||||
return
|
||||
|
||||
def write_text(self, text):
|
||||
self.outfp.write(text)
|
||||
return
|
||||
|
||||
def receive_layout(self, ltpage):
|
||||
def render(item):
|
||||
if isinstance(item, LTContainer):
|
||||
for child in item:
|
||||
render(child)
|
||||
elif isinstance(item, LTText):
|
||||
self.write_text(item.get_text())
|
||||
if isinstance(item, LTTextBox):
|
||||
self.write_text('\n')
|
||||
elif isinstance(item, LTImage):
|
||||
if self.imagewriter is not None:
|
||||
self.imagewriter.export_image(item)
|
||||
if self.showpageno:
|
||||
self.write_text('Page %s\n' % ltpage.pageid)
|
||||
render(ltpage)
|
||||
self.write_text('\f')
|
||||
return
|
||||
|
||||
# Some dummy functions to save memory/CPU when all that is wanted
|
||||
# is text. This stops all the image and drawing output from being
|
||||
# recorded and taking up RAM.
|
||||
def render_image(self, name, stream):
|
||||
if self.imagewriter is None:
|
||||
return
|
||||
PDFConverter.render_image(self, name, stream)
|
||||
return
|
||||
|
||||
def paint_path(self, gstate, stroke, fill, evenodd, path):
|
||||
return
|
||||
|
||||
|
||||
## HTMLConverter
|
||||
##
|
||||
class HTMLConverter(PDFConverter):
|
||||
|
||||
RECT_COLORS = {
|
||||
#'char': 'green',
|
||||
'figure': 'yellow',
|
||||
'textline': 'magenta',
|
||||
'textbox': 'cyan',
|
||||
'textgroup': 'red',
|
||||
'curve': 'black',
|
||||
'page': 'gray',
|
||||
}
|
||||
|
||||
TEXT_COLORS = {
|
||||
'textbox': 'blue',
|
||||
'char': 'black',
|
||||
}
|
||||
|
||||
def __init__(self, rsrcmgr, outfp, pageno=1, laparams=None,
|
||||
scale=1, fontscale=1.0, layoutmode='normal', showpageno=True,
|
||||
pagemargin=50, imagewriter=None, debug=0,
|
||||
rect_colors={'curve': 'black', 'page': 'gray'},
|
||||
text_colors={'char': 'black'}):
|
||||
PDFConverter.__init__(self, rsrcmgr, outfp, pageno=pageno, laparams=laparams)
|
||||
self.scale = scale
|
||||
self.fontscale = fontscale
|
||||
self.layoutmode = layoutmode
|
||||
self.showpageno = showpageno
|
||||
self.pagemargin = pagemargin
|
||||
self.imagewriter = imagewriter
|
||||
self.rect_colors = rect_colors
|
||||
self.text_colors = text_colors
|
||||
if debug:
|
||||
self.rect_colors.update(self.RECT_COLORS)
|
||||
self.text_colors.update(self.TEXT_COLORS)
|
||||
self._yoffset = self.pagemargin
|
||||
self._font = None
|
||||
self._fontstack = []
|
||||
self.write_header()
|
||||
return
|
||||
|
||||
def write(self, text):
|
||||
self.outfp.write(text)
|
||||
return
|
||||
|
||||
def write_header(self):
|
||||
self.write('<html><head>\n')
|
||||
self.write('<meta http-equiv="Content-Type" content="text/html; charset=utf-8">\n')
|
||||
self.write('</head><body>\n')
|
||||
return
|
||||
|
||||
def write_footer(self):
|
||||
self.write('<div style="position:absolute; top:0px;">Page: %s</div>\n' %
|
||||
', '.join('<a href="#%s">%s</a>' % (i, i) for i in range(1, self.pageno)))
|
||||
self.write('</body></html>\n')
|
||||
return
|
||||
|
||||
def write_text(self, text):
|
||||
self.write(q(text))
|
||||
return
|
||||
|
||||
def place_rect(self, color, borderwidth, x, y, w, h):
|
||||
color = self.rect_colors.get(color)
|
||||
if color is not None:
|
||||
self.write('<span style="position:absolute; border: %s %dpx solid; '
|
||||
'left:%dpx; top:%dpx; width:%dpx; height:%dpx;"></span>\n' %
|
||||
(color, borderwidth,
|
||||
x*self.scale, (self._yoffset-y)*self.scale,
|
||||
w*self.scale, h*self.scale))
|
||||
return
|
||||
|
||||
def place_border(self, color, borderwidth, item):
|
||||
self.place_rect(color, borderwidth, item.x0, item.y1, item.width, item.height)
|
||||
return
|
||||
|
||||
def place_image(self, item, borderwidth, x, y, w, h):
|
||||
if self.imagewriter is not None:
|
||||
name = self.imagewriter.export_image(item)
|
||||
self.write('<img src="%s" border="%d" style="position:absolute; left:%dpx; top:%dpx;" '
|
||||
'width="%d" height="%d" />\n' %
|
||||
(q(name), borderwidth,
|
||||
x*self.scale, (self._yoffset-y)*self.scale,
|
||||
w*self.scale, h*self.scale))
|
||||
return
|
||||
|
||||
def place_text(self, color, text, x, y, size):
|
||||
color = self.text_colors.get(color)
|
||||
if color is not None:
|
||||
self.write('<span style="position:absolute; color:%s; left:%dpx; top:%dpx; font-size:%dpx;">' %
|
||||
(color, x*self.scale, (self._yoffset-y)*self.scale, size*self.scale*self.fontscale))
|
||||
self.write_text(text)
|
||||
self.write('</span>\n')
|
||||
return
|
||||
|
||||
def begin_div(self, color, borderwidth, x, y, w, h, writing_mode=False):
|
||||
self._fontstack.append(self._font)
|
||||
self._font = None
|
||||
self.write('<div style="position:absolute; border: %s %dpx solid; writing-mode:%s; '
|
||||
'left:%dpx; top:%dpx; width:%dpx; height:%dpx;">' %
|
||||
(color, borderwidth, writing_mode,
|
||||
x*self.scale, (self._yoffset-y)*self.scale,
|
||||
w*self.scale, h*self.scale))
|
||||
return
|
||||
|
||||
def end_div(self, color):
|
||||
if self._font is not None:
|
||||
self.write('</span>')
|
||||
self._font = self._fontstack.pop()
|
||||
self.write('</div>')
|
||||
return
|
||||
|
||||
def put_text(self, text, fontname, fontsize):
|
||||
font = (fontname, fontsize)
|
||||
if font != self._font:
|
||||
if self._font is not None:
|
||||
self.write('</span>')
|
||||
self.write('<span style="font-family: %s; font-size:%dpx">' %
|
||||
(q(fontname), fontsize * self.scale * self.fontscale))
|
||||
self._font = font
|
||||
self.write_text(text)
|
||||
return
|
||||
|
||||
def put_newline(self):
|
||||
self.write('<br>')
|
||||
return
|
||||
|
||||
def receive_layout(self, ltpage):
|
||||
def show_group(item):
|
||||
if isinstance(item, LTTextGroup):
|
||||
self.place_border('textgroup', 1, item)
|
||||
for child in item:
|
||||
show_group(child)
|
||||
return
|
||||
|
||||
def render(item):
|
||||
if isinstance(item, LTPage):
|
||||
self._yoffset += item.y1
|
||||
self.place_border('page', 1, item)
|
||||
if self.showpageno:
|
||||
self.write('<div style="position:absolute; top:%dpx;">' %
|
||||
((self._yoffset-item.y1)*self.scale))
|
||||
self.write('<a name="%s">Page %s</a></div>\n' % (item.pageid, item.pageid))
|
||||
for child in item:
|
||||
render(child)
|
||||
if item.groups is not None:
|
||||
for group in item.groups:
|
||||
show_group(group)
|
||||
elif isinstance(item, LTCurve):
|
||||
self.place_border('curve', 1, item)
|
||||
elif isinstance(item, LTFigure):
|
||||
self.begin_div('figure', 1, item.x0, item.y1, item.width, item.height)
|
||||
for child in item:
|
||||
render(child)
|
||||
self.end_div('figure')
|
||||
elif isinstance(item, LTImage):
|
||||
self.place_image(item, 1, item.x0, item.y1, item.width, item.height)
|
||||
else:
|
||||
if self.layoutmode == 'exact':
|
||||
if isinstance(item, LTTextLine):
|
||||
self.place_border('textline', 1, item)
|
||||
for child in item:
|
||||
render(child)
|
||||
elif isinstance(item, LTTextBox):
|
||||
self.place_border('textbox', 1, item)
|
||||
self.place_text('textbox', str(item.index+1), item.x0, item.y1, 20)
|
||||
for child in item:
|
||||
render(child)
|
||||
elif isinstance(item, LTChar):
|
||||
self.place_border('char', 1, item)
|
||||
self.place_text('char', item.get_text(), item.x0, item.y1, item.size)
|
||||
else:
|
||||
if isinstance(item, LTTextLine):
|
||||
for child in item:
|
||||
render(child)
|
||||
if self.layoutmode != 'loose':
|
||||
self.put_newline()
|
||||
elif isinstance(item, LTTextBox):
|
||||
self.begin_div('textbox', 1, item.x0, item.y1, item.width, item.height,
|
||||
item.get_writing_mode())
|
||||
for child in item:
|
||||
render(child)
|
||||
self.end_div('textbox')
|
||||
elif isinstance(item, LTChar):
|
||||
self.put_text(item.get_text(), item.fontname, item.size)
|
||||
elif isinstance(item, LTText):
|
||||
self.write_text(item.get_text())
|
||||
return
|
||||
render(ltpage)
|
||||
self._yoffset += self.pagemargin
|
||||
return
|
||||
|
||||
def close(self):
|
||||
self.write_footer()
|
||||
return
|
||||
|
||||
|
||||
## XMLConverter
|
||||
##
|
||||
class XMLConverter(PDFConverter):
|
||||
|
||||
CONTROL = re.compile(r'[\x00-\x08\x0b-\x0c\x0e-\x1f]')
|
||||
|
||||
def __init__(self, rsrcmgr, outfp, pageno=1,
|
||||
laparams=None, imagewriter=None, stripcontrol=False):
|
||||
PDFConverter.__init__(self, rsrcmgr, outfp, pageno=pageno, laparams=laparams)
|
||||
self.imagewriter = imagewriter
|
||||
self.stripcontrol = stripcontrol
|
||||
self.write_header()
|
||||
return
|
||||
|
||||
def write_header(self):
|
||||
self.outfp.write('<?xml version="1.0" encoding="utf-8" ?>\n')
|
||||
self.outfp.write('<pages>\n')
|
||||
return
|
||||
|
||||
def write_footer(self):
|
||||
self.outfp.write('</pages>\n')
|
||||
return
|
||||
|
||||
def write_text(self, text):
|
||||
if self.stripcontrol:
|
||||
text = self.CONTROL.sub(u'', text)
|
||||
self.outfp.write(q(text))
|
||||
return
|
||||
|
||||
def receive_layout(self, ltpage):
|
||||
def show_group(item):
|
||||
if isinstance(item, LTTextBox):
|
||||
self.outfp.write('<textbox id="%d" bbox="%s" />\n' %
|
||||
(item.index, bbox2str(item.bbox)))
|
||||
elif isinstance(item, LTTextGroup):
|
||||
self.outfp.write('<textgroup bbox="%s">\n' % bbox2str(item.bbox))
|
||||
for child in item:
|
||||
show_group(child)
|
||||
self.outfp.write('</textgroup>\n')
|
||||
return
|
||||
|
||||
def render(item):
|
||||
if isinstance(item, LTPage):
|
||||
self.outfp.write('<page id="%s" bbox="%s" rotate="%d">\n' %
|
||||
(item.pageid, bbox2str(item.bbox), item.rotate))
|
||||
for child in item:
|
||||
render(child)
|
||||
if item.groups is not None:
|
||||
self.outfp.write('<layout>\n')
|
||||
for group in item.groups:
|
||||
show_group(group)
|
||||
self.outfp.write('</layout>\n')
|
||||
self.outfp.write('</page>\n')
|
||||
elif isinstance(item, LTLine):
|
||||
self.outfp.write('<line linewidth="%d" bbox="%s" />\n' %
|
||||
(item.linewidth, bbox2str(item.bbox)))
|
||||
elif isinstance(item, LTRect):
|
||||
self.outfp.write('<rect linewidth="%d" bbox="%s" />\n' %
|
||||
(item.linewidth, bbox2str(item.bbox)))
|
||||
elif isinstance(item, LTCurve):
|
||||
self.outfp.write('<curve linewidth="%d" bbox="%s" pts="%s"/>\n' %
|
||||
(item.linewidth, bbox2str(item.bbox), item.get_pts()))
|
||||
elif isinstance(item, LTFigure):
|
||||
self.outfp.write('<figure name="%s" bbox="%s">\n' %
|
||||
(item.name, bbox2str(item.bbox)))
|
||||
for child in item:
|
||||
render(child)
|
||||
self.outfp.write('</figure>\n')
|
||||
elif isinstance(item, LTTextLine):
|
||||
self.outfp.write('<textline bbox="%s">\n' % bbox2str(item.bbox))
|
||||
for child in item:
|
||||
render(child)
|
||||
self.outfp.write('</textline>\n')
|
||||
elif isinstance(item, LTTextBox):
|
||||
wmode = ''
|
||||
if isinstance(item, LTTextBoxVertical):
|
||||
wmode = ' wmode="vertical"'
|
||||
self.outfp.write('<textbox id="%d" bbox="%s"%s>\n' %
|
||||
(item.index, bbox2str(item.bbox), wmode))
|
||||
for child in item:
|
||||
render(child)
|
||||
self.outfp.write('</textbox>\n')
|
||||
elif isinstance(item, LTChar):
|
||||
self.outfp.write('<text font="%s" bbox="%s" size="%.3f">' %
|
||||
(q(item.fontname), bbox2str(item.bbox), item.size))
|
||||
self.write_text(item.get_text())
|
||||
self.outfp.write('</text>\n')
|
||||
elif isinstance(item, LTText):
|
||||
self.outfp.write('<text>%s</text>\n' % item.get_text())
|
||||
elif isinstance(item, LTImage):
|
||||
if self.imagewriter is not None:
|
||||
name = self.imagewriter.export_image(item)
|
||||
self.outfp.write('<image src="%s" width="%d" height="%d" />\n' %
|
||||
(q(name), item.width, item.height))
|
||||
else:
|
||||
self.outfp.write('<image width="%d" height="%d" />\n' %
|
||||
(item.width, item.height))
|
||||
else:
|
||||
assert 0, item
|
||||
return
|
||||
render(ltpage)
|
||||
return
|
||||
|
||||
def close(self):
|
||||
self.write_footer()
|
||||
return
|
|
@ -1,64 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import re
|
||||
from .psparser import PSLiteral
|
||||
from .glyphlist import glyphname2unicode
|
||||
from .latin_enc import ENCODING
|
||||
|
||||
|
||||
STRIP_NAME = re.compile(r'[0-9]+')
|
||||
|
||||
|
||||
## name2unicode
|
||||
##
|
||||
def name2unicode(name):
|
||||
"""Converts Adobe glyph names to Unicode numbers."""
|
||||
if name in glyphname2unicode:
|
||||
return glyphname2unicode[name]
|
||||
m = STRIP_NAME.search(name)
|
||||
if not m:
|
||||
raise KeyError(name)
|
||||
return chr(int(m.group(0)))
|
||||
|
||||
|
||||
## EncodingDB
|
||||
##
|
||||
class EncodingDB:
|
||||
|
||||
std2unicode = {}
|
||||
mac2unicode = {}
|
||||
win2unicode = {}
|
||||
pdf2unicode = {}
|
||||
for (name, std, mac, win, pdf) in ENCODING:
|
||||
c = name2unicode(name)
|
||||
if std:
|
||||
std2unicode[std] = c
|
||||
if mac:
|
||||
mac2unicode[mac] = c
|
||||
if win:
|
||||
win2unicode[win] = c
|
||||
if pdf:
|
||||
pdf2unicode[pdf] = c
|
||||
|
||||
encodings = {
|
||||
'StandardEncoding': std2unicode,
|
||||
'MacRomanEncoding': mac2unicode,
|
||||
'WinAnsiEncoding': win2unicode,
|
||||
'PDFDocEncoding': pdf2unicode,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_encoding(klass, name, diff=None):
|
||||
cid2unicode = klass.encodings.get(name, klass.std2unicode)
|
||||
if diff:
|
||||
cid2unicode = cid2unicode.copy()
|
||||
cid = 0
|
||||
for x in diff:
|
||||
if isinstance(x, int):
|
||||
cid = x
|
||||
elif isinstance(x, PSLiteral):
|
||||
try:
|
||||
cid2unicode[cid] = name2unicode(x.name)
|
||||
except KeyError:
|
||||
pass
|
||||
cid += 1
|
||||
return cid2unicode
|
|
@ -1,46 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
""" Font metrics for the Adobe core 14 fonts.
|
||||
|
||||
Font metrics are used to compute the boundary of each character
|
||||
written with a proportional font.
|
||||
|
||||
The following data were extracted from the AFM files:
|
||||
|
||||
http://www.ctan.org/tex-archive/fonts/adobe/afm/
|
||||
|
||||
"""
|
||||
|
||||
### BEGIN Verbatim copy of the license part
|
||||
|
||||
#
|
||||
# Adobe Core 35 AFM Files with 314 Glyph Entries - ReadMe
|
||||
#
|
||||
# This file and the 35 PostScript(R) AFM files it accompanies may be
|
||||
# used, copied, and distributed for any purpose and without charge,
|
||||
# with or without modification, provided that all copyright notices
|
||||
# are retained; that the AFM files are not distributed without this
|
||||
# file; that all modifications to this file or any of the AFM files
|
||||
# are prominently noted in the modified file(s); and that this
|
||||
# paragraph is not modified. Adobe Systems has no responsibility or
|
||||
# obligation to support the use of the AFM files.
|
||||
#
|
||||
|
||||
### END Verbatim copy of the license part
|
||||
|
||||
FONT_METRICS = {
|
||||
'Courier': ({'FontName': 'Courier', 'Descent': -194.0, 'FontBBox': (-6.0, -249.0, 639.0, 803.0), 'FontWeight': 'Medium', 'CapHeight': 572.0, 'FontFamily': 'Courier', 'Flags': 64, 'XHeight': 434.0, 'ItalicAngle': 0.0, 'Ascent': 627.0}, {u' ': 600, u'!': 600, u'"': 600, u'#': 600, u'$': 600, u'%': 600, u'&': 600, u"'": 600, u'(': 600, u')': 600, u'*': 600, u'+': 600, u',': 600, u'-': 600, u'.': 600, u'/': 600, u'0': 600, u'1': 600, u'2': 600, u'3': 600, u'4': 600, u'5': 600, u'6': 600, u'7': 600, u'8': 600, u'9': 600, u':': 600, u';': 600, u'<': 600, u'=': 600, u'>': 600, u'?': 600, u'@': 600, u'A': 600, u'B': 600, u'C': 600, u'D': 600, u'E': 600, u'F': 600, u'G': 600, u'H': 600, u'I': 600, u'J': 600, u'K': 600, u'L': 600, u'M': 600, u'N': 600, u'O': 600, u'P': 600, u'Q': 600, u'R': 600, u'S': 600, u'T': 600, u'U': 600, u'V': 600, u'W': 600, u'X': 600, u'Y': 600, u'Z': 600, u'[': 600, u'\\': 600, u']': 600, u'^': 600, u'_': 600, u'`': 600, u'a': 600, u'b': 600, u'c': 600, u'd': 600, u'e': 600, u'f': 600, u'g': 600, u'h': 600, u'i': 600, u'j': 600, u'k': 600, u'l': 600, u'm': 600, u'n': 600, u'o': 600, u'p': 600, u'q': 600, u'r': 600, u's': 600, u't': 600, u'u': 600, u'v': 600, u'w': 600, u'x': 600, u'y': 600, u'z': 600, u'{': 600, u'|': 600, u'}': 600, u'~': 600, u'\xa1': 600, u'\xa2': 600, u'\xa3': 600, u'\xa4': 600, u'\xa5': 600, u'\xa6': 600, u'\xa7': 600, u'\xa8': 600, u'\xa9': 600, u'\xaa': 600, u'\xab': 600, u'\xac': 600, u'\xae': 600, u'\xaf': 600, u'\xb0': 600, u'\xb1': 600, u'\xb2': 600, u'\xb3': 600, u'\xb4': 600, u'\xb5': 600, u'\xb6': 600, u'\xb7': 600, u'\xb8': 600, u'\xb9': 600, u'\xba': 600, u'\xbb': 600, u'\xbc': 600, u'\xbd': 600, u'\xbe': 600, u'\xbf': 600, u'\xc0': 600, u'\xc1': 600, u'\xc2': 600, u'\xc3': 600, u'\xc4': 600, u'\xc5': 600, u'\xc6': 600, u'\xc7': 600, u'\xc8': 600, u'\xc9': 600, u'\xca': 600, u'\xcb': 600, u'\xcc': 600, u'\xcd': 600, u'\xce': 600, u'\xcf': 600, u'\xd0': 600, u'\xd1': 600, u'\xd2': 600, u'\xd3': 600, u'\xd4': 600, u'\xd5': 600, u'\xd6': 600, u'\xd7': 600, u'\xd8': 600, u'\xd9': 600, u'\xda': 600, u'\xdb': 600, u'\xdc': 600, u'\xdd': 600, u'\xde': 600, u'\xdf': 600, u'\xe0': 600, u'\xe1': 600, u'\xe2': 600, u'\xe3': 600, u'\xe4': 600, u'\xe5': 600, u'\xe6': 600, u'\xe7': 600, u'\xe8': 600, u'\xe9': 600, u'\xea': 600, u'\xeb': 600, u'\xec': 600, u'\xed': 600, u'\xee': 600, u'\xef': 600, u'\xf0': 600, u'\xf1': 600, u'\xf2': 600, u'\xf3': 600, u'\xf4': 600, u'\xf5': 600, u'\xf6': 600, u'\xf7': 600, u'\xf8': 600, u'\xf9': 600, u'\xfa': 600, u'\xfb': 600, u'\xfc': 600, u'\xfd': 600, u'\xfe': 600, u'\xff': 600, u'\u0100': 600, u'\u0101': 600, u'\u0102': 600, u'\u0103': 600, u'\u0104': 600, u'\u0105': 600, u'\u0106': 600, u'\u0107': 600, u'\u010c': 600, u'\u010d': 600, u'\u010e': 600, u'\u010f': 600, u'\u0110': 600, u'\u0111': 600, u'\u0112': 600, u'\u0113': 600, u'\u0116': 600, u'\u0117': 600, u'\u0118': 600, u'\u0119': 600, u'\u011a': 600, u'\u011b': 600, u'\u011e': 600, u'\u011f': 600, u'\u0122': 600, u'\u0123': 600, u'\u012a': 600, u'\u012b': 600, u'\u012e': 600, u'\u012f': 600, u'\u0130': 600, u'\u0131': 600, u'\u0136': 600, u'\u0137': 600, u'\u0139': 600, u'\u013a': 600, u'\u013b': 600, u'\u013c': 600, u'\u013d': 600, u'\u013e': 600, u'\u0141': 600, u'\u0142': 600, u'\u0143': 600, u'\u0144': 600, u'\u0145': 600, u'\u0146': 600, u'\u0147': 600, u'\u0148': 600, u'\u014c': 600, u'\u014d': 600, u'\u0150': 600, u'\u0151': 600, u'\u0152': 600, u'\u0153': 600, u'\u0154': 600, u'\u0155': 600, u'\u0156': 600, u'\u0157': 600, u'\u0158': 600, u'\u0159': 600, u'\u015a': 600, u'\u015b': 600, u'\u015e': 600, u'\u015f': 600, u'\u0160': 600, u'\u0161': 600, u'\u0162': 600, u'\u0163': 600, u'\u0164': 600, u'\u0165': 600, u'\u016a': 600, u'\u016b': 600, u'\u016e': 600, u'\u016f': 600, u'\u0170': 600, u'\u0171': 600, u'\u0172': 600, u'\u0173': 600, u'\u0178': 600, u'\u0179': 600, u'\u017a': 600, u'\u017b': 600, u'\u017c': 600, u'\u017d': 600, u'\u017e': 600, u'\u0192': 600, u'\u0218': 600, u'\u0219': 600, u'\u02c6': 600, u'\u02c7': 600, u'\u02d8': 600, u'\u02d9': 600, u'\u02da': 600, u'\u02db': 600, u'\u02dc': 600, u'\u02dd': 600, u'\u2013': 600, u'\u2014': 600, u'\u2018': 600, u'\u2019': 600, u'\u201a': 600, u'\u201c': 600, u'\u201d': 600, u'\u201e': 600, u'\u2020': 600, u'\u2021': 600, u'\u2022': 600, u'\u2026': 600, u'\u2030': 600, u'\u2039': 600, u'\u203a': 600, u'\u2044': 600, u'\u2122': 600, u'\u2202': 600, u'\u2206': 600, u'\u2211': 600, u'\u2212': 600, u'\u221a': 600, u'\u2260': 600, u'\u2264': 600, u'\u2265': 600, u'\u25ca': 600, u'\uf6c3': 600, u'\ufb01': 600, u'\ufb02': 600}),
|
||||
'Courier-Bold': ({'FontName': 'Courier-Bold', 'Descent': -194.0, 'FontBBox': (-88.0, -249.0, 697.0, 811.0), 'FontWeight': 'Bold', 'CapHeight': 572.0, 'FontFamily': 'Courier', 'Flags': 64, 'XHeight': 434.0, 'ItalicAngle': 0.0, 'Ascent': 627.0}, {u' ': 600, u'!': 600, u'"': 600, u'#': 600, u'$': 600, u'%': 600, u'&': 600, u"'": 600, u'(': 600, u')': 600, u'*': 600, u'+': 600, u',': 600, u'-': 600, u'.': 600, u'/': 600, u'0': 600, u'1': 600, u'2': 600, u'3': 600, u'4': 600, u'5': 600, u'6': 600, u'7': 600, u'8': 600, u'9': 600, u':': 600, u';': 600, u'<': 600, u'=': 600, u'>': 600, u'?': 600, u'@': 600, u'A': 600, u'B': 600, u'C': 600, u'D': 600, u'E': 600, u'F': 600, u'G': 600, u'H': 600, u'I': 600, u'J': 600, u'K': 600, u'L': 600, u'M': 600, u'N': 600, u'O': 600, u'P': 600, u'Q': 600, u'R': 600, u'S': 600, u'T': 600, u'U': 600, u'V': 600, u'W': 600, u'X': 600, u'Y': 600, u'Z': 600, u'[': 600, u'\\': 600, u']': 600, u'^': 600, u'_': 600, u'`': 600, u'a': 600, u'b': 600, u'c': 600, u'd': 600, u'e': 600, u'f': 600, u'g': 600, u'h': 600, u'i': 600, u'j': 600, u'k': 600, u'l': 600, u'm': 600, u'n': 600, u'o': 600, u'p': 600, u'q': 600, u'r': 600, u's': 600, u't': 600, u'u': 600, u'v': 600, u'w': 600, u'x': 600, u'y': 600, u'z': 600, u'{': 600, u'|': 600, u'}': 600, u'~': 600, u'\xa1': 600, u'\xa2': 600, u'\xa3': 600, u'\xa4': 600, u'\xa5': 600, u'\xa6': 600, u'\xa7': 600, u'\xa8': 600, u'\xa9': 600, u'\xaa': 600, u'\xab': 600, u'\xac': 600, u'\xae': 600, u'\xaf': 600, u'\xb0': 600, u'\xb1': 600, u'\xb2': 600, u'\xb3': 600, u'\xb4': 600, u'\xb5': 600, u'\xb6': 600, u'\xb7': 600, u'\xb8': 600, u'\xb9': 600, u'\xba': 600, u'\xbb': 600, u'\xbc': 600, u'\xbd': 600, u'\xbe': 600, u'\xbf': 600, u'\xc0': 600, u'\xc1': 600, u'\xc2': 600, u'\xc3': 600, u'\xc4': 600, u'\xc5': 600, u'\xc6': 600, u'\xc7': 600, u'\xc8': 600, u'\xc9': 600, u'\xca': 600, u'\xcb': 600, u'\xcc': 600, u'\xcd': 600, u'\xce': 600, u'\xcf': 600, u'\xd0': 600, u'\xd1': 600, u'\xd2': 600, u'\xd3': 600, u'\xd4': 600, u'\xd5': 600, u'\xd6': 600, u'\xd7': 600, u'\xd8': 600, u'\xd9': 600, u'\xda': 600, u'\xdb': 600, u'\xdc': 600, u'\xdd': 600, u'\xde': 600, u'\xdf': 600, u'\xe0': 600, u'\xe1': 600, u'\xe2': 600, u'\xe3': 600, u'\xe4': 600, u'\xe5': 600, u'\xe6': 600, u'\xe7': 600, u'\xe8': 600, u'\xe9': 600, u'\xea': 600, u'\xeb': 600, u'\xec': 600, u'\xed': 600, u'\xee': 600, u'\xef': 600, u'\xf0': 600, u'\xf1': 600, u'\xf2': 600, u'\xf3': 600, u'\xf4': 600, u'\xf5': 600, u'\xf6': 600, u'\xf7': 600, u'\xf8': 600, u'\xf9': 600, u'\xfa': 600, u'\xfb': 600, u'\xfc': 600, u'\xfd': 600, u'\xfe': 600, u'\xff': 600, u'\u0100': 600, u'\u0101': 600, u'\u0102': 600, u'\u0103': 600, u'\u0104': 600, u'\u0105': 600, u'\u0106': 600, u'\u0107': 600, u'\u010c': 600, u'\u010d': 600, u'\u010e': 600, u'\u010f': 600, u'\u0110': 600, u'\u0111': 600, u'\u0112': 600, u'\u0113': 600, u'\u0116': 600, u'\u0117': 600, u'\u0118': 600, u'\u0119': 600, u'\u011a': 600, u'\u011b': 600, u'\u011e': 600, u'\u011f': 600, u'\u0122': 600, u'\u0123': 600, u'\u012a': 600, u'\u012b': 600, u'\u012e': 600, u'\u012f': 600, u'\u0130': 600, u'\u0131': 600, u'\u0136': 600, u'\u0137': 600, u'\u0139': 600, u'\u013a': 600, u'\u013b': 600, u'\u013c': 600, u'\u013d': 600, u'\u013e': 600, u'\u0141': 600, u'\u0142': 600, u'\u0143': 600, u'\u0144': 600, u'\u0145': 600, u'\u0146': 600, u'\u0147': 600, u'\u0148': 600, u'\u014c': 600, u'\u014d': 600, u'\u0150': 600, u'\u0151': 600, u'\u0152': 600, u'\u0153': 600, u'\u0154': 600, u'\u0155': 600, u'\u0156': 600, u'\u0157': 600, u'\u0158': 600, u'\u0159': 600, u'\u015a': 600, u'\u015b': 600, u'\u015e': 600, u'\u015f': 600, u'\u0160': 600, u'\u0161': 600, u'\u0162': 600, u'\u0163': 600, u'\u0164': 600, u'\u0165': 600, u'\u016a': 600, u'\u016b': 600, u'\u016e': 600, u'\u016f': 600, u'\u0170': 600, u'\u0171': 600, u'\u0172': 600, u'\u0173': 600, u'\u0178': 600, u'\u0179': 600, u'\u017a': 600, u'\u017b': 600, u'\u017c': 600, u'\u017d': 600, u'\u017e': 600, u'\u0192': 600, u'\u0218': 600, u'\u0219': 600, u'\u02c6': 600, u'\u02c7': 600, u'\u02d8': 600, u'\u02d9': 600, u'\u02da': 600, u'\u02db': 600, u'\u02dc': 600, u'\u02dd': 600, u'\u2013': 600, u'\u2014': 600, u'\u2018': 600, u'\u2019': 600, u'\u201a': 600, u'\u201c': 600, u'\u201d': 600, u'\u201e': 600, u'\u2020': 600, u'\u2021': 600, u'\u2022': 600, u'\u2026': 600, u'\u2030': 600, u'\u2039': 600, u'\u203a': 600, u'\u2044': 600, u'\u2122': 600, u'\u2202': 600, u'\u2206': 600, u'\u2211': 600, u'\u2212': 600, u'\u221a': 600, u'\u2260': 600, u'\u2264': 600, u'\u2265': 600, u'\u25ca': 600, u'\uf6c3': 600, u'\ufb01': 600, u'\ufb02': 600}),
|
||||
'Courier-BoldOblique': ({'FontName': 'Courier-BoldOblique', 'Descent': -194.0, 'FontBBox': (-49.0, -249.0, 758.0, 811.0), 'FontWeight': 'Bold', 'CapHeight': 572.0, 'FontFamily': 'Courier', 'Flags': 64, 'XHeight': 434.0, 'ItalicAngle': -11.0, 'Ascent': 627.0}, {u' ': 600, u'!': 600, u'"': 600, u'#': 600, u'$': 600, u'%': 600, u'&': 600, u"'": 600, u'(': 600, u')': 600, u'*': 600, u'+': 600, u',': 600, u'-': 600, u'.': 600, u'/': 600, u'0': 600, u'1': 600, u'2': 600, u'3': 600, u'4': 600, u'5': 600, u'6': 600, u'7': 600, u'8': 600, u'9': 600, u':': 600, u';': 600, u'<': 600, u'=': 600, u'>': 600, u'?': 600, u'@': 600, u'A': 600, u'B': 600, u'C': 600, u'D': 600, u'E': 600, u'F': 600, u'G': 600, u'H': 600, u'I': 600, u'J': 600, u'K': 600, u'L': 600, u'M': 600, u'N': 600, u'O': 600, u'P': 600, u'Q': 600, u'R': 600, u'S': 600, u'T': 600, u'U': 600, u'V': 600, u'W': 600, u'X': 600, u'Y': 600, u'Z': 600, u'[': 600, u'\\': 600, u']': 600, u'^': 600, u'_': 600, u'`': 600, u'a': 600, u'b': 600, u'c': 600, u'd': 600, u'e': 600, u'f': 600, u'g': 600, u'h': 600, u'i': 600, u'j': 600, u'k': 600, u'l': 600, u'm': 600, u'n': 600, u'o': 600, u'p': 600, u'q': 600, u'r': 600, u's': 600, u't': 600, u'u': 600, u'v': 600, u'w': 600, u'x': 600, u'y': 600, u'z': 600, u'{': 600, u'|': 600, u'}': 600, u'~': 600, u'\xa1': 600, u'\xa2': 600, u'\xa3': 600, u'\xa4': 600, u'\xa5': 600, u'\xa6': 600, u'\xa7': 600, u'\xa8': 600, u'\xa9': 600, u'\xaa': 600, u'\xab': 600, u'\xac': 600, u'\xae': 600, u'\xaf': 600, u'\xb0': 600, u'\xb1': 600, u'\xb2': 600, u'\xb3': 600, u'\xb4': 600, u'\xb5': 600, u'\xb6': 600, u'\xb7': 600, u'\xb8': 600, u'\xb9': 600, u'\xba': 600, u'\xbb': 600, u'\xbc': 600, u'\xbd': 600, u'\xbe': 600, u'\xbf': 600, u'\xc0': 600, u'\xc1': 600, u'\xc2': 600, u'\xc3': 600, u'\xc4': 600, u'\xc5': 600, u'\xc6': 600, u'\xc7': 600, u'\xc8': 600, u'\xc9': 600, u'\xca': 600, u'\xcb': 600, u'\xcc': 600, u'\xcd': 600, u'\xce': 600, u'\xcf': 600, u'\xd0': 600, u'\xd1': 600, u'\xd2': 600, u'\xd3': 600, u'\xd4': 600, u'\xd5': 600, u'\xd6': 600, u'\xd7': 600, u'\xd8': 600, u'\xd9': 600, u'\xda': 600, u'\xdb': 600, u'\xdc': 600, u'\xdd': 600, u'\xde': 600, u'\xdf': 600, u'\xe0': 600, u'\xe1': 600, u'\xe2': 600, u'\xe3': 600, u'\xe4': 600, u'\xe5': 600, u'\xe6': 600, u'\xe7': 600, u'\xe8': 600, u'\xe9': 600, u'\xea': 600, u'\xeb': 600, u'\xec': 600, u'\xed': 600, u'\xee': 600, u'\xef': 600, u'\xf0': 600, u'\xf1': 600, u'\xf2': 600, u'\xf3': 600, u'\xf4': 600, u'\xf5': 600, u'\xf6': 600, u'\xf7': 600, u'\xf8': 600, u'\xf9': 600, u'\xfa': 600, u'\xfb': 600, u'\xfc': 600, u'\xfd': 600, u'\xfe': 600, u'\xff': 600, u'\u0100': 600, u'\u0101': 600, u'\u0102': 600, u'\u0103': 600, u'\u0104': 600, u'\u0105': 600, u'\u0106': 600, u'\u0107': 600, u'\u010c': 600, u'\u010d': 600, u'\u010e': 600, u'\u010f': 600, u'\u0110': 600, u'\u0111': 600, u'\u0112': 600, u'\u0113': 600, u'\u0116': 600, u'\u0117': 600, u'\u0118': 600, u'\u0119': 600, u'\u011a': 600, u'\u011b': 600, u'\u011e': 600, u'\u011f': 600, u'\u0122': 600, u'\u0123': 600, u'\u012a': 600, u'\u012b': 600, u'\u012e': 600, u'\u012f': 600, u'\u0130': 600, u'\u0131': 600, u'\u0136': 600, u'\u0137': 600, u'\u0139': 600, u'\u013a': 600, u'\u013b': 600, u'\u013c': 600, u'\u013d': 600, u'\u013e': 600, u'\u0141': 600, u'\u0142': 600, u'\u0143': 600, u'\u0144': 600, u'\u0145': 600, u'\u0146': 600, u'\u0147': 600, u'\u0148': 600, u'\u014c': 600, u'\u014d': 600, u'\u0150': 600, u'\u0151': 600, u'\u0152': 600, u'\u0153': 600, u'\u0154': 600, u'\u0155': 600, u'\u0156': 600, u'\u0157': 600, u'\u0158': 600, u'\u0159': 600, u'\u015a': 600, u'\u015b': 600, u'\u015e': 600, u'\u015f': 600, u'\u0160': 600, u'\u0161': 600, u'\u0162': 600, u'\u0163': 600, u'\u0164': 600, u'\u0165': 600, u'\u016a': 600, u'\u016b': 600, u'\u016e': 600, u'\u016f': 600, u'\u0170': 600, u'\u0171': 600, u'\u0172': 600, u'\u0173': 600, u'\u0178': 600, u'\u0179': 600, u'\u017a': 600, u'\u017b': 600, u'\u017c': 600, u'\u017d': 600, u'\u017e': 600, u'\u0192': 600, u'\u0218': 600, u'\u0219': 600, u'\u02c6': 600, u'\u02c7': 600, u'\u02d8': 600, u'\u02d9': 600, u'\u02da': 600, u'\u02db': 600, u'\u02dc': 600, u'\u02dd': 600, u'\u2013': 600, u'\u2014': 600, u'\u2018': 600, u'\u2019': 600, u'\u201a': 600, u'\u201c': 600, u'\u201d': 600, u'\u201e': 600, u'\u2020': 600, u'\u2021': 600, u'\u2022': 600, u'\u2026': 600, u'\u2030': 600, u'\u2039': 600, u'\u203a': 600, u'\u2044': 600, u'\u2122': 600, u'\u2202': 600, u'\u2206': 600, u'\u2211': 600, u'\u2212': 600, u'\u221a': 600, u'\u2260': 600, u'\u2264': 600, u'\u2265': 600, u'\u25ca': 600, u'\uf6c3': 600, u'\ufb01': 600, u'\ufb02': 600}),
|
||||
'Courier-Oblique': ({'FontName': 'Courier-Oblique', 'Descent': -194.0, 'FontBBox': (-49.0, -249.0, 749.0, 803.0), 'FontWeight': 'Medium', 'CapHeight': 572.0, 'FontFamily': 'Courier', 'Flags': 64, 'XHeight': 434.0, 'ItalicAngle': -11.0, 'Ascent': 627.0}, {u' ': 600, u'!': 600, u'"': 600, u'#': 600, u'$': 600, u'%': 600, u'&': 600, u"'": 600, u'(': 600, u')': 600, u'*': 600, u'+': 600, u',': 600, u'-': 600, u'.': 600, u'/': 600, u'0': 600, u'1': 600, u'2': 600, u'3': 600, u'4': 600, u'5': 600, u'6': 600, u'7': 600, u'8': 600, u'9': 600, u':': 600, u';': 600, u'<': 600, u'=': 600, u'>': 600, u'?': 600, u'@': 600, u'A': 600, u'B': 600, u'C': 600, u'D': 600, u'E': 600, u'F': 600, u'G': 600, u'H': 600, u'I': 600, u'J': 600, u'K': 600, u'L': 600, u'M': 600, u'N': 600, u'O': 600, u'P': 600, u'Q': 600, u'R': 600, u'S': 600, u'T': 600, u'U': 600, u'V': 600, u'W': 600, u'X': 600, u'Y': 600, u'Z': 600, u'[': 600, u'\\': 600, u']': 600, u'^': 600, u'_': 600, u'`': 600, u'a': 600, u'b': 600, u'c': 600, u'd': 600, u'e': 600, u'f': 600, u'g': 600, u'h': 600, u'i': 600, u'j': 600, u'k': 600, u'l': 600, u'm': 600, u'n': 600, u'o': 600, u'p': 600, u'q': 600, u'r': 600, u's': 600, u't': 600, u'u': 600, u'v': 600, u'w': 600, u'x': 600, u'y': 600, u'z': 600, u'{': 600, u'|': 600, u'}': 600, u'~': 600, u'\xa1': 600, u'\xa2': 600, u'\xa3': 600, u'\xa4': 600, u'\xa5': 600, u'\xa6': 600, u'\xa7': 600, u'\xa8': 600, u'\xa9': 600, u'\xaa': 600, u'\xab': 600, u'\xac': 600, u'\xae': 600, u'\xaf': 600, u'\xb0': 600, u'\xb1': 600, u'\xb2': 600, u'\xb3': 600, u'\xb4': 600, u'\xb5': 600, u'\xb6': 600, u'\xb7': 600, u'\xb8': 600, u'\xb9': 600, u'\xba': 600, u'\xbb': 600, u'\xbc': 600, u'\xbd': 600, u'\xbe': 600, u'\xbf': 600, u'\xc0': 600, u'\xc1': 600, u'\xc2': 600, u'\xc3': 600, u'\xc4': 600, u'\xc5': 600, u'\xc6': 600, u'\xc7': 600, u'\xc8': 600, u'\xc9': 600, u'\xca': 600, u'\xcb': 600, u'\xcc': 600, u'\xcd': 600, u'\xce': 600, u'\xcf': 600, u'\xd0': 600, u'\xd1': 600, u'\xd2': 600, u'\xd3': 600, u'\xd4': 600, u'\xd5': 600, u'\xd6': 600, u'\xd7': 600, u'\xd8': 600, u'\xd9': 600, u'\xda': 600, u'\xdb': 600, u'\xdc': 600, u'\xdd': 600, u'\xde': 600, u'\xdf': 600, u'\xe0': 600, u'\xe1': 600, u'\xe2': 600, u'\xe3': 600, u'\xe4': 600, u'\xe5': 600, u'\xe6': 600, u'\xe7': 600, u'\xe8': 600, u'\xe9': 600, u'\xea': 600, u'\xeb': 600, u'\xec': 600, u'\xed': 600, u'\xee': 600, u'\xef': 600, u'\xf0': 600, u'\xf1': 600, u'\xf2': 600, u'\xf3': 600, u'\xf4': 600, u'\xf5': 600, u'\xf6': 600, u'\xf7': 600, u'\xf8': 600, u'\xf9': 600, u'\xfa': 600, u'\xfb': 600, u'\xfc': 600, u'\xfd': 600, u'\xfe': 600, u'\xff': 600, u'\u0100': 600, u'\u0101': 600, u'\u0102': 600, u'\u0103': 600, u'\u0104': 600, u'\u0105': 600, u'\u0106': 600, u'\u0107': 600, u'\u010c': 600, u'\u010d': 600, u'\u010e': 600, u'\u010f': 600, u'\u0110': 600, u'\u0111': 600, u'\u0112': 600, u'\u0113': 600, u'\u0116': 600, u'\u0117': 600, u'\u0118': 600, u'\u0119': 600, u'\u011a': 600, u'\u011b': 600, u'\u011e': 600, u'\u011f': 600, u'\u0122': 600, u'\u0123': 600, u'\u012a': 600, u'\u012b': 600, u'\u012e': 600, u'\u012f': 600, u'\u0130': 600, u'\u0131': 600, u'\u0136': 600, u'\u0137': 600, u'\u0139': 600, u'\u013a': 600, u'\u013b': 600, u'\u013c': 600, u'\u013d': 600, u'\u013e': 600, u'\u0141': 600, u'\u0142': 600, u'\u0143': 600, u'\u0144': 600, u'\u0145': 600, u'\u0146': 600, u'\u0147': 600, u'\u0148': 600, u'\u014c': 600, u'\u014d': 600, u'\u0150': 600, u'\u0151': 600, u'\u0152': 600, u'\u0153': 600, u'\u0154': 600, u'\u0155': 600, u'\u0156': 600, u'\u0157': 600, u'\u0158': 600, u'\u0159': 600, u'\u015a': 600, u'\u015b': 600, u'\u015e': 600, u'\u015f': 600, u'\u0160': 600, u'\u0161': 600, u'\u0162': 600, u'\u0163': 600, u'\u0164': 600, u'\u0165': 600, u'\u016a': 600, u'\u016b': 600, u'\u016e': 600, u'\u016f': 600, u'\u0170': 600, u'\u0171': 600, u'\u0172': 600, u'\u0173': 600, u'\u0178': 600, u'\u0179': 600, u'\u017a': 600, u'\u017b': 600, u'\u017c': 600, u'\u017d': 600, u'\u017e': 600, u'\u0192': 600, u'\u0218': 600, u'\u0219': 600, u'\u02c6': 600, u'\u02c7': 600, u'\u02d8': 600, u'\u02d9': 600, u'\u02da': 600, u'\u02db': 600, u'\u02dc': 600, u'\u02dd': 600, u'\u2013': 600, u'\u2014': 600, u'\u2018': 600, u'\u2019': 600, u'\u201a': 600, u'\u201c': 600, u'\u201d': 600, u'\u201e': 600, u'\u2020': 600, u'\u2021': 600, u'\u2022': 600, u'\u2026': 600, u'\u2030': 600, u'\u2039': 600, u'\u203a': 600, u'\u2044': 600, u'\u2122': 600, u'\u2202': 600, u'\u2206': 600, u'\u2211': 600, u'\u2212': 600, u'\u221a': 600, u'\u2260': 600, u'\u2264': 600, u'\u2265': 600, u'\u25ca': 600, u'\uf6c3': 600, u'\ufb01': 600, u'\ufb02': 600}),
|
||||
'Helvetica': ({'FontName': 'Helvetica', 'Descent': -207.0, 'FontBBox': (-166.0, -225.0, 1000.0, 931.0), 'FontWeight': 'Medium', 'CapHeight': 718.0, 'FontFamily': 'Helvetica', 'Flags': 0, 'XHeight': 523.0, 'ItalicAngle': 0.0, 'Ascent': 718.0}, {u' ': 278, u'!': 278, u'"': 355, u'#': 556, u'$': 556, u'%': 889, u'&': 667, u"'": 191, u'(': 333, u')': 333, u'*': 389, u'+': 584, u',': 278, u'-': 333, u'.': 278, u'/': 278, u'0': 556, u'1': 556, u'2': 556, u'3': 556, u'4': 556, u'5': 556, u'6': 556, u'7': 556, u'8': 556, u'9': 556, u':': 278, u';': 278, u'<': 584, u'=': 584, u'>': 584, u'?': 556, u'@': 1015, u'A': 667, u'B': 667, u'C': 722, u'D': 722, u'E': 667, u'F': 611, u'G': 778, u'H': 722, u'I': 278, u'J': 500, u'K': 667, u'L': 556, u'M': 833, u'N': 722, u'O': 778, u'P': 667, u'Q': 778, u'R': 722, u'S': 667, u'T': 611, u'U': 722, u'V': 667, u'W': 944, u'X': 667, u'Y': 667, u'Z': 611, u'[': 278, u'\\': 278, u']': 278, u'^': 469, u'_': 556, u'`': 333, u'a': 556, u'b': 556, u'c': 500, u'd': 556, u'e': 556, u'f': 278, u'g': 556, u'h': 556, u'i': 222, u'j': 222, u'k': 500, u'l': 222, u'm': 833, u'n': 556, u'o': 556, u'p': 556, u'q': 556, u'r': 333, u's': 500, u't': 278, u'u': 556, u'v': 500, u'w': 722, u'x': 500, u'y': 500, u'z': 500, u'{': 334, u'|': 260, u'}': 334, u'~': 584, u'\xa1': 333, u'\xa2': 556, u'\xa3': 556, u'\xa4': 556, u'\xa5': 556, u'\xa6': 260, u'\xa7': 556, u'\xa8': 333, u'\xa9': 737, u'\xaa': 370, u'\xab': 556, u'\xac': 584, u'\xae': 737, u'\xaf': 333, u'\xb0': 400, u'\xb1': 584, u'\xb2': 333, u'\xb3': 333, u'\xb4': 333, u'\xb5': 556, u'\xb6': 537, u'\xb7': 278, u'\xb8': 333, u'\xb9': 333, u'\xba': 365, u'\xbb': 556, u'\xbc': 834, u'\xbd': 834, u'\xbe': 834, u'\xbf': 611, u'\xc0': 667, u'\xc1': 667, u'\xc2': 667, u'\xc3': 667, u'\xc4': 667, u'\xc5': 667, u'\xc6': 1000, u'\xc7': 722, u'\xc8': 667, u'\xc9': 667, u'\xca': 667, u'\xcb': 667, u'\xcc': 278, u'\xcd': 278, u'\xce': 278, u'\xcf': 278, u'\xd0': 722, u'\xd1': 722, u'\xd2': 778, u'\xd3': 778, u'\xd4': 778, u'\xd5': 778, u'\xd6': 778, u'\xd7': 584, u'\xd8': 778, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 667, u'\xde': 667, u'\xdf': 611, u'\xe0': 556, u'\xe1': 556, u'\xe2': 556, u'\xe3': 556, u'\xe4': 556, u'\xe5': 556, u'\xe6': 889, u'\xe7': 500, u'\xe8': 556, u'\xe9': 556, u'\xea': 556, u'\xeb': 556, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 556, u'\xf1': 556, u'\xf2': 556, u'\xf3': 556, u'\xf4': 556, u'\xf5': 556, u'\xf6': 556, u'\xf7': 584, u'\xf8': 611, u'\xf9': 556, u'\xfa': 556, u'\xfb': 556, u'\xfc': 556, u'\xfd': 500, u'\xfe': 556, u'\xff': 500, u'\u0100': 667, u'\u0101': 556, u'\u0102': 667, u'\u0103': 556, u'\u0104': 667, u'\u0105': 556, u'\u0106': 722, u'\u0107': 500, u'\u010c': 722, u'\u010d': 500, u'\u010e': 722, u'\u010f': 643, u'\u0110': 722, u'\u0111': 556, u'\u0112': 667, u'\u0113': 556, u'\u0116': 667, u'\u0117': 556, u'\u0118': 667, u'\u0119': 556, u'\u011a': 667, u'\u011b': 556, u'\u011e': 778, u'\u011f': 556, u'\u0122': 778, u'\u0123': 556, u'\u012a': 278, u'\u012b': 278, u'\u012e': 278, u'\u012f': 222, u'\u0130': 278, u'\u0131': 278, u'\u0136': 667, u'\u0137': 500, u'\u0139': 556, u'\u013a': 222, u'\u013b': 556, u'\u013c': 222, u'\u013d': 556, u'\u013e': 299, u'\u0141': 556, u'\u0142': 222, u'\u0143': 722, u'\u0144': 556, u'\u0145': 722, u'\u0146': 556, u'\u0147': 722, u'\u0148': 556, u'\u014c': 778, u'\u014d': 556, u'\u0150': 778, u'\u0151': 556, u'\u0152': 1000, u'\u0153': 944, u'\u0154': 722, u'\u0155': 333, u'\u0156': 722, u'\u0157': 333, u'\u0158': 722, u'\u0159': 333, u'\u015a': 667, u'\u015b': 500, u'\u015e': 667, u'\u015f': 500, u'\u0160': 667, u'\u0161': 500, u'\u0162': 611, u'\u0163': 278, u'\u0164': 611, u'\u0165': 317, u'\u016a': 722, u'\u016b': 556, u'\u016e': 722, u'\u016f': 556, u'\u0170': 722, u'\u0171': 556, u'\u0172': 722, u'\u0173': 556, u'\u0178': 667, u'\u0179': 611, u'\u017a': 500, u'\u017b': 611, u'\u017c': 500, u'\u017d': 611, u'\u017e': 500, u'\u0192': 556, u'\u0218': 667, u'\u0219': 500, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 556, u'\u2014': 1000, u'\u2018': 222, u'\u2019': 222, u'\u201a': 222, u'\u201c': 333, u'\u201d': 333, u'\u201e': 333, u'\u2020': 556, u'\u2021': 556, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 1000, u'\u2202': 476, u'\u2206': 612, u'\u2211': 600, u'\u2212': 584, u'\u221a': 453, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 471, u'\uf6c3': 250, u'\ufb01': 500, u'\ufb02': 500}),
|
||||
'Helvetica-Bold': ({'FontName': 'Helvetica-Bold', 'Descent': -207.0, 'FontBBox': (-170.0, -228.0, 1003.0, 962.0), 'FontWeight': 'Bold', 'CapHeight': 718.0, 'FontFamily': 'Helvetica', 'Flags': 0, 'XHeight': 532.0, 'ItalicAngle': 0.0, 'Ascent': 718.0}, {u' ': 278, u'!': 333, u'"': 474, u'#': 556, u'$': 556, u'%': 889, u'&': 722, u"'": 238, u'(': 333, u')': 333, u'*': 389, u'+': 584, u',': 278, u'-': 333, u'.': 278, u'/': 278, u'0': 556, u'1': 556, u'2': 556, u'3': 556, u'4': 556, u'5': 556, u'6': 556, u'7': 556, u'8': 556, u'9': 556, u':': 333, u';': 333, u'<': 584, u'=': 584, u'>': 584, u'?': 611, u'@': 975, u'A': 722, u'B': 722, u'C': 722, u'D': 722, u'E': 667, u'F': 611, u'G': 778, u'H': 722, u'I': 278, u'J': 556, u'K': 722, u'L': 611, u'M': 833, u'N': 722, u'O': 778, u'P': 667, u'Q': 778, u'R': 722, u'S': 667, u'T': 611, u'U': 722, u'V': 667, u'W': 944, u'X': 667, u'Y': 667, u'Z': 611, u'[': 333, u'\\': 278, u']': 333, u'^': 584, u'_': 556, u'`': 333, u'a': 556, u'b': 611, u'c': 556, u'd': 611, u'e': 556, u'f': 333, u'g': 611, u'h': 611, u'i': 278, u'j': 278, u'k': 556, u'l': 278, u'm': 889, u'n': 611, u'o': 611, u'p': 611, u'q': 611, u'r': 389, u's': 556, u't': 333, u'u': 611, u'v': 556, u'w': 778, u'x': 556, u'y': 556, u'z': 500, u'{': 389, u'|': 280, u'}': 389, u'~': 584, u'\xa1': 333, u'\xa2': 556, u'\xa3': 556, u'\xa4': 556, u'\xa5': 556, u'\xa6': 280, u'\xa7': 556, u'\xa8': 333, u'\xa9': 737, u'\xaa': 370, u'\xab': 556, u'\xac': 584, u'\xae': 737, u'\xaf': 333, u'\xb0': 400, u'\xb1': 584, u'\xb2': 333, u'\xb3': 333, u'\xb4': 333, u'\xb5': 611, u'\xb6': 556, u'\xb7': 278, u'\xb8': 333, u'\xb9': 333, u'\xba': 365, u'\xbb': 556, u'\xbc': 834, u'\xbd': 834, u'\xbe': 834, u'\xbf': 611, u'\xc0': 722, u'\xc1': 722, u'\xc2': 722, u'\xc3': 722, u'\xc4': 722, u'\xc5': 722, u'\xc6': 1000, u'\xc7': 722, u'\xc8': 667, u'\xc9': 667, u'\xca': 667, u'\xcb': 667, u'\xcc': 278, u'\xcd': 278, u'\xce': 278, u'\xcf': 278, u'\xd0': 722, u'\xd1': 722, u'\xd2': 778, u'\xd3': 778, u'\xd4': 778, u'\xd5': 778, u'\xd6': 778, u'\xd7': 584, u'\xd8': 778, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 667, u'\xde': 667, u'\xdf': 611, u'\xe0': 556, u'\xe1': 556, u'\xe2': 556, u'\xe3': 556, u'\xe4': 556, u'\xe5': 556, u'\xe6': 889, u'\xe7': 556, u'\xe8': 556, u'\xe9': 556, u'\xea': 556, u'\xeb': 556, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 611, u'\xf1': 611, u'\xf2': 611, u'\xf3': 611, u'\xf4': 611, u'\xf5': 611, u'\xf6': 611, u'\xf7': 584, u'\xf8': 611, u'\xf9': 611, u'\xfa': 611, u'\xfb': 611, u'\xfc': 611, u'\xfd': 556, u'\xfe': 611, u'\xff': 556, u'\u0100': 722, u'\u0101': 556, u'\u0102': 722, u'\u0103': 556, u'\u0104': 722, u'\u0105': 556, u'\u0106': 722, u'\u0107': 556, u'\u010c': 722, u'\u010d': 556, u'\u010e': 722, u'\u010f': 743, u'\u0110': 722, u'\u0111': 611, u'\u0112': 667, u'\u0113': 556, u'\u0116': 667, u'\u0117': 556, u'\u0118': 667, u'\u0119': 556, u'\u011a': 667, u'\u011b': 556, u'\u011e': 778, u'\u011f': 611, u'\u0122': 778, u'\u0123': 611, u'\u012a': 278, u'\u012b': 278, u'\u012e': 278, u'\u012f': 278, u'\u0130': 278, u'\u0131': 278, u'\u0136': 722, u'\u0137': 556, u'\u0139': 611, u'\u013a': 278, u'\u013b': 611, u'\u013c': 278, u'\u013d': 611, u'\u013e': 400, u'\u0141': 611, u'\u0142': 278, u'\u0143': 722, u'\u0144': 611, u'\u0145': 722, u'\u0146': 611, u'\u0147': 722, u'\u0148': 611, u'\u014c': 778, u'\u014d': 611, u'\u0150': 778, u'\u0151': 611, u'\u0152': 1000, u'\u0153': 944, u'\u0154': 722, u'\u0155': 389, u'\u0156': 722, u'\u0157': 389, u'\u0158': 722, u'\u0159': 389, u'\u015a': 667, u'\u015b': 556, u'\u015e': 667, u'\u015f': 556, u'\u0160': 667, u'\u0161': 556, u'\u0162': 611, u'\u0163': 333, u'\u0164': 611, u'\u0165': 389, u'\u016a': 722, u'\u016b': 611, u'\u016e': 722, u'\u016f': 611, u'\u0170': 722, u'\u0171': 611, u'\u0172': 722, u'\u0173': 611, u'\u0178': 667, u'\u0179': 611, u'\u017a': 500, u'\u017b': 611, u'\u017c': 500, u'\u017d': 611, u'\u017e': 500, u'\u0192': 556, u'\u0218': 667, u'\u0219': 556, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 556, u'\u2014': 1000, u'\u2018': 278, u'\u2019': 278, u'\u201a': 278, u'\u201c': 500, u'\u201d': 500, u'\u201e': 500, u'\u2020': 556, u'\u2021': 556, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 1000, u'\u2202': 494, u'\u2206': 612, u'\u2211': 600, u'\u2212': 584, u'\u221a': 549, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 494, u'\uf6c3': 250, u'\ufb01': 611, u'\ufb02': 611}),
|
||||
'Helvetica-BoldOblique': ({'FontName': 'Helvetica-BoldOblique', 'Descent': -207.0, 'FontBBox': (-175.0, -228.0, 1114.0, 962.0), 'FontWeight': 'Bold', 'CapHeight': 718.0, 'FontFamily': 'Helvetica', 'Flags': 0, 'XHeight': 532.0, 'ItalicAngle': -12.0, 'Ascent': 718.0}, {u' ': 278, u'!': 333, u'"': 474, u'#': 556, u'$': 556, u'%': 889, u'&': 722, u"'": 238, u'(': 333, u')': 333, u'*': 389, u'+': 584, u',': 278, u'-': 333, u'.': 278, u'/': 278, u'0': 556, u'1': 556, u'2': 556, u'3': 556, u'4': 556, u'5': 556, u'6': 556, u'7': 556, u'8': 556, u'9': 556, u':': 333, u';': 333, u'<': 584, u'=': 584, u'>': 584, u'?': 611, u'@': 975, u'A': 722, u'B': 722, u'C': 722, u'D': 722, u'E': 667, u'F': 611, u'G': 778, u'H': 722, u'I': 278, u'J': 556, u'K': 722, u'L': 611, u'M': 833, u'N': 722, u'O': 778, u'P': 667, u'Q': 778, u'R': 722, u'S': 667, u'T': 611, u'U': 722, u'V': 667, u'W': 944, u'X': 667, u'Y': 667, u'Z': 611, u'[': 333, u'\\': 278, u']': 333, u'^': 584, u'_': 556, u'`': 333, u'a': 556, u'b': 611, u'c': 556, u'd': 611, u'e': 556, u'f': 333, u'g': 611, u'h': 611, u'i': 278, u'j': 278, u'k': 556, u'l': 278, u'm': 889, u'n': 611, u'o': 611, u'p': 611, u'q': 611, u'r': 389, u's': 556, u't': 333, u'u': 611, u'v': 556, u'w': 778, u'x': 556, u'y': 556, u'z': 500, u'{': 389, u'|': 280, u'}': 389, u'~': 584, u'\xa1': 333, u'\xa2': 556, u'\xa3': 556, u'\xa4': 556, u'\xa5': 556, u'\xa6': 280, u'\xa7': 556, u'\xa8': 333, u'\xa9': 737, u'\xaa': 370, u'\xab': 556, u'\xac': 584, u'\xae': 737, u'\xaf': 333, u'\xb0': 400, u'\xb1': 584, u'\xb2': 333, u'\xb3': 333, u'\xb4': 333, u'\xb5': 611, u'\xb6': 556, u'\xb7': 278, u'\xb8': 333, u'\xb9': 333, u'\xba': 365, u'\xbb': 556, u'\xbc': 834, u'\xbd': 834, u'\xbe': 834, u'\xbf': 611, u'\xc0': 722, u'\xc1': 722, u'\xc2': 722, u'\xc3': 722, u'\xc4': 722, u'\xc5': 722, u'\xc6': 1000, u'\xc7': 722, u'\xc8': 667, u'\xc9': 667, u'\xca': 667, u'\xcb': 667, u'\xcc': 278, u'\xcd': 278, u'\xce': 278, u'\xcf': 278, u'\xd0': 722, u'\xd1': 722, u'\xd2': 778, u'\xd3': 778, u'\xd4': 778, u'\xd5': 778, u'\xd6': 778, u'\xd7': 584, u'\xd8': 778, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 667, u'\xde': 667, u'\xdf': 611, u'\xe0': 556, u'\xe1': 556, u'\xe2': 556, u'\xe3': 556, u'\xe4': 556, u'\xe5': 556, u'\xe6': 889, u'\xe7': 556, u'\xe8': 556, u'\xe9': 556, u'\xea': 556, u'\xeb': 556, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 611, u'\xf1': 611, u'\xf2': 611, u'\xf3': 611, u'\xf4': 611, u'\xf5': 611, u'\xf6': 611, u'\xf7': 584, u'\xf8': 611, u'\xf9': 611, u'\xfa': 611, u'\xfb': 611, u'\xfc': 611, u'\xfd': 556, u'\xfe': 611, u'\xff': 556, u'\u0100': 722, u'\u0101': 556, u'\u0102': 722, u'\u0103': 556, u'\u0104': 722, u'\u0105': 556, u'\u0106': 722, u'\u0107': 556, u'\u010c': 722, u'\u010d': 556, u'\u010e': 722, u'\u010f': 743, u'\u0110': 722, u'\u0111': 611, u'\u0112': 667, u'\u0113': 556, u'\u0116': 667, u'\u0117': 556, u'\u0118': 667, u'\u0119': 556, u'\u011a': 667, u'\u011b': 556, u'\u011e': 778, u'\u011f': 611, u'\u0122': 778, u'\u0123': 611, u'\u012a': 278, u'\u012b': 278, u'\u012e': 278, u'\u012f': 278, u'\u0130': 278, u'\u0131': 278, u'\u0136': 722, u'\u0137': 556, u'\u0139': 611, u'\u013a': 278, u'\u013b': 611, u'\u013c': 278, u'\u013d': 611, u'\u013e': 400, u'\u0141': 611, u'\u0142': 278, u'\u0143': 722, u'\u0144': 611, u'\u0145': 722, u'\u0146': 611, u'\u0147': 722, u'\u0148': 611, u'\u014c': 778, u'\u014d': 611, u'\u0150': 778, u'\u0151': 611, u'\u0152': 1000, u'\u0153': 944, u'\u0154': 722, u'\u0155': 389, u'\u0156': 722, u'\u0157': 389, u'\u0158': 722, u'\u0159': 389, u'\u015a': 667, u'\u015b': 556, u'\u015e': 667, u'\u015f': 556, u'\u0160': 667, u'\u0161': 556, u'\u0162': 611, u'\u0163': 333, u'\u0164': 611, u'\u0165': 389, u'\u016a': 722, u'\u016b': 611, u'\u016e': 722, u'\u016f': 611, u'\u0170': 722, u'\u0171': 611, u'\u0172': 722, u'\u0173': 611, u'\u0178': 667, u'\u0179': 611, u'\u017a': 500, u'\u017b': 611, u'\u017c': 500, u'\u017d': 611, u'\u017e': 500, u'\u0192': 556, u'\u0218': 667, u'\u0219': 556, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 556, u'\u2014': 1000, u'\u2018': 278, u'\u2019': 278, u'\u201a': 278, u'\u201c': 500, u'\u201d': 500, u'\u201e': 500, u'\u2020': 556, u'\u2021': 556, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 1000, u'\u2202': 494, u'\u2206': 612, u'\u2211': 600, u'\u2212': 584, u'\u221a': 549, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 494, u'\uf6c3': 250, u'\ufb01': 611, u'\ufb02': 611}),
|
||||
'Helvetica-Oblique': ({'FontName': 'Helvetica-Oblique', 'Descent': -207.0, 'FontBBox': (-171.0, -225.0, 1116.0, 931.0), 'FontWeight': 'Medium', 'CapHeight': 718.0, 'FontFamily': 'Helvetica', 'Flags': 0, 'XHeight': 523.0, 'ItalicAngle': -12.0, 'Ascent': 718.0}, {u' ': 278, u'!': 278, u'"': 355, u'#': 556, u'$': 556, u'%': 889, u'&': 667, u"'": 191, u'(': 333, u')': 333, u'*': 389, u'+': 584, u',': 278, u'-': 333, u'.': 278, u'/': 278, u'0': 556, u'1': 556, u'2': 556, u'3': 556, u'4': 556, u'5': 556, u'6': 556, u'7': 556, u'8': 556, u'9': 556, u':': 278, u';': 278, u'<': 584, u'=': 584, u'>': 584, u'?': 556, u'@': 1015, u'A': 667, u'B': 667, u'C': 722, u'D': 722, u'E': 667, u'F': 611, u'G': 778, u'H': 722, u'I': 278, u'J': 500, u'K': 667, u'L': 556, u'M': 833, u'N': 722, u'O': 778, u'P': 667, u'Q': 778, u'R': 722, u'S': 667, u'T': 611, u'U': 722, u'V': 667, u'W': 944, u'X': 667, u'Y': 667, u'Z': 611, u'[': 278, u'\\': 278, u']': 278, u'^': 469, u'_': 556, u'`': 333, u'a': 556, u'b': 556, u'c': 500, u'd': 556, u'e': 556, u'f': 278, u'g': 556, u'h': 556, u'i': 222, u'j': 222, u'k': 500, u'l': 222, u'm': 833, u'n': 556, u'o': 556, u'p': 556, u'q': 556, u'r': 333, u's': 500, u't': 278, u'u': 556, u'v': 500, u'w': 722, u'x': 500, u'y': 500, u'z': 500, u'{': 334, u'|': 260, u'}': 334, u'~': 584, u'\xa1': 333, u'\xa2': 556, u'\xa3': 556, u'\xa4': 556, u'\xa5': 556, u'\xa6': 260, u'\xa7': 556, u'\xa8': 333, u'\xa9': 737, u'\xaa': 370, u'\xab': 556, u'\xac': 584, u'\xae': 737, u'\xaf': 333, u'\xb0': 400, u'\xb1': 584, u'\xb2': 333, u'\xb3': 333, u'\xb4': 333, u'\xb5': 556, u'\xb6': 537, u'\xb7': 278, u'\xb8': 333, u'\xb9': 333, u'\xba': 365, u'\xbb': 556, u'\xbc': 834, u'\xbd': 834, u'\xbe': 834, u'\xbf': 611, u'\xc0': 667, u'\xc1': 667, u'\xc2': 667, u'\xc3': 667, u'\xc4': 667, u'\xc5': 667, u'\xc6': 1000, u'\xc7': 722, u'\xc8': 667, u'\xc9': 667, u'\xca': 667, u'\xcb': 667, u'\xcc': 278, u'\xcd': 278, u'\xce': 278, u'\xcf': 278, u'\xd0': 722, u'\xd1': 722, u'\xd2': 778, u'\xd3': 778, u'\xd4': 778, u'\xd5': 778, u'\xd6': 778, u'\xd7': 584, u'\xd8': 778, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 667, u'\xde': 667, u'\xdf': 611, u'\xe0': 556, u'\xe1': 556, u'\xe2': 556, u'\xe3': 556, u'\xe4': 556, u'\xe5': 556, u'\xe6': 889, u'\xe7': 500, u'\xe8': 556, u'\xe9': 556, u'\xea': 556, u'\xeb': 556, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 556, u'\xf1': 556, u'\xf2': 556, u'\xf3': 556, u'\xf4': 556, u'\xf5': 556, u'\xf6': 556, u'\xf7': 584, u'\xf8': 611, u'\xf9': 556, u'\xfa': 556, u'\xfb': 556, u'\xfc': 556, u'\xfd': 500, u'\xfe': 556, u'\xff': 500, u'\u0100': 667, u'\u0101': 556, u'\u0102': 667, u'\u0103': 556, u'\u0104': 667, u'\u0105': 556, u'\u0106': 722, u'\u0107': 500, u'\u010c': 722, u'\u010d': 500, u'\u010e': 722, u'\u010f': 643, u'\u0110': 722, u'\u0111': 556, u'\u0112': 667, u'\u0113': 556, u'\u0116': 667, u'\u0117': 556, u'\u0118': 667, u'\u0119': 556, u'\u011a': 667, u'\u011b': 556, u'\u011e': 778, u'\u011f': 556, u'\u0122': 778, u'\u0123': 556, u'\u012a': 278, u'\u012b': 278, u'\u012e': 278, u'\u012f': 222, u'\u0130': 278, u'\u0131': 278, u'\u0136': 667, u'\u0137': 500, u'\u0139': 556, u'\u013a': 222, u'\u013b': 556, u'\u013c': 222, u'\u013d': 556, u'\u013e': 299, u'\u0141': 556, u'\u0142': 222, u'\u0143': 722, u'\u0144': 556, u'\u0145': 722, u'\u0146': 556, u'\u0147': 722, u'\u0148': 556, u'\u014c': 778, u'\u014d': 556, u'\u0150': 778, u'\u0151': 556, u'\u0152': 1000, u'\u0153': 944, u'\u0154': 722, u'\u0155': 333, u'\u0156': 722, u'\u0157': 333, u'\u0158': 722, u'\u0159': 333, u'\u015a': 667, u'\u015b': 500, u'\u015e': 667, u'\u015f': 500, u'\u0160': 667, u'\u0161': 500, u'\u0162': 611, u'\u0163': 278, u'\u0164': 611, u'\u0165': 317, u'\u016a': 722, u'\u016b': 556, u'\u016e': 722, u'\u016f': 556, u'\u0170': 722, u'\u0171': 556, u'\u0172': 722, u'\u0173': 556, u'\u0178': 667, u'\u0179': 611, u'\u017a': 500, u'\u017b': 611, u'\u017c': 500, u'\u017d': 611, u'\u017e': 500, u'\u0192': 556, u'\u0218': 667, u'\u0219': 500, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 556, u'\u2014': 1000, u'\u2018': 222, u'\u2019': 222, u'\u201a': 222, u'\u201c': 333, u'\u201d': 333, u'\u201e': 333, u'\u2020': 556, u'\u2021': 556, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 1000, u'\u2202': 476, u'\u2206': 612, u'\u2211': 600, u'\u2212': 584, u'\u221a': 453, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 471, u'\uf6c3': 250, u'\ufb01': 500, u'\ufb02': 500}),
|
||||
'Symbol': ({'FontName': 'Symbol', 'FontBBox': (-180.0, -293.0, 1090.0, 1010.0), 'FontWeight': 'Medium', 'FontFamily': 'Symbol', 'Flags': 0, 'ItalicAngle': 0.0}, {u' ': 250, u'!': 333, u'#': 500, u'%': 833, u'&': 778, u'(': 333, u')': 333, u'+': 549, u',': 250, u'.': 250, u'/': 278, u'0': 500, u'1': 500, u'2': 500, u'3': 500, u'4': 500, u'5': 500, u'6': 500, u'7': 500, u'8': 500, u'9': 500, u':': 278, u';': 278, u'<': 549, u'=': 549, u'>': 549, u'?': 444, u'[': 333, u']': 333, u'_': 500, u'{': 480, u'|': 200, u'}': 480, u'\xac': 713, u'\xb0': 400, u'\xb1': 549, u'\xb5': 576, u'\xd7': 549, u'\xf7': 549, u'\u0192': 500, u'\u0391': 722, u'\u0392': 667, u'\u0393': 603, u'\u0395': 611, u'\u0396': 611, u'\u0397': 722, u'\u0398': 741, u'\u0399': 333, u'\u039a': 722, u'\u039b': 686, u'\u039c': 889, u'\u039d': 722, u'\u039e': 645, u'\u039f': 722, u'\u03a0': 768, u'\u03a1': 556, u'\u03a3': 592, u'\u03a4': 611, u'\u03a5': 690, u'\u03a6': 763, u'\u03a7': 722, u'\u03a8': 795, u'\u03b1': 631, u'\u03b2': 549, u'\u03b3': 411, u'\u03b4': 494, u'\u03b5': 439, u'\u03b6': 494, u'\u03b7': 603, u'\u03b8': 521, u'\u03b9': 329, u'\u03ba': 549, u'\u03bb': 549, u'\u03bd': 521, u'\u03be': 493, u'\u03bf': 549, u'\u03c0': 549, u'\u03c1': 549, u'\u03c2': 439, u'\u03c3': 603, u'\u03c4': 439, u'\u03c5': 576, u'\u03c6': 521, u'\u03c7': 549, u'\u03c8': 686, u'\u03c9': 686, u'\u03d1': 631, u'\u03d2': 620, u'\u03d5': 603, u'\u03d6': 713, u'\u2022': 460, u'\u2026': 1000, u'\u2032': 247, u'\u2033': 411, u'\u2044': 167, u'\u20ac': 750, u'\u2111': 686, u'\u2118': 987, u'\u211c': 795, u'\u2126': 768, u'\u2135': 823, u'\u2190': 987, u'\u2191': 603, u'\u2192': 987, u'\u2193': 603, u'\u2194': 1042, u'\u21b5': 658, u'\u21d0': 987, u'\u21d1': 603, u'\u21d2': 987, u'\u21d3': 603, u'\u21d4': 1042, u'\u2200': 713, u'\u2202': 494, u'\u2203': 549, u'\u2205': 823, u'\u2206': 612, u'\u2207': 713, u'\u2208': 713, u'\u2209': 713, u'\u220b': 439, u'\u220f': 823, u'\u2211': 713, u'\u2212': 549, u'\u2217': 500, u'\u221a': 549, u'\u221d': 713, u'\u221e': 713, u'\u2220': 768, u'\u2227': 603, u'\u2228': 603, u'\u2229': 768, u'\u222a': 768, u'\u222b': 274, u'\u2234': 863, u'\u223c': 549, u'\u2245': 549, u'\u2248': 549, u'\u2260': 549, u'\u2261': 549, u'\u2264': 549, u'\u2265': 549, u'\u2282': 713, u'\u2283': 713, u'\u2284': 713, u'\u2286': 713, u'\u2287': 713, u'\u2295': 768, u'\u2297': 768, u'\u22a5': 658, u'\u22c5': 250, u'\u2320': 686, u'\u2321': 686, u'\u2329': 329, u'\u232a': 329, u'\u25ca': 494, u'\u2660': 753, u'\u2663': 753, u'\u2665': 753, u'\u2666': 753, u'\uf6d9': 790, u'\uf6da': 790, u'\uf6db': 890, u'\uf8e5': 500, u'\uf8e6': 603, u'\uf8e7': 1000, u'\uf8e8': 790, u'\uf8e9': 790, u'\uf8ea': 786, u'\uf8eb': 384, u'\uf8ec': 384, u'\uf8ed': 384, u'\uf8ee': 384, u'\uf8ef': 384, u'\uf8f0': 384, u'\uf8f1': 494, u'\uf8f2': 494, u'\uf8f3': 494, u'\uf8f4': 494, u'\uf8f5': 686, u'\uf8f6': 384, u'\uf8f7': 384, u'\uf8f8': 384, u'\uf8f9': 384, u'\uf8fa': 384, u'\uf8fb': 384, u'\uf8fc': 494, u'\uf8fd': 494, u'\uf8fe': 494, u'\uf8ff': 790}),
|
||||
'Times-Bold': ({'FontName': 'Times-Bold', 'Descent': -217.0, 'FontBBox': (-168.0, -218.0, 1000.0, 935.0), 'FontWeight': 'Bold', 'CapHeight': 676.0, 'FontFamily': 'Times', 'Flags': 0, 'XHeight': 461.0, 'ItalicAngle': 0.0, 'Ascent': 683.0}, {u' ': 250, u'!': 333, u'"': 555, u'#': 500, u'$': 500, u'%': 1000, u'&': 833, u"'": 278, u'(': 333, u')': 333, u'*': 500, u'+': 570, u',': 250, u'-': 333, u'.': 250, u'/': 278, u'0': 500, u'1': 500, u'2': 500, u'3': 500, u'4': 500, u'5': 500, u'6': 500, u'7': 500, u'8': 500, u'9': 500, u':': 333, u';': 333, u'<': 570, u'=': 570, u'>': 570, u'?': 500, u'@': 930, u'A': 722, u'B': 667, u'C': 722, u'D': 722, u'E': 667, u'F': 611, u'G': 778, u'H': 778, u'I': 389, u'J': 500, u'K': 778, u'L': 667, u'M': 944, u'N': 722, u'O': 778, u'P': 611, u'Q': 778, u'R': 722, u'S': 556, u'T': 667, u'U': 722, u'V': 722, u'W': 1000, u'X': 722, u'Y': 722, u'Z': 667, u'[': 333, u'\\': 278, u']': 333, u'^': 581, u'_': 500, u'`': 333, u'a': 500, u'b': 556, u'c': 444, u'd': 556, u'e': 444, u'f': 333, u'g': 500, u'h': 556, u'i': 278, u'j': 333, u'k': 556, u'l': 278, u'm': 833, u'n': 556, u'o': 500, u'p': 556, u'q': 556, u'r': 444, u's': 389, u't': 333, u'u': 556, u'v': 500, u'w': 722, u'x': 500, u'y': 500, u'z': 444, u'{': 394, u'|': 220, u'}': 394, u'~': 520, u'\xa1': 333, u'\xa2': 500, u'\xa3': 500, u'\xa4': 500, u'\xa5': 500, u'\xa6': 220, u'\xa7': 500, u'\xa8': 333, u'\xa9': 747, u'\xaa': 300, u'\xab': 500, u'\xac': 570, u'\xae': 747, u'\xaf': 333, u'\xb0': 400, u'\xb1': 570, u'\xb2': 300, u'\xb3': 300, u'\xb4': 333, u'\xb5': 556, u'\xb6': 540, u'\xb7': 250, u'\xb8': 333, u'\xb9': 300, u'\xba': 330, u'\xbb': 500, u'\xbc': 750, u'\xbd': 750, u'\xbe': 750, u'\xbf': 500, u'\xc0': 722, u'\xc1': 722, u'\xc2': 722, u'\xc3': 722, u'\xc4': 722, u'\xc5': 722, u'\xc6': 1000, u'\xc7': 722, u'\xc8': 667, u'\xc9': 667, u'\xca': 667, u'\xcb': 667, u'\xcc': 389, u'\xcd': 389, u'\xce': 389, u'\xcf': 389, u'\xd0': 722, u'\xd1': 722, u'\xd2': 778, u'\xd3': 778, u'\xd4': 778, u'\xd5': 778, u'\xd6': 778, u'\xd7': 570, u'\xd8': 778, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 722, u'\xde': 611, u'\xdf': 556, u'\xe0': 500, u'\xe1': 500, u'\xe2': 500, u'\xe3': 500, u'\xe4': 500, u'\xe5': 500, u'\xe6': 722, u'\xe7': 444, u'\xe8': 444, u'\xe9': 444, u'\xea': 444, u'\xeb': 444, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 500, u'\xf1': 556, u'\xf2': 500, u'\xf3': 500, u'\xf4': 500, u'\xf5': 500, u'\xf6': 500, u'\xf7': 570, u'\xf8': 500, u'\xf9': 556, u'\xfa': 556, u'\xfb': 556, u'\xfc': 556, u'\xfd': 500, u'\xfe': 556, u'\xff': 500, u'\u0100': 722, u'\u0101': 500, u'\u0102': 722, u'\u0103': 500, u'\u0104': 722, u'\u0105': 500, u'\u0106': 722, u'\u0107': 444, u'\u010c': 722, u'\u010d': 444, u'\u010e': 722, u'\u010f': 672, u'\u0110': 722, u'\u0111': 556, u'\u0112': 667, u'\u0113': 444, u'\u0116': 667, u'\u0117': 444, u'\u0118': 667, u'\u0119': 444, u'\u011a': 667, u'\u011b': 444, u'\u011e': 778, u'\u011f': 500, u'\u0122': 778, u'\u0123': 500, u'\u012a': 389, u'\u012b': 278, u'\u012e': 389, u'\u012f': 278, u'\u0130': 389, u'\u0131': 278, u'\u0136': 778, u'\u0137': 556, u'\u0139': 667, u'\u013a': 278, u'\u013b': 667, u'\u013c': 278, u'\u013d': 667, u'\u013e': 394, u'\u0141': 667, u'\u0142': 278, u'\u0143': 722, u'\u0144': 556, u'\u0145': 722, u'\u0146': 556, u'\u0147': 722, u'\u0148': 556, u'\u014c': 778, u'\u014d': 500, u'\u0150': 778, u'\u0151': 500, u'\u0152': 1000, u'\u0153': 722, u'\u0154': 722, u'\u0155': 444, u'\u0156': 722, u'\u0157': 444, u'\u0158': 722, u'\u0159': 444, u'\u015a': 556, u'\u015b': 389, u'\u015e': 556, u'\u015f': 389, u'\u0160': 556, u'\u0161': 389, u'\u0162': 667, u'\u0163': 333, u'\u0164': 667, u'\u0165': 416, u'\u016a': 722, u'\u016b': 556, u'\u016e': 722, u'\u016f': 556, u'\u0170': 722, u'\u0171': 556, u'\u0172': 722, u'\u0173': 556, u'\u0178': 722, u'\u0179': 667, u'\u017a': 444, u'\u017b': 667, u'\u017c': 444, u'\u017d': 667, u'\u017e': 444, u'\u0192': 500, u'\u0218': 556, u'\u0219': 389, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 500, u'\u2014': 1000, u'\u2018': 333, u'\u2019': 333, u'\u201a': 333, u'\u201c': 500, u'\u201d': 500, u'\u201e': 500, u'\u2020': 500, u'\u2021': 500, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 1000, u'\u2202': 494, u'\u2206': 612, u'\u2211': 600, u'\u2212': 570, u'\u221a': 549, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 494, u'\uf6c3': 250, u'\ufb01': 556, u'\ufb02': 556}),
|
||||
'Times-BoldItalic': ({'FontName': 'Times-BoldItalic', 'Descent': -217.0, 'FontBBox': (-200.0, -218.0, 996.0, 921.0), 'FontWeight': 'Bold', 'CapHeight': 669.0, 'FontFamily': 'Times', 'Flags': 0, 'XHeight': 462.0, 'ItalicAngle': -15.0, 'Ascent': 683.0}, {u' ': 250, u'!': 389, u'"': 555, u'#': 500, u'$': 500, u'%': 833, u'&': 778, u"'": 278, u'(': 333, u')': 333, u'*': 500, u'+': 570, u',': 250, u'-': 333, u'.': 250, u'/': 278, u'0': 500, u'1': 500, u'2': 500, u'3': 500, u'4': 500, u'5': 500, u'6': 500, u'7': 500, u'8': 500, u'9': 500, u':': 333, u';': 333, u'<': 570, u'=': 570, u'>': 570, u'?': 500, u'@': 832, u'A': 667, u'B': 667, u'C': 667, u'D': 722, u'E': 667, u'F': 667, u'G': 722, u'H': 778, u'I': 389, u'J': 500, u'K': 667, u'L': 611, u'M': 889, u'N': 722, u'O': 722, u'P': 611, u'Q': 722, u'R': 667, u'S': 556, u'T': 611, u'U': 722, u'V': 667, u'W': 889, u'X': 667, u'Y': 611, u'Z': 611, u'[': 333, u'\\': 278, u']': 333, u'^': 570, u'_': 500, u'`': 333, u'a': 500, u'b': 500, u'c': 444, u'd': 500, u'e': 444, u'f': 333, u'g': 500, u'h': 556, u'i': 278, u'j': 278, u'k': 500, u'l': 278, u'm': 778, u'n': 556, u'o': 500, u'p': 500, u'q': 500, u'r': 389, u's': 389, u't': 278, u'u': 556, u'v': 444, u'w': 667, u'x': 500, u'y': 444, u'z': 389, u'{': 348, u'|': 220, u'}': 348, u'~': 570, u'\xa1': 389, u'\xa2': 500, u'\xa3': 500, u'\xa4': 500, u'\xa5': 500, u'\xa6': 220, u'\xa7': 500, u'\xa8': 333, u'\xa9': 747, u'\xaa': 266, u'\xab': 500, u'\xac': 606, u'\xae': 747, u'\xaf': 333, u'\xb0': 400, u'\xb1': 570, u'\xb2': 300, u'\xb3': 300, u'\xb4': 333, u'\xb5': 576, u'\xb6': 500, u'\xb7': 250, u'\xb8': 333, u'\xb9': 300, u'\xba': 300, u'\xbb': 500, u'\xbc': 750, u'\xbd': 750, u'\xbe': 750, u'\xbf': 500, u'\xc0': 667, u'\xc1': 667, u'\xc2': 667, u'\xc3': 667, u'\xc4': 667, u'\xc5': 667, u'\xc6': 944, u'\xc7': 667, u'\xc8': 667, u'\xc9': 667, u'\xca': 667, u'\xcb': 667, u'\xcc': 389, u'\xcd': 389, u'\xce': 389, u'\xcf': 389, u'\xd0': 722, u'\xd1': 722, u'\xd2': 722, u'\xd3': 722, u'\xd4': 722, u'\xd5': 722, u'\xd6': 722, u'\xd7': 570, u'\xd8': 722, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 611, u'\xde': 611, u'\xdf': 500, u'\xe0': 500, u'\xe1': 500, u'\xe2': 500, u'\xe3': 500, u'\xe4': 500, u'\xe5': 500, u'\xe6': 722, u'\xe7': 444, u'\xe8': 444, u'\xe9': 444, u'\xea': 444, u'\xeb': 444, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 500, u'\xf1': 556, u'\xf2': 500, u'\xf3': 500, u'\xf4': 500, u'\xf5': 500, u'\xf6': 500, u'\xf7': 570, u'\xf8': 500, u'\xf9': 556, u'\xfa': 556, u'\xfb': 556, u'\xfc': 556, u'\xfd': 444, u'\xfe': 500, u'\xff': 444, u'\u0100': 667, u'\u0101': 500, u'\u0102': 667, u'\u0103': 500, u'\u0104': 667, u'\u0105': 500, u'\u0106': 667, u'\u0107': 444, u'\u010c': 667, u'\u010d': 444, u'\u010e': 722, u'\u010f': 608, u'\u0110': 722, u'\u0111': 500, u'\u0112': 667, u'\u0113': 444, u'\u0116': 667, u'\u0117': 444, u'\u0118': 667, u'\u0119': 444, u'\u011a': 667, u'\u011b': 444, u'\u011e': 722, u'\u011f': 500, u'\u0122': 722, u'\u0123': 500, u'\u012a': 389, u'\u012b': 278, u'\u012e': 389, u'\u012f': 278, u'\u0130': 389, u'\u0131': 278, u'\u0136': 667, u'\u0137': 500, u'\u0139': 611, u'\u013a': 278, u'\u013b': 611, u'\u013c': 278, u'\u013d': 611, u'\u013e': 382, u'\u0141': 611, u'\u0142': 278, u'\u0143': 722, u'\u0144': 556, u'\u0145': 722, u'\u0146': 556, u'\u0147': 722, u'\u0148': 556, u'\u014c': 722, u'\u014d': 500, u'\u0150': 722, u'\u0151': 500, u'\u0152': 944, u'\u0153': 722, u'\u0154': 667, u'\u0155': 389, u'\u0156': 667, u'\u0157': 389, u'\u0158': 667, u'\u0159': 389, u'\u015a': 556, u'\u015b': 389, u'\u015e': 556, u'\u015f': 389, u'\u0160': 556, u'\u0161': 389, u'\u0162': 611, u'\u0163': 278, u'\u0164': 611, u'\u0165': 366, u'\u016a': 722, u'\u016b': 556, u'\u016e': 722, u'\u016f': 556, u'\u0170': 722, u'\u0171': 556, u'\u0172': 722, u'\u0173': 556, u'\u0178': 611, u'\u0179': 611, u'\u017a': 389, u'\u017b': 611, u'\u017c': 389, u'\u017d': 611, u'\u017e': 389, u'\u0192': 500, u'\u0218': 556, u'\u0219': 389, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 500, u'\u2014': 1000, u'\u2018': 333, u'\u2019': 333, u'\u201a': 333, u'\u201c': 500, u'\u201d': 500, u'\u201e': 500, u'\u2020': 500, u'\u2021': 500, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 1000, u'\u2202': 494, u'\u2206': 612, u'\u2211': 600, u'\u2212': 606, u'\u221a': 549, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 494, u'\uf6c3': 250, u'\ufb01': 556, u'\ufb02': 556}),
|
||||
'Times-Italic': ({'FontName': 'Times-Italic', 'Descent': -217.0, 'FontBBox': (-169.0, -217.0, 1010.0, 883.0), 'FontWeight': 'Medium', 'CapHeight': 653.0, 'FontFamily': 'Times', 'Flags': 0, 'XHeight': 441.0, 'ItalicAngle': -15.5, 'Ascent': 683.0}, {u' ': 250, u'!': 333, u'"': 420, u'#': 500, u'$': 500, u'%': 833, u'&': 778, u"'": 214, u'(': 333, u')': 333, u'*': 500, u'+': 675, u',': 250, u'-': 333, u'.': 250, u'/': 278, u'0': 500, u'1': 500, u'2': 500, u'3': 500, u'4': 500, u'5': 500, u'6': 500, u'7': 500, u'8': 500, u'9': 500, u':': 333, u';': 333, u'<': 675, u'=': 675, u'>': 675, u'?': 500, u'@': 920, u'A': 611, u'B': 611, u'C': 667, u'D': 722, u'E': 611, u'F': 611, u'G': 722, u'H': 722, u'I': 333, u'J': 444, u'K': 667, u'L': 556, u'M': 833, u'N': 667, u'O': 722, u'P': 611, u'Q': 722, u'R': 611, u'S': 500, u'T': 556, u'U': 722, u'V': 611, u'W': 833, u'X': 611, u'Y': 556, u'Z': 556, u'[': 389, u'\\': 278, u']': 389, u'^': 422, u'_': 500, u'`': 333, u'a': 500, u'b': 500, u'c': 444, u'd': 500, u'e': 444, u'f': 278, u'g': 500, u'h': 500, u'i': 278, u'j': 278, u'k': 444, u'l': 278, u'm': 722, u'n': 500, u'o': 500, u'p': 500, u'q': 500, u'r': 389, u's': 389, u't': 278, u'u': 500, u'v': 444, u'w': 667, u'x': 444, u'y': 444, u'z': 389, u'{': 400, u'|': 275, u'}': 400, u'~': 541, u'\xa1': 389, u'\xa2': 500, u'\xa3': 500, u'\xa4': 500, u'\xa5': 500, u'\xa6': 275, u'\xa7': 500, u'\xa8': 333, u'\xa9': 760, u'\xaa': 276, u'\xab': 500, u'\xac': 675, u'\xae': 760, u'\xaf': 333, u'\xb0': 400, u'\xb1': 675, u'\xb2': 300, u'\xb3': 300, u'\xb4': 333, u'\xb5': 500, u'\xb6': 523, u'\xb7': 250, u'\xb8': 333, u'\xb9': 300, u'\xba': 310, u'\xbb': 500, u'\xbc': 750, u'\xbd': 750, u'\xbe': 750, u'\xbf': 500, u'\xc0': 611, u'\xc1': 611, u'\xc2': 611, u'\xc3': 611, u'\xc4': 611, u'\xc5': 611, u'\xc6': 889, u'\xc7': 667, u'\xc8': 611, u'\xc9': 611, u'\xca': 611, u'\xcb': 611, u'\xcc': 333, u'\xcd': 333, u'\xce': 333, u'\xcf': 333, u'\xd0': 722, u'\xd1': 667, u'\xd2': 722, u'\xd3': 722, u'\xd4': 722, u'\xd5': 722, u'\xd6': 722, u'\xd7': 675, u'\xd8': 722, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 556, u'\xde': 611, u'\xdf': 500, u'\xe0': 500, u'\xe1': 500, u'\xe2': 500, u'\xe3': 500, u'\xe4': 500, u'\xe5': 500, u'\xe6': 667, u'\xe7': 444, u'\xe8': 444, u'\xe9': 444, u'\xea': 444, u'\xeb': 444, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 500, u'\xf1': 500, u'\xf2': 500, u'\xf3': 500, u'\xf4': 500, u'\xf5': 500, u'\xf6': 500, u'\xf7': 675, u'\xf8': 500, u'\xf9': 500, u'\xfa': 500, u'\xfb': 500, u'\xfc': 500, u'\xfd': 444, u'\xfe': 500, u'\xff': 444, u'\u0100': 611, u'\u0101': 500, u'\u0102': 611, u'\u0103': 500, u'\u0104': 611, u'\u0105': 500, u'\u0106': 667, u'\u0107': 444, u'\u010c': 667, u'\u010d': 444, u'\u010e': 722, u'\u010f': 544, u'\u0110': 722, u'\u0111': 500, u'\u0112': 611, u'\u0113': 444, u'\u0116': 611, u'\u0117': 444, u'\u0118': 611, u'\u0119': 444, u'\u011a': 611, u'\u011b': 444, u'\u011e': 722, u'\u011f': 500, u'\u0122': 722, u'\u0123': 500, u'\u012a': 333, u'\u012b': 278, u'\u012e': 333, u'\u012f': 278, u'\u0130': 333, u'\u0131': 278, u'\u0136': 667, u'\u0137': 444, u'\u0139': 556, u'\u013a': 278, u'\u013b': 556, u'\u013c': 278, u'\u013d': 611, u'\u013e': 300, u'\u0141': 556, u'\u0142': 278, u'\u0143': 667, u'\u0144': 500, u'\u0145': 667, u'\u0146': 500, u'\u0147': 667, u'\u0148': 500, u'\u014c': 722, u'\u014d': 500, u'\u0150': 722, u'\u0151': 500, u'\u0152': 944, u'\u0153': 667, u'\u0154': 611, u'\u0155': 389, u'\u0156': 611, u'\u0157': 389, u'\u0158': 611, u'\u0159': 389, u'\u015a': 500, u'\u015b': 389, u'\u015e': 500, u'\u015f': 389, u'\u0160': 500, u'\u0161': 389, u'\u0162': 556, u'\u0163': 278, u'\u0164': 556, u'\u0165': 300, u'\u016a': 722, u'\u016b': 500, u'\u016e': 722, u'\u016f': 500, u'\u0170': 722, u'\u0171': 500, u'\u0172': 722, u'\u0173': 500, u'\u0178': 556, u'\u0179': 556, u'\u017a': 389, u'\u017b': 556, u'\u017c': 389, u'\u017d': 556, u'\u017e': 389, u'\u0192': 500, u'\u0218': 500, u'\u0219': 389, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 500, u'\u2014': 889, u'\u2018': 333, u'\u2019': 333, u'\u201a': 333, u'\u201c': 556, u'\u201d': 556, u'\u201e': 556, u'\u2020': 500, u'\u2021': 500, u'\u2022': 350, u'\u2026': 889, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 980, u'\u2202': 476, u'\u2206': 612, u'\u2211': 600, u'\u2212': 675, u'\u221a': 453, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 471, u'\uf6c3': 250, u'\ufb01': 500, u'\ufb02': 500}),
|
||||
'Times-Roman': ({'FontName': 'Times-Roman', 'Descent': -217.0, 'FontBBox': (-168.0, -218.0, 1000.0, 898.0), 'FontWeight': 'Roman', 'CapHeight': 662.0, 'FontFamily': 'Times', 'Flags': 0, 'XHeight': 450.0, 'ItalicAngle': 0.0, 'Ascent': 683.0}, {u' ': 250, u'!': 333, u'"': 408, u'#': 500, u'$': 500, u'%': 833, u'&': 778, u"'": 180, u'(': 333, u')': 333, u'*': 500, u'+': 564, u',': 250, u'-': 333, u'.': 250, u'/': 278, u'0': 500, u'1': 500, u'2': 500, u'3': 500, u'4': 500, u'5': 500, u'6': 500, u'7': 500, u'8': 500, u'9': 500, u':': 278, u';': 278, u'<': 564, u'=': 564, u'>': 564, u'?': 444, u'@': 921, u'A': 722, u'B': 667, u'C': 667, u'D': 722, u'E': 611, u'F': 556, u'G': 722, u'H': 722, u'I': 333, u'J': 389, u'K': 722, u'L': 611, u'M': 889, u'N': 722, u'O': 722, u'P': 556, u'Q': 722, u'R': 667, u'S': 556, u'T': 611, u'U': 722, u'V': 722, u'W': 944, u'X': 722, u'Y': 722, u'Z': 611, u'[': 333, u'\\': 278, u']': 333, u'^': 469, u'_': 500, u'`': 333, u'a': 444, u'b': 500, u'c': 444, u'd': 500, u'e': 444, u'f': 333, u'g': 500, u'h': 500, u'i': 278, u'j': 278, u'k': 500, u'l': 278, u'm': 778, u'n': 500, u'o': 500, u'p': 500, u'q': 500, u'r': 333, u's': 389, u't': 278, u'u': 500, u'v': 500, u'w': 722, u'x': 500, u'y': 500, u'z': 444, u'{': 480, u'|': 200, u'}': 480, u'~': 541, u'\xa1': 333, u'\xa2': 500, u'\xa3': 500, u'\xa4': 500, u'\xa5': 500, u'\xa6': 200, u'\xa7': 500, u'\xa8': 333, u'\xa9': 760, u'\xaa': 276, u'\xab': 500, u'\xac': 564, u'\xae': 760, u'\xaf': 333, u'\xb0': 400, u'\xb1': 564, u'\xb2': 300, u'\xb3': 300, u'\xb4': 333, u'\xb5': 500, u'\xb6': 453, u'\xb7': 250, u'\xb8': 333, u'\xb9': 300, u'\xba': 310, u'\xbb': 500, u'\xbc': 750, u'\xbd': 750, u'\xbe': 750, u'\xbf': 444, u'\xc0': 722, u'\xc1': 722, u'\xc2': 722, u'\xc3': 722, u'\xc4': 722, u'\xc5': 722, u'\xc6': 889, u'\xc7': 667, u'\xc8': 611, u'\xc9': 611, u'\xca': 611, u'\xcb': 611, u'\xcc': 333, u'\xcd': 333, u'\xce': 333, u'\xcf': 333, u'\xd0': 722, u'\xd1': 722, u'\xd2': 722, u'\xd3': 722, u'\xd4': 722, u'\xd5': 722, u'\xd6': 722, u'\xd7': 564, u'\xd8': 722, u'\xd9': 722, u'\xda': 722, u'\xdb': 722, u'\xdc': 722, u'\xdd': 722, u'\xde': 556, u'\xdf': 500, u'\xe0': 444, u'\xe1': 444, u'\xe2': 444, u'\xe3': 444, u'\xe4': 444, u'\xe5': 444, u'\xe6': 667, u'\xe7': 444, u'\xe8': 444, u'\xe9': 444, u'\xea': 444, u'\xeb': 444, u'\xec': 278, u'\xed': 278, u'\xee': 278, u'\xef': 278, u'\xf0': 500, u'\xf1': 500, u'\xf2': 500, u'\xf3': 500, u'\xf4': 500, u'\xf5': 500, u'\xf6': 500, u'\xf7': 564, u'\xf8': 500, u'\xf9': 500, u'\xfa': 500, u'\xfb': 500, u'\xfc': 500, u'\xfd': 500, u'\xfe': 500, u'\xff': 500, u'\u0100': 722, u'\u0101': 444, u'\u0102': 722, u'\u0103': 444, u'\u0104': 722, u'\u0105': 444, u'\u0106': 667, u'\u0107': 444, u'\u010c': 667, u'\u010d': 444, u'\u010e': 722, u'\u010f': 588, u'\u0110': 722, u'\u0111': 500, u'\u0112': 611, u'\u0113': 444, u'\u0116': 611, u'\u0117': 444, u'\u0118': 611, u'\u0119': 444, u'\u011a': 611, u'\u011b': 444, u'\u011e': 722, u'\u011f': 500, u'\u0122': 722, u'\u0123': 500, u'\u012a': 333, u'\u012b': 278, u'\u012e': 333, u'\u012f': 278, u'\u0130': 333, u'\u0131': 278, u'\u0136': 722, u'\u0137': 500, u'\u0139': 611, u'\u013a': 278, u'\u013b': 611, u'\u013c': 278, u'\u013d': 611, u'\u013e': 344, u'\u0141': 611, u'\u0142': 278, u'\u0143': 722, u'\u0144': 500, u'\u0145': 722, u'\u0146': 500, u'\u0147': 722, u'\u0148': 500, u'\u014c': 722, u'\u014d': 500, u'\u0150': 722, u'\u0151': 500, u'\u0152': 889, u'\u0153': 722, u'\u0154': 667, u'\u0155': 333, u'\u0156': 667, u'\u0157': 333, u'\u0158': 667, u'\u0159': 333, u'\u015a': 556, u'\u015b': 389, u'\u015e': 556, u'\u015f': 389, u'\u0160': 556, u'\u0161': 389, u'\u0162': 611, u'\u0163': 278, u'\u0164': 611, u'\u0165': 326, u'\u016a': 722, u'\u016b': 500, u'\u016e': 722, u'\u016f': 500, u'\u0170': 722, u'\u0171': 500, u'\u0172': 722, u'\u0173': 500, u'\u0178': 722, u'\u0179': 611, u'\u017a': 444, u'\u017b': 611, u'\u017c': 444, u'\u017d': 611, u'\u017e': 444, u'\u0192': 500, u'\u0218': 556, u'\u0219': 389, u'\u02c6': 333, u'\u02c7': 333, u'\u02d8': 333, u'\u02d9': 333, u'\u02da': 333, u'\u02db': 333, u'\u02dc': 333, u'\u02dd': 333, u'\u2013': 500, u'\u2014': 1000, u'\u2018': 333, u'\u2019': 333, u'\u201a': 333, u'\u201c': 444, u'\u201d': 444, u'\u201e': 444, u'\u2020': 500, u'\u2021': 500, u'\u2022': 350, u'\u2026': 1000, u'\u2030': 1000, u'\u2039': 333, u'\u203a': 333, u'\u2044': 167, u'\u2122': 980, u'\u2202': 476, u'\u2206': 612, u'\u2211': 600, u'\u2212': 564, u'\u221a': 453, u'\u2260': 549, u'\u2264': 549, u'\u2265': 549, u'\u25ca': 471, u'\uf6c3': 250, u'\ufb01': 556, u'\ufb02': 556}),
|
||||
'ZapfDingbats': ({'FontName': 'ZapfDingbats', 'FontBBox': (-1.0, -143.0, 981.0, 820.0), 'FontWeight': 'Medium', 'FontFamily': 'ITC', 'Flags': 0, 'ItalicAngle': 0.0}, {u'\x01': 974, u'\x02': 961, u'\x03': 980, u'\x04': 719, u'\x05': 789, u'\x06': 494, u'\x07': 552, u'\x08': 537, u'\t': 577, u'\n': 692, u'\x0b': 960, u'\x0c': 939, u'\r': 549, u'\x0e': 855, u'\x0f': 911, u'\x10': 933, u'\x11': 945, u'\x12': 974, u'\x13': 755, u'\x14': 846, u'\x15': 762, u'\x16': 761, u'\x17': 571, u'\x18': 677, u'\x19': 763, u'\x1a': 760, u'\x1b': 759, u'\x1c': 754, u'\x1d': 786, u'\x1e': 788, u'\x1f': 788, u' ': 790, u'!': 793, u'"': 794, u'#': 816, u'$': 823, u'%': 789, u'&': 841, u"'": 823, u'(': 833, u')': 816, u'*': 831, u'+': 923, u',': 744, u'-': 723, u'.': 749, u'/': 790, u'0': 792, u'1': 695, u'2': 776, u'3': 768, u'4': 792, u'5': 759, u'6': 707, u'7': 708, u'8': 682, u'9': 701, u':': 826, u';': 815, u'<': 789, u'=': 789, u'>': 707, u'?': 687, u'@': 696, u'A': 689, u'B': 786, u'C': 787, u'D': 713, u'E': 791, u'F': 785, u'G': 791, u'H': 873, u'I': 761, u'J': 762, u'K': 759, u'L': 892, u'M': 892, u'N': 788, u'O': 784, u'Q': 438, u'R': 138, u'S': 277, u'T': 415, u'U': 509, u'V': 410, u'W': 234, u'X': 234, u'Y': 390, u'Z': 390, u'[': 276, u'\\': 276, u']': 317, u'^': 317, u'_': 334, u'`': 334, u'a': 392, u'b': 392, u'c': 668, u'd': 668, u'e': 732, u'f': 544, u'g': 544, u'h': 910, u'i': 911, u'j': 667, u'k': 760, u'l': 760, u'm': 626, u'n': 694, u'o': 595, u'p': 776, u'u': 690, u'v': 791, u'w': 790, u'x': 788, u'y': 788, u'z': 788, u'{': 788, u'|': 788, u'}': 788, u'~': 788, u'\x7f': 788, u'\x80': 788, u'\x81': 788, u'\x82': 788, u'\x83': 788, u'\x84': 788, u'\x85': 788, u'\x86': 788, u'\x87': 788, u'\x88': 788, u'\x89': 788, u'\x8a': 788, u'\x8b': 788, u'\x8c': 788, u'\x8d': 788, u'\x8e': 788, u'\x8f': 788, u'\x90': 788, u'\x91': 788, u'\x92': 788, u'\x93': 788, u'\x94': 788, u'\x95': 788, u'\x96': 788, u'\x97': 788, u'\x98': 788, u'\x99': 788, u'\x9a': 788, u'\x9b': 788, u'\x9c': 788, u'\x9d': 788, u'\x9e': 788, u'\x9f': 788, u'\xa0': 894, u'\xa1': 838, u'\xa2': 924, u'\xa3': 1016, u'\xa4': 458, u'\xa5': 924, u'\xa6': 918, u'\xa7': 927, u'\xa8': 928, u'\xa9': 928, u'\xaa': 834, u'\xab': 873, u'\xac': 828, u'\xad': 924, u'\xae': 917, u'\xaf': 930, u'\xb0': 931, u'\xb1': 463, u'\xb2': 883, u'\xb3': 836, u'\xb4': 867, u'\xb5': 696, u'\xb6': 874, u'\xb7': 760, u'\xb8': 946, u'\xb9': 865, u'\xba': 967, u'\xbb': 831, u'\xbc': 873, u'\xbd': 927, u'\xbe': 970, u'\xbf': 918, u'\xc0': 748, u'\xc1': 836, u'\xc2': 771, u'\xc3': 888, u'\xc4': 748, u'\xc5': 771, u'\xc6': 888, u'\xc7': 867, u'\xc8': 696, u'\xc9': 874, u'\xca': 974, u'\xcb': 762, u'\xcc': 759, u'\xcd': 509, u'\xce': 410}),
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,242 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
""" Standard encoding tables used in PDF.
|
||||
|
||||
This table is extracted from PDF Reference Manual 1.6, pp.925
|
||||
"D.1 Latin Character Set and Encodings"
|
||||
|
||||
"""
|
||||
|
||||
ENCODING = [
|
||||
# (name, std, mac, win, pdf)
|
||||
('A', 65, 65, 65, 65),
|
||||
('AE', 225, 174, 198, 198),
|
||||
('Aacute', None, 231, 193, 193),
|
||||
('Acircumflex', None, 229, 194, 194),
|
||||
('Adieresis', None, 128, 196, 196),
|
||||
('Agrave', None, 203, 192, 192),
|
||||
('Aring', None, 129, 197, 197),
|
||||
('Atilde', None, 204, 195, 195),
|
||||
('B', 66, 66, 66, 66),
|
||||
('C', 67, 67, 67, 67),
|
||||
('Ccedilla', None, 130, 199, 199),
|
||||
('D', 68, 68, 68, 68),
|
||||
('E', 69, 69, 69, 69),
|
||||
('Eacute', None, 131, 201, 201),
|
||||
('Ecircumflex', None, 230, 202, 202),
|
||||
('Edieresis', None, 232, 203, 203),
|
||||
('Egrave', None, 233, 200, 200),
|
||||
('Eth', None, None, 208, 208),
|
||||
('Euro', None, None, 128, 160),
|
||||
('F', 70, 70, 70, 70),
|
||||
('G', 71, 71, 71, 71),
|
||||
('H', 72, 72, 72, 72),
|
||||
('I', 73, 73, 73, 73),
|
||||
('Iacute', None, 234, 205, 205),
|
||||
('Icircumflex', None, 235, 206, 206),
|
||||
('Idieresis', None, 236, 207, 207),
|
||||
('Igrave', None, 237, 204, 204),
|
||||
('J', 74, 74, 74, 74),
|
||||
('K', 75, 75, 75, 75),
|
||||
('L', 76, 76, 76, 76),
|
||||
('Lslash', 232, None, None, 149),
|
||||
('M', 77, 77, 77, 77),
|
||||
('N', 78, 78, 78, 78),
|
||||
('Ntilde', None, 132, 209, 209),
|
||||
('O', 79, 79, 79, 79),
|
||||
('OE', 234, 206, 140, 150),
|
||||
('Oacute', None, 238, 211, 211),
|
||||
('Ocircumflex', None, 239, 212, 212),
|
||||
('Odieresis', None, 133, 214, 214),
|
||||
('Ograve', None, 241, 210, 210),
|
||||
('Oslash', 233, 175, 216, 216),
|
||||
('Otilde', None, 205, 213, 213),
|
||||
('P', 80, 80, 80, 80),
|
||||
('Q', 81, 81, 81, 81),
|
||||
('R', 82, 82, 82, 82),
|
||||
('S', 83, 83, 83, 83),
|
||||
('Scaron', None, None, 138, 151),
|
||||
('T', 84, 84, 84, 84),
|
||||
('Thorn', None, None, 222, 222),
|
||||
('U', 85, 85, 85, 85),
|
||||
('Uacute', None, 242, 218, 218),
|
||||
('Ucircumflex', None, 243, 219, 219),
|
||||
('Udieresis', None, 134, 220, 220),
|
||||
('Ugrave', None, 244, 217, 217),
|
||||
('V', 86, 86, 86, 86),
|
||||
('W', 87, 87, 87, 87),
|
||||
('X', 88, 88, 88, 88),
|
||||
('Y', 89, 89, 89, 89),
|
||||
('Yacute', None, None, 221, 221),
|
||||
('Ydieresis', None, 217, 159, 152),
|
||||
('Z', 90, 90, 90, 90),
|
||||
('Zcaron', None, None, 142, 153),
|
||||
('a', 97, 97, 97, 97),
|
||||
('aacute', None, 135, 225, 225),
|
||||
('acircumflex', None, 137, 226, 226),
|
||||
('acute', 194, 171, 180, 180),
|
||||
('adieresis', None, 138, 228, 228),
|
||||
('ae', 241, 190, 230, 230),
|
||||
('agrave', None, 136, 224, 224),
|
||||
('ampersand', 38, 38, 38, 38),
|
||||
('aring', None, 140, 229, 229),
|
||||
('asciicircum', 94, 94, 94, 94),
|
||||
('asciitilde', 126, 126, 126, 126),
|
||||
('asterisk', 42, 42, 42, 42),
|
||||
('at', 64, 64, 64, 64),
|
||||
('atilde', None, 139, 227, 227),
|
||||
('b', 98, 98, 98, 98),
|
||||
('backslash', 92, 92, 92, 92),
|
||||
('bar', 124, 124, 124, 124),
|
||||
('braceleft', 123, 123, 123, 123),
|
||||
('braceright', 125, 125, 125, 125),
|
||||
('bracketleft', 91, 91, 91, 91),
|
||||
('bracketright', 93, 93, 93, 93),
|
||||
('breve', 198, 249, None, 24),
|
||||
('brokenbar', None, None, 166, 166),
|
||||
('bullet', 183, 165, 149, 128),
|
||||
('c', 99, 99, 99, 99),
|
||||
('caron', 207, 255, None, 25),
|
||||
('ccedilla', None, 141, 231, 231),
|
||||
('cedilla', 203, 252, 184, 184),
|
||||
('cent', 162, 162, 162, 162),
|
||||
('circumflex', 195, 246, 136, 26),
|
||||
('colon', 58, 58, 58, 58),
|
||||
('comma', 44, 44, 44, 44),
|
||||
('copyright', None, 169, 169, 169),
|
||||
('currency', 168, 219, 164, 164),
|
||||
('d', 100, 100, 100, 100),
|
||||
('dagger', 178, 160, 134, 129),
|
||||
('daggerdbl', 179, 224, 135, 130),
|
||||
('degree', None, 161, 176, 176),
|
||||
('dieresis', 200, 172, 168, 168),
|
||||
('divide', None, 214, 247, 247),
|
||||
('dollar', 36, 36, 36, 36),
|
||||
('dotaccent', 199, 250, None, 27),
|
||||
('dotlessi', 245, 245, None, 154),
|
||||
('e', 101, 101, 101, 101),
|
||||
('eacute', None, 142, 233, 233),
|
||||
('ecircumflex', None, 144, 234, 234),
|
||||
('edieresis', None, 145, 235, 235),
|
||||
('egrave', None, 143, 232, 232),
|
||||
('eight', 56, 56, 56, 56),
|
||||
('ellipsis', 188, 201, 133, 131),
|
||||
('emdash', 208, 209, 151, 132),
|
||||
('endash', 177, 208, 150, 133),
|
||||
('equal', 61, 61, 61, 61),
|
||||
('eth', None, None, 240, 240),
|
||||
('exclam', 33, 33, 33, 33),
|
||||
('exclamdown', 161, 193, 161, 161),
|
||||
('f', 102, 102, 102, 102),
|
||||
('fi', 174, 222, None, 147),
|
||||
('five', 53, 53, 53, 53),
|
||||
('fl', 175, 223, None, 148),
|
||||
('florin', 166, 196, 131, 134),
|
||||
('four', 52, 52, 52, 52),
|
||||
('fraction', 164, 218, None, 135),
|
||||
('g', 103, 103, 103, 103),
|
||||
('germandbls', 251, 167, 223, 223),
|
||||
('grave', 193, 96, 96, 96),
|
||||
('greater', 62, 62, 62, 62),
|
||||
('guillemotleft', 171, 199, 171, 171),
|
||||
('guillemotright', 187, 200, 187, 187),
|
||||
('guilsinglleft', 172, 220, 139, 136),
|
||||
('guilsinglright', 173, 221, 155, 137),
|
||||
('h', 104, 104, 104, 104),
|
||||
('hungarumlaut', 205, 253, None, 28),
|
||||
('hyphen', 45, 45, 45, 45),
|
||||
('i', 105, 105, 105, 105),
|
||||
('iacute', None, 146, 237, 237),
|
||||
('icircumflex', None, 148, 238, 238),
|
||||
('idieresis', None, 149, 239, 239),
|
||||
('igrave', None, 147, 236, 236),
|
||||
('j', 106, 106, 106, 106),
|
||||
('k', 107, 107, 107, 107),
|
||||
('l', 108, 108, 108, 108),
|
||||
('less', 60, 60, 60, 60),
|
||||
('logicalnot', None, 194, 172, 172),
|
||||
('lslash', 248, None, None, 155),
|
||||
('m', 109, 109, 109, 109),
|
||||
('macron', 197, 248, 175, 175),
|
||||
('minus', None, None, None, 138),
|
||||
('mu', None, 181, 181, 181),
|
||||
('multiply', None, None, 215, 215),
|
||||
('n', 110, 110, 110, 110),
|
||||
('nbspace', None, 202, 160, None),
|
||||
('nine', 57, 57, 57, 57),
|
||||
('ntilde', None, 150, 241, 241),
|
||||
('numbersign', 35, 35, 35, 35),
|
||||
('o', 111, 111, 111, 111),
|
||||
('oacute', None, 151, 243, 243),
|
||||
('ocircumflex', None, 153, 244, 244),
|
||||
('odieresis', None, 154, 246, 246),
|
||||
('oe', 250, 207, 156, 156),
|
||||
('ogonek', 206, 254, None, 29),
|
||||
('ograve', None, 152, 242, 242),
|
||||
('one', 49, 49, 49, 49),
|
||||
('onehalf', None, None, 189, 189),
|
||||
('onequarter', None, None, 188, 188),
|
||||
('onesuperior', None, None, 185, 185),
|
||||
('ordfeminine', 227, 187, 170, 170),
|
||||
('ordmasculine', 235, 188, 186, 186),
|
||||
('oslash', 249, 191, 248, 248),
|
||||
('otilde', None, 155, 245, 245),
|
||||
('p', 112, 112, 112, 112),
|
||||
('paragraph', 182, 166, 182, 182),
|
||||
('parenleft', 40, 40, 40, 40),
|
||||
('parenright', 41, 41, 41, 41),
|
||||
('percent', 37, 37, 37, 37),
|
||||
('period', 46, 46, 46, 46),
|
||||
('periodcentered', 180, 225, 183, 183),
|
||||
('perthousand', 189, 228, 137, 139),
|
||||
('plus', 43, 43, 43, 43),
|
||||
('plusminus', None, 177, 177, 177),
|
||||
('q', 113, 113, 113, 113),
|
||||
('question', 63, 63, 63, 63),
|
||||
('questiondown', 191, 192, 191, 191),
|
||||
('quotedbl', 34, 34, 34, 34),
|
||||
('quotedblbase', 185, 227, 132, 140),
|
||||
('quotedblleft', 170, 210, 147, 141),
|
||||
('quotedblright', 186, 211, 148, 142),
|
||||
('quoteleft', 96, 212, 145, 143),
|
||||
('quoteright', 39, 213, 146, 144),
|
||||
('quotesinglbase', 184, 226, 130, 145),
|
||||
('quotesingle', 169, 39, 39, 39),
|
||||
('r', 114, 114, 114, 114),
|
||||
('registered', None, 168, 174, 174),
|
||||
('ring', 202, 251, None, 30),
|
||||
('s', 115, 115, 115, 115),
|
||||
('scaron', None, None, 154, 157),
|
||||
('section', 167, 164, 167, 167),
|
||||
('semicolon', 59, 59, 59, 59),
|
||||
('seven', 55, 55, 55, 55),
|
||||
('six', 54, 54, 54, 54),
|
||||
('slash', 47, 47, 47, 47),
|
||||
('space', 32, 32, 32, 32),
|
||||
('sterling', 163, 163, 163, 163),
|
||||
('t', 116, 116, 116, 116),
|
||||
('thorn', None, None, 254, 254),
|
||||
('three', 51, 51, 51, 51),
|
||||
('threequarters', None, None, 190, 190),
|
||||
('threesuperior', None, None, 179, 179),
|
||||
('tilde', 196, 247, 152, 31),
|
||||
('trademark', None, 170, 153, 146),
|
||||
('two', 50, 50, 50, 50),
|
||||
('twosuperior', None, None, 178, 178),
|
||||
('u', 117, 117, 117, 117),
|
||||
('uacute', None, 156, 250, 250),
|
||||
('ucircumflex', None, 158, 251, 251),
|
||||
('udieresis', None, 159, 252, 252),
|
||||
('ugrave', None, 157, 249, 249),
|
||||
('underscore', 95, 95, 95, 95),
|
||||
('v', 118, 118, 118, 118),
|
||||
('w', 119, 119, 119, 119),
|
||||
('x', 120, 120, 120, 120),
|
||||
('y', 121, 121, 121, 121),
|
||||
('yacute', None, None, 253, 253),
|
||||
('ydieresis', None, 216, 255, 255),
|
||||
('yen', 165, 180, 165, 165),
|
||||
('z', 122, 122, 122, 122),
|
||||
('zcaron', None, None, 158, 158),
|
||||
('zero', 48, 48, 48, 48),
|
||||
]
|
|
@ -1,735 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
from .utils import INF
|
||||
from .utils import Plane
|
||||
from .utils import get_bound
|
||||
from .utils import uniq
|
||||
from .utils import csort
|
||||
from .utils import fsplit
|
||||
from .utils import bbox2str
|
||||
from .utils import matrix2str
|
||||
from .utils import apply_matrix_pt
|
||||
|
||||
|
||||
## IndexAssigner
|
||||
##
|
||||
class IndexAssigner:
|
||||
|
||||
def __init__(self, index=0):
|
||||
self.index = index
|
||||
return
|
||||
|
||||
def run(self, obj):
|
||||
if isinstance(obj, LTTextBox):
|
||||
obj.index = self.index
|
||||
self.index += 1
|
||||
elif isinstance(obj, LTTextGroup):
|
||||
for x in obj:
|
||||
self.run(x)
|
||||
return
|
||||
|
||||
|
||||
## LAParams
|
||||
##
|
||||
class LAParams:
|
||||
|
||||
def __init__(self,
|
||||
line_overlap=0.5,
|
||||
char_margin=2.0,
|
||||
line_margin=0.5,
|
||||
word_margin=0.1,
|
||||
boxes_flow=0.5,
|
||||
detect_vertical=False,
|
||||
all_texts=False):
|
||||
self.line_overlap = line_overlap
|
||||
self.char_margin = char_margin
|
||||
self.line_margin = line_margin
|
||||
self.word_margin = word_margin
|
||||
self.boxes_flow = boxes_flow
|
||||
self.detect_vertical = detect_vertical
|
||||
self.all_texts = all_texts
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<LAParams: char_margin=%.1f, line_margin=%.1f, word_margin=%.1f all_texts=%r>' %
|
||||
(self.char_margin, self.line_margin, self.word_margin, self.all_texts))
|
||||
|
||||
|
||||
## LTItem
|
||||
##
|
||||
class LTItem:
|
||||
|
||||
def analyze(self, laparams):
|
||||
"""Perform the layout analysis."""
|
||||
return
|
||||
|
||||
|
||||
## LTText
|
||||
##
|
||||
class LTText:
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s %r>' %
|
||||
(self.__class__.__name__, self.get_text()))
|
||||
|
||||
def get_text(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
## LTComponent
|
||||
##
|
||||
class LTComponent(LTItem):
|
||||
|
||||
def __init__(self, bbox):
|
||||
LTItem.__init__(self)
|
||||
self.set_bbox(bbox)
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s %s>' %
|
||||
(self.__class__.__name__, bbox2str(self.bbox)))
|
||||
|
||||
# Disable comparison.
|
||||
def __lt__(self, _):
|
||||
raise ValueError
|
||||
def __le__(self, _):
|
||||
raise ValueError
|
||||
def __gt__(self, _):
|
||||
raise ValueError
|
||||
def __ge__(self, _):
|
||||
raise ValueError
|
||||
|
||||
def set_bbox(self, bbox):
|
||||
(x0, y0, x1, y1) = bbox
|
||||
self.x0 = x0
|
||||
self.y0 = y0
|
||||
self.x1 = x1
|
||||
self.y1 = y1
|
||||
self.width = x1-x0
|
||||
self.height = y1-y0
|
||||
self.bbox = bbox
|
||||
return
|
||||
|
||||
def is_empty(self):
|
||||
return self.width <= 0 or self.height <= 0
|
||||
|
||||
def is_hoverlap(self, obj):
|
||||
assert isinstance(obj, LTComponent)
|
||||
return obj.x0 <= self.x1 and self.x0 <= obj.x1
|
||||
|
||||
def hdistance(self, obj):
|
||||
assert isinstance(obj, LTComponent)
|
||||
if self.is_hoverlap(obj):
|
||||
return 0
|
||||
else:
|
||||
return min(abs(self.x0-obj.x1), abs(self.x1-obj.x0))
|
||||
|
||||
def hoverlap(self, obj):
|
||||
assert isinstance(obj, LTComponent)
|
||||
if self.is_hoverlap(obj):
|
||||
return min(abs(self.x0-obj.x1), abs(self.x1-obj.x0))
|
||||
else:
|
||||
return 0
|
||||
|
||||
def is_voverlap(self, obj):
|
||||
assert isinstance(obj, LTComponent)
|
||||
return obj.y0 <= self.y1 and self.y0 <= obj.y1
|
||||
|
||||
def vdistance(self, obj):
|
||||
assert isinstance(obj, LTComponent)
|
||||
if self.is_voverlap(obj):
|
||||
return 0
|
||||
else:
|
||||
return min(abs(self.y0-obj.y1), abs(self.y1-obj.y0))
|
||||
|
||||
def voverlap(self, obj):
|
||||
assert isinstance(obj, LTComponent)
|
||||
if self.is_voverlap(obj):
|
||||
return min(abs(self.y0-obj.y1), abs(self.y1-obj.y0))
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
## LTCurve
|
||||
##
|
||||
class LTCurve(LTComponent):
|
||||
|
||||
def __init__(self, linewidth, pts):
|
||||
LTComponent.__init__(self, get_bound(pts))
|
||||
self.pts = pts
|
||||
self.linewidth = linewidth
|
||||
return
|
||||
|
||||
def get_pts(self):
|
||||
return ','.join('%.3f,%.3f' % p for p in self.pts)
|
||||
|
||||
|
||||
## LTLine
|
||||
##
|
||||
class LTLine(LTCurve):
|
||||
|
||||
def __init__(self, linewidth, p0, p1):
|
||||
LTCurve.__init__(self, linewidth, [p0, p1])
|
||||
return
|
||||
|
||||
|
||||
## LTRect
|
||||
##
|
||||
class LTRect(LTCurve):
|
||||
|
||||
def __init__(self, linewidth, bbox):
|
||||
(x0, y0, x1, y1) = bbox
|
||||
LTCurve.__init__(self, linewidth, [(x0, y0), (x1, y0), (x1, y1), (x0, y1)])
|
||||
return
|
||||
|
||||
|
||||
## LTImage
|
||||
##
|
||||
class LTImage(LTComponent):
|
||||
|
||||
def __init__(self, name, stream, bbox):
|
||||
LTComponent.__init__(self, bbox)
|
||||
self.name = name
|
||||
self.stream = stream
|
||||
self.srcsize = (stream.get_any(('W', 'Width')),
|
||||
stream.get_any(('H', 'Height')))
|
||||
self.imagemask = stream.get_any(('IM', 'ImageMask'))
|
||||
self.bits = stream.get_any(('BPC', 'BitsPerComponent'), 1)
|
||||
self.colorspace = stream.get_any(('CS', 'ColorSpace'))
|
||||
if not isinstance(self.colorspace, list):
|
||||
self.colorspace = [self.colorspace]
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s(%s) %s %r>' %
|
||||
(self.__class__.__name__, self.name,
|
||||
bbox2str(self.bbox), self.srcsize))
|
||||
|
||||
|
||||
## LTAnno
|
||||
##
|
||||
class LTAnno(LTItem, LTText):
|
||||
|
||||
def __init__(self, text):
|
||||
self._text = text
|
||||
return
|
||||
|
||||
def get_text(self):
|
||||
return self._text
|
||||
|
||||
|
||||
## LTChar
|
||||
##
|
||||
class LTChar(LTComponent, LTText):
|
||||
|
||||
def __init__(self, matrix, font, fontsize, scaling, rise,
|
||||
text, textwidth, textdisp):
|
||||
LTText.__init__(self)
|
||||
self._text = text
|
||||
self.matrix = matrix
|
||||
self.fontname = font.fontname
|
||||
self.adv = textwidth * fontsize * scaling
|
||||
# compute the boundary rectangle.
|
||||
if font.is_vertical():
|
||||
# vertical
|
||||
width = font.get_width() * fontsize
|
||||
(vx, vy) = textdisp
|
||||
if vx is None:
|
||||
vx = width * 0.5
|
||||
else:
|
||||
vx = vx * fontsize * .001
|
||||
vy = (1000 - vy) * fontsize * .001
|
||||
tx = -vx
|
||||
ty = vy + rise
|
||||
bll = (tx, ty+self.adv)
|
||||
bur = (tx+width, ty)
|
||||
else:
|
||||
# horizontal
|
||||
height = font.get_height() * fontsize
|
||||
descent = font.get_descent() * fontsize
|
||||
ty = descent + rise
|
||||
bll = (0, ty)
|
||||
bur = (self.adv, ty+height)
|
||||
(a, b, c, d, e, f) = self.matrix
|
||||
self.upright = (0 < a*d*scaling and b*c <= 0)
|
||||
(x0, y0) = apply_matrix_pt(self.matrix, bll)
|
||||
(x1, y1) = apply_matrix_pt(self.matrix, bur)
|
||||
if x1 < x0:
|
||||
(x0, x1) = (x1, x0)
|
||||
if y1 < y0:
|
||||
(y0, y1) = (y1, y0)
|
||||
LTComponent.__init__(self, (x0, y0, x1, y1))
|
||||
if font.is_vertical():
|
||||
self.size = self.width
|
||||
else:
|
||||
self.size = self.height
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s %s matrix=%s font=%r adv=%s text=%r>' %
|
||||
(self.__class__.__name__, bbox2str(self.bbox),
|
||||
matrix2str(self.matrix), self.fontname, self.adv,
|
||||
self.get_text()))
|
||||
|
||||
def get_text(self):
|
||||
return self._text
|
||||
|
||||
def is_compatible(self, obj):
|
||||
"""Returns True if two characters can coexist in the same line."""
|
||||
return True
|
||||
|
||||
|
||||
## LTContainer
|
||||
##
|
||||
class LTContainer(LTComponent):
|
||||
|
||||
def __init__(self, bbox):
|
||||
LTComponent.__init__(self, bbox)
|
||||
self._objs = []
|
||||
return
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._objs)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._objs)
|
||||
|
||||
def add(self, obj):
|
||||
self._objs.append(obj)
|
||||
return
|
||||
|
||||
def extend(self, objs):
|
||||
for obj in objs:
|
||||
self.add(obj)
|
||||
return
|
||||
|
||||
def analyze(self, laparams):
|
||||
for obj in self._objs:
|
||||
obj.analyze(laparams)
|
||||
return
|
||||
|
||||
|
||||
## LTExpandableContainer
|
||||
##
|
||||
class LTExpandableContainer(LTContainer):
|
||||
|
||||
def __init__(self):
|
||||
LTContainer.__init__(self, (+INF, +INF, -INF, -INF))
|
||||
return
|
||||
|
||||
def add(self, obj):
|
||||
LTContainer.add(self, obj)
|
||||
self.set_bbox((min(self.x0, obj.x0), min(self.y0, obj.y0),
|
||||
max(self.x1, obj.x1), max(self.y1, obj.y1)))
|
||||
return
|
||||
|
||||
|
||||
## LTTextContainer
|
||||
##
|
||||
class LTTextContainer(LTExpandableContainer, LTText):
|
||||
|
||||
def __init__(self):
|
||||
LTText.__init__(self)
|
||||
LTExpandableContainer.__init__(self)
|
||||
return
|
||||
|
||||
def get_text(self):
|
||||
return ''.join(obj.get_text() for obj in self if isinstance(obj, LTText))
|
||||
|
||||
|
||||
## LTTextLine
|
||||
##
|
||||
class LTTextLine(LTTextContainer):
|
||||
|
||||
def __init__(self, word_margin):
|
||||
LTTextContainer.__init__(self)
|
||||
self.word_margin = word_margin
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s %s %r>' %
|
||||
(self.__class__.__name__, bbox2str(self.bbox),
|
||||
self.get_text()))
|
||||
|
||||
def analyze(self, laparams):
|
||||
LTTextContainer.analyze(self, laparams)
|
||||
LTContainer.add(self, LTAnno('\n'))
|
||||
return
|
||||
|
||||
def find_neighbors(self, plane, ratio):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class LTTextLineHorizontal(LTTextLine):
|
||||
|
||||
def __init__(self, word_margin):
|
||||
LTTextLine.__init__(self, word_margin)
|
||||
self._x1 = +INF
|
||||
return
|
||||
|
||||
def add(self, obj):
|
||||
if isinstance(obj, LTChar) and self.word_margin:
|
||||
margin = self.word_margin * max(obj.width, obj.height)
|
||||
if self._x1 < obj.x0-margin:
|
||||
LTContainer.add(self, LTAnno(' '))
|
||||
self._x1 = obj.x1
|
||||
LTTextLine.add(self, obj)
|
||||
return
|
||||
|
||||
def find_neighbors(self, plane, ratio):
|
||||
d = ratio*self.height
|
||||
objs = plane.find((self.x0, self.y0-d, self.x1, self.y1+d))
|
||||
return [obj for obj in objs
|
||||
if (isinstance(obj, LTTextLineHorizontal) and
|
||||
abs(obj.height-self.height) < d and
|
||||
(abs(obj.x0-self.x0) < d or
|
||||
abs(obj.x1-self.x1) < d))]
|
||||
|
||||
|
||||
class LTTextLineVertical(LTTextLine):
|
||||
|
||||
def __init__(self, word_margin):
|
||||
LTTextLine.__init__(self, word_margin)
|
||||
self._y0 = -INF
|
||||
return
|
||||
|
||||
def add(self, obj):
|
||||
if isinstance(obj, LTChar) and self.word_margin:
|
||||
margin = self.word_margin * max(obj.width, obj.height)
|
||||
if obj.y1+margin < self._y0:
|
||||
LTContainer.add(self, LTAnno(' '))
|
||||
self._y0 = obj.y0
|
||||
LTTextLine.add(self, obj)
|
||||
return
|
||||
|
||||
def find_neighbors(self, plane, ratio):
|
||||
d = ratio*self.width
|
||||
objs = plane.find((self.x0-d, self.y0, self.x1+d, self.y1))
|
||||
return [obj for obj in objs
|
||||
if (isinstance(obj, LTTextLineVertical) and
|
||||
abs(obj.width-self.width) < d and
|
||||
(abs(obj.y0-self.y0) < d or
|
||||
abs(obj.y1-self.y1) < d))]
|
||||
|
||||
|
||||
## LTTextBox
|
||||
##
|
||||
## A set of text objects that are grouped within
|
||||
## a certain rectangular area.
|
||||
##
|
||||
class LTTextBox(LTTextContainer):
|
||||
|
||||
def __init__(self):
|
||||
LTTextContainer.__init__(self)
|
||||
self.index = -1
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s(%s) %s %r>' %
|
||||
(self.__class__.__name__,
|
||||
self.index, bbox2str(self.bbox), self.get_text()))
|
||||
|
||||
|
||||
class LTTextBoxHorizontal(LTTextBox):
|
||||
|
||||
def analyze(self, laparams):
|
||||
LTTextBox.analyze(self, laparams)
|
||||
self._objs = csort(self._objs, key=lambda obj: -obj.y1)
|
||||
return
|
||||
|
||||
def get_writing_mode(self):
|
||||
return 'lr-tb'
|
||||
|
||||
|
||||
class LTTextBoxVertical(LTTextBox):
|
||||
|
||||
def analyze(self, laparams):
|
||||
LTTextBox.analyze(self, laparams)
|
||||
self._objs = csort(self._objs, key=lambda obj: -obj.x1)
|
||||
return
|
||||
|
||||
def get_writing_mode(self):
|
||||
return 'tb-rl'
|
||||
|
||||
|
||||
## LTTextGroup
|
||||
##
|
||||
class LTTextGroup(LTTextContainer):
|
||||
|
||||
def __init__(self, objs):
|
||||
LTTextContainer.__init__(self)
|
||||
self.extend(objs)
|
||||
return
|
||||
|
||||
|
||||
class LTTextGroupLRTB(LTTextGroup):
|
||||
|
||||
def analyze(self, laparams):
|
||||
LTTextGroup.analyze(self, laparams)
|
||||
# reorder the objects from top-left to bottom-right.
|
||||
self._objs = csort(self._objs, key=lambda obj:
|
||||
(1-laparams.boxes_flow)*(obj.x0) -
|
||||
(1+laparams.boxes_flow)*(obj.y0+obj.y1))
|
||||
return
|
||||
|
||||
|
||||
class LTTextGroupTBRL(LTTextGroup):
|
||||
|
||||
def analyze(self, laparams):
|
||||
LTTextGroup.analyze(self, laparams)
|
||||
# reorder the objects from top-right to bottom-left.
|
||||
self._objs = csort(self._objs, key=lambda obj:
|
||||
-(1+laparams.boxes_flow)*(obj.x0+obj.x1)
|
||||
- (1-laparams.boxes_flow)*(obj.y1))
|
||||
return
|
||||
|
||||
|
||||
## LTLayoutContainer
|
||||
##
|
||||
class LTLayoutContainer(LTContainer):
|
||||
|
||||
def __init__(self, bbox):
|
||||
LTContainer.__init__(self, bbox)
|
||||
self.groups = None
|
||||
return
|
||||
|
||||
# group_objects: group text object to textlines.
|
||||
def group_objects(self, laparams, objs):
|
||||
obj0 = None
|
||||
line = None
|
||||
for obj1 in objs:
|
||||
if obj0 is not None:
|
||||
# halign: obj0 and obj1 is horizontally aligned.
|
||||
#
|
||||
# +------+ - - -
|
||||
# | obj0 | - - +------+ -
|
||||
# | | | obj1 | | (line_overlap)
|
||||
# +------+ - - | | -
|
||||
# - - - +------+
|
||||
#
|
||||
# |<--->|
|
||||
# (char_margin)
|
||||
halign = (obj0.is_compatible(obj1) and
|
||||
obj0.is_voverlap(obj1) and
|
||||
(min(obj0.height, obj1.height) * laparams.line_overlap <
|
||||
obj0.voverlap(obj1)) and
|
||||
(obj0.hdistance(obj1) <
|
||||
max(obj0.width, obj1.width) * laparams.char_margin))
|
||||
|
||||
# valign: obj0 and obj1 is vertically aligned.
|
||||
#
|
||||
# +------+
|
||||
# | obj0 |
|
||||
# | |
|
||||
# +------+ - - -
|
||||
# | | | (char_margin)
|
||||
# +------+ - -
|
||||
# | obj1 |
|
||||
# | |
|
||||
# +------+
|
||||
#
|
||||
# |<-->|
|
||||
# (line_overlap)
|
||||
valign = (laparams.detect_vertical and
|
||||
obj0.is_compatible(obj1) and
|
||||
obj0.is_hoverlap(obj1) and
|
||||
(min(obj0.width, obj1.width) * laparams.line_overlap <
|
||||
obj0.hoverlap(obj1)) and
|
||||
(obj0.vdistance(obj1) <
|
||||
max(obj0.height, obj1.height) * laparams.char_margin))
|
||||
|
||||
if ((halign and isinstance(line, LTTextLineHorizontal)) or
|
||||
(valign and isinstance(line, LTTextLineVertical))):
|
||||
line.add(obj1)
|
||||
elif line is not None:
|
||||
yield line
|
||||
line = None
|
||||
else:
|
||||
if valign and not halign:
|
||||
line = LTTextLineVertical(laparams.word_margin)
|
||||
line.add(obj0)
|
||||
line.add(obj1)
|
||||
elif halign and not valign:
|
||||
line = LTTextLineHorizontal(laparams.word_margin)
|
||||
line.add(obj0)
|
||||
line.add(obj1)
|
||||
else:
|
||||
line = LTTextLineHorizontal(laparams.word_margin)
|
||||
line.add(obj0)
|
||||
yield line
|
||||
line = None
|
||||
obj0 = obj1
|
||||
if line is None:
|
||||
line = LTTextLineHorizontal(laparams.word_margin)
|
||||
line.add(obj0)
|
||||
yield line
|
||||
return
|
||||
|
||||
# group_textlines: group neighboring lines to textboxes.
|
||||
def group_textlines(self, laparams, lines):
|
||||
plane = Plane(self.bbox)
|
||||
plane.extend(lines)
|
||||
boxes = {}
|
||||
for line in lines:
|
||||
neighbors = line.find_neighbors(plane, laparams.line_margin)
|
||||
if line not in neighbors: continue
|
||||
members = []
|
||||
for obj1 in neighbors:
|
||||
members.append(obj1)
|
||||
if obj1 in boxes:
|
||||
members.extend(boxes.pop(obj1))
|
||||
if isinstance(line, LTTextLineHorizontal):
|
||||
box = LTTextBoxHorizontal()
|
||||
else:
|
||||
box = LTTextBoxVertical()
|
||||
for obj in uniq(members):
|
||||
box.add(obj)
|
||||
boxes[obj] = box
|
||||
done = set()
|
||||
for line in lines:
|
||||
if line not in boxes: continue
|
||||
box = boxes[line]
|
||||
if box in done:
|
||||
continue
|
||||
done.add(box)
|
||||
if not box.is_empty():
|
||||
yield box
|
||||
return
|
||||
|
||||
# group_textboxes: group textboxes hierarchically.
|
||||
def group_textboxes(self, laparams, boxes):
|
||||
assert boxes
|
||||
|
||||
def dist(obj1, obj2):
|
||||
"""A distance function between two TextBoxes.
|
||||
|
||||
Consider the bounding rectangle for obj1 and obj2.
|
||||
Return its area less the areas of obj1 and obj2,
|
||||
shown as 'www' below. This value may be negative.
|
||||
+------+..........+ (x1, y1)
|
||||
| obj1 |wwwwwwwwww:
|
||||
+------+www+------+
|
||||
:wwwwwwwwww| obj2 |
|
||||
(x0, y0) +..........+------+
|
||||
"""
|
||||
x0 = min(obj1.x0, obj2.x0)
|
||||
y0 = min(obj1.y0, obj2.y0)
|
||||
x1 = max(obj1.x1, obj2.x1)
|
||||
y1 = max(obj1.y1, obj2.y1)
|
||||
return ((x1-x0)*(y1-y0) - obj1.width*obj1.height - obj2.width*obj2.height)
|
||||
|
||||
def isany(obj1, obj2):
|
||||
"""Check if there's any other object between obj1 and obj2.
|
||||
"""
|
||||
x0 = min(obj1.x0, obj2.x0)
|
||||
y0 = min(obj1.y0, obj2.y0)
|
||||
x1 = max(obj1.x1, obj2.x1)
|
||||
y1 = max(obj1.y1, obj2.y1)
|
||||
objs = set(plane.find((x0, y0, x1, y1)))
|
||||
return objs.difference((obj1, obj2))
|
||||
|
||||
def key_obj(t):
|
||||
(c,d,_,_) = t
|
||||
return (c,d)
|
||||
|
||||
# XXX this still takes O(n^2) :(
|
||||
dists = []
|
||||
for i in range(len(boxes)):
|
||||
obj1 = boxes[i]
|
||||
for j in range(i+1, len(boxes)):
|
||||
obj2 = boxes[j]
|
||||
dists.append((0, dist(obj1, obj2), obj1, obj2))
|
||||
# We could use dists.sort(), but it would randomize the test result.
|
||||
dists = csort(dists, key=key_obj)
|
||||
plane = Plane(self.bbox)
|
||||
plane.extend(boxes)
|
||||
while dists:
|
||||
(c, d, obj1, obj2) = dists.pop(0)
|
||||
if c == 0 and isany(obj1, obj2):
|
||||
dists.append((1, d, obj1, obj2))
|
||||
continue
|
||||
if (isinstance(obj1, (LTTextBoxVertical, LTTextGroupTBRL)) or
|
||||
isinstance(obj2, (LTTextBoxVertical, LTTextGroupTBRL))):
|
||||
group = LTTextGroupTBRL([obj1, obj2])
|
||||
else:
|
||||
group = LTTextGroupLRTB([obj1, obj2])
|
||||
plane.remove(obj1)
|
||||
plane.remove(obj2)
|
||||
dists = [ (c,d,obj1,obj2) for (c,d,obj1,obj2) in dists
|
||||
if (obj1 in plane and obj2 in plane) ]
|
||||
for other in plane:
|
||||
dists.append((0, dist(group, other), group, other))
|
||||
dists = csort(dists, key=key_obj)
|
||||
plane.add(group)
|
||||
assert len(plane) == 1
|
||||
return list(plane)
|
||||
|
||||
def analyze(self, laparams):
|
||||
# textobjs is a list of LTChar objects, i.e.
|
||||
# it has all the individual characters in the page.
|
||||
(textobjs, otherobjs) = fsplit(lambda obj: isinstance(obj, LTChar), self)
|
||||
for obj in otherobjs:
|
||||
obj.analyze(laparams)
|
||||
if not textobjs:
|
||||
return
|
||||
textlines = list(self.group_objects(laparams, textobjs))
|
||||
(empties, textlines) = fsplit(lambda obj: obj.is_empty(), textlines)
|
||||
for obj in empties:
|
||||
obj.analyze(laparams)
|
||||
textboxes = list(self.group_textlines(laparams, textlines))
|
||||
if -1 <= laparams.boxes_flow and laparams.boxes_flow <= +1 and textboxes:
|
||||
self.groups = self.group_textboxes(laparams, textboxes)
|
||||
assigner = IndexAssigner()
|
||||
for group in self.groups:
|
||||
group.analyze(laparams)
|
||||
assigner.run(group)
|
||||
textboxes.sort(key=lambda box: box.index)
|
||||
else:
|
||||
def getkey(box):
|
||||
if isinstance(box, LTTextBoxVertical):
|
||||
return (0, -box.x1, box.y0)
|
||||
else:
|
||||
return (1, box.y0, box.x0)
|
||||
textboxes.sort(key=getkey)
|
||||
self._objs = textboxes + otherobjs + empties
|
||||
return
|
||||
|
||||
|
||||
## LTFigure
|
||||
##
|
||||
class LTFigure(LTLayoutContainer):
|
||||
|
||||
def __init__(self, name, bbox, matrix):
|
||||
self.name = name
|
||||
self.matrix = matrix
|
||||
(x, y, w, h) = bbox
|
||||
bbox = get_bound(apply_matrix_pt(matrix, (p, q))
|
||||
for (p, q) in ((x, y), (x+w, y), (x, y+h), (x+w, y+h)))
|
||||
LTLayoutContainer.__init__(self, bbox)
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s(%s) %s matrix=%s>' %
|
||||
(self.__class__.__name__, self.name,
|
||||
bbox2str(self.bbox), matrix2str(self.matrix)))
|
||||
|
||||
def analyze(self, laparams):
|
||||
if not laparams.all_texts:
|
||||
return
|
||||
LTLayoutContainer.analyze(self, laparams)
|
||||
return
|
||||
|
||||
|
||||
## LTPage
|
||||
##
|
||||
class LTPage(LTLayoutContainer):
|
||||
|
||||
def __init__(self, pageid, bbox, rotate=0):
|
||||
LTLayoutContainer.__init__(self, bbox)
|
||||
self.pageid = pageid
|
||||
self.rotate = rotate
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<%s(%r) %s rotate=%r>' %
|
||||
(self.__class__.__name__, self.pageid,
|
||||
bbox2str(self.bbox), self.rotate))
|
|
@ -1,106 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
class CorruptDataError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
## LZWDecoder
|
||||
##
|
||||
class LZWDecoder:
|
||||
|
||||
def __init__(self, fp):
|
||||
self.fp = fp
|
||||
self.buff = 0
|
||||
self.bpos = 8
|
||||
self.nbits = 9
|
||||
self.table = None
|
||||
self.prevbuf = None
|
||||
return
|
||||
|
||||
def readbits(self, bits):
|
||||
v = 0
|
||||
while 1:
|
||||
# the number of remaining bits we can get from the current buffer.
|
||||
r = 8-self.bpos
|
||||
if bits <= r:
|
||||
# |-----8-bits-----|
|
||||
# |-bpos-|-bits-| |
|
||||
# | |----r----|
|
||||
v = (v << bits) | ((self.buff >> (r-bits)) & ((1 << bits)-1))
|
||||
self.bpos += bits
|
||||
break
|
||||
else:
|
||||
# |-----8-bits-----|
|
||||
# |-bpos-|---bits----...
|
||||
# | |----r----|
|
||||
v = (v << r) | (self.buff & ((1 << r)-1))
|
||||
bits -= r
|
||||
x = self.fp.read(1)
|
||||
if not x:
|
||||
raise EOFError
|
||||
self.buff = x[0]
|
||||
self.bpos = 0
|
||||
return v
|
||||
|
||||
def feed(self, code):
|
||||
x = b''
|
||||
if code == 256:
|
||||
self.table = [bytes([c]) for c in range(256)] # 0-255
|
||||
self.table.append(None) # 256
|
||||
self.table.append(None) # 257
|
||||
self.prevbuf = b''
|
||||
self.nbits = 9
|
||||
elif code == 257:
|
||||
pass
|
||||
elif not self.prevbuf:
|
||||
x = self.prevbuf = self.table[code]
|
||||
else:
|
||||
if code < len(self.table):
|
||||
x = self.table[code]
|
||||
self.table.append(self.prevbuf+x[:1])
|
||||
elif code == len(self.table):
|
||||
self.table.append(self.prevbuf+self.prevbuf[:1])
|
||||
x = self.table[code]
|
||||
else:
|
||||
raise CorruptDataError
|
||||
l = len(self.table)
|
||||
if l == 511:
|
||||
self.nbits = 10
|
||||
elif l == 1023:
|
||||
self.nbits = 11
|
||||
elif l == 2047:
|
||||
self.nbits = 12
|
||||
self.prevbuf = x
|
||||
return x
|
||||
|
||||
def run(self):
|
||||
while 1:
|
||||
try:
|
||||
code = self.readbits(self.nbits)
|
||||
except EOFError:
|
||||
break
|
||||
try:
|
||||
x = self.feed(code)
|
||||
except CorruptDataError:
|
||||
# just ignore corrupt data and stop yielding there
|
||||
break
|
||||
yield x
|
||||
#logging.debug('nbits=%d, code=%d, output=%r, table=%r' %
|
||||
# (self.nbits, code, x, self.table[258:]))
|
||||
return
|
||||
|
||||
|
||||
# lzwdecode
|
||||
def lzwdecode(data):
|
||||
"""
|
||||
>>> lzwdecode(bytes.fromhex('800b6050220c0c8501'))
|
||||
b'-----A---B'
|
||||
"""
|
||||
fp = BytesIO(data)
|
||||
return b''.join(LZWDecoder(fp).run())
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
print('pdfminer.lzw', doctest.testmod())
|
|
@ -1,34 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
from .psparser import LIT
|
||||
|
||||
|
||||
## PDFColorSpace
|
||||
##
|
||||
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
|
||||
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
|
||||
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
|
||||
|
||||
|
||||
class PDFColorSpace:
|
||||
|
||||
def __init__(self, name, ncomponents):
|
||||
self.name = name
|
||||
self.ncomponents = ncomponents
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
|
||||
|
||||
|
||||
PREDEFINED_COLORSPACE = dict(
|
||||
(name, PDFColorSpace(name, n)) for (name, n) in {
|
||||
'CalRGB': 3,
|
||||
'CalGray': 1,
|
||||
'Lab': 3,
|
||||
'DeviceRGB': 3,
|
||||
'DeviceCMYK': 4,
|
||||
'DeviceGray': 1,
|
||||
'Separation': 1,
|
||||
'Indexed': 1,
|
||||
'Pattern': 1,
|
||||
}.items())
|
|
@ -1,182 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
from .utils import mult_matrix
|
||||
from .utils import translate_matrix
|
||||
from .utils import q
|
||||
from .utils import bbox2str
|
||||
from .utils import isnumber
|
||||
from .pdffont import PDFUnicodeNotDefined
|
||||
|
||||
|
||||
## PDFDevice
|
||||
##
|
||||
class PDFDevice:
|
||||
|
||||
def __init__(self, rsrcmgr):
|
||||
self.rsrcmgr = rsrcmgr
|
||||
self.ctm = None
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFDevice>'
|
||||
|
||||
def close(self):
|
||||
return
|
||||
|
||||
def set_ctm(self, ctm):
|
||||
self.ctm = ctm
|
||||
return
|
||||
|
||||
def begin_tag(self, tag, props=None):
|
||||
return
|
||||
|
||||
def end_tag(self):
|
||||
return
|
||||
|
||||
def do_tag(self, tag, props=None):
|
||||
return
|
||||
|
||||
def begin_page(self, page, ctm):
|
||||
return
|
||||
|
||||
def end_page(self, page):
|
||||
return
|
||||
|
||||
def begin_figure(self, name, bbox, matrix):
|
||||
return
|
||||
|
||||
def end_figure(self, name):
|
||||
return
|
||||
|
||||
def paint_path(self, graphicstate, stroke, fill, evenodd, path):
|
||||
return
|
||||
|
||||
def render_image(self, name, stream):
|
||||
return
|
||||
|
||||
def render_string(self, textstate, seq):
|
||||
return
|
||||
|
||||
|
||||
## PDFTextDevice
|
||||
##
|
||||
class PDFTextDevice(PDFDevice):
|
||||
|
||||
def render_string(self, textstate, seq):
|
||||
matrix = mult_matrix(textstate.matrix, self.ctm)
|
||||
font = textstate.font
|
||||
fontsize = textstate.fontsize
|
||||
scaling = textstate.scaling * .01
|
||||
charspace = textstate.charspace * scaling
|
||||
wordspace = textstate.wordspace * scaling
|
||||
rise = textstate.rise
|
||||
if font.is_multibyte():
|
||||
wordspace = 0
|
||||
dxscale = .001 * fontsize * scaling
|
||||
if font.is_vertical():
|
||||
textstate.linematrix = self.render_string_vertical(
|
||||
seq, matrix, textstate.linematrix, font, fontsize,
|
||||
scaling, charspace, wordspace, rise, dxscale)
|
||||
else:
|
||||
textstate.linematrix = self.render_string_horizontal(
|
||||
seq, matrix, textstate.linematrix, font, fontsize,
|
||||
scaling, charspace, wordspace, rise, dxscale)
|
||||
return
|
||||
|
||||
def render_string_horizontal(self, seq, matrix, pos,
|
||||
font, fontsize, scaling, charspace, wordspace, rise, dxscale):
|
||||
(x, y) = pos
|
||||
needcharspace = False
|
||||
for obj in seq:
|
||||
if isnumber(obj):
|
||||
x -= obj*dxscale
|
||||
needcharspace = True
|
||||
else:
|
||||
for cid in font.decode(obj):
|
||||
if needcharspace:
|
||||
x += charspace
|
||||
x += self.render_char(translate_matrix(matrix, (x, y)),
|
||||
font, fontsize, scaling, rise, cid)
|
||||
if cid == 32 and wordspace:
|
||||
x += wordspace
|
||||
needcharspace = True
|
||||
return (x, y)
|
||||
|
||||
def render_string_vertical(self, seq, matrix, pos,
|
||||
font, fontsize, scaling, charspace, wordspace, rise, dxscale):
|
||||
(x, y) = pos
|
||||
needcharspace = False
|
||||
for obj in seq:
|
||||
if isnumber(obj):
|
||||
y -= obj*dxscale
|
||||
needcharspace = True
|
||||
else:
|
||||
for cid in font.decode(obj):
|
||||
if needcharspace:
|
||||
y += charspace
|
||||
y += self.render_char(translate_matrix(matrix, (x, y)),
|
||||
font, fontsize, scaling, rise, cid)
|
||||
if cid == 32 and wordspace:
|
||||
y += wordspace
|
||||
needcharspace = True
|
||||
return (x, y)
|
||||
|
||||
def render_char(self, matrix, font, fontsize, scaling, rise, cid):
|
||||
return 0
|
||||
|
||||
|
||||
## TagExtractor
|
||||
##
|
||||
class TagExtractor(PDFDevice):
|
||||
|
||||
def __init__(self, rsrcmgr, outfp):
|
||||
PDFDevice.__init__(self, rsrcmgr)
|
||||
self.outfp = outfp
|
||||
self.pageno = 0
|
||||
self._stack = []
|
||||
return
|
||||
|
||||
def render_string(self, textstate, seq):
|
||||
font = textstate.font
|
||||
text = ''
|
||||
for obj in seq:
|
||||
if not isinstance(obj, bytes):
|
||||
continue
|
||||
chars = font.decode(obj)
|
||||
for cid in chars:
|
||||
try:
|
||||
char = font.to_unichr(cid)
|
||||
text += char
|
||||
except PDFUnicodeNotDefined:
|
||||
pass
|
||||
self.outfp.write(q(text))
|
||||
return
|
||||
|
||||
def begin_page(self, page, ctm):
|
||||
self.outfp.write('<page id="%s" bbox="%s" rotate="%d">' %
|
||||
(self.pageno, bbox2str(page.mediabox), page.rotate))
|
||||
return
|
||||
|
||||
def end_page(self, page):
|
||||
self.outfp.write('</page>\n')
|
||||
self.pageno += 1
|
||||
return
|
||||
|
||||
def begin_tag(self, tag, props=None):
|
||||
s = ''
|
||||
if isinstance(props, dict):
|
||||
s = ''.join(' %s="%s"' % (q(k), q(str(v))) for (k, v)
|
||||
in sorted(props.items()))
|
||||
self.outfp.write('<%s%s>' % (q(tag.name), s))
|
||||
self._stack.append(tag)
|
||||
return
|
||||
|
||||
def end_tag(self):
|
||||
assert self._stack
|
||||
tag = self._stack.pop(-1)
|
||||
self.outfp.write('</%s>' % q(tag.name))
|
||||
return
|
||||
|
||||
def do_tag(self, tag, props=None):
|
||||
self.begin_tag(tag, props)
|
||||
self._stack.pop(-1)
|
||||
return
|
|
@ -1,738 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import sys
|
||||
import struct
|
||||
from io import BytesIO
|
||||
from .cmapdb import CMapDB
|
||||
from .cmapdb import CMapParser
|
||||
from .cmapdb import FileUnicodeMap
|
||||
from .cmapdb import CMap
|
||||
from .encodingdb import EncodingDB
|
||||
from .encodingdb import name2unicode
|
||||
from .psparser import PSStackParser
|
||||
from .psparser import PSEOF
|
||||
from .psparser import LIT
|
||||
from .psparser import KWD
|
||||
from .psparser import STRICT
|
||||
from .psparser import PSLiteral
|
||||
from .psparser import literal_name
|
||||
from .pdftypes import PDFException
|
||||
from .pdftypes import resolve1
|
||||
from .pdftypes import int_value
|
||||
from .pdftypes import num_value
|
||||
from .pdftypes import bytes_value
|
||||
from .pdftypes import list_value
|
||||
from .pdftypes import dict_value
|
||||
from .pdftypes import stream_value
|
||||
from .fontmetrics import FONT_METRICS
|
||||
from .utils import apply_matrix_norm
|
||||
from .utils import nunpack
|
||||
from .utils import choplist
|
||||
from .utils import isnumber
|
||||
|
||||
|
||||
def get_widths(seq):
|
||||
widths = {}
|
||||
r = []
|
||||
for v in seq:
|
||||
if isinstance(v, list):
|
||||
if r:
|
||||
char1 = r[-1]
|
||||
for (i, w) in enumerate(v):
|
||||
widths[char1+i] = w
|
||||
r = []
|
||||
elif isnumber(v):
|
||||
r.append(v)
|
||||
if len(r) == 3:
|
||||
(char1, char2, w) = r
|
||||
for i in range(char1, char2+1):
|
||||
widths[i] = w
|
||||
r = []
|
||||
return widths
|
||||
#assert get_widths([1]) == {}
|
||||
#assert get_widths([1,2,3]) == {1:3, 2:3}
|
||||
#assert get_widths([1,[2,3],6,[7,8]]) == {1:2,2:3, 6:7,7:8}
|
||||
|
||||
|
||||
def get_widths2(seq):
|
||||
widths = {}
|
||||
r = []
|
||||
for v in seq:
|
||||
if isinstance(v, list):
|
||||
if r:
|
||||
char1 = r[-1]
|
||||
for (i, (w, vx, vy)) in enumerate(choplist(3, v)):
|
||||
widths[char1+i] = (w, (vx, vy))
|
||||
r = []
|
||||
elif isnumber(v):
|
||||
r.append(v)
|
||||
if len(r) == 5:
|
||||
(char1, char2, w, vx, vy) = r
|
||||
for i in range(char1, char2+1):
|
||||
widths[i] = (w, (vx, vy))
|
||||
r = []
|
||||
return widths
|
||||
#assert get_widths2([1]) == {}
|
||||
#assert get_widths2([1,2,3,4,5]) == {1:(3, (4,5)), 2:(3, (4,5))}
|
||||
#assert get_widths2([1,[2,3,4,5],6,[7,8,9]]) == {1:(2, (3,4)), 6:(7, (8,9))}
|
||||
|
||||
|
||||
## FontMetricsDB
|
||||
##
|
||||
class FontMetricsDB:
|
||||
|
||||
@classmethod
|
||||
def get_metrics(klass, fontname):
|
||||
return FONT_METRICS[fontname]
|
||||
|
||||
|
||||
## Type1FontHeaderParser
|
||||
##
|
||||
class Type1FontHeaderParser(PSStackParser):
|
||||
|
||||
KEYWORD_BEGIN = KWD(b'begin')
|
||||
KEYWORD_END = KWD(b'end')
|
||||
KEYWORD_DEF = KWD(b'def')
|
||||
KEYWORD_PUT = KWD(b'put')
|
||||
KEYWORD_DICT = KWD(b'dict')
|
||||
KEYWORD_ARRAY = KWD(b'array')
|
||||
KEYWORD_READONLY = KWD(b'readonly')
|
||||
KEYWORD_FOR = KWD(b'for')
|
||||
KEYWORD_FOR = KWD(b'for')
|
||||
|
||||
def __init__(self, data):
|
||||
PSStackParser.__init__(self, data)
|
||||
self._cid2unicode = {}
|
||||
return
|
||||
|
||||
def get_encoding(self):
|
||||
while 1:
|
||||
try:
|
||||
(cid, name) = self.nextobject()
|
||||
except PSEOF:
|
||||
break
|
||||
try:
|
||||
self._cid2unicode[cid] = name2unicode(name)
|
||||
except KeyError:
|
||||
pass
|
||||
return self._cid2unicode
|
||||
|
||||
def do_keyword(self, pos, token):
|
||||
if token is self.KEYWORD_PUT:
|
||||
((_, key), (_, value)) = self.pop(2)
|
||||
if (isinstance(key, int) and
|
||||
isinstance(value, PSLiteral)):
|
||||
self.add_results((key, literal_name(value)))
|
||||
return
|
||||
|
||||
|
||||
NIBBLES = ('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', 'e', 'e-', None, '-')
|
||||
|
||||
|
||||
## CFFFont
|
||||
## (Format specified in Adobe Technical Note: #5176
|
||||
## "The Compact Font Format Specification")
|
||||
##
|
||||
def getdict(data):
|
||||
d = {}
|
||||
fp = BytesIO(data)
|
||||
stack = []
|
||||
while 1:
|
||||
c = fp.read(1)
|
||||
if not c:
|
||||
break
|
||||
b0 = ord(c)
|
||||
if b0 <= 21:
|
||||
d[b0] = stack
|
||||
stack = []
|
||||
continue
|
||||
if b0 == 30:
|
||||
s = ''
|
||||
loop = True
|
||||
while loop:
|
||||
b = ord(fp.read(1))
|
||||
for n in (b >> 4, b & 15):
|
||||
if n == 15:
|
||||
loop = False
|
||||
else:
|
||||
s += NIBBLES[n]
|
||||
value = float(s)
|
||||
elif 32 <= b0 and b0 <= 246:
|
||||
value = b0-139
|
||||
else:
|
||||
b1 = ord(fp.read(1))
|
||||
if 247 <= b0 and b0 <= 250:
|
||||
value = ((b0-247) << 8)+b1+108
|
||||
elif 251 <= b0 and b0 <= 254:
|
||||
value = -((b0-251) << 8)-b1-108
|
||||
else:
|
||||
b2 = ord(fp.read(1))
|
||||
if 128 <= b1:
|
||||
b1 -= 256
|
||||
if b0 == 28:
|
||||
value = b1 << 8 | b2
|
||||
else:
|
||||
value = b1 << 24 | b2 << 16 | struct.unpack('>H', fp.read(2))[0]
|
||||
stack.append(value)
|
||||
return d
|
||||
|
||||
|
||||
class CFFFont:
|
||||
|
||||
STANDARD_STRINGS = (
|
||||
'.notdef', 'space', 'exclam', 'quotedbl', 'numbersign',
|
||||
'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft',
|
||||
'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period',
|
||||
'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six',
|
||||
'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal',
|
||||
'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G',
|
||||
'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
|
||||
'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash',
|
||||
'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a',
|
||||
'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n',
|
||||
'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
|
||||
'braceleft', 'bar', 'braceright', 'asciitilde', 'exclamdown',
|
||||
'cent', 'sterling', 'fraction', 'yen', 'florin', 'section',
|
||||
'currency', 'quotesingle', 'quotedblleft', 'guillemotleft',
|
||||
'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'endash',
|
||||
'dagger', 'daggerdbl', 'periodcentered', 'paragraph', 'bullet',
|
||||
'quotesinglbase', 'quotedblbase', 'quotedblright',
|
||||
'guillemotright', 'ellipsis', 'perthousand', 'questiondown',
|
||||
'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve',
|
||||
'dotaccent', 'dieresis', 'ring', 'cedilla', 'hungarumlaut',
|
||||
'ogonek', 'caron', 'emdash', 'AE', 'ordfeminine', 'Lslash',
|
||||
'Oslash', 'OE', 'ordmasculine', 'ae', 'dotlessi', 'lslash',
|
||||
'oslash', 'oe', 'germandbls', 'onesuperior', 'logicalnot', 'mu',
|
||||
'trademark', 'Eth', 'onehalf', 'plusminus', 'Thorn',
|
||||
'onequarter', 'divide', 'brokenbar', 'degree', 'thorn',
|
||||
'threequarters', 'twosuperior', 'registered', 'minus', 'eth',
|
||||
'multiply', 'threesuperior', 'copyright', 'Aacute',
|
||||
'Acircumflex', 'Adieresis', 'Agrave', 'Aring', 'Atilde',
|
||||
'Ccedilla', 'Eacute', 'Ecircumflex', 'Edieresis', 'Egrave',
|
||||
'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Ntilde',
|
||||
'Oacute', 'Ocircumflex', 'Odieresis', 'Ograve', 'Otilde',
|
||||
'Scaron', 'Uacute', 'Ucircumflex', 'Udieresis', 'Ugrave',
|
||||
'Yacute', 'Ydieresis', 'Zcaron', 'aacute', 'acircumflex',
|
||||
'adieresis', 'agrave', 'aring', 'atilde', 'ccedilla', 'eacute',
|
||||
'ecircumflex', 'edieresis', 'egrave', 'iacute', 'icircumflex',
|
||||
'idieresis', 'igrave', 'ntilde', 'oacute', 'ocircumflex',
|
||||
'odieresis', 'ograve', 'otilde', 'scaron', 'uacute',
|
||||
'ucircumflex', 'udieresis', 'ugrave', 'yacute', 'ydieresis',
|
||||
'zcaron', 'exclamsmall', 'Hungarumlautsmall', 'dollaroldstyle',
|
||||
'dollarsuperior', 'ampersandsmall', 'Acutesmall',
|
||||
'parenleftsuperior', 'parenrightsuperior', 'twodotenleader',
|
||||
'onedotenleader', 'zerooldstyle', 'oneoldstyle', 'twooldstyle',
|
||||
'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle',
|
||||
'sevenoldstyle', 'eightoldstyle', 'nineoldstyle',
|
||||
'commasuperior', 'threequartersemdash', 'periodsuperior',
|
||||
'questionsmall', 'asuperior', 'bsuperior', 'centsuperior',
|
||||
'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior',
|
||||
'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior',
|
||||
'ff', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior',
|
||||
'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall',
|
||||
'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall',
|
||||
'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall',
|
||||
'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall',
|
||||
'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall',
|
||||
'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall',
|
||||
'exclamdownsmall', 'centoldstyle', 'Lslashsmall', 'Scaronsmall',
|
||||
'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall',
|
||||
'Dotaccentsmall', 'Macronsmall', 'figuredash', 'hypheninferior',
|
||||
'Ogoneksmall', 'Ringsmall', 'Cedillasmall', 'questiondownsmall',
|
||||
'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths',
|
||||
'onethird', 'twothirds', 'zerosuperior', 'foursuperior',
|
||||
'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior',
|
||||
'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior',
|
||||
'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior',
|
||||
'seveninferior', 'eightinferior', 'nineinferior',
|
||||
'centinferior', 'dollarinferior', 'periodinferior',
|
||||
'commainferior', 'Agravesmall', 'Aacutesmall',
|
||||
'Acircumflexsmall', 'Atildesmall', 'Adieresissmall',
|
||||
'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall',
|
||||
'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall',
|
||||
'Igravesmall', 'Iacutesmall', 'Icircumflexsmall',
|
||||
'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall',
|
||||
'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall',
|
||||
'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall',
|
||||
'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall',
|
||||
'Yacutesmall', 'Thornsmall', 'Ydieresissmall', '001.000',
|
||||
'001.001', '001.002', '001.003', 'Black', 'Bold', 'Book',
|
||||
'Light', 'Medium', 'Regular', 'Roman', 'Semibold',
|
||||
)
|
||||
|
||||
class INDEX:
|
||||
|
||||
def __init__(self, fp):
|
||||
self.fp = fp
|
||||
self.offsets = []
|
||||
(count, offsize) = struct.unpack('>HB', self.fp.read(3))
|
||||
for i in range(count+1):
|
||||
self.offsets.append(nunpack(self.fp.read(offsize)))
|
||||
self.base = self.fp.tell()-1
|
||||
self.fp.seek(self.base+self.offsets[-1])
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<INDEX: size=%d>' % len(self)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.offsets)-1
|
||||
|
||||
def __getitem__(self, i):
|
||||
self.fp.seek(self.base+self.offsets[i])
|
||||
return self.fp.read(self.offsets[i+1]-self.offsets[i])
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self[i] for i in range(len(self)))
|
||||
|
||||
def __init__(self, name, fp):
|
||||
self.name = name
|
||||
self.fp = fp
|
||||
# Header
|
||||
(_major, _minor, hdrsize, offsize) = struct.unpack('BBBB', self.fp.read(4))
|
||||
self.fp.read(hdrsize-4)
|
||||
# Name INDEX
|
||||
self.name_index = self.INDEX(self.fp)
|
||||
# Top DICT INDEX
|
||||
self.dict_index = self.INDEX(self.fp)
|
||||
# String INDEX
|
||||
self.string_index = self.INDEX(self.fp)
|
||||
# Global Subr INDEX
|
||||
self.subr_index = self.INDEX(self.fp)
|
||||
# Top DICT DATA
|
||||
self.top_dict = getdict(self.dict_index[0])
|
||||
(charset_pos,) = self.top_dict.get(15, [0])
|
||||
(encoding_pos,) = self.top_dict.get(16, [0])
|
||||
(charstring_pos,) = self.top_dict.get(17, [0])
|
||||
# CharStrings
|
||||
self.fp.seek(charstring_pos)
|
||||
self.charstring = self.INDEX(self.fp)
|
||||
self.nglyphs = len(self.charstring)
|
||||
# Encodings
|
||||
self.code2gid = {}
|
||||
self.gid2code = {}
|
||||
self.fp.seek(encoding_pos)
|
||||
format = self.fp.read(1)
|
||||
if format == b'\x00':
|
||||
# Format 0
|
||||
(n,) = struct.unpack('B', self.fp.read(1))
|
||||
for (code, gid) in enumerate(struct.unpack('B'*n, self.fp.read(n))):
|
||||
self.code2gid[code] = gid
|
||||
self.gid2code[gid] = code
|
||||
elif format == b'\x01':
|
||||
# Format 1
|
||||
(n,) = struct.unpack('B', self.fp.read(1))
|
||||
code = 0
|
||||
for i in range(n):
|
||||
(first, nleft) = struct.unpack('BB', self.fp.read(2))
|
||||
for gid in range(first, first+nleft+1):
|
||||
self.code2gid[code] = gid
|
||||
self.gid2code[gid] = code
|
||||
code += 1
|
||||
else:
|
||||
raise ValueError('unsupported encoding format: %r' % format)
|
||||
# Charsets
|
||||
self.name2gid = {}
|
||||
self.gid2name = {}
|
||||
self.fp.seek(charset_pos)
|
||||
format = self.fp.read(1)
|
||||
if format == b'\x00':
|
||||
# Format 0
|
||||
n = self.nglyphs-1
|
||||
for (gid, sid) in enumerate(struct.unpack('>'+'H'*n, self.fp.read(2*n))):
|
||||
gid += 1
|
||||
name = self.getstr(sid)
|
||||
self.name2gid[name] = gid
|
||||
self.gid2name[gid] = name
|
||||
elif format == b'\x01':
|
||||
# Format 1
|
||||
(n,) = struct.unpack('B', self.fp.read(1))
|
||||
sid = 0
|
||||
for i in range(n):
|
||||
(first, nleft) = struct.unpack('BB', self.fp.read(2))
|
||||
for gid in range(first, first+nleft+1):
|
||||
name = self.getstr(sid)
|
||||
self.name2gid[name] = gid
|
||||
self.gid2name[gid] = name
|
||||
sid += 1
|
||||
elif format == b'\x02':
|
||||
# Format 2
|
||||
assert 0
|
||||
else:
|
||||
raise ValueError('unsupported charset format: %r' % format)
|
||||
#print(self.code2gid)
|
||||
#print(self.name2gid)
|
||||
#assert 0
|
||||
return
|
||||
|
||||
def getstr(self, sid):
|
||||
if sid < len(self.STANDARD_STRINGS):
|
||||
return self.STANDARD_STRINGS[sid]
|
||||
return self.string_index[sid-len(self.STANDARD_STRINGS)]
|
||||
|
||||
|
||||
## TrueTypeFont
|
||||
##
|
||||
class TrueTypeFont:
|
||||
|
||||
class CMapNotFound(Exception):
|
||||
pass
|
||||
|
||||
def __init__(self, name, fp):
|
||||
self.name = name
|
||||
self.fp = fp
|
||||
self.tables = {}
|
||||
self.fonttype = fp.read(4)
|
||||
(ntables, _1, _2, _3) = struct.unpack('>HHHH', fp.read(8))
|
||||
for _ in range(ntables):
|
||||
(name, tsum, offset, length) = struct.unpack('>4sLLL', fp.read(16))
|
||||
self.tables[name] = (offset, length)
|
||||
return
|
||||
|
||||
def create_unicode_map(self):
|
||||
if 'cmap' not in self.tables:
|
||||
raise TrueTypeFont.CMapNotFound
|
||||
(base_offset, length) = self.tables['cmap']
|
||||
fp = self.fp
|
||||
fp.seek(base_offset)
|
||||
(version, nsubtables) = struct.unpack('>HH', fp.read(4))
|
||||
subtables = []
|
||||
for i in range(nsubtables):
|
||||
subtables.append(struct.unpack('>HHL', fp.read(8)))
|
||||
char2gid = {}
|
||||
# Only supports subtable type 0, 2 and 4.
|
||||
for (_1, _2, st_offset) in subtables:
|
||||
fp.seek(base_offset+st_offset)
|
||||
(fmttype, fmtlen, fmtlang) = struct.unpack('>HHH', fp.read(6))
|
||||
if fmttype == 0:
|
||||
char2gid.update(enumerate(struct.unpack('>256B', fp.read(256))))
|
||||
elif fmttype == 2:
|
||||
subheaderkeys = struct.unpack('>256H', fp.read(512))
|
||||
firstbytes = [0]*8192
|
||||
for (i, k) in enumerate(subheaderkeys):
|
||||
firstbytes[k//8] = i
|
||||
nhdrs = max(subheaderkeys)//8 + 1
|
||||
hdrs = []
|
||||
for i in range(nhdrs):
|
||||
(firstcode, entcount, delta, offset) = struct.unpack('>HHhH', fp.read(8))
|
||||
hdrs.append((i, firstcode, entcount, delta, fp.tell()-2+offset))
|
||||
for (i, firstcode, entcount, delta, pos) in hdrs:
|
||||
if not entcount:
|
||||
continue
|
||||
first = firstcode + (firstbytes[i] << 8)
|
||||
fp.seek(pos)
|
||||
for c in range(entcount):
|
||||
gid = struct.unpack('>H', fp.read(2))
|
||||
if gid:
|
||||
gid += delta
|
||||
char2gid[first+c] = gid
|
||||
elif fmttype == 4:
|
||||
(segcount, _1, _2, _3) = struct.unpack('>HHHH', fp.read(8))
|
||||
segcount //= 2
|
||||
ecs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
|
||||
fp.read(2)
|
||||
scs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
|
||||
idds = struct.unpack('>%dh' % segcount, fp.read(2*segcount))
|
||||
pos = fp.tell()
|
||||
idrs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
|
||||
for (ec, sc, idd, idr) in zip(ecs, scs, idds, idrs):
|
||||
if idr:
|
||||
fp.seek(pos+idr)
|
||||
for c in range(sc, ec+1):
|
||||
char2gid[c] = (struct.unpack('>H', fp.read(2))[0] + idd) & 0xffff
|
||||
else:
|
||||
for c in range(sc, ec+1):
|
||||
char2gid[c] = (c + idd) & 0xffff
|
||||
else:
|
||||
assert 0
|
||||
# create unicode map
|
||||
unicode_map = FileUnicodeMap()
|
||||
for (char, gid) in char2gid.items():
|
||||
unicode_map.add_cid2unichr(gid, char)
|
||||
return unicode_map
|
||||
|
||||
|
||||
## Fonts
|
||||
##
|
||||
class PDFFontError(PDFException):
|
||||
pass
|
||||
|
||||
|
||||
class PDFUnicodeNotDefined(PDFFontError):
|
||||
pass
|
||||
|
||||
LITERAL_STANDARD_ENCODING = LIT('StandardEncoding')
|
||||
LITERAL_TYPE1C = LIT('Type1C')
|
||||
|
||||
|
||||
# PDFFont
|
||||
class PDFFont:
|
||||
|
||||
def __init__(self, descriptor, widths, default_width=None):
|
||||
self.descriptor = descriptor
|
||||
self.widths = widths
|
||||
self.fontname = resolve1(descriptor.get('FontName', 'unknown'))
|
||||
if isinstance(self.fontname, PSLiteral):
|
||||
self.fontname = literal_name(self.fontname)
|
||||
self.flags = int_value(descriptor.get('Flags', 0))
|
||||
self.ascent = num_value(descriptor.get('Ascent', 0))
|
||||
self.descent = num_value(descriptor.get('Descent', 0))
|
||||
self.italic_angle = num_value(descriptor.get('ItalicAngle', 0))
|
||||
self.default_width = default_width or num_value(descriptor.get('MissingWidth', 0))
|
||||
self.leading = num_value(descriptor.get('Leading', 0))
|
||||
self.bbox = list_value(descriptor.get('FontBBox', (0, 0, 0, 0)))
|
||||
self.hscale = self.vscale = .001
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFFont>'
|
||||
|
||||
def is_vertical(self):
|
||||
return False
|
||||
|
||||
def is_multibyte(self):
|
||||
return False
|
||||
|
||||
def decode(self, data):
|
||||
return list(data)
|
||||
|
||||
def get_ascent(self):
|
||||
return self.ascent * self.vscale
|
||||
|
||||
def get_descent(self):
|
||||
return self.descent * self.vscale
|
||||
|
||||
def get_width(self):
|
||||
w = self.bbox[2]-self.bbox[0]
|
||||
if w == 0:
|
||||
w = -self.default_width
|
||||
return w * self.hscale
|
||||
|
||||
def get_height(self):
|
||||
h = self.bbox[3]-self.bbox[1]
|
||||
if h == 0:
|
||||
h = self.ascent - self.descent
|
||||
return h * self.vscale
|
||||
|
||||
def char_width(self, cid):
|
||||
try:
|
||||
return self.widths[cid] * self.hscale
|
||||
except KeyError:
|
||||
try:
|
||||
return self.widths[self.to_unichr(cid)] * self.hscale
|
||||
except (KeyError, PDFUnicodeNotDefined):
|
||||
return self.default_width * self.hscale
|
||||
|
||||
def char_disp(self, cid):
|
||||
return 0
|
||||
|
||||
def string_width(self, s):
|
||||
return sum(self.char_width(cid) for cid in self.decode(s))
|
||||
|
||||
|
||||
# PDFSimpleFont
|
||||
class PDFSimpleFont(PDFFont):
|
||||
|
||||
def __init__(self, descriptor, widths, spec):
|
||||
# Font encoding is specified either by a name of
|
||||
# built-in encoding or a dictionary that describes
|
||||
# the differences.
|
||||
if 'Encoding' in spec:
|
||||
encoding = resolve1(spec['Encoding'])
|
||||
else:
|
||||
encoding = LITERAL_STANDARD_ENCODING
|
||||
if isinstance(encoding, dict):
|
||||
name = literal_name(encoding.get('BaseEncoding', LITERAL_STANDARD_ENCODING))
|
||||
diff = list_value(encoding.get('Differences', None))
|
||||
self.cid2unicode = EncodingDB.get_encoding(name, diff)
|
||||
else:
|
||||
self.cid2unicode = EncodingDB.get_encoding(literal_name(encoding))
|
||||
self.unicode_map = None
|
||||
if 'ToUnicode' in spec:
|
||||
strm = stream_value(spec['ToUnicode'])
|
||||
self.unicode_map = FileUnicodeMap()
|
||||
CMapParser(self.unicode_map, BytesIO(strm.get_data())).run()
|
||||
PDFFont.__init__(self, descriptor, widths)
|
||||
return
|
||||
|
||||
def to_unichr(self, cid):
|
||||
if self.unicode_map:
|
||||
try:
|
||||
return self.unicode_map.get_unichr(cid)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
return self.cid2unicode[cid]
|
||||
except KeyError:
|
||||
raise PDFUnicodeNotDefined(None, cid)
|
||||
|
||||
|
||||
# PDFType1Font
|
||||
class PDFType1Font(PDFSimpleFont):
|
||||
|
||||
def __init__(self, rsrcmgr, spec):
|
||||
try:
|
||||
self.basefont = literal_name(spec['BaseFont'])
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFFontError('BaseFont is missing')
|
||||
self.basefont = 'unknown'
|
||||
try:
|
||||
(descriptor, widths) = FontMetricsDB.get_metrics(self.basefont)
|
||||
except KeyError:
|
||||
descriptor = dict_value(spec.get('FontDescriptor', {}))
|
||||
firstchar = int_value(spec.get('FirstChar', 0))
|
||||
#lastchar = int_value(spec.get('LastChar', 255))
|
||||
widths = list_value(spec.get('Widths', [0]*256))
|
||||
widths = dict((i+firstchar, w) for (i, w) in enumerate(widths))
|
||||
PDFSimpleFont.__init__(self, descriptor, widths, spec)
|
||||
if 'Encoding' not in spec and 'FontFile' in descriptor:
|
||||
# try to recover the missing encoding info from the font file.
|
||||
self.fontfile = stream_value(descriptor.get('FontFile'))
|
||||
length1 = int_value(self.fontfile['Length1'])
|
||||
data = self.fontfile.get_data()[:length1]
|
||||
parser = Type1FontHeaderParser(BytesIO(data))
|
||||
self.cid2unicode = parser.get_encoding()
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFType1Font: basefont=%r>' % self.basefont
|
||||
|
||||
|
||||
# PDFTrueTypeFont
|
||||
class PDFTrueTypeFont(PDFType1Font):
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFTrueTypeFont: basefont=%r>' % self.basefont
|
||||
|
||||
|
||||
# PDFType3Font
|
||||
class PDFType3Font(PDFSimpleFont):
|
||||
|
||||
def __init__(self, rsrcmgr, spec):
|
||||
firstchar = int_value(spec.get('FirstChar', 0))
|
||||
#lastchar = int_value(spec.get('LastChar', 0))
|
||||
widths = list_value(spec.get('Widths', [0]*256))
|
||||
widths = dict((i+firstchar, w) for (i, w) in enumerate(widths))
|
||||
if 'FontDescriptor' in spec:
|
||||
descriptor = dict_value(spec['FontDescriptor'])
|
||||
else:
|
||||
descriptor = {'Ascent': 0, 'Descent': 0,
|
||||
'FontBBox': spec['FontBBox']}
|
||||
PDFSimpleFont.__init__(self, descriptor, widths, spec)
|
||||
self.matrix = tuple(list_value(spec.get('FontMatrix')))
|
||||
(_, self.descent, _, self.ascent) = self.bbox
|
||||
(self.hscale, self.vscale) = apply_matrix_norm(self.matrix, (1, 1))
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFType3Font>'
|
||||
|
||||
|
||||
# PDFCIDFont
|
||||
class PDFCIDFont(PDFFont):
|
||||
|
||||
def __init__(self, rsrcmgr, spec):
|
||||
try:
|
||||
self.basefont = literal_name(spec['BaseFont'])
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFFontError('BaseFont is missing')
|
||||
self.basefont = 'unknown'
|
||||
self.cidsysteminfo = dict_value(spec.get('CIDSystemInfo', {}))
|
||||
registry = bytes_value(self.cidsysteminfo.get('Registry', b'unknown'))
|
||||
ordering = bytes_value(self.cidsysteminfo.get('Ordering', b'unknown'))
|
||||
self.cidcoding = (registry + b'-' + ordering).decode('ascii')
|
||||
try:
|
||||
name = literal_name(spec['Encoding'])
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFFontError('Encoding is unspecified')
|
||||
name = 'unknown'
|
||||
try:
|
||||
self.cmap = CMapDB.get_cmap(name)
|
||||
except CMapDB.CMapNotFound as e:
|
||||
if STRICT:
|
||||
raise PDFFontError(e)
|
||||
self.cmap = CMap()
|
||||
try:
|
||||
descriptor = dict_value(spec['FontDescriptor'])
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFFontError('FontDescriptor is missing')
|
||||
descriptor = {}
|
||||
ttf = None
|
||||
if 'FontFile2' in descriptor:
|
||||
self.fontfile = stream_value(descriptor.get('FontFile2'))
|
||||
ttf = TrueTypeFont(self.basefont,
|
||||
BytesIO(self.fontfile.get_data()))
|
||||
self.unicode_map = None
|
||||
if 'ToUnicode' in spec:
|
||||
strm = stream_value(spec['ToUnicode'])
|
||||
self.unicode_map = FileUnicodeMap()
|
||||
CMapParser(self.unicode_map, BytesIO(strm.get_data())).run()
|
||||
elif self.cidcoding in ('Adobe-Identity', 'Adobe-UCS'):
|
||||
if ttf:
|
||||
try:
|
||||
self.unicode_map = ttf.create_unicode_map()
|
||||
except TrueTypeFont.CMapNotFound:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
self.unicode_map = CMapDB.get_unicode_map(self.cidcoding, self.cmap.is_vertical())
|
||||
except CMapDB.CMapNotFound as e:
|
||||
pass
|
||||
|
||||
self.vertical = self.cmap.is_vertical()
|
||||
if self.vertical:
|
||||
# writing mode: vertical
|
||||
widths = get_widths2(list_value(spec.get('W2', [])))
|
||||
self.disps = dict((cid, (vx, vy)) for (cid, (_, (vx, vy))) in widths.items())
|
||||
(vy, w) = spec.get('DW2', [880, -1000])
|
||||
self.default_disp = (None, vy)
|
||||
widths = dict((cid, w) for (cid, (w, _)) in widths.items())
|
||||
default_width = w
|
||||
else:
|
||||
# writing mode: horizontal
|
||||
self.disps = {}
|
||||
self.default_disp = 0
|
||||
widths = get_widths(list_value(spec.get('W', [])))
|
||||
default_width = spec.get('DW', 1000)
|
||||
PDFFont.__init__(self, descriptor, widths, default_width=default_width)
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFCIDFont: basefont=%r, cidcoding=%r>' % (self.basefont, self.cidcoding)
|
||||
|
||||
def is_vertical(self):
|
||||
return self.vertical
|
||||
|
||||
def is_multibyte(self):
|
||||
return True
|
||||
|
||||
def decode(self, data):
|
||||
return self.cmap.decode(data)
|
||||
|
||||
def char_disp(self, cid):
|
||||
"Returns an integer for horizontal fonts, a tuple for vertical fonts."
|
||||
return self.disps.get(cid, self.default_disp)
|
||||
|
||||
def to_unichr(self, cid):
|
||||
try:
|
||||
if not self.unicode_map:
|
||||
raise KeyError(cid)
|
||||
return self.unicode_map.get_unichr(cid)
|
||||
except KeyError:
|
||||
raise PDFUnicodeNotDefined(self.cidcoding, cid)
|
||||
|
||||
|
||||
# main
|
||||
def main(argv):
|
||||
for fname in argv[1:]:
|
||||
with open(fname, 'rb') as fp:
|
||||
#font = TrueTypeFont(fname, fp)
|
||||
font = CFFFont(fname, fp)
|
||||
print(font)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,889 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import re
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from .cmapdb import CMapDB
|
||||
from .cmapdb import CMap
|
||||
from .psparser import PSTypeError
|
||||
from .psparser import PSEOF
|
||||
from .psparser import PSKeyword
|
||||
from .psparser import literal_name
|
||||
from .psparser import keyword_name
|
||||
from .psparser import PSStackParser
|
||||
from .psparser import LIT
|
||||
from .psparser import KWD
|
||||
from .psparser import STRICT
|
||||
from .pdftypes import PDFException
|
||||
from .pdftypes import PDFStream
|
||||
from .pdftypes import PDFObjRef
|
||||
from .pdftypes import resolve1
|
||||
from .pdftypes import list_value
|
||||
from .pdftypes import dict_value
|
||||
from .pdftypes import stream_value
|
||||
from .pdffont import PDFFontError
|
||||
from .pdffont import PDFType1Font
|
||||
from .pdffont import PDFTrueTypeFont
|
||||
from .pdffont import PDFType3Font
|
||||
from .pdffont import PDFCIDFont
|
||||
from .pdfcolor import PDFColorSpace
|
||||
from .pdfcolor import PREDEFINED_COLORSPACE
|
||||
from .utils import choplist
|
||||
from .utils import mult_matrix
|
||||
from .utils import MATRIX_IDENTITY
|
||||
|
||||
|
||||
## Exceptions
|
||||
##
|
||||
class PDFResourceError(PDFException):
|
||||
pass
|
||||
|
||||
class PDFInterpreterError(PDFException):
|
||||
pass
|
||||
|
||||
|
||||
## Constants
|
||||
##
|
||||
LITERAL_PDF = LIT('PDF')
|
||||
LITERAL_TEXT = LIT('Text')
|
||||
LITERAL_FONT = LIT('Font')
|
||||
LITERAL_FORM = LIT('Form')
|
||||
LITERAL_IMAGE = LIT('Image')
|
||||
|
||||
|
||||
## PDFTextState
|
||||
##
|
||||
class PDFTextState:
|
||||
|
||||
def __init__(self):
|
||||
self.font = None
|
||||
self.fontsize = 0
|
||||
self.charspace = 0
|
||||
self.wordspace = 0
|
||||
self.scaling = 100
|
||||
self.leading = 0
|
||||
self.render = 0
|
||||
self.rise = 0
|
||||
self.reset()
|
||||
# self.matrix is set
|
||||
# self.linematrix is set
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<PDFTextState: font=%r, fontsize=%r, charspace=%r, wordspace=%r, '
|
||||
' scaling=%r, leading=%r, render=%r, rise=%r, '
|
||||
' matrix=%r, linematrix=%r>' %
|
||||
(self.font, self.fontsize, self.charspace, self.wordspace,
|
||||
self.scaling, self.leading, self.render, self.rise,
|
||||
self.matrix, self.linematrix))
|
||||
|
||||
def copy(self):
|
||||
obj = PDFTextState()
|
||||
obj.font = self.font
|
||||
obj.fontsize = self.fontsize
|
||||
obj.charspace = self.charspace
|
||||
obj.wordspace = self.wordspace
|
||||
obj.scaling = self.scaling
|
||||
obj.leading = self.leading
|
||||
obj.render = self.render
|
||||
obj.rise = self.rise
|
||||
obj.matrix = self.matrix
|
||||
obj.linematrix = self.linematrix
|
||||
return obj
|
||||
|
||||
def reset(self):
|
||||
self.matrix = MATRIX_IDENTITY
|
||||
self.linematrix = (0, 0)
|
||||
return
|
||||
|
||||
|
||||
## PDFGraphicState
|
||||
##
|
||||
class PDFGraphicState:
|
||||
|
||||
def __init__(self):
|
||||
self.linewidth = 0
|
||||
self.linecap = None
|
||||
self.linejoin = None
|
||||
self.miterlimit = None
|
||||
self.dash = None
|
||||
self.intent = None
|
||||
self.flatness = None
|
||||
return
|
||||
|
||||
def copy(self):
|
||||
obj = PDFGraphicState()
|
||||
obj.linewidth = self.linewidth
|
||||
obj.linecap = self.linecap
|
||||
obj.linejoin = self.linejoin
|
||||
obj.miterlimit = self.miterlimit
|
||||
obj.dash = self.dash
|
||||
obj.intent = self.intent
|
||||
obj.flatness = self.flatness
|
||||
return obj
|
||||
|
||||
def __repr__(self):
|
||||
return ('<PDFGraphicState: linewidth=%r, linecap=%r, linejoin=%r, '
|
||||
' miterlimit=%r, dash=%r, intent=%r, flatness=%r>' %
|
||||
(self.linewidth, self.linecap, self.linejoin,
|
||||
self.miterlimit, self.dash, self.intent, self.flatness))
|
||||
|
||||
|
||||
## Resource Manager
|
||||
##
|
||||
class PDFResourceManager:
|
||||
|
||||
"""Repository of shared resources.
|
||||
|
||||
ResourceManager facilitates reuse of shared resources
|
||||
such as fonts and images so that large objects are not
|
||||
allocated multiple times.
|
||||
"""
|
||||
|
||||
debug = False
|
||||
|
||||
def __init__(self, caching=True):
|
||||
self.caching = caching
|
||||
self._cached_fonts = {}
|
||||
return
|
||||
|
||||
def get_procset(self, procs):
|
||||
for proc in procs:
|
||||
if proc is LITERAL_PDF:
|
||||
pass
|
||||
elif proc is LITERAL_TEXT:
|
||||
pass
|
||||
else:
|
||||
#raise PDFResourceError('ProcSet %r is not supported.' % proc)
|
||||
pass
|
||||
return
|
||||
|
||||
def get_cmap(self, cmapname, strict=False):
|
||||
try:
|
||||
return CMapDB.get_cmap(cmapname)
|
||||
except CMapDB.CMapNotFound:
|
||||
if strict:
|
||||
raise
|
||||
return CMap()
|
||||
|
||||
def get_font(self, objid, spec):
|
||||
if objid and objid in self._cached_fonts:
|
||||
font = self._cached_fonts[objid]
|
||||
else:
|
||||
if self.debug:
|
||||
logging.info('get_font: create: objid=%r, spec=%r' % (objid, spec))
|
||||
if STRICT:
|
||||
if spec['Type'] is not LITERAL_FONT:
|
||||
raise PDFFontError('Type is not /Font')
|
||||
# Create a Font object.
|
||||
if 'Subtype' in spec:
|
||||
subtype = literal_name(spec['Subtype'])
|
||||
else:
|
||||
if STRICT:
|
||||
raise PDFFontError('Font Subtype is not specified.')
|
||||
subtype = 'Type1'
|
||||
if subtype in ('Type1', 'MMType1'):
|
||||
# Type1 Font
|
||||
font = PDFType1Font(self, spec)
|
||||
elif subtype == 'TrueType':
|
||||
# TrueType Font
|
||||
font = PDFTrueTypeFont(self, spec)
|
||||
elif subtype == 'Type3':
|
||||
# Type3 Font
|
||||
font = PDFType3Font(self, spec)
|
||||
elif subtype in ('CIDFontType0', 'CIDFontType2'):
|
||||
# CID Font
|
||||
font = PDFCIDFont(self, spec)
|
||||
elif subtype == 'Type0':
|
||||
# Type0 Font
|
||||
dfonts = list_value(spec['DescendantFonts'])
|
||||
assert dfonts
|
||||
subspec = dict_value(dfonts[0]).copy()
|
||||
for k in ('Encoding', 'ToUnicode'):
|
||||
if k in spec:
|
||||
subspec[k] = resolve1(spec[k])
|
||||
font = self.get_font(None, subspec)
|
||||
else:
|
||||
if STRICT:
|
||||
raise PDFFontError('Invalid Font spec: %r' % spec)
|
||||
font = PDFType1Font(self, spec) # this is so wrong!
|
||||
if objid and self.caching:
|
||||
self._cached_fonts[objid] = font
|
||||
return font
|
||||
|
||||
|
||||
## PDFContentParser
|
||||
##
|
||||
class PDFContentParser(PSStackParser):
|
||||
|
||||
def __init__(self, streams):
|
||||
self.streams = streams
|
||||
self.istream = 0
|
||||
PSStackParser.__init__(self, None)
|
||||
return
|
||||
|
||||
def fillfp(self):
|
||||
if not self.fp:
|
||||
if self.istream < len(self.streams):
|
||||
strm = stream_value(self.streams[self.istream])
|
||||
self.istream += 1
|
||||
else:
|
||||
raise PSEOF('Unexpected EOF, file truncated?')
|
||||
self.fp = BytesIO(strm.get_data())
|
||||
return
|
||||
|
||||
def seek(self, pos):
|
||||
self.fillfp()
|
||||
PSStackParser.seek(self, pos)
|
||||
return
|
||||
|
||||
def fillbuf(self):
|
||||
if self.charpos < len(self.buf):
|
||||
return
|
||||
while 1:
|
||||
self.fillfp()
|
||||
self.bufpos = self.fp.tell()
|
||||
self.buf = self.fp.read(self.BUFSIZ)
|
||||
if self.buf:
|
||||
break
|
||||
self.fp = None
|
||||
self.charpos = 0
|
||||
return
|
||||
|
||||
def get_inline_data(self, pos, target=b'EI'):
|
||||
self.seek(pos)
|
||||
i = 0
|
||||
data = b''
|
||||
while i <= len(target):
|
||||
self.fillbuf()
|
||||
if i:
|
||||
c = self.buf[self.charpos:self.charpos+1]
|
||||
data += c
|
||||
self.charpos += 1
|
||||
if len(target) <= i and c.isspace():
|
||||
i += 1
|
||||
elif i < len(target) and c == target[i:i+1]:
|
||||
i += 1
|
||||
else:
|
||||
i = 0
|
||||
else:
|
||||
try:
|
||||
j = self.buf.index(target[0], self.charpos)
|
||||
#print('found', (0, self.buf[j:j+10]))
|
||||
data += self.buf[self.charpos:j+1]
|
||||
self.charpos = j+1
|
||||
i = 1
|
||||
except ValueError:
|
||||
data += self.buf[self.charpos:]
|
||||
self.charpos = len(self.buf)
|
||||
data = data[:-(len(target)+1)] # strip the last part
|
||||
data = re.sub(br'(\x0d\x0a|[\x0d\x0a])$', b'', data)
|
||||
return (pos, data)
|
||||
|
||||
def flush(self):
|
||||
self.add_results(*self.popall())
|
||||
return
|
||||
|
||||
KEYWORD_BI = KWD(b'BI')
|
||||
KEYWORD_ID = KWD(b'ID')
|
||||
KEYWORD_EI = KWD(b'EI')
|
||||
|
||||
def do_keyword(self, pos, token):
|
||||
if token is self.KEYWORD_BI:
|
||||
# inline image within a content stream
|
||||
self.start_type(pos, 'inline')
|
||||
elif token is self.KEYWORD_ID:
|
||||
try:
|
||||
(_, objs) = self.end_type('inline')
|
||||
if len(objs) % 2 != 0:
|
||||
raise PSTypeError('Invalid dictionary construct: %r' % objs)
|
||||
d = dict((literal_name(k), v) for (k, v) in choplist(2, objs))
|
||||
(pos, data) = self.get_inline_data(pos+len(b'ID '))
|
||||
obj = PDFStream(d, data)
|
||||
self.push((pos, obj))
|
||||
self.push((pos, self.KEYWORD_EI))
|
||||
except PSTypeError:
|
||||
if STRICT:
|
||||
raise
|
||||
else:
|
||||
self.push((pos, token))
|
||||
return
|
||||
|
||||
|
||||
## Interpreter
|
||||
##
|
||||
class PDFPageInterpreter:
|
||||
|
||||
debug = 0
|
||||
|
||||
def __init__(self, rsrcmgr, device):
|
||||
self.rsrcmgr = rsrcmgr
|
||||
self.device = device
|
||||
return
|
||||
|
||||
def dup(self):
|
||||
return self.__class__(self.rsrcmgr, self.device)
|
||||
|
||||
# init_resources(resources):
|
||||
# Prepare the fonts and XObjects listed in the Resource attribute.
|
||||
def init_resources(self, resources):
|
||||
self.resources = resources
|
||||
self.fontmap = {}
|
||||
self.xobjmap = {}
|
||||
self.csmap = PREDEFINED_COLORSPACE.copy()
|
||||
if not resources:
|
||||
return
|
||||
|
||||
def get_colorspace(spec):
|
||||
if isinstance(spec, list):
|
||||
name = literal_name(spec[0])
|
||||
else:
|
||||
name = literal_name(spec)
|
||||
if name == 'ICCBased' and isinstance(spec, list) and 2 <= len(spec):
|
||||
return PDFColorSpace(name, stream_value(spec[1])['N'])
|
||||
elif name == 'DeviceN' and isinstance(spec, list) and 2 <= len(spec):
|
||||
return PDFColorSpace(name, len(list_value(spec[1])))
|
||||
else:
|
||||
return PREDEFINED_COLORSPACE.get(name)
|
||||
for (k, v) in dict_value(resources).items():
|
||||
if self.debug:
|
||||
logging.debug('Resource: %r: %r' % (k, v))
|
||||
if k == 'Font':
|
||||
for (fontid, spec) in dict_value(v).items():
|
||||
objid = None
|
||||
if isinstance(spec, PDFObjRef):
|
||||
objid = spec.objid
|
||||
spec = dict_value(spec)
|
||||
self.fontmap[fontid] = self.rsrcmgr.get_font(objid, spec)
|
||||
elif k == 'ColorSpace':
|
||||
for (csid, spec) in dict_value(v).items():
|
||||
self.csmap[csid] = get_colorspace(resolve1(spec))
|
||||
elif k == 'ProcSet':
|
||||
self.rsrcmgr.get_procset(list_value(v))
|
||||
elif k == 'XObject':
|
||||
for (xobjid, xobjstrm) in dict_value(v).items():
|
||||
self.xobjmap[xobjid] = xobjstrm
|
||||
return
|
||||
|
||||
# init_state(ctm)
|
||||
# Initialize the text and graphic states for rendering a page.
|
||||
def init_state(self, ctm):
|
||||
# gstack: stack for graphical states.
|
||||
self.gstack = []
|
||||
self.ctm = ctm
|
||||
self.device.set_ctm(self.ctm)
|
||||
self.textstate = PDFTextState()
|
||||
self.graphicstate = PDFGraphicState()
|
||||
self.curpath = []
|
||||
# argstack: stack for command arguments.
|
||||
self.argstack = []
|
||||
# set some global states.
|
||||
self.scs = self.ncs = None
|
||||
if self.csmap:
|
||||
for v in self.csmap.values():
|
||||
self.scs = self.ncs = v
|
||||
break
|
||||
return
|
||||
|
||||
def push(self, obj):
|
||||
self.argstack.append(obj)
|
||||
return
|
||||
|
||||
def pop(self, n):
|
||||
if n == 0:
|
||||
return []
|
||||
x = self.argstack[-n:]
|
||||
self.argstack = self.argstack[:-n]
|
||||
return x
|
||||
|
||||
def get_current_state(self):
|
||||
return (self.ctm, self.textstate.copy(), self.graphicstate.copy())
|
||||
|
||||
def set_current_state(self, state):
|
||||
(self.ctm, self.textstate, self.graphicstate) = state
|
||||
self.device.set_ctm(self.ctm)
|
||||
return
|
||||
|
||||
# gsave
|
||||
def do_q(self):
|
||||
self.gstack.append(self.get_current_state())
|
||||
return
|
||||
|
||||
# grestore
|
||||
def do_Q(self):
|
||||
if self.gstack:
|
||||
self.set_current_state(self.gstack.pop())
|
||||
return
|
||||
|
||||
# concat-matrix
|
||||
def do_cm(self, a1, b1, c1, d1, e1, f1):
|
||||
self.ctm = mult_matrix((a1, b1, c1, d1, e1, f1), self.ctm)
|
||||
self.device.set_ctm(self.ctm)
|
||||
return
|
||||
|
||||
# setlinewidth
|
||||
def do_w(self, linewidth):
|
||||
self.graphicstate.linewidth = linewidth
|
||||
return
|
||||
|
||||
# setlinecap
|
||||
def do_J(self, linecap):
|
||||
self.graphicstate.linecap = linecap
|
||||
return
|
||||
|
||||
# setlinejoin
|
||||
def do_j(self, linejoin):
|
||||
self.graphicstate.linejoin = linejoin
|
||||
return
|
||||
|
||||
# setmiterlimit
|
||||
def do_M(self, miterlimit):
|
||||
self.graphicstate.miterlimit = miterlimit
|
||||
return
|
||||
|
||||
# setdash
|
||||
def do_d(self, dash, phase):
|
||||
self.graphicstate.dash = (dash, phase)
|
||||
return
|
||||
|
||||
# setintent
|
||||
def do_ri(self, intent):
|
||||
self.graphicstate.intent = intent
|
||||
return
|
||||
|
||||
# setflatness
|
||||
def do_i(self, flatness):
|
||||
self.graphicstate.flatness = flatness
|
||||
return
|
||||
|
||||
# load-gstate
|
||||
def do_gs(self, name):
|
||||
#XXX
|
||||
return
|
||||
|
||||
# moveto
|
||||
def do_m(self, x, y):
|
||||
self.curpath.append(('m', x, y))
|
||||
return
|
||||
|
||||
# lineto
|
||||
def do_l(self, x, y):
|
||||
self.curpath.append(('l', x, y))
|
||||
return
|
||||
|
||||
# curveto
|
||||
def do_c(self, x1, y1, x2, y2, x3, y3):
|
||||
self.curpath.append(('c', x1, y1, x2, y2, x3, y3))
|
||||
return
|
||||
|
||||
# urveto
|
||||
def do_v(self, x2, y2, x3, y3):
|
||||
self.curpath.append(('v', x2, y2, x3, y3))
|
||||
return
|
||||
|
||||
# rveto
|
||||
def do_y(self, x1, y1, x3, y3):
|
||||
self.curpath.append(('y', x1, y1, x3, y3))
|
||||
return
|
||||
|
||||
# closepath
|
||||
def do_h(self):
|
||||
self.curpath.append(('h',))
|
||||
return
|
||||
|
||||
# rectangle
|
||||
def do_re(self, x, y, w, h):
|
||||
self.curpath.append(('m', x, y))
|
||||
self.curpath.append(('l', x+w, y))
|
||||
self.curpath.append(('l', x+w, y+h))
|
||||
self.curpath.append(('l', x, y+h))
|
||||
self.curpath.append(('h',))
|
||||
return
|
||||
|
||||
# stroke
|
||||
def do_S(self):
|
||||
self.device.paint_path(self.graphicstate, True, False, False, self.curpath)
|
||||
self.curpath = []
|
||||
return
|
||||
|
||||
# close-and-stroke
|
||||
def do_s(self):
|
||||
self.do_h()
|
||||
self.do_S()
|
||||
return
|
||||
|
||||
# fill
|
||||
def do_f(self):
|
||||
self.device.paint_path(self.graphicstate, False, True, False, self.curpath)
|
||||
self.curpath = []
|
||||
return
|
||||
# fill (obsolete)
|
||||
do_F = do_f
|
||||
|
||||
# fill-even-odd
|
||||
def do_f_a(self):
|
||||
self.device.paint_path(self.graphicstate, False, True, True, self.curpath)
|
||||
self.curpath = []
|
||||
return
|
||||
|
||||
# fill-and-stroke
|
||||
def do_B(self):
|
||||
self.device.paint_path(self.graphicstate, True, True, False, self.curpath)
|
||||
self.curpath = []
|
||||
return
|
||||
|
||||
# fill-and-stroke-even-odd
|
||||
def do_B_a(self):
|
||||
self.device.paint_path(self.graphicstate, True, True, True, self.curpath)
|
||||
self.curpath = []
|
||||
return
|
||||
|
||||
# close-fill-and-stroke
|
||||
def do_b(self):
|
||||
self.do_h()
|
||||
self.do_B()
|
||||
return
|
||||
|
||||
# close-fill-and-stroke-even-odd
|
||||
def do_b_a(self):
|
||||
self.do_h()
|
||||
self.do_B_a()
|
||||
return
|
||||
|
||||
# close-only
|
||||
def do_n(self):
|
||||
self.curpath = []
|
||||
return
|
||||
|
||||
# clip
|
||||
def do_W(self):
|
||||
return
|
||||
|
||||
# clip-even-odd
|
||||
def do_W_a(self):
|
||||
return
|
||||
|
||||
# setcolorspace-stroking
|
||||
def do_CS(self, name):
|
||||
try:
|
||||
self.scs = self.csmap[literal_name(name)]
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('Undefined ColorSpace: %r' % name)
|
||||
return
|
||||
|
||||
# setcolorspace-non-strokine
|
||||
def do_cs(self, name):
|
||||
try:
|
||||
self.ncs = self.csmap[literal_name(name)]
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('Undefined ColorSpace: %r' % name)
|
||||
return
|
||||
|
||||
# setgray-stroking
|
||||
def do_G(self, gray):
|
||||
#self.do_CS(LITERAL_DEVICE_GRAY)
|
||||
return
|
||||
|
||||
# setgray-non-stroking
|
||||
def do_g(self, gray):
|
||||
#self.do_cs(LITERAL_DEVICE_GRAY)
|
||||
return
|
||||
|
||||
# setrgb-stroking
|
||||
def do_RG(self, r, g, b):
|
||||
#self.do_CS(LITERAL_DEVICE_RGB)
|
||||
return
|
||||
|
||||
# setrgb-non-stroking
|
||||
def do_rg(self, r, g, b):
|
||||
#self.do_cs(LITERAL_DEVICE_RGB)
|
||||
return
|
||||
|
||||
# setcmyk-stroking
|
||||
def do_K(self, c, m, y, k):
|
||||
#self.do_CS(LITERAL_DEVICE_CMYK)
|
||||
return
|
||||
|
||||
# setcmyk-non-stroking
|
||||
def do_k(self, c, m, y, k):
|
||||
#self.do_cs(LITERAL_DEVICE_CMYK)
|
||||
return
|
||||
|
||||
# setcolor
|
||||
def do_SCN(self):
|
||||
if self.scs:
|
||||
n = self.scs.ncomponents
|
||||
else:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('No colorspace specified!')
|
||||
n = 1
|
||||
self.pop(n)
|
||||
return
|
||||
|
||||
def do_scn(self):
|
||||
if self.ncs:
|
||||
n = self.ncs.ncomponents
|
||||
else:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('No colorspace specified!')
|
||||
n = 1
|
||||
self.pop(n)
|
||||
return
|
||||
|
||||
def do_SC(self):
|
||||
self.do_SCN()
|
||||
return
|
||||
|
||||
def do_sc(self):
|
||||
self.do_scn()
|
||||
return
|
||||
|
||||
# sharing-name
|
||||
def do_sh(self, name):
|
||||
return
|
||||
|
||||
# begin-text
|
||||
def do_BT(self):
|
||||
self.textstate.reset()
|
||||
return
|
||||
|
||||
# end-text
|
||||
def do_ET(self):
|
||||
return
|
||||
|
||||
# begin-compat
|
||||
def do_BX(self):
|
||||
return
|
||||
|
||||
# end-compat
|
||||
def do_EX(self):
|
||||
return
|
||||
|
||||
# marked content operators
|
||||
def do_MP(self, tag):
|
||||
self.device.do_tag(tag)
|
||||
return
|
||||
|
||||
def do_DP(self, tag, props):
|
||||
self.device.do_tag(tag, props)
|
||||
return
|
||||
|
||||
def do_BMC(self, tag):
|
||||
self.device.begin_tag(tag)
|
||||
return
|
||||
|
||||
def do_BDC(self, tag, props):
|
||||
self.device.begin_tag(tag, props)
|
||||
return
|
||||
|
||||
def do_EMC(self):
|
||||
self.device.end_tag()
|
||||
return
|
||||
|
||||
# setcharspace
|
||||
def do_Tc(self, space):
|
||||
self.textstate.charspace = space
|
||||
return
|
||||
|
||||
# setwordspace
|
||||
def do_Tw(self, space):
|
||||
self.textstate.wordspace = space
|
||||
return
|
||||
|
||||
# textscale
|
||||
def do_Tz(self, scale):
|
||||
self.textstate.scaling = scale
|
||||
return
|
||||
|
||||
# setleading
|
||||
def do_TL(self, leading):
|
||||
self.textstate.leading = -leading
|
||||
return
|
||||
|
||||
# selectfont
|
||||
def do_Tf(self, fontid, fontsize):
|
||||
try:
|
||||
self.textstate.font = self.fontmap[literal_name(fontid)]
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('Undefined Font id: %r' % fontid)
|
||||
self.textstate.font = self.rsrcmgr.get_font(None, {})
|
||||
self.textstate.fontsize = fontsize
|
||||
return
|
||||
|
||||
# setrendering
|
||||
def do_Tr(self, render):
|
||||
self.textstate.render = render
|
||||
return
|
||||
|
||||
# settextrise
|
||||
def do_Ts(self, rise):
|
||||
self.textstate.rise = rise
|
||||
return
|
||||
|
||||
# text-move
|
||||
def do_Td(self, tx, ty):
|
||||
(a, b, c, d, e, f) = self.textstate.matrix
|
||||
self.textstate.matrix = (a, b, c, d, tx*a+ty*c+e, tx*b+ty*d+f)
|
||||
self.textstate.linematrix = (0, 0)
|
||||
#print('Td(%r,%r): %r' % (tx, ty, self.textstate), file=sys.stderr)
|
||||
return
|
||||
|
||||
# text-move
|
||||
def do_TD(self, tx, ty):
|
||||
(a, b, c, d, e, f) = self.textstate.matrix
|
||||
self.textstate.matrix = (a, b, c, d, tx*a+ty*c+e, tx*b+ty*d+f)
|
||||
self.textstate.leading = ty
|
||||
self.textstate.linematrix = (0, 0)
|
||||
#print('TD(%r,%r): %r' % (tx, ty, self.textstate), file=sys.stderr)
|
||||
return
|
||||
|
||||
# textmatrix
|
||||
def do_Tm(self, a, b, c, d, e, f):
|
||||
self.textstate.matrix = (a, b, c, d, e, f)
|
||||
self.textstate.linematrix = (0, 0)
|
||||
return
|
||||
|
||||
# nextline
|
||||
def do_T_a(self):
|
||||
(a, b, c, d, e, f) = self.textstate.matrix
|
||||
self.textstate.matrix = (a, b, c, d, self.textstate.leading*c+e, self.textstate.leading*d+f)
|
||||
self.textstate.linematrix = (0, 0)
|
||||
return
|
||||
|
||||
# show-pos
|
||||
def do_TJ(self, seq):
|
||||
#print('TJ(%r): %r' % (seq, self.textstate), file=sys.stderr)
|
||||
if self.textstate.font is None:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('No font specified!')
|
||||
return
|
||||
self.device.render_string(self.textstate, seq)
|
||||
return
|
||||
|
||||
# show
|
||||
def do_Tj(self, s):
|
||||
self.do_TJ([s])
|
||||
return
|
||||
|
||||
# quote
|
||||
def do__q(self, s):
|
||||
self.do_T_a()
|
||||
self.do_TJ([s])
|
||||
return
|
||||
|
||||
# doublequote
|
||||
def do__w(self, aw, ac, s):
|
||||
self.do_Tw(aw)
|
||||
self.do_Tc(ac)
|
||||
self.do_TJ([s])
|
||||
return
|
||||
|
||||
# inline image
|
||||
def do_BI(self): # never called
|
||||
return
|
||||
|
||||
def do_ID(self): # never called
|
||||
return
|
||||
|
||||
def do_EI(self, obj):
|
||||
if 'W' in obj and 'H' in obj:
|
||||
iobjid = str(id(obj))
|
||||
self.device.begin_figure(iobjid, (0, 0, 1, 1), MATRIX_IDENTITY)
|
||||
self.device.render_image(iobjid, obj)
|
||||
self.device.end_figure(iobjid)
|
||||
return
|
||||
|
||||
# invoke an XObject
|
||||
def do_Do(self, xobjid):
|
||||
xobjid = literal_name(xobjid)
|
||||
try:
|
||||
xobj = stream_value(self.xobjmap[xobjid])
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('Undefined xobject id: %r' % xobjid)
|
||||
return
|
||||
if self.debug: logging.info('Processing xobj: %r' % xobj)
|
||||
subtype = xobj.get('Subtype')
|
||||
if subtype is LITERAL_FORM and 'BBox' in xobj:
|
||||
interpreter = self.dup()
|
||||
bbox = list_value(xobj['BBox'])
|
||||
matrix = list_value(xobj.get('Matrix', MATRIX_IDENTITY))
|
||||
# According to PDF reference 1.7 section 4.9.1, XObjects in
|
||||
# earlier PDFs (prior to v1.2) use the page's Resources entry
|
||||
# instead of having their own Resources entry.
|
||||
resources = dict_value(xobj.get('Resources')) or self.resources.copy()
|
||||
self.device.begin_figure(xobjid, bbox, matrix)
|
||||
interpreter.render_contents(resources, [xobj], ctm=mult_matrix(matrix, self.ctm))
|
||||
self.device.end_figure(xobjid)
|
||||
elif subtype is LITERAL_IMAGE and 'Width' in xobj and 'Height' in xobj:
|
||||
self.device.begin_figure(xobjid, (0, 0, 1, 1), MATRIX_IDENTITY)
|
||||
self.device.render_image(xobjid, xobj)
|
||||
self.device.end_figure(xobjid)
|
||||
else:
|
||||
# unsupported xobject type.
|
||||
pass
|
||||
return
|
||||
|
||||
def process_page(self, page):
|
||||
if self.debug: logging.info('Processing page: %r' % page)
|
||||
(x0, y0, x1, y1) = page.mediabox
|
||||
if page.rotate == 90:
|
||||
ctm = (0, -1, 1, 0, -y0, x1)
|
||||
elif page.rotate == 180:
|
||||
ctm = (-1, 0, 0, -1, x1, y1)
|
||||
elif page.rotate == 270:
|
||||
ctm = (0, 1, -1, 0, y1, -x0)
|
||||
else:
|
||||
ctm = (1, 0, 0, 1, -x0, -y0)
|
||||
self.device.begin_page(page, ctm)
|
||||
self.render_contents(page.resources, page.contents, ctm=ctm)
|
||||
self.device.end_page(page)
|
||||
return
|
||||
|
||||
# render_contents(resources, streams, ctm)
|
||||
# Render the content streams.
|
||||
# This method may be called recursively.
|
||||
def render_contents(self, resources, streams, ctm=MATRIX_IDENTITY):
|
||||
if self.debug:
|
||||
logging.info('render_contents: resources=%r, streams=%r, ctm=%r' %
|
||||
(resources, streams, ctm))
|
||||
self.init_resources(resources)
|
||||
self.init_state(ctm)
|
||||
self.execute(list_value(streams))
|
||||
return
|
||||
|
||||
def execute(self, streams):
|
||||
try:
|
||||
parser = PDFContentParser(streams)
|
||||
except PSEOF:
|
||||
# empty page
|
||||
return
|
||||
while 1:
|
||||
try:
|
||||
(_, obj) = parser.nextobject()
|
||||
except PSEOF:
|
||||
break
|
||||
if isinstance(obj, PSKeyword):
|
||||
name = keyword_name(obj).decode('ascii')
|
||||
method = 'do_%s' % name.replace('*', '_a').replace('"', '_w').replace("'", '_q')
|
||||
if hasattr(self, method):
|
||||
func = getattr(self, method)
|
||||
nargs = func.__code__.co_argcount-1
|
||||
if nargs:
|
||||
args = self.pop(nargs)
|
||||
if self.debug:
|
||||
logging.debug('exec: %s %r' % (name, args))
|
||||
if len(args) == nargs:
|
||||
func(*args)
|
||||
else:
|
||||
if self.debug:
|
||||
logging.debug('exec: %s' % name)
|
||||
func()
|
||||
else:
|
||||
if STRICT:
|
||||
raise PDFInterpreterError('Unknown operator: %r' % name)
|
||||
else:
|
||||
self.push(obj)
|
||||
return
|
|
@ -1,176 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from .psparser import PSStackParser
|
||||
from .psparser import PSSyntaxError
|
||||
from .psparser import PSEOF
|
||||
from .psparser import KWD
|
||||
from .psparser import STRICT
|
||||
from .pdftypes import PDFException
|
||||
from .pdftypes import PDFStream
|
||||
from .pdftypes import PDFObjRef
|
||||
from .pdftypes import int_value
|
||||
from .pdftypes import dict_value
|
||||
|
||||
|
||||
## Exceptions
|
||||
##
|
||||
class PDFSyntaxError(PDFException):
|
||||
pass
|
||||
|
||||
|
||||
## PDFParser
|
||||
##
|
||||
class PDFParser(PSStackParser):
|
||||
|
||||
"""
|
||||
PDFParser fetch PDF objects from a file stream.
|
||||
It can handle indirect references by referring to
|
||||
a PDF document set by set_document method.
|
||||
It also reads XRefs at the end of every PDF file.
|
||||
|
||||
Typical usage:
|
||||
parser = PDFParser(fp)
|
||||
parser.read_xref()
|
||||
parser.read_xref(fallback=True) # optional
|
||||
parser.set_document(doc)
|
||||
parser.seek(offset)
|
||||
parser.nextobject()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, fp):
|
||||
PSStackParser.__init__(self, fp)
|
||||
self.doc = None
|
||||
self.fallback = False
|
||||
return
|
||||
|
||||
def set_document(self, doc):
|
||||
"""Associates the parser with a PDFDocument object."""
|
||||
self.doc = doc
|
||||
return
|
||||
|
||||
KEYWORD_R = KWD(b'R')
|
||||
KEYWORD_NULL = KWD(b'null')
|
||||
KEYWORD_ENDOBJ = KWD(b'endobj')
|
||||
KEYWORD_STREAM = KWD(b'stream')
|
||||
KEYWORD_XREF = KWD(b'xref')
|
||||
KEYWORD_STARTXREF = KWD(b'startxref')
|
||||
|
||||
def do_keyword(self, pos, token):
|
||||
"""Handles PDF-related keywords."""
|
||||
|
||||
if token in (self.KEYWORD_XREF, self.KEYWORD_STARTXREF):
|
||||
self.add_results(*self.pop(1))
|
||||
|
||||
elif token is self.KEYWORD_ENDOBJ:
|
||||
self.add_results(*self.pop(4))
|
||||
|
||||
elif token is self.KEYWORD_NULL:
|
||||
# null object
|
||||
self.push((pos, None))
|
||||
|
||||
elif token is self.KEYWORD_R:
|
||||
# reference to indirect object
|
||||
try:
|
||||
((_, objid), (_, genno)) = self.pop(2)
|
||||
(objid, genno) = (int(objid), int(genno))
|
||||
obj = PDFObjRef(self.doc, objid, genno)
|
||||
self.push((pos, obj))
|
||||
except PSSyntaxError:
|
||||
pass
|
||||
|
||||
elif token is self.KEYWORD_STREAM:
|
||||
# stream object
|
||||
((_, dic),) = self.pop(1)
|
||||
dic = dict_value(dic)
|
||||
objlen = 0
|
||||
if not self.fallback:
|
||||
try:
|
||||
objlen = int_value(dic['Length'])
|
||||
except KeyError:
|
||||
if STRICT:
|
||||
raise PDFSyntaxError('/Length is undefined: %r' % dic)
|
||||
self.seek(pos)
|
||||
try:
|
||||
(_, line) = self.nextline() # 'stream'
|
||||
except PSEOF:
|
||||
if STRICT:
|
||||
raise PDFSyntaxError('Unexpected EOF')
|
||||
return
|
||||
pos += len(line)
|
||||
self.fp.seek(pos)
|
||||
data = self.fp.read(objlen)
|
||||
self.seek(pos+objlen)
|
||||
while 1:
|
||||
try:
|
||||
(linepos, line) = self.nextline()
|
||||
except PSEOF:
|
||||
if STRICT:
|
||||
raise PDFSyntaxError('Unexpected EOF')
|
||||
break
|
||||
if b'endstream' in line:
|
||||
i = line.index(b'endstream')
|
||||
objlen += i
|
||||
if self.fallback:
|
||||
data += line[:i]
|
||||
break
|
||||
objlen += len(line)
|
||||
if self.fallback:
|
||||
data += line
|
||||
self.seek(pos+objlen)
|
||||
# XXX limit objlen not to exceed object boundary
|
||||
if self.debug:
|
||||
logging.debug('Stream: pos=%d, objlen=%d, dic=%r, data=%r...' % \
|
||||
(pos, objlen, dic, data[:10]))
|
||||
obj = PDFStream(dic, data, self.doc.decipher)
|
||||
self.push((pos, obj))
|
||||
|
||||
else:
|
||||
# others
|
||||
self.push((pos, token))
|
||||
|
||||
return
|
||||
|
||||
|
||||
## PDFStreamParser
|
||||
##
|
||||
class PDFStreamParser(PDFParser):
|
||||
|
||||
"""
|
||||
PDFStreamParser is used to parse PDF content streams
|
||||
that is contained in each page and has instructions
|
||||
for rendering the page. A reference to a PDF document is
|
||||
needed because a PDF content stream can also have
|
||||
indirect references to other objects in the same document.
|
||||
"""
|
||||
|
||||
def __init__(self, data):
|
||||
PDFParser.__init__(self, BytesIO(data))
|
||||
return
|
||||
|
||||
def flush(self):
|
||||
self.add_results(*self.popall())
|
||||
return
|
||||
|
||||
KEYWORD_OBJ = KWD(b'obj')
|
||||
def do_keyword(self, pos, token):
|
||||
if token is self.KEYWORD_R:
|
||||
# reference to indirect object
|
||||
try:
|
||||
((_, objid), (_, genno)) = self.pop(2)
|
||||
(objid, genno) = (int(objid), int(genno))
|
||||
obj = PDFObjRef(self.doc, objid, genno)
|
||||
self.push((pos, obj))
|
||||
except PSSyntaxError:
|
||||
pass
|
||||
return
|
||||
elif token in (self.KEYWORD_OBJ, self.KEYWORD_ENDOBJ):
|
||||
if STRICT:
|
||||
# See PDF Spec 3.4.6: Only the object values are stored in the
|
||||
# stream; the obj and endobj keywords are not used.
|
||||
raise PDFSyntaxError('Keyword endobj found in stream')
|
||||
return
|
||||
# others
|
||||
self.push((pos, token))
|
||||
return
|
|
@ -1,296 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import zlib
|
||||
from .lzw import lzwdecode
|
||||
from .ascii85 import ascii85decode
|
||||
from .ascii85 import asciihexdecode
|
||||
from .runlength import rldecode
|
||||
from .ccitt import ccittfaxdecode
|
||||
from .psparser import PSException
|
||||
from .psparser import PSObject
|
||||
from .psparser import LIT
|
||||
from .psparser import STRICT
|
||||
from .utils import apply_png_predictor
|
||||
from .utils import isnumber
|
||||
|
||||
|
||||
LITERAL_CRYPT = LIT('Crypt')
|
||||
|
||||
# Abbreviation of Filter names in PDF 4.8.6. "Inline Images"
|
||||
LITERALS_FLATE_DECODE = (LIT('FlateDecode'), LIT('Fl'))
|
||||
LITERALS_LZW_DECODE = (LIT('LZWDecode'), LIT('LZW'))
|
||||
LITERALS_ASCII85_DECODE = (LIT('ASCII85Decode'), LIT('A85'))
|
||||
LITERALS_ASCIIHEX_DECODE = (LIT('ASCIIHexDecode'), LIT('AHx'))
|
||||
LITERALS_RUNLENGTH_DECODE = (LIT('RunLengthDecode'), LIT('RL'))
|
||||
LITERALS_CCITTFAX_DECODE = (LIT('CCITTFaxDecode'), LIT('CCF'))
|
||||
LITERALS_DCT_DECODE = (LIT('DCTDecode'), LIT('DCT'))
|
||||
|
||||
|
||||
## PDF Objects
|
||||
##
|
||||
class PDFObject(PSObject):
|
||||
pass
|
||||
|
||||
class PDFException(PSException):
|
||||
pass
|
||||
|
||||
class PDFTypeError(PDFException):
|
||||
pass
|
||||
|
||||
class PDFValueError(PDFException):
|
||||
pass
|
||||
|
||||
class PDFObjectNotFound(PDFException):
|
||||
pass
|
||||
|
||||
class PDFNotImplementedError(PDFException):
|
||||
pass
|
||||
|
||||
|
||||
## PDFObjRef
|
||||
##
|
||||
class PDFObjRef(PDFObject):
|
||||
|
||||
def __init__(self, doc, objid, _):
|
||||
if objid == 0:
|
||||
if STRICT:
|
||||
raise PDFValueError('PDF object id cannot be 0.')
|
||||
self.doc = doc
|
||||
self.objid = objid
|
||||
#self.genno = genno # Never used.
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<PDFObjRef:%d>' % (self.objid)
|
||||
|
||||
def resolve(self, default=None):
|
||||
try:
|
||||
return self.doc.getobj(self.objid)
|
||||
except PDFObjectNotFound:
|
||||
return default
|
||||
|
||||
|
||||
# resolve
|
||||
def resolve1(x, default=None):
|
||||
"""Resolves an object.
|
||||
|
||||
If this is an array or dictionary, it may still contains
|
||||
some indirect objects inside.
|
||||
"""
|
||||
while isinstance(x, PDFObjRef):
|
||||
x = x.resolve(default=default)
|
||||
return x
|
||||
|
||||
|
||||
def resolve_all(x, default=None):
|
||||
"""Recursively resolves the given object and all the internals.
|
||||
|
||||
Make sure there is no indirect reference within the nested object.
|
||||
This procedure might be slow.
|
||||
"""
|
||||
while isinstance(x, PDFObjRef):
|
||||
x = x.resolve(default=default)
|
||||
if isinstance(x, list):
|
||||
x = [resolve_all(v, default=default) for v in x]
|
||||
elif isinstance(x, dict):
|
||||
for (k, v) in x.items():
|
||||
x[k] = resolve_all(v, default=default)
|
||||
return x
|
||||
|
||||
|
||||
def decipher_all(decipher, objid, genno, x):
|
||||
"""Recursively deciphers the given object.
|
||||
"""
|
||||
if isinstance(x, bytes):
|
||||
return decipher(objid, genno, x)
|
||||
if isinstance(x, list):
|
||||
x = [decipher_all(decipher, objid, genno, v) for v in x]
|
||||
elif isinstance(x, dict):
|
||||
for (k, v) in x.items():
|
||||
x[k] = decipher_all(decipher, objid, genno, v)
|
||||
return x
|
||||
|
||||
|
||||
# Type checking
|
||||
def int_value(x):
|
||||
x = resolve1(x)
|
||||
if not isinstance(x, int):
|
||||
if STRICT:
|
||||
raise PDFTypeError('Integer required: %r' % x)
|
||||
return 0
|
||||
return x
|
||||
|
||||
|
||||
def float_value(x):
|
||||
x = resolve1(x)
|
||||
if not isinstance(x, float):
|
||||
if STRICT:
|
||||
raise PDFTypeError('Float required: %r' % x)
|
||||
return 0.0
|
||||
return x
|
||||
|
||||
|
||||
def num_value(x):
|
||||
x = resolve1(x)
|
||||
if not isnumber(x):
|
||||
if STRICT:
|
||||
raise PDFTypeError('Int or Float required: %r' % x)
|
||||
return 0
|
||||
return x
|
||||
|
||||
|
||||
def bytes_value(x):
|
||||
x = resolve1(x)
|
||||
if not isinstance(x, bytes):
|
||||
if STRICT:
|
||||
raise PDFTypeError('Bytes required: %r' % x)
|
||||
return b''
|
||||
return x
|
||||
|
||||
|
||||
def list_value(x):
|
||||
x = resolve1(x)
|
||||
if not isinstance(x, (list, tuple)):
|
||||
if STRICT:
|
||||
raise PDFTypeError('List required: %r' % x)
|
||||
return []
|
||||
return x
|
||||
|
||||
|
||||
def dict_value(x):
|
||||
x = resolve1(x)
|
||||
if not isinstance(x, dict):
|
||||
if STRICT:
|
||||
raise PDFTypeError('Dict required: %r' % x)
|
||||
return {}
|
||||
return x
|
||||
|
||||
|
||||
def stream_value(x):
|
||||
x = resolve1(x)
|
||||
if not isinstance(x, PDFStream):
|
||||
if STRICT:
|
||||
raise PDFTypeError('PDFStream required: %r' % x)
|
||||
return PDFStream({}, '')
|
||||
return x
|
||||
|
||||
|
||||
## PDFStream type
|
||||
##
|
||||
class PDFStream(PDFObject):
|
||||
|
||||
def __init__(self, attrs, rawdata, decipher=None):
|
||||
assert isinstance(attrs, dict)
|
||||
self.attrs = attrs
|
||||
self.rawdata = rawdata
|
||||
self.decipher = decipher
|
||||
self.data = None
|
||||
self.objid = None
|
||||
self.genno = None
|
||||
return
|
||||
|
||||
def set_objid(self, objid, genno):
|
||||
self.objid = objid
|
||||
self.genno = genno
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
if self.data is None:
|
||||
assert self.rawdata is not None
|
||||
return '<PDFStream(%r): raw=%d, %r>' % (self.objid, len(self.rawdata), self.attrs)
|
||||
else:
|
||||
assert self.data is not None
|
||||
return '<PDFStream(%r): len=%d, %r>' % (self.objid, len(self.data), self.attrs)
|
||||
|
||||
def __contains__(self, name):
|
||||
return name in self.attrs
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.attrs[name]
|
||||
|
||||
def get(self, name, default=None):
|
||||
return self.attrs.get(name, default)
|
||||
|
||||
def get_any(self, names, default=None):
|
||||
for name in names:
|
||||
if name in self.attrs:
|
||||
return self.attrs[name]
|
||||
return default
|
||||
|
||||
def get_filters(self):
|
||||
filters = self.get_any(('F', 'Filter'))
|
||||
params = self.get_any(('DP', 'DecodeParms', 'FDecodeParms'), {})
|
||||
if not filters:
|
||||
return []
|
||||
if not isinstance(filters, list):
|
||||
filters = [filters]
|
||||
if not isinstance(params, list):
|
||||
# Make sure the parameters list is the same as filters.
|
||||
params = [params]*len(filters)
|
||||
if STRICT and len(params) != len(filters):
|
||||
raise PDFException("Parameters len filter mismatch")
|
||||
return zip(filters, params)
|
||||
|
||||
def decode(self):
|
||||
assert self.data is None and self.rawdata is not None
|
||||
data = self.rawdata
|
||||
if self.decipher:
|
||||
# Handle encryption
|
||||
data = self.decipher(self.objid, self.genno, data, self.attrs)
|
||||
filters = self.get_filters()
|
||||
if not filters:
|
||||
self.data = data
|
||||
self.rawdata = None
|
||||
return
|
||||
for (f,params) in filters:
|
||||
if f in LITERALS_FLATE_DECODE:
|
||||
# will get errors if the document is encrypted.
|
||||
try:
|
||||
data = zlib.decompress(data)
|
||||
except zlib.error as e:
|
||||
if STRICT:
|
||||
raise PDFException('Invalid zlib bytes: %r, %r' % (e, data))
|
||||
data = b''
|
||||
elif f in LITERALS_LZW_DECODE:
|
||||
data = lzwdecode(data)
|
||||
elif f in LITERALS_ASCII85_DECODE:
|
||||
data = ascii85decode(data)
|
||||
elif f in LITERALS_ASCIIHEX_DECODE:
|
||||
data = asciihexdecode(data)
|
||||
elif f in LITERALS_RUNLENGTH_DECODE:
|
||||
data = rldecode(data)
|
||||
elif f in LITERALS_CCITTFAX_DECODE:
|
||||
data = ccittfaxdecode(data, params)
|
||||
elif f in LITERALS_DCT_DECODE:
|
||||
# This is probably a JPG stream - it does not need to be decoded twice.
|
||||
# Just return the stream to the user.
|
||||
pass
|
||||
elif f == LITERAL_CRYPT:
|
||||
# not yet..
|
||||
raise PDFNotImplementedError('/Crypt filter is unsupported')
|
||||
else:
|
||||
raise PDFNotImplementedError('Unsupported filter: %r' % f)
|
||||
# apply predictors
|
||||
if 'Predictor' in params:
|
||||
pred = int_value(params['Predictor'])
|
||||
if pred == 1:
|
||||
# no predictor
|
||||
pass
|
||||
elif 10 <= pred:
|
||||
# PNG predictor
|
||||
colors = int_value(params.get('Colors', 1))
|
||||
columns = int_value(params.get('Columns', 1))
|
||||
bitspercomponent = int_value(params.get('BitsPerComponent', 8))
|
||||
data = apply_png_predictor(pred, colors, columns, bitspercomponent, data)
|
||||
else:
|
||||
raise PDFNotImplementedError('Unsupported predictor: %r' % pred)
|
||||
self.data = data
|
||||
self.rawdata = None
|
||||
return
|
||||
|
||||
def get_data(self):
|
||||
if self.data is None:
|
||||
self.decode()
|
||||
return self.data
|
||||
|
||||
def get_rawdata(self):
|
||||
return self.rawdata
|
|
@ -1,138 +0,0 @@
|
|||
import re
|
||||
import ply.lex as lex
|
||||
|
||||
states = (
|
||||
('instring', 'exclusive'),
|
||||
)
|
||||
|
||||
tokens = (
|
||||
'COMMENT', 'HEXSTRING', 'INT', 'FLOAT', 'LITERAL', 'KEYWORD', 'STRING', 'OPERATOR'
|
||||
)
|
||||
|
||||
delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
|
||||
delimiter_end = r'(?=[%s]|$)' % delimiter
|
||||
|
||||
def t_COMMENT(t):
|
||||
# r'^%!.+\n'
|
||||
r'%.*\n'
|
||||
pass
|
||||
|
||||
RE_SPC = re.compile(r'\s')
|
||||
RE_HEX_PAIR = re.compile(r'[0-9a-fA-F]{2}|.')
|
||||
@lex.TOKEN(r'<[0-9A-Fa-f\s]*>')
|
||||
def t_HEXSTRING(t):
|
||||
cleaned = RE_SPC.sub('', t.value[1:-1])
|
||||
pairs = RE_HEX_PAIR.findall(cleaned)
|
||||
token_bytes = bytes([int(pair, 16) for pair in pairs])
|
||||
try:
|
||||
t.value = token_bytes.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
# should be kept as bytes
|
||||
t.value = token_bytes
|
||||
return t
|
||||
|
||||
@lex.TOKEN(r'(\-|\+)?[0-9]+' + delimiter_end)
|
||||
def t_INT(t):
|
||||
t.value = int(t.value)
|
||||
return t
|
||||
|
||||
@lex.TOKEN(r'(\-|\+)?([0-9]+\.|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?' + delimiter_end)
|
||||
def t_FLOAT(t):
|
||||
t.value = float(t.value)
|
||||
return t
|
||||
|
||||
RE_LITERAL_HEX = re.compile(r'#[0-9A-Fa-f]{2}')
|
||||
@lex.TOKEN(r'/.+?' + delimiter_end)
|
||||
def t_LITERAL(t):
|
||||
newvalue = t.value[1:]
|
||||
# If there's '#' chars in the literal, we much de-hex it
|
||||
def re_sub(m):
|
||||
# convert any hex str to int (without the # char) and the convert that
|
||||
return bytes.fromhex(m.group(0)[1:]).decode('latin-1')
|
||||
newvalue = RE_LITERAL_HEX.sub(re_sub , newvalue)
|
||||
# If there's any lone # char left, remove them
|
||||
newvalue = newvalue.replace('#', '')
|
||||
t.value = newvalue
|
||||
return t
|
||||
|
||||
def t_OPERATOR(t):
|
||||
r'{|}|<<|>>|\[|\]'
|
||||
return t
|
||||
|
||||
t_KEYWORD = r'.+?' + delimiter_end
|
||||
|
||||
def t_instring(t):
|
||||
r'\('
|
||||
t.lexer.value_buffer = []
|
||||
t.lexer.string_startpos = t.lexpos
|
||||
t.lexer.level = 1
|
||||
t.lexer.begin('instring')
|
||||
|
||||
# The parens situation: it's complicated. We can have both escaped parens and unescaped parens.
|
||||
# If they're escaped, there's nothing special, we unescape them and add them to the string. If
|
||||
# they're not escaped, we have to count how many of them there are, to know when a rparen is the
|
||||
# end of the string. The regular expression for this is messed up, so what we do is when we hit
|
||||
# a paren, we look if the previous buffer ended up with a backslash. If it did, we don't to paren
|
||||
# balancing.
|
||||
|
||||
def t_instring_lparen(t):
|
||||
r'\('
|
||||
is_escaped = t.lexer.value_buffer and t.lexer.value_buffer[-1].endswith('\\')
|
||||
if is_escaped:
|
||||
t.lexer.value_buffer[-1] = t.lexer.value_buffer[-1][:-1]
|
||||
else:
|
||||
t.lexer.level +=1
|
||||
t.lexer.value_buffer.append('(')
|
||||
|
||||
def t_instring_rparen(t):
|
||||
r'\)'
|
||||
is_escaped = t.lexer.value_buffer and t.lexer.value_buffer[-1].endswith('\\')
|
||||
if is_escaped:
|
||||
t.lexer.value_buffer[-1] = t.lexer.value_buffer[-1][:-1]
|
||||
else:
|
||||
t.lexer.level -=1
|
||||
|
||||
if t.lexer.level == 0:
|
||||
t.value = ''.join(t.lexer.value_buffer)
|
||||
if any(ord(c) > 0x7f for c in t.value):
|
||||
t.value = t.value.encode('latin-1')
|
||||
t.type = "STRING"
|
||||
t.lexpos = t.lexer.string_startpos
|
||||
t.lexer.begin('INITIAL')
|
||||
return t
|
||||
else:
|
||||
t.lexer.value_buffer.append(')')
|
||||
|
||||
RE_STRING_ESCAPE = re.compile(r'\\[btnfr\\]')
|
||||
RE_STRING_OCTAL = re.compile(r'\\[0-7]{1,3}')
|
||||
RE_STRING_LINE_CONT = re.compile(r'\\\n|\\\r|\\\r\n')
|
||||
ESC_STRING = { 'b': '\b', 't': '\t', 'n': '\n', 'f': '\f', 'r': '\r', '\\': '\\' }
|
||||
|
||||
def repl_string_escape(m):
|
||||
return ESC_STRING[m.group(0)[1]]
|
||||
|
||||
def repl_string_octal(m):
|
||||
i = int(m.group(0)[1:], 8)
|
||||
if i < 0xff: # we never want to go above 256 because it's unencodable
|
||||
return chr(i)
|
||||
else:
|
||||
return m.group(0)
|
||||
|
||||
def t_instring_contents(t):
|
||||
r'[^()]+'
|
||||
s = t.value
|
||||
s = RE_STRING_ESCAPE.sub(repl_string_escape, s)
|
||||
s = RE_STRING_OCTAL.sub(repl_string_octal, s)
|
||||
s = RE_STRING_LINE_CONT.sub('', s)
|
||||
t.lexer.value_buffer.append(s)
|
||||
|
||||
t_instring_ignore = ''
|
||||
t_ignore = ' \t\r\n'
|
||||
|
||||
# Error handling rule
|
||||
def t_error(t):
|
||||
print("Illegal character '%r'" % t.value[0])
|
||||
t.lexer.skip(1)
|
||||
t_instring_error = t_error
|
||||
|
||||
lexer = lex.lex()
|
|
@ -1,728 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import re
|
||||
import logging
|
||||
from .utils import choplist
|
||||
|
||||
STRICT = 0
|
||||
|
||||
|
||||
## PS Exceptions
|
||||
##
|
||||
class PSException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PSEOF(PSException):
|
||||
pass
|
||||
|
||||
|
||||
class PSSyntaxError(PSException):
|
||||
pass
|
||||
|
||||
|
||||
class PSTypeError(PSException):
|
||||
pass
|
||||
|
||||
|
||||
class PSValueError(PSException):
|
||||
pass
|
||||
|
||||
|
||||
## Basic PostScript Types
|
||||
##
|
||||
|
||||
## PSObject
|
||||
##
|
||||
class PSObject:
|
||||
|
||||
"""Base class for all PS or PDF-related data types."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
## PSLiteral
|
||||
##
|
||||
class PSLiteral(PSObject):
|
||||
|
||||
"""A class that represents a PostScript literal.
|
||||
|
||||
Postscript literals are used as identifiers, such as
|
||||
variable names, property names and dictionary keys.
|
||||
Literals are case sensitive and denoted by a preceding
|
||||
slash sign (e.g. "/Name")
|
||||
|
||||
Note: Do not create an instance of PSLiteral directly.
|
||||
Always use PSLiteralTable.intern().
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '/%r' % self.name
|
||||
|
||||
|
||||
## PSKeyword
|
||||
##
|
||||
class PSKeyword(PSObject):
|
||||
|
||||
"""A class that represents a PostScript keyword.
|
||||
|
||||
PostScript keywords are a dozen of predefined words.
|
||||
Commands and directives in PostScript are expressed by keywords.
|
||||
They are also used to denote the content boundaries.
|
||||
|
||||
Note: Do not create an instance of PSKeyword directly.
|
||||
Always use PSKeywordTable.intern().
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return self.name.decode('ascii')
|
||||
|
||||
|
||||
## PSSymbolTable
|
||||
##
|
||||
class PSSymbolTable:
|
||||
|
||||
"""A utility class for storing PSLiteral/PSKeyword objects.
|
||||
|
||||
Interned objects can be checked its identity with "is" operator.
|
||||
"""
|
||||
|
||||
def __init__(self, klass):
|
||||
self.dict = {}
|
||||
self.klass = klass
|
||||
return
|
||||
|
||||
def intern(self, name):
|
||||
if name in self.dict:
|
||||
lit = self.dict[name]
|
||||
else:
|
||||
lit = self.klass(name)
|
||||
self.dict[name] = lit
|
||||
return lit
|
||||
|
||||
PSLiteralTable = PSSymbolTable(PSLiteral)
|
||||
PSKeywordTable = PSSymbolTable(PSKeyword)
|
||||
LIT = PSLiteralTable.intern
|
||||
KWD = PSKeywordTable.intern
|
||||
KEYWORD_PROC_BEGIN = KWD(b'{')
|
||||
KEYWORD_PROC_END = KWD(b'}')
|
||||
KEYWORD_ARRAY_BEGIN = KWD(b'[')
|
||||
KEYWORD_ARRAY_END = KWD(b']')
|
||||
KEYWORD_DICT_BEGIN = KWD(b'<<')
|
||||
KEYWORD_DICT_END = KWD(b'>>')
|
||||
|
||||
|
||||
def literal_name(x):
|
||||
if not isinstance(x, PSLiteral):
|
||||
if STRICT:
|
||||
raise PSTypeError('Literal required: %r' % (x,))
|
||||
else:
|
||||
return str(x)
|
||||
return x.name
|
||||
|
||||
|
||||
def keyword_name(x):
|
||||
if not isinstance(x, PSKeyword):
|
||||
if STRICT:
|
||||
raise PSTypeError('Keyword required: %r' % (x,))
|
||||
else:
|
||||
return str(x)
|
||||
return x.name
|
||||
|
||||
|
||||
## PSBaseParser
|
||||
##
|
||||
EOL = re.compile(br'[\r\n]')
|
||||
SPC = re.compile(br'\s')
|
||||
NONSPC = re.compile(br'\S')
|
||||
HEX = re.compile(br'[0-9a-fA-F]')
|
||||
END_LITERAL = re.compile(br'[#/%\[\]()<>{}\s]')
|
||||
END_HEX_STRING = re.compile(br'[^\s0-9a-fA-F]')
|
||||
HEX_PAIR = re.compile(br'[0-9a-fA-F]{2}|.')
|
||||
END_NUMBER = re.compile(br'[^0-9]')
|
||||
END_KEYWORD = re.compile(br'[#/%\[\]()<>{}\s]')
|
||||
END_STRING = re.compile(br'[()\134]')
|
||||
OCT_STRING = re.compile(br'[0-7]')
|
||||
ESC_STRING = {
|
||||
b'b': b'\x08', b't': b'\x09', b'n': b'\x0a', b'f': b'\x0c',
|
||||
b'r': b'\x0d', b'(': b'(', b')': b')', b'\\': b'\\'
|
||||
}
|
||||
|
||||
|
||||
class PSBaseParser:
|
||||
|
||||
"""Most basic PostScript parser that performs only tokenization.
|
||||
"""
|
||||
BUFSIZ = 4096
|
||||
|
||||
debug = 0
|
||||
|
||||
def __init__(self, fp):
|
||||
self.fp = fp
|
||||
self.seek(0)
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %r, bufpos=%d>' % (self.__class__.__name__, self.fp, self.bufpos)
|
||||
|
||||
def flush(self):
|
||||
return
|
||||
|
||||
def close(self):
|
||||
self.flush()
|
||||
return
|
||||
|
||||
def tell(self):
|
||||
return self.bufpos+self.charpos
|
||||
|
||||
def poll(self, pos=None, n=80):
|
||||
pos0 = self.fp.tell()
|
||||
if not pos:
|
||||
pos = self.bufpos+self.charpos
|
||||
self.fp.seek(pos)
|
||||
logging.info('poll(%d): %r' % (pos, self.fp.read(n)))
|
||||
self.fp.seek(pos0)
|
||||
return
|
||||
|
||||
def seek(self, pos):
|
||||
"""Seeks the parser to the given position.
|
||||
"""
|
||||
if self.debug:
|
||||
logging.debug('seek: %r' % pos)
|
||||
self.fp.seek(pos)
|
||||
# reset the status for nextline()
|
||||
self.bufpos = pos
|
||||
self.buf = b''
|
||||
self.charpos = 0
|
||||
# reset the status for nexttoken()
|
||||
self._parse1 = self._parse_main
|
||||
self._curtoken = b''
|
||||
self._curtokenpos = 0
|
||||
self._tokens = []
|
||||
return
|
||||
|
||||
def fillbuf(self):
|
||||
if self.charpos < len(self.buf):
|
||||
return
|
||||
# fetch next chunk.
|
||||
self.bufpos = self.fp.tell()
|
||||
self.buf = self.fp.read(self.BUFSIZ)
|
||||
if not self.buf:
|
||||
raise PSEOF('Unexpected EOF')
|
||||
self.charpos = 0
|
||||
return
|
||||
|
||||
def nextline(self):
|
||||
"""Fetches a next line that ends either with \\r or \\n.
|
||||
"""
|
||||
linebuf = b''
|
||||
linepos = self.bufpos + self.charpos
|
||||
eol = False
|
||||
while 1:
|
||||
self.fillbuf()
|
||||
if eol:
|
||||
c = self.buf[self.charpos:self.charpos+1]
|
||||
# handle b'\r\n'
|
||||
if c == b'\n':
|
||||
linebuf += c
|
||||
self.charpos += 1
|
||||
break
|
||||
m = EOL.search(self.buf, self.charpos)
|
||||
if m:
|
||||
linebuf += self.buf[self.charpos:m.end(0)]
|
||||
self.charpos = m.end(0)
|
||||
if linebuf[-1:] == b'\r':
|
||||
eol = True
|
||||
else:
|
||||
break
|
||||
else:
|
||||
linebuf += self.buf[self.charpos:]
|
||||
self.charpos = len(self.buf)
|
||||
if self.debug:
|
||||
logging.debug('nextline: %r, %r' % (linepos, linebuf))
|
||||
return (linepos, linebuf)
|
||||
|
||||
def revreadlines(self):
|
||||
"""Fetches a next line backward.
|
||||
|
||||
This is used to locate the trailers at the end of a file.
|
||||
"""
|
||||
self.fp.seek(0, 2)
|
||||
pos = self.fp.tell()
|
||||
buf = b''
|
||||
while 0 < pos:
|
||||
prevpos = pos
|
||||
pos = max(0, pos-self.BUFSIZ)
|
||||
self.fp.seek(pos)
|
||||
s = self.fp.read(prevpos-pos)
|
||||
if not s:
|
||||
break
|
||||
while 1:
|
||||
n = max(s.rfind(b'\r'), s.rfind(b'\n'))
|
||||
if n == -1:
|
||||
buf = s + buf
|
||||
break
|
||||
yield s[n:]+buf
|
||||
s = s[:n]
|
||||
buf = b''
|
||||
return
|
||||
|
||||
def _parse_main(self, s, i):
|
||||
m = NONSPC.search(s, i)
|
||||
if not m:
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
c = s[j:j+1]
|
||||
self._curtokenpos = self.bufpos+j
|
||||
if c == b'%':
|
||||
self._curtoken = b'%'
|
||||
self._parse1 = self._parse_comment
|
||||
return j+1
|
||||
elif c == b'/':
|
||||
self._curtoken = b''
|
||||
self._parse1 = self._parse_literal
|
||||
return j+1
|
||||
elif c in b'-+' or c.isdigit():
|
||||
self._curtoken = c
|
||||
self._parse1 = self._parse_number
|
||||
return j+1
|
||||
elif c == b'.':
|
||||
self._curtoken = c
|
||||
self._parse1 = self._parse_float
|
||||
return j+1
|
||||
elif c.isalpha():
|
||||
self._curtoken = c
|
||||
self._parse1 = self._parse_keyword
|
||||
return j+1
|
||||
elif c == b'(':
|
||||
self._curtoken = b''
|
||||
self.paren = 1
|
||||
self._parse1 = self._parse_string
|
||||
return j+1
|
||||
elif c == b'<':
|
||||
self._curtoken = b''
|
||||
self._parse1 = self._parse_wopen
|
||||
return j+1
|
||||
elif c == b'>':
|
||||
self._curtoken = b''
|
||||
self._parse1 = self._parse_wclose
|
||||
return j+1
|
||||
else:
|
||||
self._add_token(KWD(c))
|
||||
return j+1
|
||||
|
||||
def _add_token(self, obj):
|
||||
self._tokens.append((self._curtokenpos, obj))
|
||||
return
|
||||
|
||||
def _parse_comment(self, s, i):
|
||||
m = EOL.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return (self._parse_comment, len(s))
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
self._parse1 = self._parse_main
|
||||
# We ignore comments.
|
||||
#self._tokens.append(self._curtoken)
|
||||
return j
|
||||
|
||||
def _parse_literal(self, s, i):
|
||||
m = END_LITERAL.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
c = s[j:j+1]
|
||||
if c == b'#':
|
||||
self.hex = b''
|
||||
self._parse1 = self._parse_literal_hex
|
||||
return j+1
|
||||
|
||||
try:
|
||||
# Try to interpret the token as a utf-8 string
|
||||
utoken = self._curtoken.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
# We failed, there is possibly a corrupt PDF here.
|
||||
if STRICT: raise
|
||||
utoken = ""
|
||||
self._add_token(LIT(utoken))
|
||||
self._parse1 = self._parse_main
|
||||
return j
|
||||
|
||||
def _parse_literal_hex(self, s, i):
|
||||
c = s[i:i+1]
|
||||
if HEX.match(c) and len(self.hex) < 2:
|
||||
self.hex += c
|
||||
return i+1
|
||||
if self.hex:
|
||||
try:
|
||||
self._curtoken += bytes([int(self.hex, 16)])
|
||||
except ValueError:
|
||||
pass
|
||||
self._parse1 = self._parse_literal
|
||||
return i
|
||||
|
||||
def _parse_number(self, s, i):
|
||||
m = END_NUMBER.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
c = s[j:j+1]
|
||||
if c == b'.':
|
||||
self._curtoken += c
|
||||
self._parse1 = self._parse_float
|
||||
return j+1
|
||||
try:
|
||||
self._add_token(int(self._curtoken))
|
||||
except ValueError:
|
||||
pass
|
||||
self._parse1 = self._parse_main
|
||||
return j
|
||||
|
||||
def _parse_float(self, s, i):
|
||||
m = END_NUMBER.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
try:
|
||||
self._add_token(float(self._curtoken))
|
||||
except ValueError:
|
||||
pass
|
||||
self._parse1 = self._parse_main
|
||||
return j
|
||||
|
||||
def _parse_keyword(self, s, i):
|
||||
m = END_KEYWORD.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
if self._curtoken == b'true':
|
||||
token = True
|
||||
elif self._curtoken == b'false':
|
||||
token = False
|
||||
else:
|
||||
token = KWD(self._curtoken)
|
||||
self._add_token(token)
|
||||
self._parse1 = self._parse_main
|
||||
return j
|
||||
|
||||
def _parse_string(self, s, i):
|
||||
m = END_STRING.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
c = s[j:j+1]
|
||||
if c == b'\\':
|
||||
self.oct = b''
|
||||
self._parse1 = self._parse_string_1
|
||||
return j+1
|
||||
if c == b'(':
|
||||
self.paren += 1
|
||||
self._curtoken += c
|
||||
return j+1
|
||||
if c == b')':
|
||||
self.paren -= 1
|
||||
if self.paren: # WTF, they said balanced parens need no special treatment.
|
||||
self._curtoken += c
|
||||
return j+1
|
||||
self._add_token(self._curtoken)
|
||||
self._parse1 = self._parse_main
|
||||
return j+1
|
||||
|
||||
def _parse_string_1(self, s, i):
|
||||
c = s[i:i+1]
|
||||
if OCT_STRING.match(c) and len(self.oct) < 3:
|
||||
self.oct += c
|
||||
return i+1
|
||||
if self.oct:
|
||||
try:
|
||||
self._curtoken += bytes([int(self.oct, 8)])
|
||||
except ValueError:
|
||||
pass
|
||||
self._parse1 = self._parse_string
|
||||
return i
|
||||
if c in ESC_STRING:
|
||||
self._curtoken += ESC_STRING[c]
|
||||
self._parse1 = self._parse_string
|
||||
return i+1
|
||||
|
||||
def _parse_wopen(self, s, i):
|
||||
c = s[i:i+1]
|
||||
if c == b'<':
|
||||
self._add_token(KEYWORD_DICT_BEGIN)
|
||||
self._parse1 = self._parse_main
|
||||
i += 1
|
||||
else:
|
||||
self._parse1 = self._parse_hexstring
|
||||
return i
|
||||
|
||||
def _parse_wclose(self, s, i):
|
||||
c = s[i:i+1]
|
||||
if c == b'>':
|
||||
self._add_token(KEYWORD_DICT_END)
|
||||
i += 1
|
||||
self._parse1 = self._parse_main
|
||||
return i
|
||||
|
||||
def _parse_hexstring(self, s, i):
|
||||
m = END_HEX_STRING.search(s, i)
|
||||
if not m:
|
||||
self._curtoken += s[i:]
|
||||
return len(s)
|
||||
j = m.start(0)
|
||||
self._curtoken += s[i:j]
|
||||
try:
|
||||
token = HEX_PAIR.sub(lambda m: bytes([int(m.group(0), 16)]),
|
||||
SPC.sub(b'', self._curtoken))
|
||||
self._add_token(token)
|
||||
except ValueError:
|
||||
pass
|
||||
self._parse1 = self._parse_main
|
||||
return j
|
||||
|
||||
def nexttoken(self):
|
||||
while not self._tokens:
|
||||
self.fillbuf()
|
||||
self.charpos = self._parse1(self.buf, self.charpos)
|
||||
token = self._tokens.pop(0)
|
||||
if self.debug:
|
||||
logging.debug('nexttoken: %r' % (token,))
|
||||
return token
|
||||
|
||||
|
||||
## PSStackParser
|
||||
##
|
||||
class PSStackParser(PSBaseParser):
|
||||
|
||||
def __init__(self, fp):
|
||||
PSBaseParser.__init__(self, fp)
|
||||
self.reset()
|
||||
return
|
||||
|
||||
def reset(self):
|
||||
self.context = []
|
||||
self.curtype = None
|
||||
self.curstack = []
|
||||
self.results = []
|
||||
return
|
||||
|
||||
def seek(self, pos):
|
||||
PSBaseParser.seek(self, pos)
|
||||
self.reset()
|
||||
return
|
||||
|
||||
def push(self, *objs):
|
||||
self.curstack.extend(objs)
|
||||
return
|
||||
|
||||
def pop(self, n):
|
||||
objs = self.curstack[-n:]
|
||||
self.curstack[-n:] = []
|
||||
return objs
|
||||
|
||||
def popall(self):
|
||||
objs = self.curstack
|
||||
self.curstack = []
|
||||
return objs
|
||||
|
||||
def add_results(self, *objs):
|
||||
if self.debug:
|
||||
logging.debug('add_results: %r' % (objs,))
|
||||
self.results.extend(objs)
|
||||
return
|
||||
|
||||
def start_type(self, pos, type):
|
||||
self.context.append((pos, self.curtype, self.curstack))
|
||||
(self.curtype, self.curstack) = (type, [])
|
||||
if self.debug:
|
||||
logging.debug('start_type: pos=%r, type=%r' % (pos, type))
|
||||
return
|
||||
|
||||
def end_type(self, type):
|
||||
if self.curtype != type:
|
||||
raise PSTypeError('Type mismatch: %r != %r' % (self.curtype, type))
|
||||
objs = [obj for (_, obj) in self.curstack]
|
||||
(pos, self.curtype, self.curstack) = self.context.pop()
|
||||
if self.debug:
|
||||
logging.debug('end_type: pos=%r, type=%r, objs=%r' % (pos, type, objs))
|
||||
return (pos, objs)
|
||||
|
||||
def do_keyword(self, pos, token):
|
||||
return
|
||||
|
||||
def nextobject(self):
|
||||
"""Yields a list of objects.
|
||||
|
||||
Returns keywords, literals, strings, numbers, arrays and dictionaries.
|
||||
Arrays and dictionaries are represented as Python lists and dictionaries.
|
||||
"""
|
||||
while not self.results:
|
||||
(pos, token) = self.nexttoken()
|
||||
#print((pos,token), (self.curtype, self.curstack))
|
||||
if isinstance(token, (int, float, bool, bytes, PSLiteral)):
|
||||
# normal token
|
||||
self.push((pos, token))
|
||||
elif token == KEYWORD_ARRAY_BEGIN:
|
||||
# begin array
|
||||
self.start_type(pos, 'a')
|
||||
elif token == KEYWORD_ARRAY_END:
|
||||
# end array
|
||||
try:
|
||||
self.push(self.end_type('a'))
|
||||
except PSTypeError:
|
||||
if STRICT:
|
||||
raise
|
||||
elif token == KEYWORD_DICT_BEGIN:
|
||||
# begin dictionary
|
||||
self.start_type(pos, 'd')
|
||||
elif token == KEYWORD_DICT_END:
|
||||
# end dictionary
|
||||
try:
|
||||
(pos, objs) = self.end_type('d')
|
||||
if len(objs) % 2 != 0:
|
||||
raise PSSyntaxError('Invalid dictionary construct: %r' % (objs,))
|
||||
# construct a Python dictionary.
|
||||
d = dict((literal_name(k), v) for (k, v) in choplist(2, objs) if v is not None)
|
||||
self.push((pos, d))
|
||||
except PSTypeError:
|
||||
if STRICT:
|
||||
raise
|
||||
elif token == KEYWORD_PROC_BEGIN:
|
||||
# begin proc
|
||||
self.start_type(pos, 'p')
|
||||
elif token == KEYWORD_PROC_END:
|
||||
# end proc
|
||||
try:
|
||||
self.push(self.end_type('p'))
|
||||
except PSTypeError:
|
||||
if STRICT:
|
||||
raise
|
||||
else:
|
||||
if self.debug:
|
||||
logging.debug('do_keyword: pos=%r, token=%r, stack=%r' % \
|
||||
(pos, token, self.curstack))
|
||||
self.do_keyword(pos, token)
|
||||
if self.context:
|
||||
continue
|
||||
else:
|
||||
self.flush()
|
||||
obj = self.results.pop(0)
|
||||
if self.debug:
|
||||
logging.debug('nextobject: %r' % (obj,))
|
||||
return obj
|
||||
|
||||
|
||||
import unittest
|
||||
|
||||
|
||||
## Simplistic Test cases
|
||||
##
|
||||
class TestPSBaseParser(unittest.TestCase):
|
||||
|
||||
TESTDATA = br'''%!PS
|
||||
begin end
|
||||
" @ #
|
||||
/a/BCD /Some_Name /foo#5f#xbaa
|
||||
0 +1 -2 .5 1.234
|
||||
(abc) () (abc ( def ) ghi)
|
||||
(def\040\0\0404ghi) (bach\\slask) (foo\nbaa)
|
||||
(this % is not a comment.)
|
||||
(foo
|
||||
baa)
|
||||
(foo\
|
||||
baa)
|
||||
<> <20> < 40 4020 >
|
||||
<abcd00
|
||||
12345>
|
||||
func/a/b{(c)do*}def
|
||||
[ 1 (z) ! ]
|
||||
<< /foo (bar) >>
|
||||
'''
|
||||
|
||||
TOKENS = [
|
||||
(5, KWD(b'begin')), (11, KWD(b'end')), (16, KWD(b'"')), (19, KWD(b'@')),
|
||||
(21, KWD(b'#')), (23, LIT('a')), (25, LIT('BCD')), (30, LIT('Some_Name')),
|
||||
(41, LIT('foo_xbaa')), (54, 0), (56, 1), (59, -2), (62, 0.5),
|
||||
(65, 1.234), (71, b'abc'), (77, b''), (80, b'abc ( def ) ghi'),
|
||||
(98, b'def \x00 4ghi'), (118, b'bach\\slask'), (132, b'foo\nbaa'),
|
||||
(143, b'this % is not a comment.'), (170, b'foo\nbaa'), (180, b'foobaa'),
|
||||
(191, b''), (194, b' '), (199, b'@@ '), (211, b'\xab\xcd\x00\x124\x05'),
|
||||
(226, KWD(b'func')), (230, LIT('a')), (232, LIT('b')),
|
||||
(234, KWD(b'{')), (235, b'c'), (238, KWD(b'do*')), (241, KWD(b'}')),
|
||||
(242, KWD(b'def')), (246, KWD(b'[')), (248, 1), (250, b'z'), (254, KWD(b'!')),
|
||||
(256, KWD(b']')), (258, KWD(b'<<')), (261, LIT('foo')), (266, b'bar'),
|
||||
(272, KWD(b'>>'))
|
||||
]
|
||||
|
||||
OBJS = [
|
||||
(23, LIT('a')), (25, LIT('BCD')), (30, LIT('Some_Name')),
|
||||
(41, LIT('foo_xbaa')), (54, 0), (56, 1), (59, -2), (62, 0.5),
|
||||
(65, 1.234), (71, b'abc'), (77, b''), (80, b'abc ( def ) ghi'),
|
||||
(98, b'def \x00 4ghi'), (118, b'bach\\slask'), (132, b'foo\nbaa'),
|
||||
(143, b'this % is not a comment.'), (170, b'foo\nbaa'), (180, b'foobaa'),
|
||||
(191, b''), (194, b' '), (199, b'@@ '), (211, b'\xab\xcd\x00\x124\x05'),
|
||||
(230, LIT('a')), (232, LIT('b')), (234, [b'c']), (246, [1, b'z']),
|
||||
(258, {'foo': b'bar'}),
|
||||
]
|
||||
|
||||
def get_tokens(self, s):
|
||||
from io import BytesIO
|
||||
|
||||
class MyParser(PSBaseParser):
|
||||
def flush(self):
|
||||
self.add_results(*self.popall())
|
||||
parser = MyParser(BytesIO(s))
|
||||
r = []
|
||||
try:
|
||||
while 1:
|
||||
r.append(parser.nexttoken())
|
||||
except PSEOF:
|
||||
pass
|
||||
return r
|
||||
|
||||
def get_objects(self, s):
|
||||
from io import BytesIO
|
||||
|
||||
class MyParser(PSStackParser):
|
||||
def flush(self):
|
||||
self.add_results(*self.popall())
|
||||
parser = MyParser(BytesIO(s))
|
||||
r = []
|
||||
try:
|
||||
while 1:
|
||||
r.append(parser.nextobject())
|
||||
except PSEOF:
|
||||
pass
|
||||
return r
|
||||
|
||||
def test_1(self):
|
||||
tokens = self.get_tokens(self.TESTDATA)
|
||||
print(tokens)
|
||||
self.assertEqual(tokens, self.TOKENS)
|
||||
return
|
||||
|
||||
def test_2(self):
|
||||
objs = self.get_objects(self.TESTDATA)
|
||||
print(objs)
|
||||
self.assertEqual(objs, self.OBJS)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
File diff suppressed because it is too large
Load Diff
|
@ -1,48 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# RunLength decoder (Adobe version) implementation based on PDF Reference
|
||||
# version 1.4 section 3.3.4.
|
||||
#
|
||||
# * public domain *
|
||||
#
|
||||
|
||||
def rldecode(data):
|
||||
r"""
|
||||
RunLength decoder (Adobe version) implementation based on PDF Reference
|
||||
version 1.4 section 3.3.4:
|
||||
The RunLengthDecode filter decodes data that has been encoded in a
|
||||
simple byte-oriented format based on run length. The encoded data
|
||||
is a sequence of runs, where each run consists of a length byte
|
||||
followed by 1 to 128 bytes of data. If the length byte is in the
|
||||
range 0 to 127, the following length + 1 (1 to 128) bytes are
|
||||
copied literally during decompression. If length is in the range
|
||||
129 to 255, the following single byte is to be copied 257 - length
|
||||
(2 to 128) times during decompression. A length value of 128
|
||||
denotes EOD.
|
||||
>>> s = b'\x05123456\xfa7\x04abcde\x80junk'
|
||||
>>> rldecode(s)
|
||||
b'1234567777777abcde'
|
||||
"""
|
||||
decoded = b''
|
||||
i = 0
|
||||
while i < len(data):
|
||||
#print('data[%d]=:%d:' % (i,ord(data[i])))
|
||||
length = data[i]
|
||||
if length == 128:
|
||||
break
|
||||
if length >= 0 and length < 128:
|
||||
run = data[i+1:(i+1)+(length+1)]
|
||||
#print('length=%d, run=%s' % (length+1,run))
|
||||
decoded += run
|
||||
i = (i+1) + (length+1)
|
||||
if length > 128:
|
||||
run = data[i+1:i+2]*(257-length)
|
||||
#print('length=%d, run=%s' % (257-length,run))
|
||||
decoded += run
|
||||
i = (i+1) + 1
|
||||
return decoded
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
print('pdfminer.runlength', doctest.testmod())
|
|
@ -1,333 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
Miscellaneous Routines.
|
||||
"""
|
||||
import struct
|
||||
from sys import maxsize as INF
|
||||
|
||||
|
||||
## PNG Predictor
|
||||
##
|
||||
def apply_png_predictor(pred, colors, columns, bitspercomponent, data):
|
||||
if bitspercomponent != 8:
|
||||
# unsupported
|
||||
raise ValueError("Unsupported `bitspercomponent': %d"%bitspercomponent)
|
||||
nbytes = colors*columns*bitspercomponent//8
|
||||
i = 0
|
||||
buf = b''
|
||||
line0 = b'\x00' * columns
|
||||
for i in range(0, len(data), nbytes+1):
|
||||
ft = data[i:i+1]
|
||||
i += 1
|
||||
line1 = data[i:i+nbytes]
|
||||
line2 = b''
|
||||
if ft == b'\x00':
|
||||
# PNG none
|
||||
line2 += line1
|
||||
elif ft == b'\x01':
|
||||
# PNG sub (UNTESTED)
|
||||
c = 0
|
||||
for b in line1:
|
||||
c = (c+b) & 255
|
||||
line2 += bytes([c])
|
||||
elif ft == b'\x02':
|
||||
# PNG up
|
||||
for (a, b) in zip(line0, line1):
|
||||
c = (a+b) & 255
|
||||
line2 += bytes([c])
|
||||
elif ft == b'\x03':
|
||||
# PNG average (UNTESTED)
|
||||
c = 0
|
||||
for (a, b) in zip(line0, line1):
|
||||
c = ((c+a+b)//2) & 255
|
||||
line2 += bytes([c])
|
||||
else:
|
||||
# unsupported
|
||||
raise ValueError("Unsupported predictor value: %d"%ft)
|
||||
buf += line2
|
||||
line0 = line2
|
||||
return buf
|
||||
|
||||
|
||||
## Matrix operations
|
||||
##
|
||||
MATRIX_IDENTITY = (1, 0, 0, 1, 0, 0)
|
||||
|
||||
|
||||
def mult_matrix(m1, m0):
|
||||
(a1, b1, c1, d1, e1, f1) = m1
|
||||
(a0, b0, c0, d0, e0, f0) = m0
|
||||
"""Returns the multiplication of two matrices."""
|
||||
return (a0*a1+c0*b1, b0*a1+d0*b1,
|
||||
a0*c1+c0*d1, b0*c1+d0*d1,
|
||||
a0*e1+c0*f1+e0, b0*e1+d0*f1+f0)
|
||||
|
||||
|
||||
def translate_matrix(m, v):
|
||||
"""Translates a matrix by (x, y)."""
|
||||
(a, b, c, d, e, f) = m
|
||||
(x, y) = v
|
||||
return (a, b, c, d, x*a+y*c+e, x*b+y*d+f)
|
||||
|
||||
|
||||
def apply_matrix_pt(m, v):
|
||||
(a, b, c, d, e, f) = m
|
||||
(x, y) = v
|
||||
"""Applies a matrix to a point."""
|
||||
return (a*x+c*y+e, b*x+d*y+f)
|
||||
|
||||
|
||||
def apply_matrix_norm(m, v):
|
||||
"""Equivalent to apply_matrix_pt(M, (p,q)) - apply_matrix_pt(M, (0,0))"""
|
||||
(a, b, c, d, e, f) = m
|
||||
(p, q) = v
|
||||
return (a*p+c*q, b*p+d*q)
|
||||
|
||||
|
||||
## Utility functions
|
||||
##
|
||||
|
||||
# isnumber
|
||||
def isnumber(x):
|
||||
return isinstance(x, (int, float))
|
||||
|
||||
# uniq
|
||||
def uniq(objs):
|
||||
"""Eliminates duplicated elements."""
|
||||
done = set()
|
||||
for obj in objs:
|
||||
if obj in done:
|
||||
continue
|
||||
done.add(obj)
|
||||
yield obj
|
||||
return
|
||||
|
||||
|
||||
# csort
|
||||
def csort(objs, key):
|
||||
"""Order-preserving sorting function."""
|
||||
idxs = { obj:i for (i, obj) in enumerate(objs) }
|
||||
return sorted(objs, key=lambda obj: (key(obj), idxs[obj]))
|
||||
|
||||
|
||||
# fsplit
|
||||
def fsplit(pred, objs):
|
||||
"""Split a list into two classes according to the predicate."""
|
||||
t = []
|
||||
f = []
|
||||
for obj in objs:
|
||||
if pred(obj):
|
||||
t.append(obj)
|
||||
else:
|
||||
f.append(obj)
|
||||
return (t, f)
|
||||
|
||||
|
||||
# drange
|
||||
def drange(v0, v1, d):
|
||||
"""Returns a discrete range."""
|
||||
assert v0 < v1
|
||||
return range(int(v0)//d, int(v1+d)//d)
|
||||
|
||||
|
||||
# get_bound
|
||||
def get_bound(pts):
|
||||
"""Compute a minimal rectangle that covers all the points."""
|
||||
(x0, y0, x1, y1) = (INF, INF, -INF, -INF)
|
||||
for (x, y) in pts:
|
||||
x0 = min(x0, x)
|
||||
y0 = min(y0, y)
|
||||
x1 = max(x1, x)
|
||||
y1 = max(y1, y)
|
||||
return (x0, y0, x1, y1)
|
||||
|
||||
|
||||
# pick
|
||||
def pick(seq, func, maxobj=None):
|
||||
"""Picks the object obj where func(obj) has the highest value."""
|
||||
maxscore = None
|
||||
for obj in seq:
|
||||
score = func(obj)
|
||||
if maxscore is None or maxscore < score:
|
||||
(maxscore, maxobj) = (score, obj)
|
||||
return maxobj
|
||||
|
||||
|
||||
# choplist
|
||||
def choplist(n, seq):
|
||||
"""Groups every n elements of the list."""
|
||||
r = []
|
||||
for x in seq:
|
||||
r.append(x)
|
||||
if len(r) == n:
|
||||
yield tuple(r)
|
||||
r = []
|
||||
return
|
||||
|
||||
|
||||
# nunpack
|
||||
def nunpack(s, default=0):
|
||||
"""Unpacks 1 to 4 byte integers (big endian)."""
|
||||
l = len(s)
|
||||
if not l:
|
||||
return default
|
||||
elif l == 1:
|
||||
return s[0]
|
||||
elif l == 2:
|
||||
return struct.unpack('>H', s)[0]
|
||||
elif l == 3:
|
||||
return struct.unpack('>L', b'\x00'+s)[0]
|
||||
elif l == 4:
|
||||
return struct.unpack('>L', s)[0]
|
||||
else:
|
||||
raise TypeError('invalid length: %d' % l)
|
||||
|
||||
|
||||
# decode_text
|
||||
PDFDocEncoding = ''.join(chr(x) for x in (
|
||||
0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007,
|
||||
0x0008, 0x0009, 0x000a, 0x000b, 0x000c, 0x000d, 0x000e, 0x000f,
|
||||
0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0017, 0x0017,
|
||||
0x02d8, 0x02c7, 0x02c6, 0x02d9, 0x02dd, 0x02db, 0x02da, 0x02dc,
|
||||
0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027,
|
||||
0x0028, 0x0029, 0x002a, 0x002b, 0x002c, 0x002d, 0x002e, 0x002f,
|
||||
0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037,
|
||||
0x0038, 0x0039, 0x003a, 0x003b, 0x003c, 0x003d, 0x003e, 0x003f,
|
||||
0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047,
|
||||
0x0048, 0x0049, 0x004a, 0x004b, 0x004c, 0x004d, 0x004e, 0x004f,
|
||||
0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057,
|
||||
0x0058, 0x0059, 0x005a, 0x005b, 0x005c, 0x005d, 0x005e, 0x005f,
|
||||
0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067,
|
||||
0x0068, 0x0069, 0x006a, 0x006b, 0x006c, 0x006d, 0x006e, 0x006f,
|
||||
0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077,
|
||||
0x0078, 0x0079, 0x007a, 0x007b, 0x007c, 0x007d, 0x007e, 0x0000,
|
||||
0x2022, 0x2020, 0x2021, 0x2026, 0x2014, 0x2013, 0x0192, 0x2044,
|
||||
0x2039, 0x203a, 0x2212, 0x2030, 0x201e, 0x201c, 0x201d, 0x2018,
|
||||
0x2019, 0x201a, 0x2122, 0xfb01, 0xfb02, 0x0141, 0x0152, 0x0160,
|
||||
0x0178, 0x017d, 0x0131, 0x0142, 0x0153, 0x0161, 0x017e, 0x0000,
|
||||
0x20ac, 0x00a1, 0x00a2, 0x00a3, 0x00a4, 0x00a5, 0x00a6, 0x00a7,
|
||||
0x00a8, 0x00a9, 0x00aa, 0x00ab, 0x00ac, 0x0000, 0x00ae, 0x00af,
|
||||
0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x00b4, 0x00b5, 0x00b6, 0x00b7,
|
||||
0x00b8, 0x00b9, 0x00ba, 0x00bb, 0x00bc, 0x00bd, 0x00be, 0x00bf,
|
||||
0x00c0, 0x00c1, 0x00c2, 0x00c3, 0x00c4, 0x00c5, 0x00c6, 0x00c7,
|
||||
0x00c8, 0x00c9, 0x00ca, 0x00cb, 0x00cc, 0x00cd, 0x00ce, 0x00cf,
|
||||
0x00d0, 0x00d1, 0x00d2, 0x00d3, 0x00d4, 0x00d5, 0x00d6, 0x00d7,
|
||||
0x00d8, 0x00d9, 0x00da, 0x00db, 0x00dc, 0x00dd, 0x00de, 0x00df,
|
||||
0x00e0, 0x00e1, 0x00e2, 0x00e3, 0x00e4, 0x00e5, 0x00e6, 0x00e7,
|
||||
0x00e8, 0x00e9, 0x00ea, 0x00eb, 0x00ec, 0x00ed, 0x00ee, 0x00ef,
|
||||
0x00f0, 0x00f1, 0x00f2, 0x00f3, 0x00f4, 0x00f5, 0x00f6, 0x00f7,
|
||||
0x00f8, 0x00f9, 0x00fa, 0x00fb, 0x00fc, 0x00fd, 0x00fe, 0x00ff,
|
||||
))
|
||||
|
||||
|
||||
def decode_text(s):
|
||||
"""Decodes a PDFDocEncoding bytes to Unicode."""
|
||||
if s.startswith(b'\xfe\xff'):
|
||||
return s[2:].decode('utf-16be', 'ignore')
|
||||
else:
|
||||
return ''.join(PDFDocEncoding[c] for c in s)
|
||||
|
||||
def q(s):
|
||||
"""Quotes html string."""
|
||||
return (s.replace('&','&')
|
||||
.replace('<','<')
|
||||
.replace('>','>')
|
||||
.replace('"','"'))
|
||||
|
||||
def bbox2str(bbox):
|
||||
(x0, y0, x1, y1) = bbox
|
||||
return '%.3f,%.3f,%.3f,%.3f' % (x0, y0, x1, y1)
|
||||
|
||||
|
||||
def matrix2str(m):
|
||||
(a, b, c, d, e, f) = m
|
||||
return '[%.2f,%.2f,%.2f,%.2f, (%.2f,%.2f)]' % (a, b, c, d, e, f)
|
||||
|
||||
|
||||
## Plane
|
||||
##
|
||||
## A set-like data structure for objects placed on a plane.
|
||||
## Can efficiently find objects in a certain rectangular area.
|
||||
## It maintains two parallel lists of objects, each of
|
||||
## which is sorted by its x or y coordinate.
|
||||
##
|
||||
class Plane:
|
||||
|
||||
def __init__(self, bbox, gridsize=50):
|
||||
self._seq = [] # preserve the object order.
|
||||
self._objs = set()
|
||||
self._grid = {}
|
||||
self.gridsize = gridsize
|
||||
(self.x0, self.y0, self.x1, self.y1) = bbox
|
||||
return
|
||||
|
||||
def __repr__(self):
|
||||
return ('<Plane objs=%r>' % list(self))
|
||||
|
||||
def __iter__(self):
|
||||
return ( obj for obj in self._seq if obj in self._objs )
|
||||
|
||||
def __len__(self):
|
||||
return len(self._objs)
|
||||
|
||||
def __contains__(self, obj):
|
||||
return obj in self._objs
|
||||
|
||||
def _getrange(self, bbox):
|
||||
(x0, y0, x1, y1) = bbox
|
||||
if (x1 <= self.x0 or self.x1 <= x0 or
|
||||
y1 <= self.y0 or self.y1 <= y0): return
|
||||
x0 = max(self.x0, x0)
|
||||
y0 = max(self.y0, y0)
|
||||
x1 = min(self.x1, x1)
|
||||
y1 = min(self.y1, y1)
|
||||
for y in drange(y0, y1, self.gridsize):
|
||||
for x in drange(x0, x1, self.gridsize):
|
||||
yield (x, y)
|
||||
return
|
||||
|
||||
# extend(objs)
|
||||
def extend(self, objs):
|
||||
for obj in objs:
|
||||
self.add(obj)
|
||||
return
|
||||
|
||||
# add(obj): place an object.
|
||||
def add(self, obj):
|
||||
for k in self._getrange((obj.x0, obj.y0, obj.x1, obj.y1)):
|
||||
if k not in self._grid:
|
||||
r = []
|
||||
self._grid[k] = r
|
||||
else:
|
||||
r = self._grid[k]
|
||||
r.append(obj)
|
||||
self._seq.append(obj)
|
||||
self._objs.add(obj)
|
||||
return
|
||||
|
||||
# remove(obj): displace an object.
|
||||
def remove(self, obj):
|
||||
for k in self._getrange((obj.x0, obj.y0, obj.x1, obj.y1)):
|
||||
try:
|
||||
self._grid[k].remove(obj)
|
||||
except (KeyError, ValueError):
|
||||
pass
|
||||
self._objs.remove(obj)
|
||||
return
|
||||
|
||||
# find(): finds objects that are in a certain area.
|
||||
def find(self, bbox):
|
||||
(x0, y0, x1, y1) = bbox
|
||||
done = set()
|
||||
for k in self._getrange(bbox):
|
||||
if k not in self._grid:
|
||||
continue
|
||||
for obj in self._grid[k]:
|
||||
if obj in done:
|
||||
continue
|
||||
done.add(obj)
|
||||
if (obj.x1 <= x0 or x1 <= obj.x0 or
|
||||
obj.y1 <= y0 or y1 <= obj.y0):
|
||||
continue
|
||||
yield obj
|
||||
return
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,17 +0,0 @@
|
|||
MIT License
|
||||
Copyright (c) 2018 YOUR NAME
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -1,29 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: pdfminer3k
|
||||
Version: 1.3.4
|
||||
Summary: Forked from original pdfminer
|
||||
Home-page: https://github.com/canserhat77/pdfminer3k
|
||||
Author: Serhat Can
|
||||
Author-email: author@example.com
|
||||
License: UNKNOWN
|
||||
Download-URL: https://github.com/canserhat77/pdfminer3k/archive/v1.3.4.tar.gz
|
||||
Platform: UNKNOWN
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/markdown
|
||||
Requires-Dist: ply
|
||||
|
||||
pdfminer3k is a Python 3 port of pdfminer.
|
||||
PDFMiner is a tool for extracting information from PDF documents.
|
||||
Unlike other PDF-related tools, it focuses entirely on getting
|
||||
and analyzing text data. PDFMiner allows to obtain
|
||||
the exact location of texts in a page, as well as
|
||||
other information such as fonts or lines.
|
||||
It includes a PDF converter that can transform PDF files
|
||||
into other text formats (such as HTML). It has an extensible
|
||||
PDF parser that can be used for other purposes instead of text analysis.
|
||||
|
||||
We had to forked this because original package was removed from pypi
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
pdfminer/__init__.py,sha256=yPHkEL3Z6rzO5lskcWyWevTAc1v5Vt-GMjOoCSvvlqU,92
|
||||
pdfminer/__pycache__/__init__.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/arcfour.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/ascii85.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/cmapdb.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/converter.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/encodingdb.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/fontmetrics.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/glyphlist.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/latin_enc.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/layout.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/lzw.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pdfcolor.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pdfdevice.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pdffont.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pdfinterp.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pdfparser.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pdftypes.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/pslexer.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/psparser.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/rijndael.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/runlength.cpython-37.pyc,,
|
||||
pdfminer/__pycache__/utils.cpython-37.pyc,,
|
||||
pdfminer/arcfour.py,sha256=3d5oCMG3FoU8ro3M3vIvx48QO1xXxr9nVNn6jwVfhTg,726
|
||||
pdfminer/ascii85.py,sha256=0qleub5AgixZ-EU-D-4iC-X4IH7ZDZQmvwDIK9FQwCI,2124
|
||||
pdfminer/cmap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pdfminer/cmap/__pycache__/__init__.cpython-37.pyc,,
|
||||
pdfminer/cmapdb.py,sha256=cM9VpkaX0beQZclcQKt2nI0MLJvIcsaJGMVRnmlSnLo,11483
|
||||
pdfminer/converter.py,sha256=ce_4hhThqaMbVUrrel0aBe4SOdlSIRP4b0OR_SUaqw8,18374
|
||||
pdfminer/encodingdb.py,sha256=A5IDlokwZ4LJUPHUFhpWED6sQnsU2eN-lTVEZZm8pKM,1472
|
||||
pdfminer/fontmetrics.py,sha256=85fKhzHRr1WIZecNjFq_eOgqy3B4bYHBcvpR5KukeFE,25238
|
||||
pdfminer/glyphlist.py,sha256=kyVkDscUu-OrI4D4NnEP0BKYA2KSIqptTUAS6SEfAJk,117221
|
||||
pdfminer/latin_enc.py,sha256=w-W-xoUnYdRpj2GRNJuVuWrU3YgnCXzviXzcqMoBY4Q,7836
|
||||
pdfminer/layout.py,sha256=f1ckRpRrXodUQ0pbDqQhm3ojanPN3O2DCmd5HyWqcrU,26942
|
||||
pdfminer/lzw.py,sha256=3VDqSWmJPU8hlOOcGClRmjYnQj0kb9gt2hFFR4XCmak,2809
|
||||
pdfminer/pdfcolor.py,sha256=Y7tcOY8mYN9InZxBSGY8O-JI0nq7cqWc_FZGoTDZHqs,652
|
||||
pdfminer/pdfdevice.py,sha256=og6yezLgvNlThNzeb3l_GsOZWfdOk3ZntSg_jYfPB3s,4999
|
||||
pdfminer/pdffont.py,sha256=sh3AELEppo0D00lWW55MJjF2Z58K3HoM__N6bsLviag,25999
|
||||
pdfminer/pdfinterp.py,sha256=36Z0hqWNqUY9dmMrXcHKA8in3VnkyLj1EvbhYAvDkXY,23591
|
||||
pdfminer/pdfparser.py,sha256=O3sLdXKUwxkmkewJKOt2LiFkJnEVreyUEBrnOQfwToY,29608
|
||||
pdfminer/pdftypes.py,sha256=lpdVDj1IEdju8dEBTvRdaFrGA6SJlrpBuqQ2SJW74-c,7777
|
||||
pdfminer/pslexer.py,sha256=FLy1B7thnTLZfY-QL-wSw4VZTvAEoc44GY-t11TbxF4,4075
|
||||
pdfminer/psparser.py,sha256=oRAB2FymUK_4WL4HJIAFXnITEsBZ6GSgAGI1wjH-OyE,8713
|
||||
pdfminer/rijndael.py,sha256=Z_n8N4fVfIlsg4GKZ2NDww7lrNl2ea5CkWzKb2D4NYo,44292
|
||||
pdfminer/runlength.py,sha256=0RkKC-0zY23etUCzrjByawNjwtD9BRn02GjuW-kxX7c,1504
|
||||
pdfminer/utils.py,sha256=PXihxfx6_5Dpdb9wYaD3DiG51CCqY0lJFOixBGRvyvw,8439
|
||||
pdfminer3k-1.3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
pdfminer3k-1.3.4.dist-info/LICENSE.txt,sha256=S1pJL407BwTTXzZK0BQPNM7ZZs57uj1pTSfVZ8Whx30,1061
|
||||
pdfminer3k-1.3.4.dist-info/METADATA,sha256=O1yUNumrcokPtPG_b2_B3ScS8dCJHMzLVvEPJFH6a5A,1119
|
||||
pdfminer3k-1.3.4.dist-info/RECORD,,
|
||||
pdfminer3k-1.3.4.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92
|
||||
pdfminer3k-1.3.4.dist-info/top_level.txt,sha256=VXd4SVCY_kvBfNFxOQxjFvUTMNHEm88nX7ksfFtZark,9
|
|
@ -1,5 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
|
@ -1 +0,0 @@
|
|||
pdfminer
|
|
@ -1,73 +0,0 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: pip
|
||||
Version: 19.0.3
|
||||
Summary: The PyPA recommended tool for installing Python packages.
|
||||
Home-page: https://pip.pypa.io/
|
||||
Author: The pip developers
|
||||
Author-email: pypa-dev@groups.google.com
|
||||
License: MIT
|
||||
Description: pip - The Python Package Installer
|
||||
==================================
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
||||
:target: https://pypi.org/project/pip/
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
||||
:target: https://pip.pypa.io/en/latest
|
||||
|
||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
||||
|
||||
Please take a look at our documentation for how to install and use pip:
|
||||
|
||||
* `Installation`_
|
||||
* `Usage`_
|
||||
* `Release notes`_
|
||||
|
||||
If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
|
||||
|
||||
* `Issue tracking`_
|
||||
* `Discourse channel`_
|
||||
* `User IRC`_
|
||||
|
||||
If you want to get involved head over to GitHub to get the source code and feel free to jump on the developer mailing lists and chat rooms:
|
||||
|
||||
* `GitHub page`_
|
||||
* `Dev mailing list`_
|
||||
* `Dev IRC`_
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
Everyone interacting in the pip project's codebases, issue trackers, chat
|
||||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
||||
|
||||
.. _package installer: https://packaging.python.org/en/latest/current/
|
||||
.. _Python Package Index: https://pypi.org
|
||||
.. _Installation: https://pip.pypa.io/en/stable/installing.html
|
||||
.. _Usage: https://pip.pypa.io/en/stable/
|
||||
.. _Release notes: https://pip.pypa.io/en/stable/news.html
|
||||
.. _GitHub page: https://github.com/pypa/pip
|
||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
||||
.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
|
||||
.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
|
||||
.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
|
||||
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
|
||||
|
||||
Keywords: distutils easy_install egg setuptools wheel virtualenv
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Topic :: Software Development :: Build Tools
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
|
|
@ -1,391 +0,0 @@
|
|||
AUTHORS.txt
|
||||
LICENSE.txt
|
||||
MANIFEST.in
|
||||
NEWS.rst
|
||||
README.rst
|
||||
pyproject.toml
|
||||
setup.cfg
|
||||
setup.py
|
||||
docs/pip_sphinxext.py
|
||||
docs/html/conf.py
|
||||
docs/html/cookbook.rst
|
||||
docs/html/index.rst
|
||||
docs/html/installing.rst
|
||||
docs/html/logic.rst
|
||||
docs/html/news.rst
|
||||
docs/html/quickstart.rst
|
||||
docs/html/usage.rst
|
||||
docs/html/user_guide.rst
|
||||
docs/html/development/configuration.rst
|
||||
docs/html/development/contributing.rst
|
||||
docs/html/development/getting-started.rst
|
||||
docs/html/development/index.rst
|
||||
docs/html/development/release-process.rst
|
||||
docs/html/development/vendoring-policy.rst
|
||||
docs/html/reference/index.rst
|
||||
docs/html/reference/pip.rst
|
||||
docs/html/reference/pip_check.rst
|
||||
docs/html/reference/pip_config.rst
|
||||
docs/html/reference/pip_download.rst
|
||||
docs/html/reference/pip_freeze.rst
|
||||
docs/html/reference/pip_hash.rst
|
||||
docs/html/reference/pip_install.rst
|
||||
docs/html/reference/pip_list.rst
|
||||
docs/html/reference/pip_search.rst
|
||||
docs/html/reference/pip_show.rst
|
||||
docs/html/reference/pip_uninstall.rst
|
||||
docs/html/reference/pip_wheel.rst
|
||||
docs/man/index.rst
|
||||
docs/man/commands/check.rst
|
||||
docs/man/commands/config.rst
|
||||
docs/man/commands/download.rst
|
||||
docs/man/commands/freeze.rst
|
||||
docs/man/commands/hash.rst
|
||||
docs/man/commands/help.rst
|
||||
docs/man/commands/install.rst
|
||||
docs/man/commands/list.rst
|
||||
docs/man/commands/search.rst
|
||||
docs/man/commands/show.rst
|
||||
docs/man/commands/uninstall.rst
|
||||
docs/man/commands/wheel.rst
|
||||
src/pip/__init__.py
|
||||
src/pip/__main__.py
|
||||
src/pip.egg-info/PKG-INFO
|
||||
src/pip.egg-info/SOURCES.txt
|
||||
src/pip.egg-info/dependency_links.txt
|
||||
src/pip.egg-info/entry_points.txt
|
||||
src/pip.egg-info/not-zip-safe
|
||||
src/pip.egg-info/top_level.txt
|
||||
src/pip/_internal/__init__.py
|
||||
src/pip/_internal/build_env.py
|
||||
src/pip/_internal/cache.py
|
||||
src/pip/_internal/configuration.py
|
||||
src/pip/_internal/download.py
|
||||
src/pip/_internal/exceptions.py
|
||||
src/pip/_internal/index.py
|
||||
src/pip/_internal/locations.py
|
||||
src/pip/_internal/pep425tags.py
|
||||
src/pip/_internal/pyproject.py
|
||||
src/pip/_internal/resolve.py
|
||||
src/pip/_internal/wheel.py
|
||||
src/pip/_internal/cli/__init__.py
|
||||
src/pip/_internal/cli/autocompletion.py
|
||||
src/pip/_internal/cli/base_command.py
|
||||
src/pip/_internal/cli/cmdoptions.py
|
||||
src/pip/_internal/cli/main_parser.py
|
||||
src/pip/_internal/cli/parser.py
|
||||
src/pip/_internal/cli/status_codes.py
|
||||
src/pip/_internal/commands/__init__.py
|
||||
src/pip/_internal/commands/check.py
|
||||
src/pip/_internal/commands/completion.py
|
||||
src/pip/_internal/commands/configuration.py
|
||||
src/pip/_internal/commands/download.py
|
||||
src/pip/_internal/commands/freeze.py
|
||||
src/pip/_internal/commands/hash.py
|
||||
src/pip/_internal/commands/help.py
|
||||
src/pip/_internal/commands/install.py
|
||||
src/pip/_internal/commands/list.py
|
||||
src/pip/_internal/commands/search.py
|
||||
src/pip/_internal/commands/show.py
|
||||
src/pip/_internal/commands/uninstall.py
|
||||
src/pip/_internal/commands/wheel.py
|
||||
src/pip/_internal/models/__init__.py
|
||||
src/pip/_internal/models/candidate.py
|
||||
src/pip/_internal/models/format_control.py
|
||||
src/pip/_internal/models/index.py
|
||||
src/pip/_internal/models/link.py
|
||||
src/pip/_internal/operations/__init__.py
|
||||
src/pip/_internal/operations/check.py
|
||||
src/pip/_internal/operations/freeze.py
|
||||
src/pip/_internal/operations/prepare.py
|
||||
src/pip/_internal/req/__init__.py
|
||||
src/pip/_internal/req/constructors.py
|
||||
src/pip/_internal/req/req_file.py
|
||||
src/pip/_internal/req/req_install.py
|
||||
src/pip/_internal/req/req_set.py
|
||||
src/pip/_internal/req/req_tracker.py
|
||||
src/pip/_internal/req/req_uninstall.py
|
||||
src/pip/_internal/utils/__init__.py
|
||||
src/pip/_internal/utils/appdirs.py
|
||||
src/pip/_internal/utils/compat.py
|
||||
src/pip/_internal/utils/deprecation.py
|
||||
src/pip/_internal/utils/encoding.py
|
||||
src/pip/_internal/utils/filesystem.py
|
||||
src/pip/_internal/utils/glibc.py
|
||||
src/pip/_internal/utils/hashes.py
|
||||
src/pip/_internal/utils/logging.py
|
||||
src/pip/_internal/utils/misc.py
|
||||
src/pip/_internal/utils/models.py
|
||||
src/pip/_internal/utils/outdated.py
|
||||
src/pip/_internal/utils/packaging.py
|
||||
src/pip/_internal/utils/setuptools_build.py
|
||||
src/pip/_internal/utils/temp_dir.py
|
||||
src/pip/_internal/utils/typing.py
|
||||
src/pip/_internal/utils/ui.py
|
||||
src/pip/_internal/vcs/__init__.py
|
||||
src/pip/_internal/vcs/bazaar.py
|
||||
src/pip/_internal/vcs/git.py
|
||||
src/pip/_internal/vcs/mercurial.py
|
||||
src/pip/_internal/vcs/subversion.py
|
||||
src/pip/_vendor/README.rst
|
||||
src/pip/_vendor/__init__.py
|
||||
src/pip/_vendor/appdirs.LICENSE.txt
|
||||
src/pip/_vendor/appdirs.py
|
||||
src/pip/_vendor/distro.LICENSE
|
||||
src/pip/_vendor/distro.py
|
||||
src/pip/_vendor/ipaddress.LICENSE
|
||||
src/pip/_vendor/ipaddress.py
|
||||
src/pip/_vendor/pyparsing.LICENSE
|
||||
src/pip/_vendor/pyparsing.py
|
||||
src/pip/_vendor/retrying.LICENSE
|
||||
src/pip/_vendor/retrying.py
|
||||
src/pip/_vendor/six.LICENSE
|
||||
src/pip/_vendor/six.py
|
||||
src/pip/_vendor/vendor.txt
|
||||
src/pip/_vendor/cachecontrol/LICENSE.txt
|
||||
src/pip/_vendor/cachecontrol/__init__.py
|
||||
src/pip/_vendor/cachecontrol/_cmd.py
|
||||
src/pip/_vendor/cachecontrol/adapter.py
|
||||
src/pip/_vendor/cachecontrol/cache.py
|
||||
src/pip/_vendor/cachecontrol/compat.py
|
||||
src/pip/_vendor/cachecontrol/controller.py
|
||||
src/pip/_vendor/cachecontrol/filewrapper.py
|
||||
src/pip/_vendor/cachecontrol/heuristics.py
|
||||
src/pip/_vendor/cachecontrol/serialize.py
|
||||
src/pip/_vendor/cachecontrol/wrapper.py
|
||||
src/pip/_vendor/cachecontrol/caches/__init__.py
|
||||
src/pip/_vendor/cachecontrol/caches/file_cache.py
|
||||
src/pip/_vendor/cachecontrol/caches/redis_cache.py
|
||||
src/pip/_vendor/certifi/LICENSE
|
||||
src/pip/_vendor/certifi/__init__.py
|
||||
src/pip/_vendor/certifi/__main__.py
|
||||
src/pip/_vendor/certifi/cacert.pem
|
||||
src/pip/_vendor/certifi/core.py
|
||||
src/pip/_vendor/chardet/LICENSE
|
||||
src/pip/_vendor/chardet/__init__.py
|
||||
src/pip/_vendor/chardet/big5freq.py
|
||||
src/pip/_vendor/chardet/big5prober.py
|
||||
src/pip/_vendor/chardet/chardistribution.py
|
||||
src/pip/_vendor/chardet/charsetgroupprober.py
|
||||
src/pip/_vendor/chardet/charsetprober.py
|
||||
src/pip/_vendor/chardet/codingstatemachine.py
|
||||
src/pip/_vendor/chardet/compat.py
|
||||
src/pip/_vendor/chardet/cp949prober.py
|
||||
src/pip/_vendor/chardet/enums.py
|
||||
src/pip/_vendor/chardet/escprober.py
|
||||
src/pip/_vendor/chardet/escsm.py
|
||||
src/pip/_vendor/chardet/eucjpprober.py
|
||||
src/pip/_vendor/chardet/euckrfreq.py
|
||||
src/pip/_vendor/chardet/euckrprober.py
|
||||
src/pip/_vendor/chardet/euctwfreq.py
|
||||
src/pip/_vendor/chardet/euctwprober.py
|
||||
src/pip/_vendor/chardet/gb2312freq.py
|
||||
src/pip/_vendor/chardet/gb2312prober.py
|
||||
src/pip/_vendor/chardet/hebrewprober.py
|
||||
src/pip/_vendor/chardet/jisfreq.py
|
||||
src/pip/_vendor/chardet/jpcntx.py
|
||||
src/pip/_vendor/chardet/langbulgarianmodel.py
|
||||
src/pip/_vendor/chardet/langcyrillicmodel.py
|
||||
src/pip/_vendor/chardet/langgreekmodel.py
|
||||
src/pip/_vendor/chardet/langhebrewmodel.py
|
||||
src/pip/_vendor/chardet/langhungarianmodel.py
|
||||
src/pip/_vendor/chardet/langthaimodel.py
|
||||
src/pip/_vendor/chardet/langturkishmodel.py
|
||||
src/pip/_vendor/chardet/latin1prober.py
|
||||
src/pip/_vendor/chardet/mbcharsetprober.py
|
||||
src/pip/_vendor/chardet/mbcsgroupprober.py
|
||||
src/pip/_vendor/chardet/mbcssm.py
|
||||
src/pip/_vendor/chardet/sbcharsetprober.py
|
||||
src/pip/_vendor/chardet/sbcsgroupprober.py
|
||||
src/pip/_vendor/chardet/sjisprober.py
|
||||
src/pip/_vendor/chardet/universaldetector.py
|
||||
src/pip/_vendor/chardet/utf8prober.py
|
||||
src/pip/_vendor/chardet/version.py
|
||||
src/pip/_vendor/chardet/cli/__init__.py
|
||||
src/pip/_vendor/chardet/cli/chardetect.py
|
||||
src/pip/_vendor/colorama/LICENSE.txt
|
||||
src/pip/_vendor/colorama/__init__.py
|
||||
src/pip/_vendor/colorama/ansi.py
|
||||
src/pip/_vendor/colorama/ansitowin32.py
|
||||
src/pip/_vendor/colorama/initialise.py
|
||||
src/pip/_vendor/colorama/win32.py
|
||||
src/pip/_vendor/colorama/winterm.py
|
||||
src/pip/_vendor/distlib/LICENSE.txt
|
||||
src/pip/_vendor/distlib/__init__.py
|
||||
src/pip/_vendor/distlib/compat.py
|
||||
src/pip/_vendor/distlib/database.py
|
||||
src/pip/_vendor/distlib/index.py
|
||||
src/pip/_vendor/distlib/locators.py
|
||||
src/pip/_vendor/distlib/manifest.py
|
||||
src/pip/_vendor/distlib/markers.py
|
||||
src/pip/_vendor/distlib/metadata.py
|
||||
src/pip/_vendor/distlib/resources.py
|
||||
src/pip/_vendor/distlib/scripts.py
|
||||
src/pip/_vendor/distlib/t32.exe
|
||||
src/pip/_vendor/distlib/t64.exe
|
||||
src/pip/_vendor/distlib/util.py
|
||||
src/pip/_vendor/distlib/version.py
|
||||
src/pip/_vendor/distlib/w32.exe
|
||||
src/pip/_vendor/distlib/w64.exe
|
||||
src/pip/_vendor/distlib/wheel.py
|
||||
src/pip/_vendor/distlib/_backport/__init__.py
|
||||
src/pip/_vendor/distlib/_backport/misc.py
|
||||
src/pip/_vendor/distlib/_backport/shutil.py
|
||||
src/pip/_vendor/distlib/_backport/sysconfig.cfg
|
||||
src/pip/_vendor/distlib/_backport/sysconfig.py
|
||||
src/pip/_vendor/distlib/_backport/tarfile.py
|
||||
src/pip/_vendor/html5lib/LICENSE
|
||||
src/pip/_vendor/html5lib/__init__.py
|
||||
src/pip/_vendor/html5lib/_ihatexml.py
|
||||
src/pip/_vendor/html5lib/_inputstream.py
|
||||
src/pip/_vendor/html5lib/_tokenizer.py
|
||||
src/pip/_vendor/html5lib/_utils.py
|
||||
src/pip/_vendor/html5lib/constants.py
|
||||
src/pip/_vendor/html5lib/html5parser.py
|
||||
src/pip/_vendor/html5lib/serializer.py
|
||||
src/pip/_vendor/html5lib/_trie/__init__.py
|
||||
src/pip/_vendor/html5lib/_trie/_base.py
|
||||
src/pip/_vendor/html5lib/_trie/datrie.py
|
||||
src/pip/_vendor/html5lib/_trie/py.py
|
||||
src/pip/_vendor/html5lib/filters/__init__.py
|
||||
src/pip/_vendor/html5lib/filters/alphabeticalattributes.py
|
||||
src/pip/_vendor/html5lib/filters/base.py
|
||||
src/pip/_vendor/html5lib/filters/inject_meta_charset.py
|
||||
src/pip/_vendor/html5lib/filters/lint.py
|
||||
src/pip/_vendor/html5lib/filters/optionaltags.py
|
||||
src/pip/_vendor/html5lib/filters/sanitizer.py
|
||||
src/pip/_vendor/html5lib/filters/whitespace.py
|
||||
src/pip/_vendor/html5lib/treeadapters/__init__.py
|
||||
src/pip/_vendor/html5lib/treeadapters/genshi.py
|
||||
src/pip/_vendor/html5lib/treeadapters/sax.py
|
||||
src/pip/_vendor/html5lib/treebuilders/__init__.py
|
||||
src/pip/_vendor/html5lib/treebuilders/base.py
|
||||
src/pip/_vendor/html5lib/treebuilders/dom.py
|
||||
src/pip/_vendor/html5lib/treebuilders/etree.py
|
||||
src/pip/_vendor/html5lib/treebuilders/etree_lxml.py
|
||||
src/pip/_vendor/html5lib/treewalkers/__init__.py
|
||||
src/pip/_vendor/html5lib/treewalkers/base.py
|
||||
src/pip/_vendor/html5lib/treewalkers/dom.py
|
||||
src/pip/_vendor/html5lib/treewalkers/etree.py
|
||||
src/pip/_vendor/html5lib/treewalkers/etree_lxml.py
|
||||
src/pip/_vendor/html5lib/treewalkers/genshi.py
|
||||
src/pip/_vendor/idna/LICENSE.rst
|
||||
src/pip/_vendor/idna/__init__.py
|
||||
src/pip/_vendor/idna/codec.py
|
||||
src/pip/_vendor/idna/compat.py
|
||||
src/pip/_vendor/idna/core.py
|
||||
src/pip/_vendor/idna/idnadata.py
|
||||
src/pip/_vendor/idna/intranges.py
|
||||
src/pip/_vendor/idna/package_data.py
|
||||
src/pip/_vendor/idna/uts46data.py
|
||||
src/pip/_vendor/lockfile/LICENSE
|
||||
src/pip/_vendor/lockfile/__init__.py
|
||||
src/pip/_vendor/lockfile/linklockfile.py
|
||||
src/pip/_vendor/lockfile/mkdirlockfile.py
|
||||
src/pip/_vendor/lockfile/pidlockfile.py
|
||||
src/pip/_vendor/lockfile/sqlitelockfile.py
|
||||
src/pip/_vendor/lockfile/symlinklockfile.py
|
||||
src/pip/_vendor/msgpack/COPYING
|
||||
src/pip/_vendor/msgpack/__init__.py
|
||||
src/pip/_vendor/msgpack/_version.py
|
||||
src/pip/_vendor/msgpack/exceptions.py
|
||||
src/pip/_vendor/msgpack/fallback.py
|
||||
src/pip/_vendor/packaging/LICENSE
|
||||
src/pip/_vendor/packaging/LICENSE.APACHE
|
||||
src/pip/_vendor/packaging/LICENSE.BSD
|
||||
src/pip/_vendor/packaging/__about__.py
|
||||
src/pip/_vendor/packaging/__init__.py
|
||||
src/pip/_vendor/packaging/_compat.py
|
||||
src/pip/_vendor/packaging/_structures.py
|
||||
src/pip/_vendor/packaging/markers.py
|
||||
src/pip/_vendor/packaging/requirements.py
|
||||
src/pip/_vendor/packaging/specifiers.py
|
||||
src/pip/_vendor/packaging/utils.py
|
||||
src/pip/_vendor/packaging/version.py
|
||||
src/pip/_vendor/pep517/LICENSE
|
||||
src/pip/_vendor/pep517/__init__.py
|
||||
src/pip/_vendor/pep517/_in_process.py
|
||||
src/pip/_vendor/pep517/build.py
|
||||
src/pip/_vendor/pep517/check.py
|
||||
src/pip/_vendor/pep517/colorlog.py
|
||||
src/pip/_vendor/pep517/compat.py
|
||||
src/pip/_vendor/pep517/envbuild.py
|
||||
src/pip/_vendor/pep517/wrappers.py
|
||||
src/pip/_vendor/pkg_resources/LICENSE
|
||||
src/pip/_vendor/pkg_resources/__init__.py
|
||||
src/pip/_vendor/pkg_resources/py31compat.py
|
||||
src/pip/_vendor/progress/LICENSE
|
||||
src/pip/_vendor/progress/__init__.py
|
||||
src/pip/_vendor/progress/bar.py
|
||||
src/pip/_vendor/progress/counter.py
|
||||
src/pip/_vendor/progress/helpers.py
|
||||
src/pip/_vendor/progress/spinner.py
|
||||
src/pip/_vendor/pytoml/LICENSE
|
||||
src/pip/_vendor/pytoml/__init__.py
|
||||
src/pip/_vendor/pytoml/core.py
|
||||
src/pip/_vendor/pytoml/parser.py
|
||||
src/pip/_vendor/pytoml/test.py
|
||||
src/pip/_vendor/pytoml/utils.py
|
||||
src/pip/_vendor/pytoml/writer.py
|
||||
src/pip/_vendor/requests/LICENSE
|
||||
src/pip/_vendor/requests/__init__.py
|
||||
src/pip/_vendor/requests/__version__.py
|
||||
src/pip/_vendor/requests/_internal_utils.py
|
||||
src/pip/_vendor/requests/adapters.py
|
||||
src/pip/_vendor/requests/api.py
|
||||
src/pip/_vendor/requests/auth.py
|
||||
src/pip/_vendor/requests/certs.py
|
||||
src/pip/_vendor/requests/compat.py
|
||||
src/pip/_vendor/requests/cookies.py
|
||||
src/pip/_vendor/requests/exceptions.py
|
||||
src/pip/_vendor/requests/help.py
|
||||
src/pip/_vendor/requests/hooks.py
|
||||
src/pip/_vendor/requests/models.py
|
||||
src/pip/_vendor/requests/packages.py
|
||||
src/pip/_vendor/requests/sessions.py
|
||||
src/pip/_vendor/requests/status_codes.py
|
||||
src/pip/_vendor/requests/structures.py
|
||||
src/pip/_vendor/requests/utils.py
|
||||
src/pip/_vendor/urllib3/LICENSE.txt
|
||||
src/pip/_vendor/urllib3/__init__.py
|
||||
src/pip/_vendor/urllib3/_collections.py
|
||||
src/pip/_vendor/urllib3/connection.py
|
||||
src/pip/_vendor/urllib3/connectionpool.py
|
||||
src/pip/_vendor/urllib3/exceptions.py
|
||||
src/pip/_vendor/urllib3/fields.py
|
||||
src/pip/_vendor/urllib3/filepost.py
|
||||
src/pip/_vendor/urllib3/poolmanager.py
|
||||
src/pip/_vendor/urllib3/request.py
|
||||
src/pip/_vendor/urllib3/response.py
|
||||
src/pip/_vendor/urllib3/contrib/__init__.py
|
||||
src/pip/_vendor/urllib3/contrib/_appengine_environ.py
|
||||
src/pip/_vendor/urllib3/contrib/appengine.py
|
||||
src/pip/_vendor/urllib3/contrib/ntlmpool.py
|
||||
src/pip/_vendor/urllib3/contrib/pyopenssl.py
|
||||
src/pip/_vendor/urllib3/contrib/securetransport.py
|
||||
src/pip/_vendor/urllib3/contrib/socks.py
|
||||
src/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
|
||||
src/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
|
||||
src/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
|
||||
src/pip/_vendor/urllib3/packages/__init__.py
|
||||
src/pip/_vendor/urllib3/packages/six.py
|
||||
src/pip/_vendor/urllib3/packages/backports/__init__.py
|
||||
src/pip/_vendor/urllib3/packages/backports/makefile.py
|
||||
src/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
|
||||
src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
|
||||
src/pip/_vendor/urllib3/util/__init__.py
|
||||
src/pip/_vendor/urllib3/util/connection.py
|
||||
src/pip/_vendor/urllib3/util/queue.py
|
||||
src/pip/_vendor/urllib3/util/request.py
|
||||
src/pip/_vendor/urllib3/util/response.py
|
||||
src/pip/_vendor/urllib3/util/retry.py
|
||||
src/pip/_vendor/urllib3/util/ssl_.py
|
||||
src/pip/_vendor/urllib3/util/timeout.py
|
||||
src/pip/_vendor/urllib3/util/url.py
|
||||
src/pip/_vendor/urllib3/util/wait.py
|
||||
src/pip/_vendor/webencodings/LICENSE
|
||||
src/pip/_vendor/webencodings/__init__.py
|
||||
src/pip/_vendor/webencodings/labels.py
|
||||
src/pip/_vendor/webencodings/mklabels.py
|
||||
src/pip/_vendor/webencodings/tests.py
|
||||
src/pip/_vendor/webencodings/x_user_defined.py
|
|
@ -1,5 +0,0 @@
|
|||
[console_scripts]
|
||||
pip = pip._internal:main
|
||||
pip3 = pip._internal:main
|
||||
pip3.7 = pip._internal:main
|
||||
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1 +0,0 @@
|
|||
__version__ = "19.0.3"
|
|
@ -1,19 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# If we are running from a wheel, add the wheel to sys.path
|
||||
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
||||
if __package__ == '':
|
||||
# __file__ is pip-*.whl/pip/__main__.py
|
||||
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
||||
# Resulting path is the name of the wheel itself
|
||||
# Add that to sys.path so we can import pip
|
||||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path.insert(0, path)
|
||||
|
||||
from pip._internal import main as _main # isort:skip # noqa
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(_main())
|
|
@ -1,78 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import absolute_import
|
||||
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
|
||||
import sys
|
||||
|
||||
# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
|
||||
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
|
||||
# isn't available. requests unconditionally imports urllib3's socks contrib
|
||||
# module, triggering this warning. The warning breaks DEP-8 tests (because of
|
||||
# the stderr output) and is just plain annoying in normal usage. I don't want
|
||||
# to add socks as yet another dependency for pip, nor do I want to allow-stder
|
||||
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
|
||||
# be done before the import of pip.vcs.
|
||||
from pip._vendor.urllib3.exceptions import DependencyWarning
|
||||
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
|
||||
|
||||
# We want to inject the use of SecureTransport as early as possible so that any
|
||||
# references or sessions or what have you are ensured to have it, however we
|
||||
# only want to do this in the case that we're running on macOS and the linked
|
||||
# OpenSSL is too old to handle TLSv1.2
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
# Checks for OpenSSL 1.0.1 on MacOS
|
||||
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
|
||||
try:
|
||||
from pip._vendor.urllib3.contrib import securetransport
|
||||
except (ImportError, OSError):
|
||||
pass
|
||||
else:
|
||||
securetransport.inject_into_urllib3()
|
||||
|
||||
from pip._internal.cli.autocompletion import autocomplete
|
||||
from pip._internal.cli.main_parser import parse_command
|
||||
from pip._internal.commands import commands_dict
|
||||
from pip._internal.exceptions import PipError
|
||||
from pip._internal.utils import deprecation
|
||||
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
|
||||
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Hide the InsecureRequestWarning from urllib3
|
||||
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Configure our deprecation warnings to be sent through loggers
|
||||
deprecation.install_warning_logger()
|
||||
|
||||
autocomplete()
|
||||
|
||||
try:
|
||||
cmd_name, cmd_args = parse_command(args)
|
||||
except PipError as exc:
|
||||
sys.stderr.write("ERROR: %s" % exc)
|
||||
sys.stderr.write(os.linesep)
|
||||
sys.exit(1)
|
||||
|
||||
# Needed for locale.getpreferredencoding(False) to work
|
||||
# in pip._internal.utils.encoding.auto_decode
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except locale.Error as e:
|
||||
# setlocale can apparently crash if locale are uninitialized
|
||||
logger.debug("Ignoring error %s when setting locale", e)
|
||||
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
|
||||
return command.main(cmd_args)
|
|
@ -1,215 +0,0 @@
|
|||
"""Build Environment used for isolation during sdist building
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
from collections import OrderedDict
|
||||
from distutils.sysconfig import get_python_lib
|
||||
from sysconfig import get_paths
|
||||
|
||||
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.utils.misc import call_subprocess
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.ui import open_spinner
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Tuple, Set, Iterable, Optional, List # noqa: F401
|
||||
from pip._internal.index import PackageFinder # noqa: F401
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _Prefix:
|
||||
|
||||
def __init__(self, path):
|
||||
# type: (str) -> None
|
||||
self.path = path
|
||||
self.setup = False
|
||||
self.bin_dir = get_paths(
|
||||
'nt' if os.name == 'nt' else 'posix_prefix',
|
||||
vars={'base': path, 'platbase': path}
|
||||
)['scripts']
|
||||
# Note: prefer distutils' sysconfig to get the
|
||||
# library paths so PyPy is correctly supported.
|
||||
purelib = get_python_lib(plat_specific=False, prefix=path)
|
||||
platlib = get_python_lib(plat_specific=True, prefix=path)
|
||||
if purelib == platlib:
|
||||
self.lib_dirs = [purelib]
|
||||
else:
|
||||
self.lib_dirs = [purelib, platlib]
|
||||
|
||||
|
||||
class BuildEnvironment(object):
|
||||
"""Creates and manages an isolated environment to install build deps
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self._temp_dir = TempDirectory(kind="build-env")
|
||||
self._temp_dir.create()
|
||||
|
||||
self._prefixes = OrderedDict((
|
||||
(name, _Prefix(os.path.join(self._temp_dir.path, name)))
|
||||
for name in ('normal', 'overlay')
|
||||
))
|
||||
|
||||
self._bin_dirs = [] # type: List[str]
|
||||
self._lib_dirs = [] # type: List[str]
|
||||
for prefix in reversed(list(self._prefixes.values())):
|
||||
self._bin_dirs.append(prefix.bin_dir)
|
||||
self._lib_dirs.extend(prefix.lib_dirs)
|
||||
|
||||
# Customize site to:
|
||||
# - ensure .pth files are honored
|
||||
# - prevent access to system site packages
|
||||
system_sites = {
|
||||
os.path.normcase(site) for site in (
|
||||
get_python_lib(plat_specific=False),
|
||||
get_python_lib(plat_specific=True),
|
||||
)
|
||||
}
|
||||
self._site_dir = os.path.join(self._temp_dir.path, 'site')
|
||||
if not os.path.exists(self._site_dir):
|
||||
os.mkdir(self._site_dir)
|
||||
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
||||
fp.write(textwrap.dedent(
|
||||
'''
|
||||
import os, site, sys
|
||||
|
||||
# First, drop system-sites related paths.
|
||||
original_sys_path = sys.path[:]
|
||||
known_paths = set()
|
||||
for path in {system_sites!r}:
|
||||
site.addsitedir(path, known_paths=known_paths)
|
||||
system_paths = set(
|
||||
os.path.normcase(path)
|
||||
for path in sys.path[len(original_sys_path):]
|
||||
)
|
||||
original_sys_path = [
|
||||
path for path in original_sys_path
|
||||
if os.path.normcase(path) not in system_paths
|
||||
]
|
||||
sys.path = original_sys_path
|
||||
|
||||
# Second, add lib directories.
|
||||
# ensuring .pth file are processed.
|
||||
for path in {lib_dirs!r}:
|
||||
assert not path in sys.path
|
||||
site.addsitedir(path)
|
||||
'''
|
||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
||||
|
||||
def __enter__(self):
|
||||
self._save_env = {
|
||||
name: os.environ.get(name, None)
|
||||
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
||||
}
|
||||
|
||||
path = self._bin_dirs[:]
|
||||
old_path = self._save_env['PATH']
|
||||
if old_path:
|
||||
path.extend(old_path.split(os.pathsep))
|
||||
|
||||
pythonpath = [self._site_dir]
|
||||
|
||||
os.environ.update({
|
||||
'PATH': os.pathsep.join(path),
|
||||
'PYTHONNOUSERSITE': '1',
|
||||
'PYTHONPATH': os.pathsep.join(pythonpath),
|
||||
})
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
for varname, old_value in self._save_env.items():
|
||||
if old_value is None:
|
||||
os.environ.pop(varname, None)
|
||||
else:
|
||||
os.environ[varname] = old_value
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
self._temp_dir.cleanup()
|
||||
|
||||
def check_requirements(self, reqs):
|
||||
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
||||
"""Return 2 sets:
|
||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||
- missing requirements: set of reqs
|
||||
"""
|
||||
missing = set()
|
||||
conflicting = set()
|
||||
if reqs:
|
||||
ws = WorkingSet(self._lib_dirs)
|
||||
for req in reqs:
|
||||
try:
|
||||
if ws.find(Requirement.parse(req)) is None:
|
||||
missing.add(req)
|
||||
except VersionConflict as e:
|
||||
conflicting.add((str(e.args[0].as_requirement()),
|
||||
str(e.args[1])))
|
||||
return conflicting, missing
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder, # type: PackageFinder
|
||||
requirements, # type: Iterable[str]
|
||||
prefix_as_string, # type: str
|
||||
message # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
prefix = self._prefixes[prefix_as_string]
|
||||
assert not prefix.setup
|
||||
prefix.setup = True
|
||||
if not requirements:
|
||||
return
|
||||
args = [
|
||||
sys.executable, os.path.dirname(pip_location), 'install',
|
||||
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
||||
'--no-warn-script-location',
|
||||
] # type: List[str]
|
||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||
args.append('-v')
|
||||
for format_control in ('no_binary', 'only_binary'):
|
||||
formats = getattr(finder.format_control, format_control)
|
||||
args.extend(('--' + format_control.replace('_', '-'),
|
||||
','.join(sorted(formats or {':none:'}))))
|
||||
if finder.index_urls:
|
||||
args.extend(['-i', finder.index_urls[0]])
|
||||
for extra_index in finder.index_urls[1:]:
|
||||
args.extend(['--extra-index-url', extra_index])
|
||||
else:
|
||||
args.append('--no-index')
|
||||
for link in finder.find_links:
|
||||
args.extend(['--find-links', link])
|
||||
for _, host, _ in finder.secure_origins:
|
||||
args.extend(['--trusted-host', host])
|
||||
if finder.allow_all_prereleases:
|
||||
args.append('--pre')
|
||||
args.append('--')
|
||||
args.extend(requirements)
|
||||
with open_spinner(message) as spinner:
|
||||
call_subprocess(args, show_stdout=False, spinner=spinner)
|
||||
|
||||
|
||||
class NoOpBuildEnvironment(BuildEnvironment):
|
||||
"""A no-op drop-in replacement for BuildEnvironment
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
pass
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
def cleanup(self):
|
||||
pass
|
||||
|
||||
def install_requirements(self, finder, requirements, prefix, message):
|
||||
raise NotImplementedError()
|
|
@ -1,224 +0,0 @@
|
|||
"""Cache Management
|
||||
"""
|
||||
|
||||
import errno
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.download import path_to_url
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.compat import expanduser
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.wheel import InvalidWheelFilename, Wheel
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, Set, List, Any # noqa: F401
|
||||
from pip._internal.index import FormatControl # noqa: F401
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Cache(object):
|
||||
"""An abstract class - provides cache directories for data from links
|
||||
|
||||
|
||||
:param cache_dir: The root of the cache.
|
||||
:param format_control: An object of FormatControl class to limit
|
||||
binaries being read from the cache.
|
||||
:param allowed_formats: which formats of files the cache should store.
|
||||
('binary' and 'source' are the only allowed values)
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control, allowed_formats):
|
||||
# type: (str, FormatControl, Set[str]) -> None
|
||||
super(Cache, self).__init__()
|
||||
self.cache_dir = expanduser(cache_dir) if cache_dir else None
|
||||
self.format_control = format_control
|
||||
self.allowed_formats = allowed_formats
|
||||
|
||||
_valid_formats = {"source", "binary"}
|
||||
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||
|
||||
def _get_cache_path_parts(self, link):
|
||||
# type: (Link) -> List[str]
|
||||
"""Get parts of part that must be os.path.joined with cache_dir
|
||||
"""
|
||||
|
||||
# We want to generate an url to use as our cache key, we don't want to
|
||||
# just re-use the URL because it might have other items in the fragment
|
||||
# and we don't care about those.
|
||||
key_parts = [link.url_without_fragment]
|
||||
if link.hash_name is not None and link.hash is not None:
|
||||
key_parts.append("=".join([link.hash_name, link.hash]))
|
||||
key_url = "#".join(key_parts)
|
||||
|
||||
# Encode our key url with sha224, we'll use this because it has similar
|
||||
# security properties to sha256, but with a shorter total output (and
|
||||
# thus less secure). However the differences don't make a lot of
|
||||
# difference for our use case here.
|
||||
hashed = hashlib.sha224(key_url.encode()).hexdigest()
|
||||
|
||||
# We want to nest the directories some to prevent having a ton of top
|
||||
# level directories where we might run out of sub directories on some
|
||||
# FS.
|
||||
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||
|
||||
return parts
|
||||
|
||||
def _get_candidates(self, link, package_name):
|
||||
# type: (Link, Optional[str]) -> List[Any]
|
||||
can_not_cache = (
|
||||
not self.cache_dir or
|
||||
not package_name or
|
||||
not link
|
||||
)
|
||||
if can_not_cache:
|
||||
return []
|
||||
|
||||
canonical_name = canonicalize_name(package_name)
|
||||
formats = self.format_control.get_allowed_formats(
|
||||
canonical_name
|
||||
)
|
||||
if not self.allowed_formats.intersection(formats):
|
||||
return []
|
||||
|
||||
root = self.get_path_for_link(link)
|
||||
try:
|
||||
return os.listdir(root)
|
||||
except OSError as err:
|
||||
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
|
||||
return []
|
||||
raise
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
"""Return a directory to store cached items in for link.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(self, link, package_name):
|
||||
# type: (Link, Optional[str]) -> Link
|
||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||
passed link.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _link_for_candidate(self, link, candidate):
|
||||
# type: (Link, str) -> Link
|
||||
root = self.get_path_for_link(link)
|
||||
path = os.path.join(root, candidate)
|
||||
|
||||
return Link(path_to_url(path))
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
|
||||
class SimpleWheelCache(Cache):
|
||||
"""A cache of wheels for future installs.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
# type: (str, FormatControl) -> None
|
||||
super(SimpleWheelCache, self).__init__(
|
||||
cache_dir, format_control, {"binary"}
|
||||
)
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
"""Return a directory to store cached wheels for link
|
||||
|
||||
Because there are M wheels for any one sdist, we provide a directory
|
||||
to cache them in, and then consult that directory when looking up
|
||||
cache hits.
|
||||
|
||||
We only insert things into the cache if they have plausible version
|
||||
numbers, so that we don't contaminate the cache with things that were
|
||||
not unique. E.g. ./package might have dozens of installs done for it
|
||||
and build a version of 0.0...and if we built and cached a wheel, we'd
|
||||
end up using the same wheel even if the source has been edited.
|
||||
|
||||
:param link: The link of the sdist for which this will cache wheels.
|
||||
"""
|
||||
parts = self._get_cache_path_parts(link)
|
||||
|
||||
# Store wheels within the root cache_dir
|
||||
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||
|
||||
def get(self, link, package_name):
|
||||
# type: (Link, Optional[str]) -> Link
|
||||
candidates = []
|
||||
|
||||
for wheel_name in self._get_candidates(link, package_name):
|
||||
try:
|
||||
wheel = Wheel(wheel_name)
|
||||
except InvalidWheelFilename:
|
||||
continue
|
||||
if not wheel.supported():
|
||||
# Built for a different python/arch/etc
|
||||
continue
|
||||
candidates.append((wheel.support_index_min(), wheel_name))
|
||||
|
||||
if not candidates:
|
||||
return link
|
||||
|
||||
return self._link_for_candidate(link, min(candidates)[1])
|
||||
|
||||
|
||||
class EphemWheelCache(SimpleWheelCache):
|
||||
"""A SimpleWheelCache that creates it's own temporary cache directory
|
||||
"""
|
||||
|
||||
def __init__(self, format_control):
|
||||
# type: (FormatControl) -> None
|
||||
self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
|
||||
self._temp_dir.create()
|
||||
|
||||
super(EphemWheelCache, self).__init__(
|
||||
self._temp_dir.path, format_control
|
||||
)
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
self._temp_dir.cleanup()
|
||||
|
||||
|
||||
class WheelCache(Cache):
|
||||
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
||||
|
||||
This Cache allows for gracefully degradation, using the ephem wheel cache
|
||||
when a certain link is not found in the simple wheel cache first.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
# type: (str, FormatControl) -> None
|
||||
super(WheelCache, self).__init__(
|
||||
cache_dir, format_control, {'binary'}
|
||||
)
|
||||
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||
self._ephem_cache = EphemWheelCache(format_control)
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
return self._wheel_cache.get_path_for_link(link)
|
||||
|
||||
def get_ephem_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
return self._ephem_cache.get_path_for_link(link)
|
||||
|
||||
def get(self, link, package_name):
|
||||
# type: (Link, Optional[str]) -> Link
|
||||
retval = self._wheel_cache.get(link, package_name)
|
||||
if retval is link:
|
||||
retval = self._ephem_cache.get(link, package_name)
|
||||
return retval
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
self._wheel_cache.cleanup()
|
||||
self._ephem_cache.cleanup()
|
|
@ -1,4 +0,0 @@
|
|||
"""Subpackage containing all of pip's command line interface related code
|
||||
"""
|
||||
|
||||
# This file intentionally does not import submodules
|
|
@ -1,152 +0,0 @@
|
|||
"""Logic that powers autocompletion installed by ``pip completion``.
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pip._internal.cli.main_parser import create_main_parser
|
||||
from pip._internal.commands import commands_dict, get_summaries
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
|
||||
|
||||
def autocomplete():
|
||||
"""Entry Point for completion of main and subcommand options.
|
||||
"""
|
||||
# Don't complete if user hasn't sourced bash_completion file.
|
||||
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
||||
return
|
||||
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||
cword = int(os.environ['COMP_CWORD'])
|
||||
try:
|
||||
current = cwords[cword - 1]
|
||||
except IndexError:
|
||||
current = ''
|
||||
|
||||
subcommands = [cmd for cmd, summary in get_summaries()]
|
||||
options = []
|
||||
# subcommand
|
||||
try:
|
||||
subcommand_name = [w for w in cwords if w in subcommands][0]
|
||||
except IndexError:
|
||||
subcommand_name = None
|
||||
|
||||
parser = create_main_parser()
|
||||
# subcommand options
|
||||
if subcommand_name:
|
||||
# special case: 'help' subcommand has no options
|
||||
if subcommand_name == 'help':
|
||||
sys.exit(1)
|
||||
# special case: list locally installed dists for show and uninstall
|
||||
should_list_installed = (
|
||||
subcommand_name in ['show', 'uninstall'] and
|
||||
not current.startswith('-')
|
||||
)
|
||||
if should_list_installed:
|
||||
installed = []
|
||||
lc = current.lower()
|
||||
for dist in get_installed_distributions(local_only=True):
|
||||
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
||||
installed.append(dist.key)
|
||||
# if there are no dists installed, fall back to option completion
|
||||
if installed:
|
||||
for dist in installed:
|
||||
print(dist)
|
||||
sys.exit(1)
|
||||
|
||||
subcommand = commands_dict[subcommand_name]()
|
||||
|
||||
for opt in subcommand.parser.option_list_all:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
for opt_str in opt._long_opts + opt._short_opts:
|
||||
options.append((opt_str, opt.nargs))
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||
# filter options by current input
|
||||
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||
# get completion type given cwords and available subcommand options
|
||||
completion_type = get_path_completion_type(
|
||||
cwords, cword, subcommand.parser.option_list_all,
|
||||
)
|
||||
# get completion files and directories if ``completion_type`` is
|
||||
# ``<file>``, ``<dir>`` or ``<path>``
|
||||
if completion_type:
|
||||
options = auto_complete_paths(current, completion_type)
|
||||
options = ((opt, 0) for opt in options)
|
||||
for option in options:
|
||||
opt_label = option[0]
|
||||
# append '=' to options which require args
|
||||
if option[1] and option[0][:2] == "--":
|
||||
opt_label += '='
|
||||
print(opt_label)
|
||||
else:
|
||||
# show main parser options only when necessary
|
||||
|
||||
opts = [i.option_list for i in parser.option_groups]
|
||||
opts.append(parser.option_list)
|
||||
opts = (o for it in opts for o in it)
|
||||
if current.startswith('-'):
|
||||
for opt in opts:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
subcommands += opt._long_opts + opt._short_opts
|
||||
else:
|
||||
# get completion type given cwords and all available options
|
||||
completion_type = get_path_completion_type(cwords, cword, opts)
|
||||
if completion_type:
|
||||
subcommands = auto_complete_paths(current, completion_type)
|
||||
|
||||
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_path_completion_type(cwords, cword, opts):
|
||||
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
||||
|
||||
:param cwords: same as the environmental variable ``COMP_WORDS``
|
||||
:param cword: same as the environmental variable ``COMP_CWORD``
|
||||
:param opts: The available options to check
|
||||
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
||||
"""
|
||||
if cword < 2 or not cwords[cword - 2].startswith('-'):
|
||||
return
|
||||
for opt in opts:
|
||||
if opt.help == optparse.SUPPRESS_HELP:
|
||||
continue
|
||||
for o in str(opt).split('/'):
|
||||
if cwords[cword - 2].split('=')[0] == o:
|
||||
if not opt.metavar or any(
|
||||
x in ('path', 'file', 'dir')
|
||||
for x in opt.metavar.split('/')):
|
||||
return opt.metavar
|
||||
|
||||
|
||||
def auto_complete_paths(current, completion_type):
|
||||
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
||||
and directories starting with ``current``; otherwise only list directories
|
||||
starting with ``current``.
|
||||
|
||||
:param current: The word to be completed
|
||||
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
||||
:return: A generator of regular files and/or directories
|
||||
"""
|
||||
directory, filename = os.path.split(current)
|
||||
current_path = os.path.abspath(directory)
|
||||
# Don't complete paths if they can't be accessed
|
||||
if not os.access(current_path, os.R_OK):
|
||||
return
|
||||
filename = os.path.normcase(filename)
|
||||
# list all files that start with ``filename``
|
||||
file_list = (x for x in os.listdir(current_path)
|
||||
if os.path.normcase(x).startswith(filename))
|
||||
for f in file_list:
|
||||
opt = os.path.join(current_path, f)
|
||||
comp_file = os.path.normcase(os.path.join(directory, f))
|
||||
# complete regular files when there is not ``<dir>`` after option
|
||||
# complete directories when there is ``<file>``, ``<path>`` or
|
||||
# ``<dir>``after option
|
||||
if completion_type != 'dir' and os.path.isfile(opt):
|
||||
yield comp_file
|
||||
elif os.path.isdir(opt):
|
||||
yield os.path.join(comp_file, '')
|
|
@ -1,341 +0,0 @@
|
|||
"""Base Command class, and related routines"""
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import optparse
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.parser import (
|
||||
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
||||
)
|
||||
from pip._internal.cli.status_codes import (
|
||||
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
|
||||
VIRTUALENV_NOT_FOUND,
|
||||
)
|
||||
from pip._internal.download import PipSession
|
||||
from pip._internal.exceptions import (
|
||||
BadCommand, CommandError, InstallationError, PreviousBuildDirError,
|
||||
UninstallationError,
|
||||
)
|
||||
from pip._internal.index import PackageFinder
|
||||
from pip._internal.locations import running_under_virtualenv
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable, install_req_from_line,
|
||||
)
|
||||
from pip._internal.req.req_file import parse_requirements
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||
from pip._internal.utils.misc import (
|
||||
get_prog, normalize_path, redact_password_from_url,
|
||||
)
|
||||
from pip._internal.utils.outdated import pip_version_check
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, List, Tuple, Any # noqa: F401
|
||||
from optparse import Values # noqa: F401
|
||||
from pip._internal.cache import WheelCache # noqa: F401
|
||||
from pip._internal.req.req_set import RequirementSet # noqa: F401
|
||||
|
||||
__all__ = ['Command']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(object):
|
||||
name = None # type: Optional[str]
|
||||
usage = None # type: Optional[str]
|
||||
hidden = False # type: bool
|
||||
ignore_require_venv = False # type: bool
|
||||
|
||||
def __init__(self, isolated=False):
|
||||
# type: (bool) -> None
|
||||
parser_kw = {
|
||||
'usage': self.usage,
|
||||
'prog': '%s %s' % (get_prog(), self.name),
|
||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||
'add_help_option': False,
|
||||
'name': self.name,
|
||||
'description': self.__doc__,
|
||||
'isolated': isolated,
|
||||
}
|
||||
|
||||
self.parser = ConfigOptionParser(**parser_kw)
|
||||
|
||||
# Commands should add options to this option group
|
||||
optgroup_name = '%s Options' % self.name.capitalize()
|
||||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||
|
||||
# Add the general options
|
||||
gen_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.general_group,
|
||||
self.parser,
|
||||
)
|
||||
self.parser.add_option_group(gen_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[Any]) -> Any
|
||||
raise NotImplementedError
|
||||
|
||||
def _build_session(self, options, retries=None, timeout=None):
|
||||
# type: (Values, Optional[int], Optional[int]) -> PipSession
|
||||
session = PipSession(
|
||||
cache=(
|
||||
normalize_path(os.path.join(options.cache_dir, "http"))
|
||||
if options.cache_dir else None
|
||||
),
|
||||
retries=retries if retries is not None else options.retries,
|
||||
insecure_hosts=options.trusted_hosts,
|
||||
)
|
||||
|
||||
# Handle custom ca-bundles from the user
|
||||
if options.cert:
|
||||
session.verify = options.cert
|
||||
|
||||
# Handle SSL client certificate
|
||||
if options.client_cert:
|
||||
session.cert = options.client_cert
|
||||
|
||||
# Handle timeouts
|
||||
if options.timeout or timeout:
|
||||
session.timeout = (
|
||||
timeout if timeout is not None else options.timeout
|
||||
)
|
||||
|
||||
# Handle configured proxies
|
||||
if options.proxy:
|
||||
session.proxies = {
|
||||
"http": options.proxy,
|
||||
"https": options.proxy,
|
||||
}
|
||||
|
||||
# Determine if we can prompt the user for authentication or not
|
||||
session.auth.prompting = not options.no_input
|
||||
|
||||
return session
|
||||
|
||||
def parse_args(self, args):
|
||||
# type: (List[str]) -> Tuple
|
||||
# factored out for testability
|
||||
return self.parser.parse_args(args)
|
||||
|
||||
def main(self, args):
|
||||
# type: (List[str]) -> int
|
||||
options, args = self.parse_args(args)
|
||||
|
||||
# Set verbosity so that it can be used elsewhere.
|
||||
self.verbosity = options.verbose - options.quiet
|
||||
|
||||
level_number = setup_logging(
|
||||
verbosity=self.verbosity,
|
||||
no_color=options.no_color,
|
||||
user_log_file=options.log,
|
||||
)
|
||||
|
||||
if sys.version_info[:2] == (3, 4):
|
||||
deprecated(
|
||||
"Python 3.4 support has been deprecated. pip 19.1 will be the "
|
||||
"last one supporting it. Please upgrade your Python as Python "
|
||||
"3.4 won't be maintained after March 2019 (cf PEP 429).",
|
||||
replacement=None,
|
||||
gone_in='19.2',
|
||||
)
|
||||
elif sys.version_info[:2] == (2, 7):
|
||||
message = (
|
||||
"A future version of pip will drop support for Python 2.7."
|
||||
)
|
||||
if platform.python_implementation() == "CPython":
|
||||
message = (
|
||||
"Python 2.7 will reach the end of its life on January "
|
||||
"1st, 2020. Please upgrade your Python as Python 2.7 "
|
||||
"won't be maintained after that date. "
|
||||
) + message
|
||||
deprecated(message, replacement=None, gone_in=None)
|
||||
|
||||
# TODO: Try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# This also affects isolated builds and it should.
|
||||
|
||||
if options.no_input:
|
||||
os.environ['PIP_NO_INPUT'] = '1'
|
||||
|
||||
if options.exists_action:
|
||||
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
||||
|
||||
if options.require_venv and not self.ignore_require_venv:
|
||||
# If a venv is required check if it can really be found
|
||||
if not running_under_virtualenv():
|
||||
logger.critical(
|
||||
'Could not find an activated virtualenv (required).'
|
||||
)
|
||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||
|
||||
try:
|
||||
status = self.run(options, args)
|
||||
# FIXME: all commands should return an exit status
|
||||
# and when it is done, isinstance is not needed anymore
|
||||
if isinstance(status, int):
|
||||
return status
|
||||
except PreviousBuildDirError as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return PREVIOUS_BUILD_DIR_ERROR
|
||||
except (InstallationError, UninstallationError, BadCommand) as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except CommandError as exc:
|
||||
logger.critical('ERROR: %s', exc)
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BrokenStdoutLoggingError:
|
||||
# Bypass our logger and write any remaining messages to stderr
|
||||
# because stdout no longer works.
|
||||
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
|
||||
if level_number <= logging.DEBUG:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
return ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.critical('Operation cancelled by user')
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BaseException:
|
||||
logger.critical('Exception:', exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
finally:
|
||||
allow_version_check = (
|
||||
# Does this command have the index_group options?
|
||||
hasattr(options, "no_index") and
|
||||
# Is this command allowed to perform this check?
|
||||
not (options.disable_pip_version_check or options.no_index)
|
||||
)
|
||||
# Check if we're using the latest version of pip available
|
||||
if allow_version_check:
|
||||
session = self._build_session(
|
||||
options,
|
||||
retries=0,
|
||||
timeout=min(5, options.timeout)
|
||||
)
|
||||
with session:
|
||||
pip_version_check(session, options)
|
||||
|
||||
# Shutdown the logging module
|
||||
logging.shutdown()
|
||||
|
||||
return SUCCESS
|
||||
|
||||
|
||||
class RequirementCommand(Command):
|
||||
|
||||
@staticmethod
|
||||
def populate_requirement_set(requirement_set, # type: RequirementSet
|
||||
args, # type: List[str]
|
||||
options, # type: Values
|
||||
finder, # type: PackageFinder
|
||||
session, # type: PipSession
|
||||
name, # type: str
|
||||
wheel_cache # type: Optional[WheelCache]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
Marshal cmd line args into a requirement set.
|
||||
"""
|
||||
# NOTE: As a side-effect, options.require_hashes and
|
||||
# requirement_set.require_hashes may be updated
|
||||
|
||||
for filename in options.constraints:
|
||||
for req_to_add in parse_requirements(
|
||||
filename,
|
||||
constraint=True, finder=finder, options=options,
|
||||
session=session, wheel_cache=wheel_cache):
|
||||
req_to_add.is_direct = True
|
||||
requirement_set.add_requirement(req_to_add)
|
||||
|
||||
for req in args:
|
||||
req_to_add = install_req_from_line(
|
||||
req, None, isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
req_to_add.is_direct = True
|
||||
requirement_set.add_requirement(req_to_add)
|
||||
|
||||
for req in options.editables:
|
||||
req_to_add = install_req_from_editable(
|
||||
req,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
req_to_add.is_direct = True
|
||||
requirement_set.add_requirement(req_to_add)
|
||||
|
||||
for filename in options.requirements:
|
||||
for req_to_add in parse_requirements(
|
||||
filename,
|
||||
finder=finder, options=options, session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
use_pep517=options.use_pep517):
|
||||
req_to_add.is_direct = True
|
||||
requirement_set.add_requirement(req_to_add)
|
||||
# If --require-hashes was a line in a requirements file, tell
|
||||
# RequirementSet about it:
|
||||
requirement_set.require_hashes = options.require_hashes
|
||||
|
||||
if not (args or options.editables or options.requirements):
|
||||
opts = {'name': name}
|
||||
if options.find_links:
|
||||
raise CommandError(
|
||||
'You must give at least one requirement to %(name)s '
|
||||
'(maybe you meant "pip %(name)s %(links)s"?)' %
|
||||
dict(opts, links=' '.join(options.find_links)))
|
||||
else:
|
||||
raise CommandError(
|
||||
'You must give at least one requirement to %(name)s '
|
||||
'(see "pip help %(name)s")' % opts)
|
||||
|
||||
def _build_package_finder(
|
||||
self,
|
||||
options, # type: Values
|
||||
session, # type: PipSession
|
||||
platform=None, # type: Optional[str]
|
||||
python_versions=None, # type: Optional[List[str]]
|
||||
abi=None, # type: Optional[str]
|
||||
implementation=None # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> PackageFinder
|
||||
"""
|
||||
Create a package finder appropriate to this requirement command.
|
||||
"""
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.debug(
|
||||
'Ignoring indexes: %s',
|
||||
','.join(redact_password_from_url(url) for url in index_urls),
|
||||
)
|
||||
index_urls = []
|
||||
|
||||
return PackageFinder(
|
||||
find_links=options.find_links,
|
||||
format_control=options.format_control,
|
||||
index_urls=index_urls,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
allow_all_prereleases=options.pre,
|
||||
session=session,
|
||||
platform=platform,
|
||||
versions=python_versions,
|
||||
abi=abi,
|
||||
implementation=implementation,
|
||||
prefer_binary=options.prefer_binary,
|
||||
)
|
|
@ -1,809 +0,0 @@
|
|||
"""
|
||||
shared options and groups
|
||||
|
||||
The principle here is to define options once, but *not* instantiate them
|
||||
globally. One reason being that options with action='append' can carry state
|
||||
between parses. pip parses general options twice internally, and shouldn't
|
||||
pass on state. To be consistent, all options will follow this design.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import textwrap
|
||||
import warnings
|
||||
from distutils.util import strtobool
|
||||
from functools import partial
|
||||
from optparse import SUPPRESS_HELP, Option, OptionGroup
|
||||
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.locations import USER_CACHE_DIR, src_prefix
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.utils.hashes import STRONG_HASHES
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.ui import BAR_TYPES
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, Callable, Dict, List, Optional, Union # noqa: F401
|
||||
from optparse import OptionParser, Values # noqa: F401
|
||||
from pip._internal.cli.parser import ConfigOptionParser # noqa: F401
|
||||
|
||||
|
||||
def raise_option_error(parser, option, msg):
|
||||
"""
|
||||
Raise an option parsing error using parser.error().
|
||||
|
||||
Args:
|
||||
parser: an OptionParser instance.
|
||||
option: an Option instance.
|
||||
msg: the error text.
|
||||
"""
|
||||
msg = '{} error: {}'.format(option, msg)
|
||||
msg = textwrap.fill(' '.join(msg.split()))
|
||||
parser.error(msg)
|
||||
|
||||
|
||||
def make_option_group(group, parser):
|
||||
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
|
||||
"""
|
||||
Return an OptionGroup object
|
||||
group -- assumed to be dict with 'name' and 'options' keys
|
||||
parser -- an optparse Parser
|
||||
"""
|
||||
option_group = OptionGroup(parser, group['name'])
|
||||
for option in group['options']:
|
||||
option_group.add_option(option())
|
||||
return option_group
|
||||
|
||||
|
||||
def check_install_build_global(options, check_options=None):
|
||||
# type: (Values, Optional[Values]) -> None
|
||||
"""Disable wheels if per-setup.py call options are set.
|
||||
|
||||
:param options: The OptionParser options to update.
|
||||
:param check_options: The options to check, if not supplied defaults to
|
||||
options.
|
||||
"""
|
||||
if check_options is None:
|
||||
check_options = options
|
||||
|
||||
def getname(n):
|
||||
return getattr(check_options, n, None)
|
||||
names = ["build_options", "global_options", "install_options"]
|
||||
if any(map(getname, names)):
|
||||
control = options.format_control
|
||||
control.disallow_binaries()
|
||||
warnings.warn(
|
||||
'Disabling all use of wheels due to the use of --build-options '
|
||||
'/ --global-options / --install-options.', stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
def check_dist_restriction(options, check_target=False):
|
||||
# type: (Values, bool) -> None
|
||||
"""Function for determining if custom platform options are allowed.
|
||||
|
||||
:param options: The OptionParser options.
|
||||
:param check_target: Whether or not to check if --target is being used.
|
||||
"""
|
||||
dist_restriction_set = any([
|
||||
options.python_version,
|
||||
options.platform,
|
||||
options.abi,
|
||||
options.implementation,
|
||||
])
|
||||
|
||||
binary_only = FormatControl(set(), {':all:'})
|
||||
sdist_dependencies_allowed = (
|
||||
options.format_control != binary_only and
|
||||
not options.ignore_dependencies
|
||||
)
|
||||
|
||||
# Installations or downloads using dist restrictions must not combine
|
||||
# source distributions and dist-specific wheels, as they are not
|
||||
# gauranteed to be locally compatible.
|
||||
if dist_restriction_set and sdist_dependencies_allowed:
|
||||
raise CommandError(
|
||||
"When restricting platform and interpreter constraints using "
|
||||
"--python-version, --platform, --abi, or --implementation, "
|
||||
"either --no-deps must be set, or --only-binary=:all: must be "
|
||||
"set and --no-binary must not be set (or must be set to "
|
||||
":none:)."
|
||||
)
|
||||
|
||||
if check_target:
|
||||
if dist_restriction_set and not options.target_dir:
|
||||
raise CommandError(
|
||||
"Can not use any platform or abi specific options unless "
|
||||
"installing via '--target'"
|
||||
)
|
||||
|
||||
|
||||
###########
|
||||
# options #
|
||||
###########
|
||||
|
||||
help_ = partial(
|
||||
Option,
|
||||
'-h', '--help',
|
||||
dest='help',
|
||||
action='help',
|
||||
help='Show help.',
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
isolated_mode = partial(
|
||||
Option,
|
||||
"--isolated",
|
||||
dest="isolated_mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Run pip in an isolated mode, ignoring environment variables and user "
|
||||
"configuration."
|
||||
),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
require_virtualenv = partial(
|
||||
Option,
|
||||
# Run only if inside a virtualenv, bail if not.
|
||||
'--require-virtualenv', '--require-venv',
|
||||
dest='require_venv',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
verbose = partial(
|
||||
Option,
|
||||
'-v', '--verbose',
|
||||
dest='verbose',
|
||||
action='count',
|
||||
default=0,
|
||||
help='Give more output. Option is additive, and can be used up to 3 times.'
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
no_color = partial(
|
||||
Option,
|
||||
'--no-color',
|
||||
dest='no_color',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Suppress colored output",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
version = partial(
|
||||
Option,
|
||||
'-V', '--version',
|
||||
dest='version',
|
||||
action='store_true',
|
||||
help='Show version and exit.',
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
quiet = partial(
|
||||
Option,
|
||||
'-q', '--quiet',
|
||||
dest='quiet',
|
||||
action='count',
|
||||
default=0,
|
||||
help=(
|
||||
'Give less output. Option is additive, and can be used up to 3'
|
||||
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
|
||||
' levels).'
|
||||
),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
progress_bar = partial(
|
||||
Option,
|
||||
'--progress-bar',
|
||||
dest='progress_bar',
|
||||
type='choice',
|
||||
choices=list(BAR_TYPES.keys()),
|
||||
default='on',
|
||||
help=(
|
||||
'Specify type of progress to be displayed [' +
|
||||
'|'.join(BAR_TYPES.keys()) + '] (default: %default)'
|
||||
),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
log = partial(
|
||||
Option,
|
||||
"--log", "--log-file", "--local-log",
|
||||
dest="log",
|
||||
metavar="path",
|
||||
help="Path to a verbose appending log."
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
no_input = partial(
|
||||
Option,
|
||||
# Don't ask for input
|
||||
'--no-input',
|
||||
dest='no_input',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
proxy = partial(
|
||||
Option,
|
||||
'--proxy',
|
||||
dest='proxy',
|
||||
type='str',
|
||||
default='',
|
||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port."
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
retries = partial(
|
||||
Option,
|
||||
'--retries',
|
||||
dest='retries',
|
||||
type='int',
|
||||
default=5,
|
||||
help="Maximum number of retries each connection should attempt "
|
||||
"(default %default times).",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
timeout = partial(
|
||||
Option,
|
||||
'--timeout', '--default-timeout',
|
||||
metavar='sec',
|
||||
dest='timeout',
|
||||
type='float',
|
||||
default=15,
|
||||
help='Set the socket timeout (default %default seconds).',
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
skip_requirements_regex = partial(
|
||||
Option,
|
||||
# A regex to be used to skip requirements
|
||||
'--skip-requirements-regex',
|
||||
dest='skip_requirements_regex',
|
||||
type='str',
|
||||
default='',
|
||||
help=SUPPRESS_HELP,
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def exists_action():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
# Option when path already exist
|
||||
'--exists-action',
|
||||
dest='exists_action',
|
||||
type='choice',
|
||||
choices=['s', 'i', 'w', 'b', 'a'],
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='action',
|
||||
help="Default action when a path already exists: "
|
||||
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
|
||||
)
|
||||
|
||||
|
||||
cert = partial(
|
||||
Option,
|
||||
'--cert',
|
||||
dest='cert',
|
||||
type='str',
|
||||
metavar='path',
|
||||
help="Path to alternate CA bundle.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
client_cert = partial(
|
||||
Option,
|
||||
'--client-cert',
|
||||
dest='client_cert',
|
||||
type='str',
|
||||
default=None,
|
||||
metavar='path',
|
||||
help="Path to SSL client certificate, a single file containing the "
|
||||
"private key and the certificate in PEM format.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
index_url = partial(
|
||||
Option,
|
||||
'-i', '--index-url', '--pypi-url',
|
||||
dest='index_url',
|
||||
metavar='URL',
|
||||
default=PyPI.simple_url,
|
||||
help="Base URL of Python Package Index (default %default). "
|
||||
"This should point to a repository compliant with PEP 503 "
|
||||
"(the simple repository API) or a local directory laid out "
|
||||
"in the same format.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def extra_index_url():
|
||||
return Option(
|
||||
'--extra-index-url',
|
||||
dest='extra_index_urls',
|
||||
metavar='URL',
|
||||
action='append',
|
||||
default=[],
|
||||
help="Extra URLs of package indexes to use in addition to "
|
||||
"--index-url. Should follow the same rules as "
|
||||
"--index-url.",
|
||||
)
|
||||
|
||||
|
||||
no_index = partial(
|
||||
Option,
|
||||
'--no-index',
|
||||
dest='no_index',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Ignore package index (only looking at --find-links URLs instead).',
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def find_links():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='url',
|
||||
help="If a url or path to an html file, then parse for links to "
|
||||
"archives. If a local path or file:// url that's a directory, "
|
||||
"then look for archives in the directory listing.",
|
||||
)
|
||||
|
||||
|
||||
def trusted_host():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
"--trusted-host",
|
||||
dest="trusted_hosts",
|
||||
action="append",
|
||||
metavar="HOSTNAME",
|
||||
default=[],
|
||||
help="Mark this host as trusted, even though it does not have valid "
|
||||
"or any HTTPS.",
|
||||
)
|
||||
|
||||
|
||||
def constraints():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
'-c', '--constraint',
|
||||
dest='constraints',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Constrain versions using the given constraints file. '
|
||||
'This option can be used multiple times.'
|
||||
)
|
||||
|
||||
|
||||
def requirements():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Install from the given requirements file. '
|
||||
'This option can be used multiple times.'
|
||||
)
|
||||
|
||||
|
||||
def editable():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
'-e', '--editable',
|
||||
dest='editables',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='path/url',
|
||||
help=('Install a project in editable mode (i.e. setuptools '
|
||||
'"develop mode") from a local project path or a VCS url.'),
|
||||
)
|
||||
|
||||
|
||||
src = partial(
|
||||
Option,
|
||||
'--src', '--source', '--source-dir', '--source-directory',
|
||||
dest='src_dir',
|
||||
metavar='dir',
|
||||
default=src_prefix,
|
||||
help='Directory to check out editable projects into. '
|
||||
'The default in a virtualenv is "<venv path>/src". '
|
||||
'The default for global installs is "<current dir>/src".'
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def _get_format_control(values, option):
|
||||
# type: (Values, Option) -> Any
|
||||
"""Get a format_control object."""
|
||||
return getattr(values, option.dest)
|
||||
|
||||
|
||||
def _handle_no_binary(option, opt_str, value, parser):
|
||||
# type: (Option, str, str, OptionParser) -> None
|
||||
existing = _get_format_control(parser.values, option)
|
||||
FormatControl.handle_mutual_excludes(
|
||||
value, existing.no_binary, existing.only_binary,
|
||||
)
|
||||
|
||||
|
||||
def _handle_only_binary(option, opt_str, value, parser):
|
||||
# type: (Option, str, str, OptionParser) -> None
|
||||
existing = _get_format_control(parser.values, option)
|
||||
FormatControl.handle_mutual_excludes(
|
||||
value, existing.only_binary, existing.no_binary,
|
||||
)
|
||||
|
||||
|
||||
def no_binary():
|
||||
# type: () -> Option
|
||||
format_control = FormatControl(set(), set())
|
||||
return Option(
|
||||
"--no-binary", dest="format_control", action="callback",
|
||||
callback=_handle_no_binary, type="str",
|
||||
default=format_control,
|
||||
help="Do not use binary packages. Can be supplied multiple times, and "
|
||||
"each time adds to the existing value. Accepts either :all: to "
|
||||
"disable all binary packages, :none: to empty the set, or one or "
|
||||
"more package names with commas between them. Note that some "
|
||||
"packages are tricky to compile and may fail to install when "
|
||||
"this option is used on them.",
|
||||
)
|
||||
|
||||
|
||||
def only_binary():
|
||||
# type: () -> Option
|
||||
format_control = FormatControl(set(), set())
|
||||
return Option(
|
||||
"--only-binary", dest="format_control", action="callback",
|
||||
callback=_handle_only_binary, type="str",
|
||||
default=format_control,
|
||||
help="Do not use source packages. Can be supplied multiple times, and "
|
||||
"each time adds to the existing value. Accepts either :all: to "
|
||||
"disable all source packages, :none: to empty the set, or one or "
|
||||
"more package names with commas between them. Packages without "
|
||||
"binary distributions will fail to install when this option is "
|
||||
"used on them.",
|
||||
)
|
||||
|
||||
|
||||
platform = partial(
|
||||
Option,
|
||||
'--platform',
|
||||
dest='platform',
|
||||
metavar='platform',
|
||||
default=None,
|
||||
help=("Only use wheels compatible with <platform>. "
|
||||
"Defaults to the platform of the running system."),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
python_version = partial(
|
||||
Option,
|
||||
'--python-version',
|
||||
dest='python_version',
|
||||
metavar='python_version',
|
||||
default=None,
|
||||
help=("Only use wheels compatible with Python "
|
||||
"interpreter version <version>. If not specified, then the "
|
||||
"current system interpreter minor version is used. A major "
|
||||
"version (e.g. '2') can be specified to match all "
|
||||
"minor revs of that major version. A minor version "
|
||||
"(e.g. '34') can also be specified."),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
implementation = partial(
|
||||
Option,
|
||||
'--implementation',
|
||||
dest='implementation',
|
||||
metavar='implementation',
|
||||
default=None,
|
||||
help=("Only use wheels compatible with Python "
|
||||
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
||||
" or 'ip'. If not specified, then the current "
|
||||
"interpreter implementation is used. Use 'py' to force "
|
||||
"implementation-agnostic wheels."),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
abi = partial(
|
||||
Option,
|
||||
'--abi',
|
||||
dest='abi',
|
||||
metavar='abi',
|
||||
default=None,
|
||||
help=("Only use wheels compatible with Python "
|
||||
"abi <abi>, e.g. 'pypy_41'. If not specified, then the "
|
||||
"current interpreter abi tag is used. Generally "
|
||||
"you will need to specify --implementation, "
|
||||
"--platform, and --python-version when using "
|
||||
"this option."),
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def prefer_binary():
|
||||
# type: () -> Option
|
||||
return Option(
|
||||
"--prefer-binary",
|
||||
dest="prefer_binary",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Prefer older binary packages over newer source packages."
|
||||
)
|
||||
|
||||
|
||||
cache_dir = partial(
|
||||
Option,
|
||||
"--cache-dir",
|
||||
dest="cache_dir",
|
||||
default=USER_CACHE_DIR,
|
||||
metavar="dir",
|
||||
help="Store the cache data in <dir>."
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def no_cache_dir_callback(option, opt, value, parser):
|
||||
"""
|
||||
Process a value provided for the --no-cache-dir option.
|
||||
|
||||
This is an optparse.Option callback for the --no-cache-dir option.
|
||||
"""
|
||||
# The value argument will be None if --no-cache-dir is passed via the
|
||||
# command-line, since the option doesn't accept arguments. However,
|
||||
# the value can be non-None if the option is triggered e.g. by an
|
||||
# environment variable, like PIP_NO_CACHE_DIR=true.
|
||||
if value is not None:
|
||||
# Then parse the string value to get argument error-checking.
|
||||
try:
|
||||
strtobool(value)
|
||||
except ValueError as exc:
|
||||
raise_option_error(parser, option=option, msg=str(exc))
|
||||
|
||||
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
||||
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
||||
# rather than enabled (logic would say the latter). Thus, we disable
|
||||
# the cache directory not just on values that parse to True, but (for
|
||||
# backwards compatibility reasons) also on values that parse to False.
|
||||
# In other words, always set it to False if the option is provided in
|
||||
# some (valid) form.
|
||||
parser.values.cache_dir = False
|
||||
|
||||
|
||||
no_cache = partial(
|
||||
Option,
|
||||
"--no-cache-dir",
|
||||
dest="cache_dir",
|
||||
action="callback",
|
||||
callback=no_cache_dir_callback,
|
||||
help="Disable the cache.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
no_deps = partial(
|
||||
Option,
|
||||
'--no-deps', '--no-dependencies',
|
||||
dest='ignore_dependencies',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't install package dependencies.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
build_dir = partial(
|
||||
Option,
|
||||
'-b', '--build', '--build-dir', '--build-directory',
|
||||
dest='build_dir',
|
||||
metavar='dir',
|
||||
help='Directory to unpack packages into and build in. Note that '
|
||||
'an initial build still takes place in a temporary directory. '
|
||||
'The location of temporary directories can be controlled by setting '
|
||||
'the TMPDIR environment variable (TEMP on Windows) appropriately. '
|
||||
'When passed, build directories are not cleaned in case of failures.'
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
ignore_requires_python = partial(
|
||||
Option,
|
||||
'--ignore-requires-python',
|
||||
dest='ignore_requires_python',
|
||||
action='store_true',
|
||||
help='Ignore the Requires-Python information.'
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
no_build_isolation = partial(
|
||||
Option,
|
||||
'--no-build-isolation',
|
||||
dest='build_isolation',
|
||||
action='store_false',
|
||||
default=True,
|
||||
help='Disable isolation when building a modern source distribution. '
|
||||
'Build dependencies specified by PEP 518 must be already installed '
|
||||
'if this option is used.'
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def no_use_pep517_callback(option, opt, value, parser):
|
||||
"""
|
||||
Process a value provided for the --no-use-pep517 option.
|
||||
|
||||
This is an optparse.Option callback for the no_use_pep517 option.
|
||||
"""
|
||||
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
||||
# will be None if --no-use-pep517 is passed via the command-line.
|
||||
# However, the value can be non-None if the option is triggered e.g.
|
||||
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
||||
if value is not None:
|
||||
msg = """A value was passed for --no-use-pep517,
|
||||
probably using either the PIP_NO_USE_PEP517 environment variable
|
||||
or the "no-use-pep517" config file option. Use an appropriate value
|
||||
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
||||
config file option instead.
|
||||
"""
|
||||
raise_option_error(parser, option=option, msg=msg)
|
||||
|
||||
# Otherwise, --no-use-pep517 was passed via the command-line.
|
||||
parser.values.use_pep517 = False
|
||||
|
||||
|
||||
use_pep517 = partial(
|
||||
Option,
|
||||
'--use-pep517',
|
||||
dest='use_pep517',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help='Use PEP 517 for building source distributions '
|
||||
'(use --no-use-pep517 to force legacy behaviour).'
|
||||
) # type: Any
|
||||
|
||||
no_use_pep517 = partial(
|
||||
Option,
|
||||
'--no-use-pep517',
|
||||
dest='use_pep517',
|
||||
action='callback',
|
||||
callback=no_use_pep517_callback,
|
||||
default=None,
|
||||
help=SUPPRESS_HELP
|
||||
) # type: Any
|
||||
|
||||
install_options = partial(
|
||||
Option,
|
||||
'--install-option',
|
||||
dest='install_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra arguments to be supplied to the setup.py install "
|
||||
"command (use like --install-option=\"--install-scripts=/usr/local/"
|
||||
"bin\"). Use multiple --install-option options to pass multiple "
|
||||
"options to setup.py install. If you are using an option with a "
|
||||
"directory path, be sure to use absolute path.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
global_options = partial(
|
||||
Option,
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the install command.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
no_clean = partial(
|
||||
Option,
|
||||
'--no-clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't clean up build directories."
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
pre = partial(
|
||||
Option,
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
disable_pip_version_check = partial(
|
||||
Option,
|
||||
"--disable-pip-version-check",
|
||||
dest="disable_pip_version_check",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Don't periodically check PyPI to determine whether a new version "
|
||||
"of pip is available for download. Implied with --no-index.",
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
# Deprecated, Remove later
|
||||
always_unzip = partial(
|
||||
Option,
|
||||
'-Z', '--always-unzip',
|
||||
dest='always_unzip',
|
||||
action='store_true',
|
||||
help=SUPPRESS_HELP,
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
def _merge_hash(option, opt_str, value, parser):
|
||||
# type: (Option, str, str, OptionParser) -> None
|
||||
"""Given a value spelled "algo:digest", append the digest to a list
|
||||
pointed to in a dict by the algo name."""
|
||||
if not parser.values.hashes:
|
||||
parser.values.hashes = {} # type: ignore
|
||||
try:
|
||||
algo, digest = value.split(':', 1)
|
||||
except ValueError:
|
||||
parser.error('Arguments to %s must be a hash name '
|
||||
'followed by a value, like --hash=sha256:abcde...' %
|
||||
opt_str)
|
||||
if algo not in STRONG_HASHES:
|
||||
parser.error('Allowed hash algorithms for %s are %s.' %
|
||||
(opt_str, ', '.join(STRONG_HASHES)))
|
||||
parser.values.hashes.setdefault(algo, []).append(digest)
|
||||
|
||||
|
||||
hash = partial(
|
||||
Option,
|
||||
'--hash',
|
||||
# Hash values eventually end up in InstallRequirement.hashes due to
|
||||
# __dict__ copying in process_line().
|
||||
dest='hashes',
|
||||
action='callback',
|
||||
callback=_merge_hash,
|
||||
type='string',
|
||||
help="Verify that the package's archive matches this "
|
||||
'hash before installing. Example: --hash=sha256:abcdef...',
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
require_hashes = partial(
|
||||
Option,
|
||||
'--require-hashes',
|
||||
dest='require_hashes',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Require a hash to check each requirement against, for '
|
||||
'repeatable installs. This option is implied when any package in a '
|
||||
'requirements file has a --hash option.',
|
||||
) # type: Callable[..., Option]
|
||||
|
||||
|
||||
##########
|
||||
# groups #
|
||||
##########
|
||||
|
||||
general_group = {
|
||||
'name': 'General Options',
|
||||
'options': [
|
||||
help_,
|
||||
isolated_mode,
|
||||
require_virtualenv,
|
||||
verbose,
|
||||
version,
|
||||
quiet,
|
||||
log,
|
||||
no_input,
|
||||
proxy,
|
||||
retries,
|
||||
timeout,
|
||||
skip_requirements_regex,
|
||||
exists_action,
|
||||
trusted_host,
|
||||
cert,
|
||||
client_cert,
|
||||
cache_dir,
|
||||
no_cache,
|
||||
disable_pip_version_check,
|
||||
no_color,
|
||||
]
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
index_group = {
|
||||
'name': 'Package Index Options',
|
||||
'options': [
|
||||
index_url,
|
||||
extra_index_url,
|
||||
no_index,
|
||||
find_links,
|
||||
]
|
||||
} # type: Dict[str, Any]
|
|
@ -1,104 +0,0 @@
|
|||
"""A single place for constructing and exposing the main parser
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pip import __version__
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.parser import (
|
||||
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
||||
)
|
||||
from pip._internal.commands import (
|
||||
commands_dict, get_similar_commands, get_summaries,
|
||||
)
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.utils.misc import get_prog
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Tuple, List # noqa: F401
|
||||
|
||||
|
||||
__all__ = ["create_main_parser", "parse_command"]
|
||||
|
||||
|
||||
def create_main_parser():
|
||||
# type: () -> ConfigOptionParser
|
||||
"""Creates and returns the main parser for pip's CLI
|
||||
"""
|
||||
|
||||
parser_kw = {
|
||||
'usage': '\n%prog <command> [options]',
|
||||
'add_help_option': False,
|
||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||
'name': 'global',
|
||||
'prog': get_prog(),
|
||||
}
|
||||
|
||||
parser = ConfigOptionParser(**parser_kw)
|
||||
parser.disable_interspersed_args()
|
||||
|
||||
pip_pkg_dir = os.path.abspath(os.path.join(
|
||||
os.path.dirname(__file__), "..", "..",
|
||||
))
|
||||
parser.version = 'pip %s from %s (python %s)' % (
|
||||
__version__, pip_pkg_dir, sys.version[:3],
|
||||
)
|
||||
|
||||
# add the general options
|
||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||
parser.add_option_group(gen_opts)
|
||||
|
||||
# so the help formatter knows
|
||||
parser.main = True # type: ignore
|
||||
|
||||
# create command listing for description
|
||||
command_summaries = get_summaries()
|
||||
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
|
||||
parser.description = '\n'.join(description)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def parse_command(args):
|
||||
# type: (List[str]) -> Tuple[str, List[str]]
|
||||
parser = create_main_parser()
|
||||
|
||||
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||
# call is to split the initial args into the general options before the
|
||||
# subcommand and everything else.
|
||||
# For example:
|
||||
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||
# general_options: ['--timeout==5']
|
||||
# args_else: ['install', '--user', 'INITools']
|
||||
general_options, args_else = parser.parse_args(args)
|
||||
|
||||
# --version
|
||||
if general_options.version:
|
||||
sys.stdout.write(parser.version) # type: ignore
|
||||
sys.stdout.write(os.linesep)
|
||||
sys.exit()
|
||||
|
||||
# pip || pip help -> print_help()
|
||||
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
||||
parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
# the subcommand name
|
||||
cmd_name = args_else[0]
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
# all the args without the subcommand
|
||||
cmd_args = args[:]
|
||||
cmd_args.remove(cmd_name)
|
||||
|
||||
return cmd_name, cmd_args
|
|
@ -1,261 +0,0 @@
|
|||
"""Base option parser setup"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import optparse
|
||||
import sys
|
||||
import textwrap
|
||||
from distutils.util import strtobool
|
||||
|
||||
from pip._vendor.six import string_types
|
||||
|
||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||
from pip._internal.configuration import Configuration, ConfigurationError
|
||||
from pip._internal.utils.compat import get_terminal_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
"""A prettier/less verbose help formatter for optparse."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# help position must be aligned with __init__.parseopts.description
|
||||
kwargs['max_help_position'] = 30
|
||||
kwargs['indent_increment'] = 1
|
||||
kwargs['width'] = get_terminal_size()[0] - 2
|
||||
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
||||
|
||||
def format_option_strings(self, option):
|
||||
return self._format_option_strings(option, ' <%s>', ', ')
|
||||
|
||||
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
|
||||
"""
|
||||
Return a comma-separated list of option strings and metavars.
|
||||
|
||||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
|
||||
:param optsep: separator
|
||||
"""
|
||||
opts = []
|
||||
|
||||
if option._short_opts:
|
||||
opts.append(option._short_opts[0])
|
||||
if option._long_opts:
|
||||
opts.append(option._long_opts[0])
|
||||
if len(opts) > 1:
|
||||
opts.insert(1, optsep)
|
||||
|
||||
if option.takes_value():
|
||||
metavar = option.metavar or option.dest.lower()
|
||||
opts.append(mvarfmt % metavar.lower())
|
||||
|
||||
return ''.join(opts)
|
||||
|
||||
def format_heading(self, heading):
|
||||
if heading == 'Options':
|
||||
return ''
|
||||
return heading + ':\n'
|
||||
|
||||
def format_usage(self, usage):
|
||||
"""
|
||||
Ensure there is only one newline between usage and the first heading
|
||||
if there is no description.
|
||||
"""
|
||||
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
|
||||
return msg
|
||||
|
||||
def format_description(self, description):
|
||||
# leave full control over description to us
|
||||
if description:
|
||||
if hasattr(self.parser, 'main'):
|
||||
label = 'Commands'
|
||||
else:
|
||||
label = 'Description'
|
||||
# some doc strings have initial newlines, some don't
|
||||
description = description.lstrip('\n')
|
||||
# some doc strings have final newlines and spaces, some don't
|
||||
description = description.rstrip()
|
||||
# dedent, then reindent
|
||||
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||
description = '%s:\n%s\n' % (label, description)
|
||||
return description
|
||||
else:
|
||||
return ''
|
||||
|
||||
def format_epilog(self, epilog):
|
||||
# leave full control over epilog to us
|
||||
if epilog:
|
||||
return epilog
|
||||
else:
|
||||
return ''
|
||||
|
||||
def indent_lines(self, text, indent):
|
||||
new_lines = [indent + line for line in text.split('\n')]
|
||||
return "\n".join(new_lines)
|
||||
|
||||
|
||||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||
"""Custom help formatter for use in ConfigOptionParser.
|
||||
|
||||
This is updates the defaults before expanding them, allowing
|
||||
them to show up correctly in the help listing.
|
||||
"""
|
||||
|
||||
def expand_default(self, option):
|
||||
if self.parser is not None:
|
||||
self.parser._update_defaults(self.parser.defaults)
|
||||
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
||||
|
||||
|
||||
class CustomOptionParser(optparse.OptionParser):
|
||||
|
||||
def insert_option_group(self, idx, *args, **kwargs):
|
||||
"""Insert an OptionGroup at a given position."""
|
||||
group = self.add_option_group(*args, **kwargs)
|
||||
|
||||
self.option_groups.pop()
|
||||
self.option_groups.insert(idx, group)
|
||||
|
||||
return group
|
||||
|
||||
@property
|
||||
def option_list_all(self):
|
||||
"""Get a list of all options, including those in option groups."""
|
||||
res = self.option_list[:]
|
||||
for i in self.option_groups:
|
||||
res.extend(i.option_list)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class ConfigOptionParser(CustomOptionParser):
|
||||
"""Custom option parser which updates its defaults by checking the
|
||||
configuration files and environmental variables"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.name = kwargs.pop('name')
|
||||
|
||||
isolated = kwargs.pop("isolated", False)
|
||||
self.config = Configuration(isolated)
|
||||
|
||||
assert self.name
|
||||
optparse.OptionParser.__init__(self, *args, **kwargs)
|
||||
|
||||
def check_default(self, option, key, val):
|
||||
try:
|
||||
return option.check_value(key, val)
|
||||
except optparse.OptionValueError as exc:
|
||||
print("An error occurred during configuration: %s" % exc)
|
||||
sys.exit(3)
|
||||
|
||||
def _get_ordered_configuration_items(self):
|
||||
# Configuration gives keys in an unordered manner. Order them.
|
||||
override_order = ["global", self.name, ":env:"]
|
||||
|
||||
# Pool the options into different groups
|
||||
section_items = {name: [] for name in override_order}
|
||||
for section_key, val in self.config.items():
|
||||
# ignore empty values
|
||||
if not val:
|
||||
logger.debug(
|
||||
"Ignoring configuration key '%s' as it's value is empty.",
|
||||
section_key
|
||||
)
|
||||
continue
|
||||
|
||||
section, key = section_key.split(".", 1)
|
||||
if section in override_order:
|
||||
section_items[section].append((key, val))
|
||||
|
||||
# Yield each group in their override order
|
||||
for section in override_order:
|
||||
for key, val in section_items[section]:
|
||||
yield key, val
|
||||
|
||||
def _update_defaults(self, defaults):
|
||||
"""Updates the given defaults with values from the config files and
|
||||
the environ. Does a little special handling for certain types of
|
||||
options (lists)."""
|
||||
|
||||
# Accumulate complex default state.
|
||||
self.values = optparse.Values(self.defaults)
|
||||
late_eval = set()
|
||||
# Then set the options with those values
|
||||
for key, val in self._get_ordered_configuration_items():
|
||||
# '--' because configuration supports only long names
|
||||
option = self.get_option('--' + key)
|
||||
|
||||
# Ignore options not present in this parser. E.g. non-globals put
|
||||
# in [global] by users that want them to apply to all applicable
|
||||
# commands.
|
||||
if option is None:
|
||||
continue
|
||||
|
||||
if option.action in ('store_true', 'store_false', 'count'):
|
||||
try:
|
||||
val = strtobool(val)
|
||||
except ValueError:
|
||||
error_msg = invalid_config_error_message(
|
||||
option.action, key, val
|
||||
)
|
||||
self.error(error_msg)
|
||||
|
||||
elif option.action == 'append':
|
||||
val = val.split()
|
||||
val = [self.check_default(option, key, v) for v in val]
|
||||
elif option.action == 'callback':
|
||||
late_eval.add(option.dest)
|
||||
opt_str = option.get_opt_string()
|
||||
val = option.convert_value(opt_str, val)
|
||||
# From take_action
|
||||
args = option.callback_args or ()
|
||||
kwargs = option.callback_kwargs or {}
|
||||
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||
else:
|
||||
val = self.check_default(option, key, val)
|
||||
|
||||
defaults[option.dest] = val
|
||||
|
||||
for key in late_eval:
|
||||
defaults[key] = getattr(self.values, key)
|
||||
self.values = None
|
||||
return defaults
|
||||
|
||||
def get_default_values(self):
|
||||
"""Overriding to make updating the defaults after instantiation of
|
||||
the option parser possible, _update_defaults() does the dirty work."""
|
||||
if not self.process_default_values:
|
||||
# Old, pre-Optik 1.5 behaviour.
|
||||
return optparse.Values(self.defaults)
|
||||
|
||||
# Load the configuration, or error out in case of an error
|
||||
try:
|
||||
self.config.load()
|
||||
except ConfigurationError as err:
|
||||
self.exit(UNKNOWN_ERROR, str(err))
|
||||
|
||||
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||
for option in self._get_all_options():
|
||||
default = defaults.get(option.dest)
|
||||
if isinstance(default, string_types):
|
||||
opt_str = option.get_opt_string()
|
||||
defaults[option.dest] = option.check_value(opt_str, default)
|
||||
return optparse.Values(defaults)
|
||||
|
||||
def error(self, msg):
|
||||
self.print_usage(sys.stderr)
|
||||
self.exit(UNKNOWN_ERROR, "%s\n" % msg)
|
||||
|
||||
|
||||
def invalid_config_error_message(action, key, val):
|
||||
"""Returns a better error message when invalid configuration option
|
||||
is provided."""
|
||||
if action in ('store_true', 'store_false'):
|
||||
return ("{0} is not a valid value for {1} option, "
|
||||
"please specify a boolean value like yes/no, "
|
||||
"true/false or 1/0 instead.").format(val, key)
|
||||
|
||||
return ("{0} is not a valid value for {1} option, "
|
||||
"please specify a numerical value like 1/0 "
|
||||
"instead.").format(val, key)
|
|
@ -1,8 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
SUCCESS = 0
|
||||
ERROR = 1
|
||||
UNKNOWN_ERROR = 2
|
||||
VIRTUALENV_NOT_FOUND = 3
|
||||
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||
NO_MATCHES_FOUND = 23
|
|
@ -1,79 +0,0 @@
|
|||
"""
|
||||
Package containing all pip commands
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from pip._internal.commands.completion import CompletionCommand
|
||||
from pip._internal.commands.configuration import ConfigurationCommand
|
||||
from pip._internal.commands.download import DownloadCommand
|
||||
from pip._internal.commands.freeze import FreezeCommand
|
||||
from pip._internal.commands.hash import HashCommand
|
||||
from pip._internal.commands.help import HelpCommand
|
||||
from pip._internal.commands.list import ListCommand
|
||||
from pip._internal.commands.check import CheckCommand
|
||||
from pip._internal.commands.search import SearchCommand
|
||||
from pip._internal.commands.show import ShowCommand
|
||||
from pip._internal.commands.install import InstallCommand
|
||||
from pip._internal.commands.uninstall import UninstallCommand
|
||||
from pip._internal.commands.wheel import WheelCommand
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Type # noqa: F401
|
||||
from pip._internal.cli.base_command import Command # noqa: F401
|
||||
|
||||
commands_order = [
|
||||
InstallCommand,
|
||||
DownloadCommand,
|
||||
UninstallCommand,
|
||||
FreezeCommand,
|
||||
ListCommand,
|
||||
ShowCommand,
|
||||
CheckCommand,
|
||||
ConfigurationCommand,
|
||||
SearchCommand,
|
||||
WheelCommand,
|
||||
HashCommand,
|
||||
CompletionCommand,
|
||||
HelpCommand,
|
||||
] # type: List[Type[Command]]
|
||||
|
||||
commands_dict = {c.name: c for c in commands_order}
|
||||
|
||||
|
||||
def get_summaries(ordered=True):
|
||||
"""Yields sorted (command name, command summary) tuples."""
|
||||
|
||||
if ordered:
|
||||
cmditems = _sort_commands(commands_dict, commands_order)
|
||||
else:
|
||||
cmditems = commands_dict.items()
|
||||
|
||||
for name, command_class in cmditems:
|
||||
yield (name, command_class.summary)
|
||||
|
||||
|
||||
def get_similar_commands(name):
|
||||
"""Command name auto-correct."""
|
||||
from difflib import get_close_matches
|
||||
|
||||
name = name.lower()
|
||||
|
||||
close_commands = get_close_matches(name, commands_dict.keys())
|
||||
|
||||
if close_commands:
|
||||
return close_commands[0]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _sort_commands(cmddict, order):
|
||||
def keyfn(key):
|
||||
try:
|
||||
return order.index(key[1])
|
||||
except ValueError:
|
||||
# unordered items should come last
|
||||
return 0xff
|
||||
|
||||
return sorted(cmddict.items(), key=keyfn)
|
|
@ -1,41 +0,0 @@
|
|||
import logging
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.operations.check import (
|
||||
check_package_set, create_package_set_from_installed,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CheckCommand(Command):
|
||||
"""Verify installed packages have compatible dependencies."""
|
||||
name = 'check'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'Verify installed packages have compatible dependencies.'
|
||||
|
||||
def run(self, options, args):
|
||||
package_set, parsing_probs = create_package_set_from_installed()
|
||||
missing, conflicting = check_package_set(package_set)
|
||||
|
||||
for project_name in missing:
|
||||
version = package_set[project_name].version
|
||||
for dependency in missing[project_name]:
|
||||
logger.info(
|
||||
"%s %s requires %s, which is not installed.",
|
||||
project_name, version, dependency[0],
|
||||
)
|
||||
|
||||
for project_name in conflicting:
|
||||
version = package_set[project_name].version
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
logger.info(
|
||||
"%s %s has requirement %s, but you have %s %s.",
|
||||
project_name, version, req, dep_name, dep_version,
|
||||
)
|
||||
|
||||
if missing or conflicting or parsing_probs:
|
||||
return 1
|
||||
else:
|
||||
logger.info("No broken requirements found.")
|
|
@ -1,94 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.utils.misc import get_prog
|
||||
|
||||
BASE_COMPLETION = """
|
||||
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
'bash': """
|
||||
_pip_completion()
|
||||
{
|
||||
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
PIP_AUTO_COMPLETE=1 $1 ) )
|
||||
}
|
||||
complete -o default -F _pip_completion %(prog)s
|
||||
""",
|
||||
'zsh': """
|
||||
function _pip_completion {
|
||||
local words cword
|
||||
read -Ac words
|
||||
read -cn cword
|
||||
reply=( $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$(( cword-1 )) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
||||
}
|
||||
compctl -K _pip_completion %(prog)s
|
||||
""",
|
||||
'fish': """
|
||||
function __fish_complete_pip
|
||||
set -lx COMP_WORDS (commandline -o) ""
|
||||
set -lx COMP_CWORD ( \\
|
||||
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
||||
)
|
||||
set -lx PIP_AUTO_COMPLETE 1
|
||||
string split \\ -- (eval $COMP_WORDS[1])
|
||||
end
|
||||
complete -fa "(__fish_complete_pip)" -c %(prog)s
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
class CompletionCommand(Command):
|
||||
"""A helper command to be used for command completion."""
|
||||
name = 'completion'
|
||||
summary = 'A helper command used for command completion.'
|
||||
ignore_require_venv = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(CompletionCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--bash', '-b',
|
||||
action='store_const',
|
||||
const='bash',
|
||||
dest='shell',
|
||||
help='Emit completion code for bash')
|
||||
cmd_opts.add_option(
|
||||
'--zsh', '-z',
|
||||
action='store_const',
|
||||
const='zsh',
|
||||
dest='shell',
|
||||
help='Emit completion code for zsh')
|
||||
cmd_opts.add_option(
|
||||
'--fish', '-f',
|
||||
action='store_const',
|
||||
const='fish',
|
||||
dest='shell',
|
||||
help='Emit completion code for fish')
|
||||
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = textwrap.dedent(
|
||||
COMPLETION_SCRIPTS.get(options.shell, '') % {
|
||||
'prog': get_prog(),
|
||||
}
|
||||
)
|
||||
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
|
||||
else:
|
||||
sys.stderr.write(
|
||||
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
|
||||
)
|
|
@ -1,227 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.configuration import Configuration, kinds
|
||||
from pip._internal.exceptions import PipError
|
||||
from pip._internal.locations import venv_config_file
|
||||
from pip._internal.utils.misc import get_prog
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigurationCommand(Command):
|
||||
"""Manage local and global configuration.
|
||||
|
||||
Subcommands:
|
||||
|
||||
list: List the active configuration (or from the file specified)
|
||||
edit: Edit the configuration file in an editor
|
||||
get: Get the value associated with name
|
||||
set: Set the name=value
|
||||
unset: Unset the value associated with name
|
||||
|
||||
If none of --user, --global and --venv are passed, a virtual
|
||||
environment configuration file is used if one is active and the file
|
||||
exists. Otherwise, all modifications happen on the to the user file by
|
||||
default.
|
||||
"""
|
||||
|
||||
name = 'config'
|
||||
usage = """
|
||||
%prog [<file-option>] list
|
||||
%prog [<file-option>] [--editor <editor-path>] edit
|
||||
|
||||
%prog [<file-option>] get name
|
||||
%prog [<file-option>] set name value
|
||||
%prog [<file-option>] unset name
|
||||
"""
|
||||
|
||||
summary = "Manage local and global configuration."
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ConfigurationCommand, self).__init__(*args, **kwargs)
|
||||
|
||||
self.configuration = None
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--editor',
|
||||
dest='editor',
|
||||
action='store',
|
||||
default=None,
|
||||
help=(
|
||||
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
||||
'environment variables if not provided.'
|
||||
)
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--global',
|
||||
dest='global_file',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use the system-wide configuration file only'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user_file',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use the user configuration file only'
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--venv',
|
||||
dest='venv_file',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use the virtualenv configuration file only'
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
handlers = {
|
||||
"list": self.list_values,
|
||||
"edit": self.open_in_editor,
|
||||
"get": self.get_name,
|
||||
"set": self.set_name_value,
|
||||
"unset": self.unset_name
|
||||
}
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error("Need an action ({}) to perform.".format(
|
||||
", ".join(sorted(handlers)))
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Determine which configuration files are to be loaded
|
||||
# Depends on whether the command is modifying.
|
||||
try:
|
||||
load_only = self._determine_file(
|
||||
options, need_value=(action in ["get", "set", "unset", "edit"])
|
||||
)
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
# Load a new configuration
|
||||
self.configuration = Configuration(
|
||||
isolated=options.isolated_mode, load_only=load_only
|
||||
)
|
||||
self.configuration.load()
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def _determine_file(self, options, need_value):
|
||||
file_options = {
|
||||
kinds.USER: options.user_file,
|
||||
kinds.GLOBAL: options.global_file,
|
||||
kinds.VENV: options.venv_file
|
||||
}
|
||||
|
||||
if sum(file_options.values()) == 0:
|
||||
if not need_value:
|
||||
return None
|
||||
# Default to user, unless there's a virtualenv file.
|
||||
elif os.path.exists(venv_config_file):
|
||||
return kinds.VENV
|
||||
else:
|
||||
return kinds.USER
|
||||
elif sum(file_options.values()) == 1:
|
||||
# There's probably a better expression for this.
|
||||
return [key for key in file_options if file_options[key]][0]
|
||||
|
||||
raise PipError(
|
||||
"Need exactly one file to operate upon "
|
||||
"(--user, --venv, --global) to perform."
|
||||
)
|
||||
|
||||
def list_values(self, options, args):
|
||||
self._get_n_args(args, "list", n=0)
|
||||
|
||||
for key, value in sorted(self.configuration.items()):
|
||||
logger.info("%s=%r", key, value)
|
||||
|
||||
def get_name(self, options, args):
|
||||
key = self._get_n_args(args, "get [name]", n=1)
|
||||
value = self.configuration.get_value(key)
|
||||
|
||||
logger.info("%s", value)
|
||||
|
||||
def set_name_value(self, options, args):
|
||||
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
||||
self.configuration.set_value(key, value)
|
||||
|
||||
self._save_configuration()
|
||||
|
||||
def unset_name(self, options, args):
|
||||
key = self._get_n_args(args, "unset [name]", n=1)
|
||||
self.configuration.unset_value(key)
|
||||
|
||||
self._save_configuration()
|
||||
|
||||
def open_in_editor(self, options, args):
|
||||
editor = self._determine_editor(options)
|
||||
|
||||
fname = self.configuration.get_file_to_edit()
|
||||
if fname is None:
|
||||
raise PipError("Could not determine appropriate file.")
|
||||
|
||||
try:
|
||||
subprocess.check_call([editor, fname])
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise PipError(
|
||||
"Editor Subprocess exited with exit code {}"
|
||||
.format(e.returncode)
|
||||
)
|
||||
|
||||
def _get_n_args(self, args, example, n):
|
||||
"""Helper to make sure the command got the right number of arguments
|
||||
"""
|
||||
if len(args) != n:
|
||||
msg = (
|
||||
'Got unexpected number of arguments, expected {}. '
|
||||
'(example: "{} config {}")'
|
||||
).format(n, get_prog(), example)
|
||||
raise PipError(msg)
|
||||
|
||||
if n == 1:
|
||||
return args[0]
|
||||
else:
|
||||
return args
|
||||
|
||||
def _save_configuration(self):
|
||||
# We successfully ran a modifying command. Need to save the
|
||||
# configuration.
|
||||
try:
|
||||
self.configuration.save()
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Unable to save configuration. Please report this as a bug.",
|
||||
exc_info=1
|
||||
)
|
||||
raise PipError("Internal Error.")
|
||||
|
||||
def _determine_editor(self, options):
|
||||
if options.editor is not None:
|
||||
return options.editor
|
||||
elif "VISUAL" in os.environ:
|
||||
return os.environ["VISUAL"]
|
||||
elif "EDITOR" in os.environ:
|
||||
return os.environ["EDITOR"]
|
||||
else:
|
||||
raise PipError("Could not determine editor to use.")
|
|
@ -1,176 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import RequirementCommand
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req import RequirementSet
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolve import Resolver
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadCommand(RequirementCommand):
|
||||
"""
|
||||
Download packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports downloading from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be downloaded.
|
||||
"""
|
||||
name = 'download'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] <vcs project url> ...
|
||||
%prog [options] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Download packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(DownloadCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.global_options())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
cmd_opts.add_option(cmdoptions.pre())
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Download packages into <dir>."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.platform())
|
||||
cmd_opts.add_option(cmdoptions.python_version())
|
||||
cmd_opts.add_option(cmdoptions.implementation())
|
||||
cmd_opts.add_option(cmdoptions.abi())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
options.ignore_installed = True
|
||||
# editable doesn't really make sense for `pip download`, but the bowels
|
||||
# of the RequirementSet code require that property.
|
||||
options.editables = []
|
||||
|
||||
if options.python_version:
|
||||
python_versions = [options.python_version]
|
||||
else:
|
||||
python_versions = None
|
||||
|
||||
cmdoptions.check_dist_restriction(options)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
options.download_dir = normalize_path(options.download_dir)
|
||||
|
||||
ensure_dir(options.download_dir)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
platform=options.platform,
|
||||
python_versions=python_versions,
|
||||
abi=options.abi,
|
||||
implementation=options.implementation,
|
||||
)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"by the current user and caching wheels has been "
|
||||
"disabled. check the permissions and owner of that "
|
||||
"directory. If executing pip with sudo, you may want "
|
||||
"sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with RequirementTracker() as req_tracker, TempDirectory(
|
||||
options.build_dir, delete=build_delete, kind="download"
|
||||
) as directory:
|
||||
|
||||
requirement_set = RequirementSet(
|
||||
require_hashes=options.require_hashes,
|
||||
)
|
||||
self.populate_requirement_set(
|
||||
requirement_set,
|
||||
args,
|
||||
options,
|
||||
finder,
|
||||
session,
|
||||
self.name,
|
||||
None
|
||||
)
|
||||
|
||||
preparer = RequirementPreparer(
|
||||
build_dir=directory.path,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=options.download_dir,
|
||||
wheel_download_dir=None,
|
||||
progress_bar=options.progress_bar,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
)
|
||||
|
||||
resolver = Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
session=session,
|
||||
wheel_cache=None,
|
||||
use_user_site=False,
|
||||
upgrade_strategy="to-satisfy-only",
|
||||
force_reinstall=False,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_requires_python=False,
|
||||
ignore_installed=True,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
resolver.resolve(requirement_set)
|
||||
|
||||
downloaded = ' '.join([
|
||||
req.name for req in requirement_set.successfully_downloaded
|
||||
])
|
||||
if downloaded:
|
||||
logger.info('Successfully downloaded %s', downloaded)
|
||||
|
||||
# Clean up
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
|
||||
return requirement_set
|
|
@ -1,96 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.operations.freeze import freeze
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
|
||||
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
"""
|
||||
Output installed packages in requirements format.
|
||||
|
||||
packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
name = 'freeze'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'Output installed packages in requirements format.'
|
||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(FreezeCommand, self).__init__(*args, **kw)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help="Use the order in the given requirements file and its "
|
||||
"comments when generating output. This option can be "
|
||||
"used multiple times.")
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='URL',
|
||||
help='URL for finding packages, which will be added to the '
|
||||
'output.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
dest='local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not output '
|
||||
'globally-installed packages.')
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
self.cmd_opts.add_option(
|
||||
'--all',
|
||||
dest='freeze_all',
|
||||
action='store_true',
|
||||
help='Do not skip these packages in the output:'
|
||||
' %s' % ', '.join(DEV_PKGS))
|
||||
self.cmd_opts.add_option(
|
||||
'--exclude-editable',
|
||||
dest='exclude_editable',
|
||||
action='store_true',
|
||||
help='Exclude editable package from output.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
format_control = FormatControl(set(), set())
|
||||
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||
skip = set(stdlib_pkgs)
|
||||
if not options.freeze_all:
|
||||
skip.update(DEV_PKGS)
|
||||
|
||||
freeze_kwargs = dict(
|
||||
requirement=options.requirements,
|
||||
find_links=options.find_links,
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
skip_regex=options.skip_requirements_regex,
|
||||
isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache,
|
||||
skip=skip,
|
||||
exclude_editable=options.exclude_editable,
|
||||
)
|
||||
|
||||
try:
|
||||
for line in freeze(**freeze_kwargs):
|
||||
sys.stdout.write(line + '\n')
|
||||
finally:
|
||||
wheel_cache.cleanup()
|
|
@ -1,57 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||
from pip._internal.utils.misc import read_chunks
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashCommand(Command):
|
||||
"""
|
||||
Compute a hash of a local package archive.
|
||||
|
||||
These can be used with --hash in a requirements file to do repeatable
|
||||
installs.
|
||||
|
||||
"""
|
||||
name = 'hash'
|
||||
usage = '%prog [options] <file> ...'
|
||||
summary = 'Compute hashes of package archives.'
|
||||
ignore_require_venv = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(HashCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-a', '--algorithm',
|
||||
dest='algorithm',
|
||||
choices=STRONG_HASHES,
|
||||
action='store',
|
||||
default=FAVORITE_HASH,
|
||||
help='The hash algorithm to use: one of %s' %
|
||||
', '.join(STRONG_HASHES))
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
self.parser.print_usage(sys.stderr)
|
||||
return ERROR
|
||||
|
||||
algorithm = options.algorithm
|
||||
for path in args:
|
||||
logger.info('%s:\n--hash=%s:%s',
|
||||
path, algorithm, _hash_of_file(path, algorithm))
|
||||
|
||||
|
||||
def _hash_of_file(path, algorithm):
|
||||
"""Return the hash digest of a file."""
|
||||
with open(path, 'rb') as archive:
|
||||
hash = hashlib.new(algorithm)
|
||||
for chunk in read_chunks(archive):
|
||||
hash.update(chunk)
|
||||
return hash.hexdigest()
|
|
@ -1,37 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
|
||||
|
||||
class HelpCommand(Command):
|
||||
"""Show help for commands"""
|
||||
name = 'help'
|
||||
usage = """
|
||||
%prog <command>"""
|
||||
summary = 'Show help for commands.'
|
||||
ignore_require_venv = True
|
||||
|
||||
def run(self, options, args):
|
||||
from pip._internal.commands import commands_dict, get_similar_commands
|
||||
|
||||
try:
|
||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||
cmd_name = args[0] # the command we need help for
|
||||
except IndexError:
|
||||
return SUCCESS
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
command = commands_dict[cmd_name]()
|
||||
command.parser.print_help()
|
||||
|
||||
return SUCCESS
|
|
@ -1,566 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
from optparse import SUPPRESS_HELP
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import RequirementCommand
|
||||
from pip._internal.cli.status_codes import ERROR
|
||||
from pip._internal.exceptions import (
|
||||
CommandError, InstallationError, PreviousBuildDirError,
|
||||
)
|
||||
from pip._internal.locations import distutils_scheme, virtualenv_no_global
|
||||
from pip._internal.operations.check import check_install_conflicts
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req import RequirementSet, install_given_reqs
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolve import Resolver
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.misc import (
|
||||
ensure_dir, get_installed_version,
|
||||
protect_pip_from_modification_on_windows,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.wheel import WheelBuilder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InstallCommand(RequirementCommand):
|
||||
"""
|
||||
Install packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports installing from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be installed.
|
||||
"""
|
||||
name = 'install'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Install packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(InstallCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.pre())
|
||||
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(
|
||||
'-t', '--target',
|
||||
dest='target_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help='Install packages into <dir>. '
|
||||
'By default this will not replace existing files/folders in '
|
||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||
'with new versions.'
|
||||
)
|
||||
cmd_opts.add_option(cmdoptions.platform())
|
||||
cmd_opts.add_option(cmdoptions.python_version())
|
||||
cmd_opts.add_option(cmdoptions.implementation())
|
||||
cmd_opts.add_option(cmdoptions.abi())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='use_user_site',
|
||||
action='store_true',
|
||||
help="Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)")
|
||||
cmd_opts.add_option(
|
||||
'--no-user',
|
||||
dest='use_user_site',
|
||||
action='store_false',
|
||||
help=SUPPRESS_HELP)
|
||||
cmd_opts.add_option(
|
||||
'--root',
|
||||
dest='root_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root "
|
||||
"directory.")
|
||||
cmd_opts.add_option(
|
||||
'--prefix',
|
||||
dest='prefix_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed")
|
||||
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-U', '--upgrade',
|
||||
dest='upgrade',
|
||||
action='store_true',
|
||||
help='Upgrade all specified packages to the newest available '
|
||||
'version. The handling of dependencies depends on the '
|
||||
'upgrade-strategy used.'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--upgrade-strategy',
|
||||
dest='upgrade_strategy',
|
||||
default='only-if-needed',
|
||||
choices=['only-if-needed', 'eager'],
|
||||
help='Determines how dependency upgrading should be handled '
|
||||
'[default: %default]. '
|
||||
'"eager" - dependencies are upgraded regardless of '
|
||||
'whether the currently installed version satisfies the '
|
||||
'requirements of the upgraded package(s). '
|
||||
'"only-if-needed" - are upgraded only when they do not '
|
||||
'satisfy the requirements of the upgraded package(s).'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--force-reinstall',
|
||||
dest='force_reinstall',
|
||||
action='store_true',
|
||||
help='Reinstall all packages even if they are already '
|
||||
'up-to-date.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-I', '--ignore-installed',
|
||||
dest='ignore_installed',
|
||||
action='store_true',
|
||||
help='Ignore the installed packages (reinstalling instead).')
|
||||
|
||||
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
|
||||
cmd_opts.add_option(cmdoptions.install_options())
|
||||
cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--compile",
|
||||
action="store_true",
|
||||
dest="compile",
|
||||
default=True,
|
||||
help="Compile Python source files to bytecode",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--no-compile",
|
||||
action="store_false",
|
||||
dest="compile",
|
||||
help="Do not compile Python source files to bytecode",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--no-warn-script-location",
|
||||
action="store_false",
|
||||
dest="warn_script_location",
|
||||
default=True,
|
||||
help="Do not warn when installing scripts outside PATH",
|
||||
)
|
||||
cmd_opts.add_option(
|
||||
"--no-warn-conflicts",
|
||||
action="store_false",
|
||||
dest="warn_about_conflicts",
|
||||
default=True,
|
||||
help="Do not warn about broken dependencies",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
cmdoptions.check_install_build_global(options)
|
||||
upgrade_strategy = "to-satisfy-only"
|
||||
if options.upgrade:
|
||||
upgrade_strategy = options.upgrade_strategy
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
cmdoptions.check_dist_restriction(options, check_target=True)
|
||||
|
||||
if options.python_version:
|
||||
python_versions = [options.python_version]
|
||||
else:
|
||||
python_versions = None
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
install_options = options.install_options or []
|
||||
if options.use_user_site:
|
||||
if options.prefix_path:
|
||||
raise CommandError(
|
||||
"Can not combine '--user' and '--prefix' as they imply "
|
||||
"different installation locations"
|
||||
)
|
||||
if virtualenv_no_global():
|
||||
raise InstallationError(
|
||||
"Can not perform a '--user' install. User site-packages "
|
||||
"are not visible in this virtualenv."
|
||||
)
|
||||
install_options.append('--user')
|
||||
install_options.append('--prefix=')
|
||||
|
||||
target_temp_dir = TempDirectory(kind="target")
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if (os.path.exists(options.target_dir) and not
|
||||
os.path.isdir(options.target_dir)):
|
||||
raise CommandError(
|
||||
"Target path exists but is not a directory, will not "
|
||||
"continue."
|
||||
)
|
||||
|
||||
# Create a target directory for using with the target option
|
||||
target_temp_dir.create()
|
||||
install_options.append('--home=' + target_temp_dir.path)
|
||||
|
||||
global_options = options.global_options or []
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
platform=options.platform,
|
||||
python_versions=python_versions,
|
||||
abi=options.abi,
|
||||
implementation=options.implementation,
|
||||
)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"by the current user and caching wheels has been "
|
||||
"disabled. check the permissions and owner of that "
|
||||
"directory. If executing pip with sudo, you may want "
|
||||
"sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with RequirementTracker() as req_tracker, TempDirectory(
|
||||
options.build_dir, delete=build_delete, kind="install"
|
||||
) as directory:
|
||||
requirement_set = RequirementSet(
|
||||
require_hashes=options.require_hashes,
|
||||
check_supported_wheels=not options.target_dir,
|
||||
)
|
||||
|
||||
try:
|
||||
self.populate_requirement_set(
|
||||
requirement_set, args, options, finder, session,
|
||||
self.name, wheel_cache
|
||||
)
|
||||
preparer = RequirementPreparer(
|
||||
build_dir=directory.path,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=None,
|
||||
wheel_download_dir=None,
|
||||
progress_bar=options.progress_bar,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
)
|
||||
|
||||
resolver = Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
use_user_site=options.use_user_site,
|
||||
upgrade_strategy=upgrade_strategy,
|
||||
force_reinstall=options.force_reinstall,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
ignore_installed=options.ignore_installed,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517
|
||||
)
|
||||
resolver.resolve(requirement_set)
|
||||
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip=requirement_set.has_requirement("pip")
|
||||
)
|
||||
|
||||
# Consider legacy and PEP517-using requirements separately
|
||||
legacy_requirements = []
|
||||
pep517_requirements = []
|
||||
for req in requirement_set.requirements.values():
|
||||
if req.use_pep517:
|
||||
pep517_requirements.append(req)
|
||||
else:
|
||||
legacy_requirements.append(req)
|
||||
|
||||
# We don't build wheels for legacy requirements if we
|
||||
# don't have wheel installed or we don't have a cache dir
|
||||
try:
|
||||
import wheel # noqa: F401
|
||||
build_legacy = bool(options.cache_dir)
|
||||
except ImportError:
|
||||
build_legacy = False
|
||||
|
||||
wb = WheelBuilder(
|
||||
finder, preparer, wheel_cache,
|
||||
build_options=[], global_options=[],
|
||||
)
|
||||
|
||||
# Always build PEP 517 requirements
|
||||
build_failures = wb.build(
|
||||
pep517_requirements,
|
||||
session=session, autobuilding=True
|
||||
)
|
||||
|
||||
if build_legacy:
|
||||
# We don't care about failures building legacy
|
||||
# requirements, as we'll fall through to a direct
|
||||
# install for those.
|
||||
wb.build(
|
||||
legacy_requirements,
|
||||
session=session, autobuilding=True
|
||||
)
|
||||
|
||||
# If we're using PEP 517, we cannot do a direct install
|
||||
# so we fail here.
|
||||
if build_failures:
|
||||
raise InstallationError(
|
||||
"Could not build wheels for {} which use"
|
||||
" PEP 517 and cannot be installed directly".format(
|
||||
", ".join(r.name for r in build_failures)))
|
||||
|
||||
to_install = resolver.get_installation_order(
|
||||
requirement_set
|
||||
)
|
||||
|
||||
# Consistency Checking of the package set we're installing.
|
||||
should_warn_about_conflicts = (
|
||||
not options.ignore_dependencies and
|
||||
options.warn_about_conflicts
|
||||
)
|
||||
if should_warn_about_conflicts:
|
||||
self._warn_about_conflicts(to_install)
|
||||
|
||||
# Don't warn about script install locations if
|
||||
# --target has been specified
|
||||
warn_script_location = options.warn_script_location
|
||||
if options.target_dir:
|
||||
warn_script_location = False
|
||||
|
||||
installed = install_given_reqs(
|
||||
to_install,
|
||||
install_options,
|
||||
global_options,
|
||||
root=options.root_path,
|
||||
home=target_temp_dir.path,
|
||||
prefix=options.prefix_path,
|
||||
pycompile=options.compile,
|
||||
warn_script_location=warn_script_location,
|
||||
use_user_site=options.use_user_site,
|
||||
)
|
||||
|
||||
lib_locations = get_lib_location_guesses(
|
||||
user=options.use_user_site,
|
||||
home=target_temp_dir.path,
|
||||
root=options.root_path,
|
||||
prefix=options.prefix_path,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
working_set = pkg_resources.WorkingSet(lib_locations)
|
||||
|
||||
reqs = sorted(installed, key=operator.attrgetter('name'))
|
||||
items = []
|
||||
for req in reqs:
|
||||
item = req.name
|
||||
try:
|
||||
installed_version = get_installed_version(
|
||||
req.name, working_set=working_set
|
||||
)
|
||||
if installed_version:
|
||||
item += '-' + installed_version
|
||||
except Exception:
|
||||
pass
|
||||
items.append(item)
|
||||
installed = ' '.join(items)
|
||||
if installed:
|
||||
logger.info('Successfully installed %s', installed)
|
||||
except EnvironmentError as error:
|
||||
show_traceback = (self.verbosity >= 1)
|
||||
|
||||
message = create_env_error_message(
|
||||
error, show_traceback, options.use_user_site,
|
||||
)
|
||||
logger.error(message, exc_info=show_traceback)
|
||||
|
||||
return ERROR
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
# Clean up
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
wheel_cache.cleanup()
|
||||
|
||||
if options.target_dir:
|
||||
self._handle_target_dir(
|
||||
options.target_dir, target_temp_dir, options.upgrade
|
||||
)
|
||||
return requirement_set
|
||||
|
||||
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
|
||||
ensure_dir(target_dir)
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
lib_dir_list = []
|
||||
|
||||
with target_temp_dir:
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
scheme = distutils_scheme('', home=target_temp_dir.path)
|
||||
purelib_dir = scheme['purelib']
|
||||
platlib_dir = scheme['platlib']
|
||||
data_dir = scheme['data']
|
||||
|
||||
if os.path.exists(purelib_dir):
|
||||
lib_dir_list.append(purelib_dir)
|
||||
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
||||
lib_dir_list.append(platlib_dir)
|
||||
if os.path.exists(data_dir):
|
||||
lib_dir_list.append(data_dir)
|
||||
|
||||
for lib_dir in lib_dir_list:
|
||||
for item in os.listdir(lib_dir):
|
||||
if lib_dir == data_dir:
|
||||
ddir = os.path.join(data_dir, item)
|
||||
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
||||
continue
|
||||
target_item_dir = os.path.join(target_dir, item)
|
||||
if os.path.exists(target_item_dir):
|
||||
if not upgrade:
|
||||
logger.warning(
|
||||
'Target directory %s already exists. Specify '
|
||||
'--upgrade to force replacement.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.islink(target_item_dir):
|
||||
logger.warning(
|
||||
'Target directory %s already exists and is '
|
||||
'a link. Pip will not automatically replace '
|
||||
'links, please remove if replacement is '
|
||||
'desired.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.isdir(target_item_dir):
|
||||
shutil.rmtree(target_item_dir)
|
||||
else:
|
||||
os.remove(target_item_dir)
|
||||
|
||||
shutil.move(
|
||||
os.path.join(lib_dir, item),
|
||||
target_item_dir
|
||||
)
|
||||
|
||||
def _warn_about_conflicts(self, to_install):
|
||||
try:
|
||||
package_set, _dep_info = check_install_conflicts(to_install)
|
||||
except Exception:
|
||||
logger.error("Error checking for conflicts.", exc_info=True)
|
||||
return
|
||||
missing, conflicting = _dep_info
|
||||
|
||||
# NOTE: There is some duplication here from pip check
|
||||
for project_name in missing:
|
||||
version = package_set[project_name][0]
|
||||
for dependency in missing[project_name]:
|
||||
logger.critical(
|
||||
"%s %s requires %s, which is not installed.",
|
||||
project_name, version, dependency[1],
|
||||
)
|
||||
|
||||
for project_name in conflicting:
|
||||
version = package_set[project_name][0]
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
logger.critical(
|
||||
"%s %s has requirement %s, but you'll have %s %s which is "
|
||||
"incompatible.",
|
||||
project_name, version, req, dep_name, dep_version,
|
||||
)
|
||||
|
||||
|
||||
def get_lib_location_guesses(*args, **kwargs):
|
||||
scheme = distutils_scheme('', *args, **kwargs)
|
||||
return [scheme['purelib'], scheme['platlib']]
|
||||
|
||||
|
||||
def create_env_error_message(error, show_traceback, using_user_site):
|
||||
"""Format an error message for an EnvironmentError
|
||||
|
||||
It may occur anytime during the execution of the install command.
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Mention the error if we are not going to show a traceback
|
||||
parts.append("Could not install packages due to an EnvironmentError")
|
||||
if not show_traceback:
|
||||
parts.append(": ")
|
||||
parts.append(str(error))
|
||||
else:
|
||||
parts.append(".")
|
||||
|
||||
# Spilt the error indication from a helper message (if any)
|
||||
parts[-1] += "\n"
|
||||
|
||||
# Suggest useful actions to the user:
|
||||
# (1) using user site-packages or (2) verifying the permissions
|
||||
if error.errno == errno.EACCES:
|
||||
user_option_part = "Consider using the `--user` option"
|
||||
permissions_part = "Check the permissions"
|
||||
|
||||
if not using_user_site:
|
||||
parts.extend([
|
||||
user_option_part, " or ",
|
||||
permissions_part.lower(),
|
||||
])
|
||||
else:
|
||||
parts.append(permissions_part)
|
||||
parts.append(".\n")
|
||||
|
||||
return "".join(parts).strip() + "\n"
|
|
@ -1,301 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.six.moves import zip_longest
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.index import PackageFinder
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable, get_installed_distributions,
|
||||
)
|
||||
from pip._internal.utils.packaging import get_installer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListCommand(Command):
|
||||
"""
|
||||
List installed packages, including editables.
|
||||
|
||||
Packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
name = 'list'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'List installed packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ListCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-o', '--outdated',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List outdated packages')
|
||||
cmd_opts.add_option(
|
||||
'-u', '--uptodate',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List uptodate packages')
|
||||
cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List editable projects.')
|
||||
cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('If in a virtualenv that has global access, do not list '
|
||||
'globally-installed packages.'),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--format',
|
||||
action='store',
|
||||
dest='list_format',
|
||||
default="columns",
|
||||
choices=('columns', 'freeze', 'json'),
|
||||
help="Select the output format among: columns (default), freeze, "
|
||||
"or json",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--not-required',
|
||||
action='store_true',
|
||||
dest='not_required',
|
||||
help="List packages that are not dependencies of "
|
||||
"installed packages.",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--exclude-editable',
|
||||
action='store_false',
|
||||
dest='include_editable',
|
||||
help='Exclude editable package from output.',
|
||||
)
|
||||
cmd_opts.add_option(
|
||||
'--include-editable',
|
||||
action='store_true',
|
||||
dest='include_editable',
|
||||
help='Include editable package from output.',
|
||||
default=True,
|
||||
)
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group, self.parser
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def _build_package_finder(self, options, index_urls, session):
|
||||
"""
|
||||
Create a package finder appropriate to this list command.
|
||||
"""
|
||||
return PackageFinder(
|
||||
find_links=options.find_links,
|
||||
index_urls=index_urls,
|
||||
allow_all_prereleases=options.pre,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
session=session,
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError(
|
||||
"Options --outdated and --uptodate cannot be combined.")
|
||||
|
||||
packages = get_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable,
|
||||
include_editables=options.include_editable,
|
||||
)
|
||||
|
||||
# get_not_required must be called firstly in order to find and
|
||||
# filter out all dependencies correctly. Otherwise a package
|
||||
# can't be identified as requirement because some parent packages
|
||||
# could be filtered out before.
|
||||
if options.not_required:
|
||||
packages = self.get_not_required(packages, options)
|
||||
|
||||
if options.outdated:
|
||||
packages = self.get_outdated(packages, options)
|
||||
elif options.uptodate:
|
||||
packages = self.get_uptodate(packages, options)
|
||||
|
||||
self.output_package_listing(packages, options)
|
||||
|
||||
def get_outdated(self, packages, options):
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version > dist.parsed_version
|
||||
]
|
||||
|
||||
def get_uptodate(self, packages, options):
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version == dist.parsed_version
|
||||
]
|
||||
|
||||
def get_not_required(self, packages, options):
|
||||
dep_keys = set()
|
||||
for dist in packages:
|
||||
dep_keys.update(requirement.key for requirement in dist.requires())
|
||||
return {pkg for pkg in packages if pkg.key not in dep_keys}
|
||||
|
||||
def iter_packages_latest_infos(self, packages, options):
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, index_urls, session)
|
||||
|
||||
for dist in packages:
|
||||
typ = 'unknown'
|
||||
all_candidates = finder.find_all_candidates(dist.key)
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
all_candidates = [candidate for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease]
|
||||
|
||||
if not all_candidates:
|
||||
continue
|
||||
best_candidate = max(all_candidates,
|
||||
key=finder._candidate_sort_key)
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.location.is_wheel:
|
||||
typ = 'wheel'
|
||||
else:
|
||||
typ = 'sdist'
|
||||
# This is dirty but makes the rest of the code much cleaner
|
||||
dist.latest_version = remote_version
|
||||
dist.latest_filetype = typ
|
||||
yield dist
|
||||
|
||||
def output_package_listing(self, packages, options):
|
||||
packages = sorted(
|
||||
packages,
|
||||
key=lambda dist: dist.project_name.lower(),
|
||||
)
|
||||
if options.list_format == 'columns' and packages:
|
||||
data, header = format_for_columns(packages, options)
|
||||
self.output_package_listing_columns(data, header)
|
||||
elif options.list_format == 'freeze':
|
||||
for dist in packages:
|
||||
if options.verbose >= 1:
|
||||
logger.info("%s==%s (%s)", dist.project_name,
|
||||
dist.version, dist.location)
|
||||
else:
|
||||
logger.info("%s==%s", dist.project_name, dist.version)
|
||||
elif options.list_format == 'json':
|
||||
logger.info(format_for_json(packages, options))
|
||||
|
||||
def output_package_listing_columns(self, data, header):
|
||||
# insert the header first: we need to know the size of column names
|
||||
if len(data) > 0:
|
||||
data.insert(0, header)
|
||||
|
||||
pkg_strings, sizes = tabulate(data)
|
||||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
||||
|
||||
for val in pkg_strings:
|
||||
logger.info(val)
|
||||
|
||||
|
||||
def tabulate(vals):
|
||||
# From pfmoore on GitHub:
|
||||
# https://github.com/pypa/pip/issues/3651#issuecomment-216932564
|
||||
assert len(vals) > 0
|
||||
|
||||
sizes = [0] * max(len(x) for x in vals)
|
||||
for row in vals:
|
||||
sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
|
||||
|
||||
result = []
|
||||
for row in vals:
|
||||
display = " ".join([str(c).ljust(s) if c is not None else ''
|
||||
for s, c in zip_longest(sizes, row)])
|
||||
result.append(display)
|
||||
|
||||
return result, sizes
|
||||
|
||||
|
||||
def format_for_columns(pkgs, options):
|
||||
"""
|
||||
Convert the package data into something usable
|
||||
by output_package_listing_columns.
|
||||
"""
|
||||
running_outdated = options.outdated
|
||||
# Adjust the header for the `pip list --outdated` case.
|
||||
if running_outdated:
|
||||
header = ["Package", "Version", "Latest", "Type"]
|
||||
else:
|
||||
header = ["Package", "Version"]
|
||||
|
||||
data = []
|
||||
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
|
||||
header.append("Location")
|
||||
if options.verbose >= 1:
|
||||
header.append("Installer")
|
||||
|
||||
for proj in pkgs:
|
||||
# if we're working on the 'outdated' list, separate out the
|
||||
# latest_version and type
|
||||
row = [proj.project_name, proj.version]
|
||||
|
||||
if running_outdated:
|
||||
row.append(proj.latest_version)
|
||||
row.append(proj.latest_filetype)
|
||||
|
||||
if options.verbose >= 1 or dist_is_editable(proj):
|
||||
row.append(proj.location)
|
||||
if options.verbose >= 1:
|
||||
row.append(get_installer(proj))
|
||||
|
||||
data.append(row)
|
||||
|
||||
return data, header
|
||||
|
||||
|
||||
def format_for_json(packages, options):
|
||||
data = []
|
||||
for dist in packages:
|
||||
info = {
|
||||
'name': dist.project_name,
|
||||
'version': six.text_type(dist.version),
|
||||
}
|
||||
if options.verbose >= 1:
|
||||
info['location'] = dist.location
|
||||
info['installer'] = get_installer(dist)
|
||||
if options.outdated:
|
||||
info['latest_version'] = six.text_type(dist.latest_version)
|
||||
info['latest_filetype'] = dist.latest_filetype
|
||||
data.append(info)
|
||||
return json.dumps(data)
|
|
@ -1,135 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import textwrap
|
||||
from collections import OrderedDict
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
||||
# why we ignore the type on this import
|
||||
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
||||
from pip._internal.download import PipXmlrpcTransport
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.utils.compat import get_terminal_size
|
||||
from pip._internal.utils.logging import indent_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchCommand(Command):
|
||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||
name = 'search'
|
||||
usage = """
|
||||
%prog [options] <query>"""
|
||||
summary = 'Search PyPI for packages.'
|
||||
ignore_require_venv = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(SearchCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-i', '--index',
|
||||
dest='index',
|
||||
metavar='URL',
|
||||
default=PyPI.pypi_url,
|
||||
help='Base URL of Python Package Index (default %default)')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
raise CommandError('Missing required argument (search query).')
|
||||
query = args
|
||||
pypi_hits = self.search(query, options)
|
||||
hits = transform_hits(pypi_hits)
|
||||
|
||||
terminal_width = None
|
||||
if sys.stdout.isatty():
|
||||
terminal_width = get_terminal_size()[0]
|
||||
|
||||
print_results(hits, terminal_width=terminal_width)
|
||||
if pypi_hits:
|
||||
return SUCCESS
|
||||
return NO_MATCHES_FOUND
|
||||
|
||||
def search(self, query, options):
|
||||
index_url = options.index
|
||||
with self._build_session(options) as session:
|
||||
transport = PipXmlrpcTransport(index_url, session)
|
||||
pypi = xmlrpc_client.ServerProxy(index_url, transport)
|
||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||
return hits
|
||||
|
||||
|
||||
def transform_hits(hits):
|
||||
"""
|
||||
The list from pypi is really a list of versions. We want a list of
|
||||
packages with the list of versions stored inline. This converts the
|
||||
list from pypi into one we can use.
|
||||
"""
|
||||
packages = OrderedDict()
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary']
|
||||
version = hit['version']
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {
|
||||
'name': name,
|
||||
'summary': summary,
|
||||
'versions': [version],
|
||||
}
|
||||
else:
|
||||
packages[name]['versions'].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]['versions']):
|
||||
packages[name]['summary'] = summary
|
||||
|
||||
return list(packages.values())
|
||||
|
||||
|
||||
def print_results(hits, name_column_width=None, terminal_width=None):
|
||||
if not hits:
|
||||
return
|
||||
if name_column_width is None:
|
||||
name_column_width = max([
|
||||
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
||||
for hit in hits
|
||||
]) + 4
|
||||
|
||||
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary'] or ''
|
||||
latest = highest_version(hit.get('versions', ['-']))
|
||||
if terminal_width is not None:
|
||||
target_width = terminal_width - name_column_width - 5
|
||||
if target_width > 10:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary = textwrap.wrap(summary, target_width)
|
||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||
|
||||
line = '%-*s - %s' % (name_column_width,
|
||||
'%s (%s)' % (name, latest), summary)
|
||||
try:
|
||||
logger.info(line)
|
||||
if name in installed_packages:
|
||||
dist = pkg_resources.get_distribution(name)
|
||||
with indent_log():
|
||||
if dist.version == latest:
|
||||
logger.info('INSTALLED: %s (latest)', dist.version)
|
||||
else:
|
||||
logger.info('INSTALLED: %s', dist.version)
|
||||
logger.info('LATEST: %s', latest)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
|
||||
def highest_version(versions):
|
||||
return max(versions, key=parse_version)
|
|
@ -1,168 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
from email.parser import FeedParser # type: ignore
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
"""
|
||||
Show information about one or more installed packages.
|
||||
|
||||
The output is in RFC-compliant mail header format.
|
||||
"""
|
||||
name = 'show'
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
summary = 'Show information about installed packages.'
|
||||
ignore_require_venv = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ShowCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--files',
|
||||
dest='files',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Show the full list of installed files for each package.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
logger.warning('ERROR: Please provide a package name or names.')
|
||||
return ERROR
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
if not print_results(
|
||||
results, list_files=options.files, verbose=options.verbose):
|
||||
return ERROR
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def search_packages_info(query):
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||
directory.
|
||||
"""
|
||||
installed = {}
|
||||
for p in pkg_resources.working_set:
|
||||
installed[canonicalize_name(p.project_name)] = p
|
||||
|
||||
query_names = [canonicalize_name(name) for name in query]
|
||||
|
||||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
||||
package = {
|
||||
'name': dist.project_name,
|
||||
'version': dist.version,
|
||||
'location': dist.location,
|
||||
'requires': [dep.project_name for dep in dist.requires()],
|
||||
}
|
||||
file_list = None
|
||||
metadata = None
|
||||
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
||||
# RECORDs should be part of .dist-info metadatas
|
||||
if dist.has_metadata('RECORD'):
|
||||
lines = dist.get_metadata_lines('RECORD')
|
||||
paths = [l.split(',')[0] for l in lines]
|
||||
paths = [os.path.join(dist.location, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('METADATA'):
|
||||
metadata = dist.get_metadata('METADATA')
|
||||
else:
|
||||
# Otherwise use pip's log for .egg-info's
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
paths = dist.get_metadata_lines('installed-files.txt')
|
||||
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('PKG-INFO'):
|
||||
metadata = dist.get_metadata('PKG-INFO')
|
||||
|
||||
if dist.has_metadata('entry_points.txt'):
|
||||
entry_points = dist.get_metadata_lines('entry_points.txt')
|
||||
package['entry_points'] = entry_points
|
||||
|
||||
if dist.has_metadata('INSTALLER'):
|
||||
for line in dist.get_metadata_lines('INSTALLER'):
|
||||
if line.strip():
|
||||
package['installer'] = line.strip()
|
||||
break
|
||||
|
||||
# @todo: Should pkg_resources.Distribution have a
|
||||
# `get_pkg_info` method?
|
||||
feed_parser = FeedParser()
|
||||
feed_parser.feed(metadata)
|
||||
pkg_info_dict = feed_parser.close()
|
||||
for key in ('metadata-version', 'summary',
|
||||
'home-page', 'author', 'author-email', 'license'):
|
||||
package[key] = pkg_info_dict.get(key)
|
||||
|
||||
# It looks like FeedParser cannot deal with repeated headers
|
||||
classifiers = []
|
||||
for line in metadata.splitlines():
|
||||
if line.startswith('Classifier: '):
|
||||
classifiers.append(line[len('Classifier: '):])
|
||||
package['classifiers'] = classifiers
|
||||
|
||||
if file_list:
|
||||
package['files'] = sorted(file_list)
|
||||
yield package
|
||||
|
||||
|
||||
def print_results(distributions, list_files=False, verbose=False):
|
||||
"""
|
||||
Print the informations from installed distributions found.
|
||||
"""
|
||||
results_printed = False
|
||||
for i, dist in enumerate(distributions):
|
||||
results_printed = True
|
||||
if i > 0:
|
||||
logger.info("---")
|
||||
|
||||
name = dist.get('name', '')
|
||||
required_by = [
|
||||
pkg.project_name for pkg in pkg_resources.working_set
|
||||
if name in [required.name for required in pkg.requires()]
|
||||
]
|
||||
|
||||
logger.info("Name: %s", name)
|
||||
logger.info("Version: %s", dist.get('version', ''))
|
||||
logger.info("Summary: %s", dist.get('summary', ''))
|
||||
logger.info("Home-page: %s", dist.get('home-page', ''))
|
||||
logger.info("Author: %s", dist.get('author', ''))
|
||||
logger.info("Author-email: %s", dist.get('author-email', ''))
|
||||
logger.info("License: %s", dist.get('license', ''))
|
||||
logger.info("Location: %s", dist.get('location', ''))
|
||||
logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
|
||||
logger.info("Required-by: %s", ', '.join(required_by))
|
||||
|
||||
if verbose:
|
||||
logger.info("Metadata-Version: %s",
|
||||
dist.get('metadata-version', ''))
|
||||
logger.info("Installer: %s", dist.get('installer', ''))
|
||||
logger.info("Classifiers:")
|
||||
for classifier in dist.get('classifiers', []):
|
||||
logger.info(" %s", classifier)
|
||||
logger.info("Entry-points:")
|
||||
for entry in dist.get('entry_points', []):
|
||||
logger.info(" %s", entry.strip())
|
||||
if list_files:
|
||||
logger.info("Files:")
|
||||
for line in dist.get('files', []):
|
||||
logger.info(" %s", line.strip())
|
||||
if "files" not in dist:
|
||||
logger.info("Cannot locate installed-files.txt")
|
||||
return results_printed
|
|
@ -1,78 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.req import parse_requirements
|
||||
from pip._internal.req.constructors import install_req_from_line
|
||||
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
||||
|
||||
|
||||
class UninstallCommand(Command):
|
||||
"""
|
||||
Uninstall packages.
|
||||
|
||||
pip is able to uninstall most installed packages. Known exceptions are:
|
||||
|
||||
- Pure distutils packages installed with ``python setup.py install``, which
|
||||
leave behind no metadata to determine what files were installed.
|
||||
- Script wrappers installed by ``python setup.py develop``.
|
||||
"""
|
||||
name = 'uninstall'
|
||||
usage = """
|
||||
%prog [options] <package> ...
|
||||
%prog [options] -r <requirements file> ..."""
|
||||
summary = 'Uninstall packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(UninstallCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Uninstall all the packages listed in the given requirements '
|
||||
'file. This option can be used multiple times.',
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'-y', '--yes',
|
||||
dest='yes',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation of uninstall deletions.")
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
with self._build_session(options) as session:
|
||||
reqs_to_uninstall = {}
|
||||
for name in args:
|
||||
req = install_req_from_line(
|
||||
name, isolated=options.isolated_mode,
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(
|
||||
filename,
|
||||
options=options,
|
||||
session=session):
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
if not reqs_to_uninstall:
|
||||
raise InstallationError(
|
||||
'You must give at least one requirement to %(name)s (see '
|
||||
'"pip help %(name)s")' % dict(name=self.name)
|
||||
)
|
||||
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip="pip" in reqs_to_uninstall
|
||||
)
|
||||
|
||||
for req in reqs_to_uninstall.values():
|
||||
uninstall_pathset = req.uninstall(
|
||||
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
||||
)
|
||||
if uninstall_pathset:
|
||||
uninstall_pathset.commit()
|
|
@ -1,186 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import RequirementCommand
|
||||
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req import RequirementSet
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolve import Resolver
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.wheel import WheelBuilder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WheelCommand(RequirementCommand):
|
||||
"""
|
||||
Build Wheel archives for your requirements and dependencies.
|
||||
|
||||
Wheel is a built-package format, and offers the advantage of not
|
||||
recompiling your software during every install. For more details, see the
|
||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||
|
||||
Requirements: setuptools>=0.8, and wheel.
|
||||
|
||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||
package to build individual wheels.
|
||||
|
||||
"""
|
||||
|
||||
name = 'wheel'
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Build wheels from your requirements.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(WheelCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-w', '--wheel-dir',
|
||||
dest='wheel_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Build wheels into <dir>, where the default is the "
|
||||
"current working directory."),
|
||||
)
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
cmd_opts.add_option(
|
||||
'--build-option',
|
||||
dest='build_options',
|
||||
metavar='options',
|
||||
action='append',
|
||||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
||||
)
|
||||
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the 'bdist_wheel' command.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
with RequirementTracker() as req_tracker, TempDirectory(
|
||||
options.build_dir, delete=build_delete, kind="wheel"
|
||||
) as directory:
|
||||
|
||||
requirement_set = RequirementSet(
|
||||
require_hashes=options.require_hashes,
|
||||
)
|
||||
|
||||
try:
|
||||
self.populate_requirement_set(
|
||||
requirement_set, args, options, finder, session,
|
||||
self.name, wheel_cache
|
||||
)
|
||||
|
||||
preparer = RequirementPreparer(
|
||||
build_dir=directory.path,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=None,
|
||||
wheel_download_dir=options.wheel_dir,
|
||||
progress_bar=options.progress_bar,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
)
|
||||
|
||||
resolver = Resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
use_user_site=False,
|
||||
upgrade_strategy="to-satisfy-only",
|
||||
force_reinstall=False,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
ignore_installed=True,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517
|
||||
)
|
||||
resolver.resolve(requirement_set)
|
||||
|
||||
# build wheels
|
||||
wb = WheelBuilder(
|
||||
finder, preparer, wheel_cache,
|
||||
build_options=options.build_options or [],
|
||||
global_options=options.global_options or [],
|
||||
no_clean=options.no_clean,
|
||||
)
|
||||
build_failures = wb.build(
|
||||
requirement_set.requirements.values(), session=session,
|
||||
)
|
||||
if len(build_failures) != 0:
|
||||
raise CommandError(
|
||||
"Failed to build one or more wheels"
|
||||
)
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
wheel_cache.cleanup()
|
|
@ -1,387 +0,0 @@
|
|||
"""Configuration management setup
|
||||
|
||||
Some terminology:
|
||||
- name
|
||||
As written in config files.
|
||||
- value
|
||||
Value associated with a name
|
||||
- key
|
||||
Name combined with it's section (section.name)
|
||||
- variant
|
||||
A single word describing where the configuration key-value pair came from
|
||||
"""
|
||||
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.six.moves import configparser
|
||||
|
||||
from pip._internal.exceptions import (
|
||||
ConfigurationError, ConfigurationFileCouldNotBeLoaded,
|
||||
)
|
||||
from pip._internal.locations import (
|
||||
legacy_config_file, new_config_file, running_under_virtualenv,
|
||||
site_config_files, venv_config_file,
|
||||
)
|
||||
from pip._internal.utils.misc import ensure_dir, enum
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import ( # noqa: F401
|
||||
Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||
)
|
||||
|
||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||
Kind = NewType("Kind", str)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||
def _normalize_name(name):
|
||||
# type: (str) -> str
|
||||
"""Make a name consistent regardless of source (environment or file)
|
||||
"""
|
||||
name = name.lower().replace('_', '-')
|
||||
if name.startswith('--'):
|
||||
name = name[2:] # only prefer long opts
|
||||
return name
|
||||
|
||||
|
||||
def _disassemble_key(name):
|
||||
# type: (str) -> List[str]
|
||||
return name.split(".", 1)
|
||||
|
||||
|
||||
# The kinds of configurations there are.
|
||||
kinds = enum(
|
||||
USER="user", # User Specific
|
||||
GLOBAL="global", # System Wide
|
||||
VENV="venv", # Virtual Environment Specific
|
||||
ENV="env", # from PIP_CONFIG_FILE
|
||||
ENV_VAR="env-var", # from Environment Variables
|
||||
)
|
||||
|
||||
|
||||
class Configuration(object):
|
||||
"""Handles management of configuration.
|
||||
|
||||
Provides an interface to accessing and managing configuration files.
|
||||
|
||||
This class converts provides an API that takes "section.key-name" style
|
||||
keys and stores the value associated with it as "key-name" under the
|
||||
section "section".
|
||||
|
||||
This allows for a clean interface wherein the both the section and the
|
||||
key-name are preserved in an easy to manage form in the configuration files
|
||||
and the data stored is also nice.
|
||||
"""
|
||||
|
||||
def __init__(self, isolated, load_only=None):
|
||||
# type: (bool, Kind) -> None
|
||||
super(Configuration, self).__init__()
|
||||
|
||||
_valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
|
||||
if load_only not in _valid_load_only:
|
||||
raise ConfigurationError(
|
||||
"Got invalid value for load_only - should be one of {}".format(
|
||||
", ".join(map(repr, _valid_load_only[:-1]))
|
||||
)
|
||||
)
|
||||
self.isolated = isolated # type: bool
|
||||
self.load_only = load_only # type: Optional[Kind]
|
||||
|
||||
# The order here determines the override order.
|
||||
self._override_order = [
|
||||
kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
|
||||
]
|
||||
|
||||
self._ignore_env_names = ["version", "help"]
|
||||
|
||||
# Because we keep track of where we got the data from
|
||||
self._parsers = {
|
||||
variant: [] for variant in self._override_order
|
||||
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
||||
self._config = {
|
||||
variant: {} for variant in self._override_order
|
||||
} # type: Dict[Kind, Dict[str, Any]]
|
||||
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
||||
|
||||
def load(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from configuration files and environment
|
||||
"""
|
||||
self._load_config_files()
|
||||
if not self.isolated:
|
||||
self._load_environment_vars()
|
||||
|
||||
def get_file_to_edit(self):
|
||||
# type: () -> Optional[str]
|
||||
"""Returns the file with highest priority in configuration
|
||||
"""
|
||||
assert self.load_only is not None, \
|
||||
"Need to be specified a file to be editing"
|
||||
|
||||
try:
|
||||
return self._get_parser_to_modify()[0]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def items(self):
|
||||
# type: () -> Iterable[Tuple[str, Any]]
|
||||
"""Returns key-value pairs like dict.items() representing the loaded
|
||||
configuration
|
||||
"""
|
||||
return self._dictionary.items()
|
||||
|
||||
def get_value(self, key):
|
||||
# type: (str) -> Any
|
||||
"""Get a value from the configuration.
|
||||
"""
|
||||
try:
|
||||
return self._dictionary[key]
|
||||
except KeyError:
|
||||
raise ConfigurationError("No such key - {}".format(key))
|
||||
|
||||
def set_value(self, key, value):
|
||||
# type: (str, Any) -> None
|
||||
"""Modify a value in the configuration.
|
||||
"""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
|
||||
# Modify the parser and the configuration
|
||||
if not parser.has_section(section):
|
||||
parser.add_section(section)
|
||||
parser.set(section, name, value)
|
||||
|
||||
self._config[self.load_only][key] = value
|
||||
self._mark_as_modified(fname, parser)
|
||||
|
||||
def unset_value(self, key):
|
||||
# type: (str) -> None
|
||||
"""Unset a value in the configuration.
|
||||
"""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
if key not in self._config[self.load_only]:
|
||||
raise ConfigurationError("No such key - {}".format(key))
|
||||
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
|
||||
# Remove the key in the parser
|
||||
modified_something = False
|
||||
if parser.has_section(section):
|
||||
# Returns whether the option was removed or not
|
||||
modified_something = parser.remove_option(section, name)
|
||||
|
||||
if modified_something:
|
||||
# name removed from parser, section may now be empty
|
||||
section_iter = iter(parser.items(section))
|
||||
try:
|
||||
val = six.next(section_iter)
|
||||
except StopIteration:
|
||||
val = None
|
||||
|
||||
if val is None:
|
||||
parser.remove_section(section)
|
||||
|
||||
self._mark_as_modified(fname, parser)
|
||||
else:
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=1]. Please report as a bug."
|
||||
)
|
||||
|
||||
del self._config[self.load_only][key]
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
"""Save the currentin-memory state.
|
||||
"""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
for fname, parser in self._modified_parsers:
|
||||
logger.info("Writing to %s", fname)
|
||||
|
||||
# Ensure directory exists.
|
||||
ensure_dir(os.path.dirname(fname))
|
||||
|
||||
with open(fname, "w") as f:
|
||||
parser.write(f) # type: ignore
|
||||
|
||||
#
|
||||
# Private routines
|
||||
#
|
||||
|
||||
def _ensure_have_load_only(self):
|
||||
# type: () -> None
|
||||
if self.load_only is None:
|
||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||
logger.debug("Will be working with %s variant only", self.load_only)
|
||||
|
||||
@property
|
||||
def _dictionary(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
"""A dictionary representing the loaded configuration.
|
||||
"""
|
||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||
# are not needed here.
|
||||
retval = {}
|
||||
|
||||
for variant in self._override_order:
|
||||
retval.update(self._config[variant])
|
||||
|
||||
return retval
|
||||
|
||||
def _load_config_files(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from configuration files
|
||||
"""
|
||||
config_files = dict(self._iter_config_files())
|
||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||
logger.debug(
|
||||
"Skipping loading configuration files due to "
|
||||
"environment's PIP_CONFIG_FILE being os.devnull"
|
||||
)
|
||||
return
|
||||
|
||||
for variant, files in config_files.items():
|
||||
for fname in files:
|
||||
# If there's specific variant set in `load_only`, load only
|
||||
# that variant, not the others.
|
||||
if self.load_only is not None and variant != self.load_only:
|
||||
logger.debug(
|
||||
"Skipping file '%s' (variant: %s)", fname, variant
|
||||
)
|
||||
continue
|
||||
|
||||
parser = self._load_file(variant, fname)
|
||||
|
||||
# Keeping track of the parsers used
|
||||
self._parsers[variant].append((fname, parser))
|
||||
|
||||
def _load_file(self, variant, fname):
|
||||
# type: (Kind, str) -> RawConfigParser
|
||||
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
||||
parser = self._construct_parser(fname)
|
||||
|
||||
for section in parser.sections():
|
||||
items = parser.items(section)
|
||||
self._config[variant].update(self._normalized_keys(section, items))
|
||||
|
||||
return parser
|
||||
|
||||
def _construct_parser(self, fname):
|
||||
# type: (str) -> RawConfigParser
|
||||
parser = configparser.RawConfigParser()
|
||||
# If there is no such file, don't bother reading it but create the
|
||||
# parser anyway, to hold the data.
|
||||
# Doing this is useful when modifying and saving files, where we don't
|
||||
# need to construct a parser.
|
||||
if os.path.exists(fname):
|
||||
try:
|
||||
parser.read(fname)
|
||||
except UnicodeDecodeError:
|
||||
# See https://github.com/pypa/pip/issues/4963
|
||||
raise ConfigurationFileCouldNotBeLoaded(
|
||||
reason="contains invalid {} characters".format(
|
||||
locale.getpreferredencoding(False)
|
||||
),
|
||||
fname=fname,
|
||||
)
|
||||
except configparser.Error as error:
|
||||
# See https://github.com/pypa/pip/issues/4893
|
||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||
return parser
|
||||
|
||||
def _load_environment_vars(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from environment variables
|
||||
"""
|
||||
self._config[kinds.ENV_VAR].update(
|
||||
self._normalized_keys(":env:", self._get_environ_vars())
|
||||
)
|
||||
|
||||
def _normalized_keys(self, section, items):
|
||||
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
||||
"""Normalizes items to construct a dictionary with normalized keys.
|
||||
|
||||
This routine is where the names become keys and are made the same
|
||||
regardless of source - configuration files or environment.
|
||||
"""
|
||||
normalized = {}
|
||||
for name, val in items:
|
||||
key = section + "." + _normalize_name(name)
|
||||
normalized[key] = val
|
||||
return normalized
|
||||
|
||||
def _get_environ_vars(self):
|
||||
# type: () -> Iterable[Tuple[str, str]]
|
||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||
for key, val in os.environ.items():
|
||||
should_be_yielded = (
|
||||
key.startswith("PIP_") and
|
||||
key[4:].lower() not in self._ignore_env_names
|
||||
)
|
||||
if should_be_yielded:
|
||||
yield key[4:].lower(), val
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def _iter_config_files(self):
|
||||
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
||||
"""Yields variant and configuration files associated with it.
|
||||
|
||||
This should be treated like items of a dictionary.
|
||||
"""
|
||||
# SMELL: Move the conditions out of this function
|
||||
|
||||
# environment variables have the lowest priority
|
||||
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
||||
if config_file is not None:
|
||||
yield kinds.ENV, [config_file]
|
||||
else:
|
||||
yield kinds.ENV, []
|
||||
|
||||
# at the base we have any global configuration
|
||||
yield kinds.GLOBAL, list(site_config_files)
|
||||
|
||||
# per-user configuration next
|
||||
should_load_user_config = not self.isolated and not (
|
||||
config_file and os.path.exists(config_file)
|
||||
)
|
||||
if should_load_user_config:
|
||||
# The legacy config file is overridden by the new config file
|
||||
yield kinds.USER, [legacy_config_file, new_config_file]
|
||||
|
||||
# finally virtualenv configuration first trumping others
|
||||
if running_under_virtualenv():
|
||||
yield kinds.VENV, [venv_config_file]
|
||||
|
||||
def _get_parser_to_modify(self):
|
||||
# type: () -> Tuple[str, RawConfigParser]
|
||||
# Determine which parser to modify
|
||||
parsers = self._parsers[self.load_only]
|
||||
if not parsers:
|
||||
# This should not happen if everything works correctly.
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=2]. Please report as a bug."
|
||||
)
|
||||
|
||||
# Use the highest priority parser.
|
||||
return parsers[-1]
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def _mark_as_modified(self, fname, parser):
|
||||
# type: (str, RawConfigParser) -> None
|
||||
file_parser_tuple = (fname, parser)
|
||||
if file_parser_tuple not in self._modified_parsers:
|
||||
self._modified_parsers.append(file_parser_tuple)
|
|
@ -1,971 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import cgi
|
||||
import email.utils
|
||||
import getpass
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pip._vendor import requests, six, urllib3
|
||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||
from pip._vendor.cachecontrol.caches import FileCache
|
||||
from pip._vendor.lockfile import LockError
|
||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||
from pip._vendor.requests.utils import get_netrc_auth
|
||||
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
||||
# why we ignore the type on this import
|
||||
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
||||
|
||||
import pip
|
||||
from pip._internal.exceptions import HashMismatch, InstallationError
|
||||
from pip._internal.locations import write_delete_marker_file
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.utils.encoding import auto_decode
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.glibc import libc_ver
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
|
||||
display_path, format_size, get_installed_version, rmtree,
|
||||
split_auth_from_netloc, splitext, unpack_file,
|
||||
)
|
||||
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.ui import DownloadProgressProvider
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import ( # noqa: F401
|
||||
Optional, Tuple, Dict, IO, Text, Union
|
||||
)
|
||||
from pip._internal.models.link import Link # noqa: F401
|
||||
from pip._internal.utils.hashes import Hashes # noqa: F401
|
||||
from pip._internal.vcs import AuthInfo # noqa: F401
|
||||
|
||||
try:
|
||||
import ssl # noqa
|
||||
except ImportError:
|
||||
ssl = None
|
||||
|
||||
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
|
||||
|
||||
__all__ = ['get_file_content',
|
||||
'is_url', 'url_to_path', 'path_to_url',
|
||||
'is_archive_file', 'unpack_vcs_link',
|
||||
'unpack_file_url', 'is_vcs_url', 'is_file_url',
|
||||
'unpack_http_url', 'unpack_url']
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def user_agent():
|
||||
"""
|
||||
Return a string representing the user agent.
|
||||
"""
|
||||
data = {
|
||||
"installer": {"name": "pip", "version": pip.__version__},
|
||||
"python": platform.python_version(),
|
||||
"implementation": {
|
||||
"name": platform.python_implementation(),
|
||||
},
|
||||
}
|
||||
|
||||
if data["implementation"]["name"] == 'CPython':
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
elif data["implementation"]["name"] == 'PyPy':
|
||||
if sys.pypy_version_info.releaselevel == 'final':
|
||||
pypy_version_info = sys.pypy_version_info[:3]
|
||||
else:
|
||||
pypy_version_info = sys.pypy_version_info
|
||||
data["implementation"]["version"] = ".".join(
|
||||
[str(x) for x in pypy_version_info]
|
||||
)
|
||||
elif data["implementation"]["name"] == 'Jython':
|
||||
# Complete Guess
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
elif data["implementation"]["name"] == 'IronPython':
|
||||
# Complete Guess
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
|
||||
if sys.platform.startswith("linux"):
|
||||
from pip._vendor import distro
|
||||
distro_infos = dict(filter(
|
||||
lambda x: x[1],
|
||||
zip(["name", "version", "id"], distro.linux_distribution()),
|
||||
))
|
||||
libc = dict(filter(
|
||||
lambda x: x[1],
|
||||
zip(["lib", "version"], libc_ver()),
|
||||
))
|
||||
if libc:
|
||||
distro_infos["libc"] = libc
|
||||
if distro_infos:
|
||||
data["distro"] = distro_infos
|
||||
|
||||
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
||||
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
|
||||
|
||||
if platform.system():
|
||||
data.setdefault("system", {})["name"] = platform.system()
|
||||
|
||||
if platform.release():
|
||||
data.setdefault("system", {})["release"] = platform.release()
|
||||
|
||||
if platform.machine():
|
||||
data["cpu"] = platform.machine()
|
||||
|
||||
if HAS_TLS:
|
||||
data["openssl_version"] = ssl.OPENSSL_VERSION
|
||||
|
||||
setuptools_version = get_installed_version("setuptools")
|
||||
if setuptools_version is not None:
|
||||
data["setuptools_version"] = setuptools_version
|
||||
|
||||
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
||||
data=data,
|
||||
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
||||
)
|
||||
|
||||
|
||||
class MultiDomainBasicAuth(AuthBase):
|
||||
|
||||
def __init__(self, prompting=True):
|
||||
# type: (bool) -> None
|
||||
self.prompting = prompting
|
||||
self.passwords = {} # type: Dict[str, AuthInfo]
|
||||
|
||||
def __call__(self, req):
|
||||
parsed = urllib_parse.urlparse(req.url)
|
||||
|
||||
# Split the credentials from the netloc.
|
||||
netloc, url_user_password = split_auth_from_netloc(parsed.netloc)
|
||||
|
||||
# Set the url of the request to the url without any credentials
|
||||
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
|
||||
|
||||
# Use any stored credentials that we have for this netloc
|
||||
username, password = self.passwords.get(netloc, (None, None))
|
||||
|
||||
# Use the credentials embedded in the url if we have none stored
|
||||
if username is None:
|
||||
username, password = url_user_password
|
||||
|
||||
# Get creds from netrc if we still don't have them
|
||||
if username is None and password is None:
|
||||
netrc_auth = get_netrc_auth(req.url)
|
||||
username, password = netrc_auth if netrc_auth else (None, None)
|
||||
|
||||
if username or password:
|
||||
# Store the username and password
|
||||
self.passwords[netloc] = (username, password)
|
||||
|
||||
# Send the basic auth with this request
|
||||
req = HTTPBasicAuth(username or "", password or "")(req)
|
||||
|
||||
# Attach a hook to handle 401 responses
|
||||
req.register_hook("response", self.handle_401)
|
||||
|
||||
return req
|
||||
|
||||
def handle_401(self, resp, **kwargs):
|
||||
# We only care about 401 responses, anything else we want to just
|
||||
# pass through the actual response
|
||||
if resp.status_code != 401:
|
||||
return resp
|
||||
|
||||
# We are not able to prompt the user so simply return the response
|
||||
if not self.prompting:
|
||||
return resp
|
||||
|
||||
parsed = urllib_parse.urlparse(resp.url)
|
||||
|
||||
# Prompt the user for a new username and password
|
||||
username = six.moves.input("User for %s: " % parsed.netloc)
|
||||
password = getpass.getpass("Password: ")
|
||||
|
||||
# Store the new username and password to use for future requests
|
||||
if username or password:
|
||||
self.passwords[parsed.netloc] = (username, password)
|
||||
|
||||
# Consume content and release the original connection to allow our new
|
||||
# request to reuse the same one.
|
||||
resp.content
|
||||
resp.raw.release_conn()
|
||||
|
||||
# Add our new username and password to the request
|
||||
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
||||
req.register_hook("response", self.warn_on_401)
|
||||
|
||||
# Send our new request
|
||||
new_resp = resp.connection.send(req, **kwargs)
|
||||
new_resp.history.append(resp)
|
||||
|
||||
return new_resp
|
||||
|
||||
def warn_on_401(self, resp, **kwargs):
|
||||
# warn user that they provided incorrect credentials
|
||||
if resp.status_code == 401:
|
||||
logger.warning('401 Error, Credentials not correct for %s',
|
||||
resp.request.url)
|
||||
|
||||
|
||||
class LocalFSAdapter(BaseAdapter):
|
||||
|
||||
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
|
||||
proxies=None):
|
||||
pathname = url_to_path(request.url)
|
||||
|
||||
resp = Response()
|
||||
resp.status_code = 200
|
||||
resp.url = request.url
|
||||
|
||||
try:
|
||||
stats = os.stat(pathname)
|
||||
except OSError as exc:
|
||||
resp.status_code = 404
|
||||
resp.raw = exc
|
||||
else:
|
||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||
resp.headers = CaseInsensitiveDict({
|
||||
"Content-Type": content_type,
|
||||
"Content-Length": stats.st_size,
|
||||
"Last-Modified": modified,
|
||||
})
|
||||
|
||||
resp.raw = open(pathname, "rb")
|
||||
resp.close = resp.raw.close
|
||||
|
||||
return resp
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
||||
class SafeFileCache(FileCache):
|
||||
"""
|
||||
A file based cache which is safe to use even when the target directory may
|
||||
not be accessible or writable.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SafeFileCache, self).__init__(*args, **kwargs)
|
||||
|
||||
# Check to ensure that the directory containing our cache directory
|
||||
# is owned by the user current executing pip. If it does not exist
|
||||
# we will check the parent directory until we find one that does exist.
|
||||
# If it is not owned by the user executing pip then we will disable
|
||||
# the cache and log a warning.
|
||||
if not check_path_owner(self.directory):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned by "
|
||||
"the current user and the cache has been disabled. Please "
|
||||
"check the permissions and owner of that directory. If "
|
||||
"executing pip with sudo, you may want sudo's -H flag.",
|
||||
self.directory,
|
||||
)
|
||||
|
||||
# Set our directory to None to disable the Cache
|
||||
self.directory = None
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
# If we don't have a directory, then the cache should be a no-op.
|
||||
if self.directory is None:
|
||||
return
|
||||
|
||||
try:
|
||||
return super(SafeFileCache, self).get(*args, **kwargs)
|
||||
except (LockError, OSError, IOError):
|
||||
# We intentionally silence this error, if we can't access the cache
|
||||
# then we can just skip caching and process the request as if
|
||||
# caching wasn't enabled.
|
||||
pass
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
# If we don't have a directory, then the cache should be a no-op.
|
||||
if self.directory is None:
|
||||
return
|
||||
|
||||
try:
|
||||
return super(SafeFileCache, self).set(*args, **kwargs)
|
||||
except (LockError, OSError, IOError):
|
||||
# We intentionally silence this error, if we can't access the cache
|
||||
# then we can just skip caching and process the request as if
|
||||
# caching wasn't enabled.
|
||||
pass
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
# If we don't have a directory, then the cache should be a no-op.
|
||||
if self.directory is None:
|
||||
return
|
||||
|
||||
try:
|
||||
return super(SafeFileCache, self).delete(*args, **kwargs)
|
||||
except (LockError, OSError, IOError):
|
||||
# We intentionally silence this error, if we can't access the cache
|
||||
# then we can just skip caching and process the request as if
|
||||
# caching wasn't enabled.
|
||||
pass
|
||||
|
||||
|
||||
class InsecureHTTPAdapter(HTTPAdapter):
|
||||
|
||||
def cert_verify(self, conn, url, verify, cert):
|
||||
conn.cert_reqs = 'CERT_NONE'
|
||||
conn.ca_certs = None
|
||||
|
||||
|
||||
class PipSession(requests.Session):
|
||||
|
||||
timeout = None # type: Optional[int]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
retries = kwargs.pop("retries", 0)
|
||||
cache = kwargs.pop("cache", None)
|
||||
insecure_hosts = kwargs.pop("insecure_hosts", [])
|
||||
|
||||
super(PipSession, self).__init__(*args, **kwargs)
|
||||
|
||||
# Attach our User Agent to the request
|
||||
self.headers["User-Agent"] = user_agent()
|
||||
|
||||
# Attach our Authentication handler to the session
|
||||
self.auth = MultiDomainBasicAuth()
|
||||
|
||||
# Create our urllib3.Retry instance which will allow us to customize
|
||||
# how we handle retries.
|
||||
retries = urllib3.Retry(
|
||||
# Set the total number of retries that a particular request can
|
||||
# have.
|
||||
total=retries,
|
||||
|
||||
# A 503 error from PyPI typically means that the Fastly -> Origin
|
||||
# connection got interrupted in some way. A 503 error in general
|
||||
# is typically considered a transient error so we'll go ahead and
|
||||
# retry it.
|
||||
# A 500 may indicate transient error in Amazon S3
|
||||
# A 520 or 527 - may indicate transient error in CloudFlare
|
||||
status_forcelist=[500, 503, 520, 527],
|
||||
|
||||
# Add a small amount of back off between failed requests in
|
||||
# order to prevent hammering the service.
|
||||
backoff_factor=0.25,
|
||||
)
|
||||
|
||||
# We want to _only_ cache responses on securely fetched origins. We do
|
||||
# this because we can't validate the response of an insecurely fetched
|
||||
# origin, and we don't want someone to be able to poison the cache and
|
||||
# require manual eviction from the cache to fix it.
|
||||
if cache:
|
||||
secure_adapter = CacheControlAdapter(
|
||||
cache=SafeFileCache(cache, use_dir_lock=True),
|
||||
max_retries=retries,
|
||||
)
|
||||
else:
|
||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||
|
||||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||
# support caching (see above) so we'll use it for all http:// URLs as
|
||||
# well as any https:// host that we've marked as ignoring TLS errors
|
||||
# for.
|
||||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||
|
||||
self.mount("https://", secure_adapter)
|
||||
self.mount("http://", insecure_adapter)
|
||||
|
||||
# Enable file:// urls
|
||||
self.mount("file://", LocalFSAdapter())
|
||||
|
||||
# We want to use a non-validating adapter for any requests which are
|
||||
# deemed insecure.
|
||||
for host in insecure_hosts:
|
||||
self.mount("https://{}/".format(host), insecure_adapter)
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# Allow setting a default timeout on a session
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
|
||||
# Dispatch the actual request
|
||||
return super(PipSession, self).request(method, url, *args, **kwargs)
|
||||
|
||||
|
||||
def get_file_content(url, comes_from=None, session=None):
|
||||
# type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
|
||||
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||
http: URL. Returns (location, content). Content is unicode.
|
||||
|
||||
:param url: File path or url.
|
||||
:param comes_from: Origin description of requirements.
|
||||
:param session: Instance of pip.download.PipSession.
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"get_file_content() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
match = _scheme_re.search(url)
|
||||
if match:
|
||||
scheme = match.group(1).lower()
|
||||
if (scheme == 'file' and comes_from and
|
||||
comes_from.startswith('http')):
|
||||
raise InstallationError(
|
||||
'Requirements file %s references URL %s, which is local'
|
||||
% (comes_from, url))
|
||||
if scheme == 'file':
|
||||
path = url.split(':', 1)[1]
|
||||
path = path.replace('\\', '/')
|
||||
match = _url_slash_drive_re.match(path)
|
||||
if match:
|
||||
path = match.group(1) + ':' + path.split('|', 1)[1]
|
||||
path = urllib_parse.unquote(path)
|
||||
if path.startswith('/'):
|
||||
path = '/' + path.lstrip('/')
|
||||
url = path
|
||||
else:
|
||||
# FIXME: catch some errors
|
||||
resp = session.get(url)
|
||||
resp.raise_for_status()
|
||||
return resp.url, resp.text
|
||||
try:
|
||||
with open(url, 'rb') as f:
|
||||
content = auto_decode(f.read())
|
||||
except IOError as exc:
|
||||
raise InstallationError(
|
||||
'Could not open requirements file: %s' % str(exc)
|
||||
)
|
||||
return url, content
|
||||
|
||||
|
||||
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
||||
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
||||
|
||||
|
||||
def is_url(name):
|
||||
# type: (Union[str, Text]) -> bool
|
||||
"""Returns true if the name looks like a URL"""
|
||||
if ':' not in name:
|
||||
return False
|
||||
scheme = name.split(':', 1)[0].lower()
|
||||
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
|
||||
|
||||
|
||||
def url_to_path(url):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Convert a file: URL to a path.
|
||||
"""
|
||||
assert url.startswith('file:'), (
|
||||
"You can only turn file: urls into filenames (not %r)" % url)
|
||||
|
||||
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
|
||||
|
||||
# if we have a UNC path, prepend UNC share notation
|
||||
if netloc:
|
||||
netloc = '\\\\' + netloc
|
||||
|
||||
path = urllib_request.url2pathname(netloc + path)
|
||||
return path
|
||||
|
||||
|
||||
def path_to_url(path):
|
||||
# type: (Union[str, Text]) -> str
|
||||
"""
|
||||
Convert a path to a file: URL. The path will be made absolute and have
|
||||
quoted path parts.
|
||||
"""
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
|
||||
return url
|
||||
|
||||
|
||||
def is_archive_file(name):
|
||||
# type: (str) -> bool
|
||||
"""Return True if `name` is a considered as an archive file."""
|
||||
ext = splitext(name)[1].lower()
|
||||
if ext in ARCHIVE_EXTENSIONS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def unpack_vcs_link(link, location):
|
||||
vcs_backend = _get_used_vcs_backend(link)
|
||||
vcs_backend.unpack(location)
|
||||
|
||||
|
||||
def _get_used_vcs_backend(link):
|
||||
for backend in vcs.backends:
|
||||
if link.scheme in backend.schemes:
|
||||
vcs_backend = backend(link.url)
|
||||
return vcs_backend
|
||||
|
||||
|
||||
def is_vcs_url(link):
|
||||
# type: (Link) -> bool
|
||||
return bool(_get_used_vcs_backend(link))
|
||||
|
||||
|
||||
def is_file_url(link):
|
||||
# type: (Link) -> bool
|
||||
return link.url.lower().startswith('file:')
|
||||
|
||||
|
||||
def is_dir_url(link):
|
||||
# type: (Link) -> bool
|
||||
"""Return whether a file:// Link points to a directory.
|
||||
|
||||
``link`` must not have any other scheme but file://. Call is_file_url()
|
||||
first.
|
||||
|
||||
"""
|
||||
link_path = url_to_path(link.url_without_fragment)
|
||||
return os.path.isdir(link_path)
|
||||
|
||||
|
||||
def _progress_indicator(iterable, *args, **kwargs):
|
||||
return iterable
|
||||
|
||||
|
||||
def _download_url(
|
||||
resp, # type: Response
|
||||
link, # type: Link
|
||||
content_file, # type: IO
|
||||
hashes, # type: Hashes
|
||||
progress_bar # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
try:
|
||||
total_length = int(resp.headers['content-length'])
|
||||
except (ValueError, KeyError, TypeError):
|
||||
total_length = 0
|
||||
|
||||
cached_resp = getattr(resp, "from_cache", False)
|
||||
if logger.getEffectiveLevel() > logging.INFO:
|
||||
show_progress = False
|
||||
elif cached_resp:
|
||||
show_progress = False
|
||||
elif total_length > (40 * 1000):
|
||||
show_progress = True
|
||||
elif not total_length:
|
||||
show_progress = True
|
||||
else:
|
||||
show_progress = False
|
||||
|
||||
show_url = link.show_url
|
||||
|
||||
def resp_read(chunk_size):
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
for chunk in resp.raw.stream(
|
||||
chunk_size,
|
||||
# We use decode_content=False here because we don't
|
||||
# want urllib3 to mess with the raw bytes we get
|
||||
# from the server. If we decompress inside of
|
||||
# urllib3 then we cannot verify the checksum
|
||||
# because the checksum will be of the compressed
|
||||
# file. This breakage will only occur if the
|
||||
# server adds a Content-Encoding header, which
|
||||
# depends on how the server was configured:
|
||||
# - Some servers will notice that the file isn't a
|
||||
# compressible file and will leave the file alone
|
||||
# and with an empty Content-Encoding
|
||||
# - Some servers will notice that the file is
|
||||
# already compressed and will leave the file
|
||||
# alone and will add a Content-Encoding: gzip
|
||||
# header
|
||||
# - Some servers won't notice anything at all and
|
||||
# will take a file that's already been compressed
|
||||
# and compress it again and set the
|
||||
# Content-Encoding: gzip header
|
||||
#
|
||||
# By setting this not to decode automatically we
|
||||
# hope to eliminate problems with the second case.
|
||||
decode_content=False):
|
||||
yield chunk
|
||||
except AttributeError:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
chunk = resp.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
def written_chunks(chunks):
|
||||
for chunk in chunks:
|
||||
content_file.write(chunk)
|
||||
yield chunk
|
||||
|
||||
progress_indicator = _progress_indicator
|
||||
|
||||
if link.netloc == PyPI.netloc:
|
||||
url = show_url
|
||||
else:
|
||||
url = link.url_without_fragment
|
||||
|
||||
if show_progress: # We don't show progress on cached responses
|
||||
progress_indicator = DownloadProgressProvider(progress_bar,
|
||||
max=total_length)
|
||||
if total_length:
|
||||
logger.info("Downloading %s (%s)", url, format_size(total_length))
|
||||
else:
|
||||
logger.info("Downloading %s", url)
|
||||
elif cached_resp:
|
||||
logger.info("Using cached %s", url)
|
||||
else:
|
||||
logger.info("Downloading %s", url)
|
||||
|
||||
logger.debug('Downloading from URL %s', link)
|
||||
|
||||
downloaded_chunks = written_chunks(
|
||||
progress_indicator(
|
||||
resp_read(CONTENT_CHUNK_SIZE),
|
||||
CONTENT_CHUNK_SIZE
|
||||
)
|
||||
)
|
||||
if hashes:
|
||||
hashes.check_against_chunks(downloaded_chunks)
|
||||
else:
|
||||
consume(downloaded_chunks)
|
||||
|
||||
|
||||
def _copy_file(filename, location, link):
|
||||
copy = True
|
||||
download_location = os.path.join(location, link.filename)
|
||||
if os.path.exists(download_location):
|
||||
response = ask_path_exists(
|
||||
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
|
||||
display_path(download_location), ('i', 'w', 'b', 'a'))
|
||||
if response == 'i':
|
||||
copy = False
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(download_location))
|
||||
os.remove(download_location)
|
||||
elif response == 'b':
|
||||
dest_file = backup_dir(download_location)
|
||||
logger.warning(
|
||||
'Backing up %s to %s',
|
||||
display_path(download_location),
|
||||
display_path(dest_file),
|
||||
)
|
||||
shutil.move(download_location, dest_file)
|
||||
elif response == 'a':
|
||||
sys.exit(-1)
|
||||
if copy:
|
||||
shutil.copy(filename, download_location)
|
||||
logger.info('Saved %s', display_path(download_location))
|
||||
|
||||
|
||||
def unpack_http_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
download_dir=None, # type: Optional[str]
|
||||
session=None, # type: Optional[PipSession]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
progress_bar="on" # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"unpack_http_url() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
with TempDirectory(kind="unpack") as temp_dir:
|
||||
# If a download dir is specified, is the file already downloaded there?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(link,
|
||||
download_dir,
|
||||
hashes)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
else:
|
||||
# let's download to a tmp dir
|
||||
from_path, content_type = _download_http_url(link,
|
||||
session,
|
||||
temp_dir.path,
|
||||
hashes,
|
||||
progress_bar)
|
||||
|
||||
# unpack the archive to the build dir location. even when only
|
||||
# downloading archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type, link)
|
||||
|
||||
# a download dir is specified; let's copy the archive there
|
||||
if download_dir and not already_downloaded_path:
|
||||
_copy_file(from_path, download_dir, link)
|
||||
|
||||
if not already_downloaded_path:
|
||||
os.unlink(from_path)
|
||||
|
||||
|
||||
def unpack_file_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Unpack link into location.
|
||||
|
||||
If download_dir is provided and link points to a file, make a copy
|
||||
of the link file inside download_dir.
|
||||
"""
|
||||
link_path = url_to_path(link.url_without_fragment)
|
||||
|
||||
# If it's a url to a local directory
|
||||
if is_dir_url(link):
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
shutil.copytree(link_path, location, symlinks=True)
|
||||
if download_dir:
|
||||
logger.info('Link is a directory, ignoring download_dir')
|
||||
return
|
||||
|
||||
# If --require-hashes is off, `hashes` is either empty, the
|
||||
# link's embedded hash, or MissingHashes; it is required to
|
||||
# match. If --require-hashes is on, we are satisfied by any
|
||||
# hash in `hashes` matching: a URL-based or an option-based
|
||||
# one; no internet-sourced hash will be in `hashes`.
|
||||
if hashes:
|
||||
hashes.check_against_path(link_path)
|
||||
|
||||
# If a download dir is specified, is the file already there and valid?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(link,
|
||||
download_dir,
|
||||
hashes)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
else:
|
||||
from_path = link_path
|
||||
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type, link)
|
||||
|
||||
# a download dir is specified and not already downloaded
|
||||
if download_dir and not already_downloaded_path:
|
||||
_copy_file(from_path, download_dir, link)
|
||||
|
||||
|
||||
def _copy_dist_from_dir(link_path, location):
|
||||
"""Copy distribution files in `link_path` to `location`.
|
||||
|
||||
Invoked when user requests to install a local directory. E.g.:
|
||||
|
||||
pip install .
|
||||
pip install ~/dev/git-repos/python-prompt-toolkit
|
||||
|
||||
"""
|
||||
|
||||
# Note: This is currently VERY SLOW if you have a lot of data in the
|
||||
# directory, because it copies everything with `shutil.copytree`.
|
||||
# What it should really do is build an sdist and install that.
|
||||
# See https://github.com/pypa/pip/issues/2195
|
||||
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
|
||||
# build an sdist
|
||||
setup_py = 'setup.py'
|
||||
sdist_args = [sys.executable]
|
||||
sdist_args.append('-c')
|
||||
sdist_args.append(SETUPTOOLS_SHIM % setup_py)
|
||||
sdist_args.append('sdist')
|
||||
sdist_args += ['--dist-dir', location]
|
||||
logger.info('Running setup.py sdist for %s', link_path)
|
||||
|
||||
with indent_log():
|
||||
call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
|
||||
|
||||
# unpack sdist into `location`
|
||||
sdist = os.path.join(location, os.listdir(location)[0])
|
||||
logger.info('Unpacking sdist %s into %s', sdist, location)
|
||||
unpack_file(sdist, location, content_type=None, link=None)
|
||||
|
||||
|
||||
class PipXmlrpcTransport(xmlrpc_client.Transport):
|
||||
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
||||
object.
|
||||
"""
|
||||
|
||||
def __init__(self, index_url, session, use_datetime=False):
|
||||
xmlrpc_client.Transport.__init__(self, use_datetime)
|
||||
index_parts = urllib_parse.urlparse(index_url)
|
||||
self._scheme = index_parts.scheme
|
||||
self._session = session
|
||||
|
||||
def request(self, host, handler, request_body, verbose=False):
|
||||
parts = (self._scheme, host, handler, None, None, None)
|
||||
url = urllib_parse.urlunparse(parts)
|
||||
try:
|
||||
headers = {'Content-Type': 'text/xml'}
|
||||
response = self._session.post(url, data=request_body,
|
||||
headers=headers, stream=True)
|
||||
response.raise_for_status()
|
||||
self.verbose = verbose
|
||||
return self.parse_response(response.raw)
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s",
|
||||
exc.response.status_code, url,
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
def unpack_url(
|
||||
link, # type: Optional[Link]
|
||||
location, # type: Optional[str]
|
||||
download_dir=None, # type: Optional[str]
|
||||
only_download=False, # type: bool
|
||||
session=None, # type: Optional[PipSession]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
progress_bar="on" # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Unpack link.
|
||||
If link is a VCS link:
|
||||
if only_download, export into download_dir and ignore location
|
||||
else unpack into location
|
||||
for other types of link:
|
||||
- unpack into location
|
||||
- if download_dir, copy the file into download_dir
|
||||
- if only_download, mark location for deletion
|
||||
|
||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
||||
required, and unhashable types of requirements (like VCS ones, which
|
||||
would ordinarily raise HashUnsupported) are allowed.
|
||||
"""
|
||||
# non-editable vcs urls
|
||||
if is_vcs_url(link):
|
||||
unpack_vcs_link(link, location)
|
||||
|
||||
# file urls
|
||||
elif is_file_url(link):
|
||||
unpack_file_url(link, location, download_dir, hashes=hashes)
|
||||
|
||||
# http urls
|
||||
else:
|
||||
if session is None:
|
||||
session = PipSession()
|
||||
|
||||
unpack_http_url(
|
||||
link,
|
||||
location,
|
||||
download_dir,
|
||||
session,
|
||||
hashes=hashes,
|
||||
progress_bar=progress_bar
|
||||
)
|
||||
if only_download:
|
||||
write_delete_marker_file(location)
|
||||
|
||||
|
||||
def _download_http_url(
|
||||
link, # type: Link
|
||||
session, # type: PipSession
|
||||
temp_dir, # type: str
|
||||
hashes, # type: Hashes
|
||||
progress_bar # type: str
|
||||
):
|
||||
# type: (...) -> Tuple[str, str]
|
||||
"""Download link url into temp_dir using provided session"""
|
||||
target_url = link.url.split('#', 1)[0]
|
||||
try:
|
||||
resp = session.get(
|
||||
target_url,
|
||||
# We use Accept-Encoding: identity here because requests
|
||||
# defaults to accepting compressed responses. This breaks in
|
||||
# a variety of ways depending on how the server is configured.
|
||||
# - Some servers will notice that the file isn't a compressible
|
||||
# file and will leave the file alone and with an empty
|
||||
# Content-Encoding
|
||||
# - Some servers will notice that the file is already
|
||||
# compressed and will leave the file alone and will add a
|
||||
# Content-Encoding: gzip header
|
||||
# - Some servers won't notice anything at all and will take
|
||||
# a file that's already been compressed and compress it again
|
||||
# and set the Content-Encoding: gzip header
|
||||
# By setting this to request only the identity encoding We're
|
||||
# hoping to eliminate the third case. Hopefully there does not
|
||||
# exist a server which when given a file will notice it is
|
||||
# already compressed and that you're not asking for a
|
||||
# compressed file and will then decompress it before sending
|
||||
# because if that's the case I don't think it'll ever be
|
||||
# possible to make this work.
|
||||
headers={"Accept-Encoding": "identity"},
|
||||
stream=True,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s", exc.response.status_code, link,
|
||||
)
|
||||
raise
|
||||
|
||||
content_type = resp.headers.get('content-type', '')
|
||||
filename = link.filename # fallback
|
||||
# Have a look at the Content-Disposition header for a better guess
|
||||
content_disposition = resp.headers.get('content-disposition')
|
||||
if content_disposition:
|
||||
type, params = cgi.parse_header(content_disposition)
|
||||
# We use ``or`` here because we don't want to use an "empty" value
|
||||
# from the filename param.
|
||||
filename = params.get('filename') or filename
|
||||
ext = splitext(filename)[1]
|
||||
if not ext:
|
||||
ext = mimetypes.guess_extension(content_type)
|
||||
if ext:
|
||||
filename += ext
|
||||
if not ext and link.url != resp.url:
|
||||
ext = os.path.splitext(resp.url)[1]
|
||||
if ext:
|
||||
filename += ext
|
||||
file_path = os.path.join(temp_dir, filename)
|
||||
with open(file_path, 'wb') as content_file:
|
||||
_download_url(resp, link, content_file, hashes, progress_bar)
|
||||
return file_path, content_type
|
||||
|
||||
|
||||
def _check_download_dir(link, download_dir, hashes):
|
||||
# type: (Link, str, Hashes) -> Optional[str]
|
||||
""" Check download_dir for previously downloaded file with correct hash
|
||||
If a correct file is found return its path else None
|
||||
"""
|
||||
download_path = os.path.join(download_dir, link.filename)
|
||||
if os.path.exists(download_path):
|
||||
# If already downloaded, does its hash match?
|
||||
logger.info('File was already downloaded %s', download_path)
|
||||
if hashes:
|
||||
try:
|
||||
hashes.check_against_path(download_path)
|
||||
except HashMismatch:
|
||||
logger.warning(
|
||||
'Previously-downloaded file %s has bad hash. '
|
||||
'Re-downloading.',
|
||||
download_path
|
||||
)
|
||||
os.unlink(download_path)
|
||||
return None
|
||||
return download_path
|
||||
return None
|
|
@ -1,274 +0,0 @@
|
|||
"""Exceptions used throughout package"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from itertools import chain, groupby, repeat
|
||||
|
||||
from pip._vendor.six import iteritems
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional # noqa: F401
|
||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""Base pip exception"""
|
||||
|
||||
|
||||
class ConfigurationError(PipError):
|
||||
"""General exception in configuration"""
|
||||
|
||||
|
||||
class InstallationError(PipError):
|
||||
"""General exception during installation"""
|
||||
|
||||
|
||||
class UninstallationError(PipError):
|
||||
"""General exception during uninstallation"""
|
||||
|
||||
|
||||
class DistributionNotFound(InstallationError):
|
||||
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||
|
||||
|
||||
class RequirementsFileParseError(InstallationError):
|
||||
"""Raised when a general error occurs parsing a requirements file line."""
|
||||
|
||||
|
||||
class BestVersionAlreadyInstalled(PipError):
|
||||
"""Raised when the most up-to-date version of a package is already
|
||||
installed."""
|
||||
|
||||
|
||||
class BadCommand(PipError):
|
||||
"""Raised when virtualenv or a command is not found"""
|
||||
|
||||
|
||||
class CommandError(PipError):
|
||||
"""Raised when there is an error in command-line arguments"""
|
||||
|
||||
|
||||
class PreviousBuildDirError(PipError):
|
||||
"""Raised when there's a previous conflicting build directory"""
|
||||
|
||||
|
||||
class InvalidWheelFilename(InstallationError):
|
||||
"""Invalid wheel filename."""
|
||||
|
||||
|
||||
class UnsupportedWheel(InstallationError):
|
||||
"""Unsupported wheel."""
|
||||
|
||||
|
||||
class HashErrors(InstallationError):
|
||||
"""Multiple HashError instances rolled into one for reporting"""
|
||||
|
||||
def __init__(self):
|
||||
self.errors = []
|
||||
|
||||
def append(self, error):
|
||||
self.errors.append(error)
|
||||
|
||||
def __str__(self):
|
||||
lines = []
|
||||
self.errors.sort(key=lambda e: e.order)
|
||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||
lines.append(cls.head)
|
||||
lines.extend(e.body() for e in errors_of_cls)
|
||||
if lines:
|
||||
return '\n'.join(lines)
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.errors)
|
||||
|
||||
def __bool__(self):
|
||||
return self.__nonzero__()
|
||||
|
||||
|
||||
class HashError(InstallationError):
|
||||
"""
|
||||
A failure to verify a package against known-good hashes
|
||||
|
||||
:cvar order: An int sorting hash exception classes by difficulty of
|
||||
recovery (lower being harder), so the user doesn't bother fretting
|
||||
about unpinned packages when he has deeper issues, like VCS
|
||||
dependencies, to deal with. Also keeps error reports in a
|
||||
deterministic order.
|
||||
:cvar head: A section heading for display above potentially many
|
||||
exceptions of this kind
|
||||
:ivar req: The InstallRequirement that triggered this error. This is
|
||||
pasted on after the exception is instantiated, because it's not
|
||||
typically available earlier.
|
||||
|
||||
"""
|
||||
req = None # type: Optional[InstallRequirement]
|
||||
head = ''
|
||||
|
||||
def body(self):
|
||||
"""Return a summary of me for display under the heading.
|
||||
|
||||
This default implementation simply prints a description of the
|
||||
triggering requirement.
|
||||
|
||||
:param req: The InstallRequirement that provoked this error, with
|
||||
populate_link() having already been called
|
||||
|
||||
"""
|
||||
return ' %s' % self._requirement_name()
|
||||
|
||||
def __str__(self):
|
||||
return '%s\n%s' % (self.head, self.body())
|
||||
|
||||
def _requirement_name(self):
|
||||
"""Return a description of the requirement that triggered me.
|
||||
|
||||
This default implementation returns long description of the req, with
|
||||
line numbers
|
||||
|
||||
"""
|
||||
return str(self.req) if self.req else 'unknown package'
|
||||
|
||||
|
||||
class VcsHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 0
|
||||
head = ("Can't verify hashes for these requirements because we don't "
|
||||
"have a way to hash version control repositories:")
|
||||
|
||||
|
||||
class DirectoryUrlHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 1
|
||||
head = ("Can't verify hashes for these file:// requirements because they "
|
||||
"point to directories:")
|
||||
|
||||
|
||||
class HashMissing(HashError):
|
||||
"""A hash was needed for a requirement but is absent."""
|
||||
|
||||
order = 2
|
||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
||||
'missing from some requirements. Here is a list of those '
|
||||
'requirements along with the hashes their downloaded archives '
|
||||
'actually had. Add lines like these to your requirements files to '
|
||||
'prevent tampering. (If you did not enable --require-hashes '
|
||||
'manually, note that it turns on automatically when any package '
|
||||
'has a hash.)')
|
||||
|
||||
def __init__(self, gotten_hash):
|
||||
"""
|
||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||
just downloaded
|
||||
"""
|
||||
self.gotten_hash = gotten_hash
|
||||
|
||||
def body(self):
|
||||
# Dodge circular import.
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||
|
||||
package = None
|
||||
if self.req:
|
||||
# In the case of URL-based requirements, display the original URL
|
||||
# seen in the requirements file rather than the package name,
|
||||
# so the output can be directly copied into the requirements file.
|
||||
package = (self.req.original_link if self.req.original_link
|
||||
# In case someone feeds something downright stupid
|
||||
# to InstallRequirement's constructor.
|
||||
else getattr(self.req, 'req', None))
|
||||
return ' %s --hash=%s:%s' % (package or 'unknown package',
|
||||
FAVORITE_HASH,
|
||||
self.gotten_hash)
|
||||
|
||||
|
||||
class HashUnpinned(HashError):
|
||||
"""A requirement had a hash specified but was not pinned to a specific
|
||||
version."""
|
||||
|
||||
order = 3
|
||||
head = ('In --require-hashes mode, all requirements must have their '
|
||||
'versions pinned with ==. These do not:')
|
||||
|
||||
|
||||
class HashMismatch(HashError):
|
||||
"""
|
||||
Distribution file hash values don't match.
|
||||
|
||||
:ivar package_name: The name of the package that triggered the hash
|
||||
mismatch. Feel free to write to this after the exception is raise to
|
||||
improve its error message.
|
||||
|
||||
"""
|
||||
order = 4
|
||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
||||
'FILE. If you have updated the package versions, please update '
|
||||
'the hashes. Otherwise, examine the package contents carefully; '
|
||||
'someone may have tampered with them.')
|
||||
|
||||
def __init__(self, allowed, gots):
|
||||
"""
|
||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
:param gots: A dict of algorithm names pointing to hashes we
|
||||
actually got from the files under suspicion
|
||||
"""
|
||||
self.allowed = allowed
|
||||
self.gots = gots
|
||||
|
||||
def body(self):
|
||||
return ' %s:\n%s' % (self._requirement_name(),
|
||||
self._hash_comparison())
|
||||
|
||||
def _hash_comparison(self):
|
||||
"""
|
||||
Return a comparison of actual and expected hash values.
|
||||
|
||||
Example::
|
||||
|
||||
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
||||
or 123451234512345123451234512345123451234512345
|
||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||
|
||||
"""
|
||||
def hash_then_or(hash_name):
|
||||
# For now, all the decent hashes have 6-char names, so we can get
|
||||
# away with hard-coding space literals.
|
||||
return chain([hash_name], repeat(' or'))
|
||||
|
||||
lines = []
|
||||
for hash_name, expecteds in iteritems(self.allowed):
|
||||
prefix = hash_then_or(hash_name)
|
||||
lines.extend((' Expected %s %s' % (next(prefix), e))
|
||||
for e in expecteds)
|
||||
lines.append(' Got %s\n' %
|
||||
self.gots[hash_name].hexdigest())
|
||||
prefix = ' or'
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class UnsupportedPythonVersion(InstallationError):
|
||||
"""Unsupported python version according to Requires-Python package
|
||||
metadata."""
|
||||
|
||||
|
||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||
"""When there are errors while loading a configuration file
|
||||
"""
|
||||
|
||||
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
||||
super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
|
||||
self.reason = reason
|
||||
self.fname = fname
|
||||
self.error = error
|
||||
|
||||
def __str__(self):
|
||||
if self.fname is not None:
|
||||
message_part = " in {}.".format(self.fname)
|
||||
else:
|
||||
assert self.error is not None
|
||||
message_part = ".\n{}\n".format(self.error.message)
|
||||
return "Configuration file {}{}".format(self.reason, message_part)
|
|
@ -1,990 +0,0 @@
|
|||
"""Routines related to PyPI, indexes"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import cgi
|
||||
import itertools
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
from pip._vendor import html5lib, requests, six
|
||||
from pip._vendor.distlib.compat import unescape
|
||||
from pip._vendor.packaging import specifiers
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.requests.exceptions import RetryError, SSLError
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||
|
||||
from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
|
||||
from pip._internal.exceptions import (
|
||||
BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
|
||||
UnsupportedWheel,
|
||||
)
|
||||
from pip._internal.models.candidate import InstallationCandidate
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.pep425tags import get_supported
|
||||
from pip._internal.utils.compat import ipaddress
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path,
|
||||
redact_password_from_url,
|
||||
)
|
||||
from pip._internal.utils.packaging import check_requires_python
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.wheel import Wheel
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from logging import Logger # noqa: F401
|
||||
from typing import ( # noqa: F401
|
||||
Tuple, Optional, Any, List, Union, Callable, Set, Sequence,
|
||||
Iterable, MutableMapping
|
||||
)
|
||||
from pip._vendor.packaging.version import _BaseVersion # noqa: F401
|
||||
from pip._vendor.requests import Response # noqa: F401
|
||||
from pip._internal.req import InstallRequirement # noqa: F401
|
||||
from pip._internal.download import PipSession # noqa: F401
|
||||
|
||||
SecureOrigin = Tuple[str, str, Optional[str]]
|
||||
BuildTag = Tuple[Any, ...] # either emply tuple or Tuple[int, str]
|
||||
CandidateSortingKey = Tuple[int, _BaseVersion, BuildTag, Optional[int]]
|
||||
|
||||
__all__ = ['FormatControl', 'PackageFinder']
|
||||
|
||||
|
||||
SECURE_ORIGINS = [
|
||||
# protocol, hostname, port
|
||||
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
||||
("https", "*", "*"),
|
||||
("*", "localhost", "*"),
|
||||
("*", "127.0.0.0/8", "*"),
|
||||
("*", "::1/128", "*"),
|
||||
("file", "*", None),
|
||||
# ssh is always secure.
|
||||
("ssh", "*", "*"),
|
||||
] # type: List[SecureOrigin]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _match_vcs_scheme(url):
|
||||
# type: (str) -> Optional[str]
|
||||
"""Look for VCS schemes in the URL.
|
||||
|
||||
Returns the matched VCS scheme, or None if there's no match.
|
||||
"""
|
||||
from pip._internal.vcs import VcsSupport
|
||||
for scheme in VcsSupport.schemes:
|
||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
||||
return scheme
|
||||
return None
|
||||
|
||||
|
||||
def _is_url_like_archive(url):
|
||||
# type: (str) -> bool
|
||||
"""Return whether the URL looks like an archive.
|
||||
"""
|
||||
filename = Link(url).filename
|
||||
for bad_ext in ARCHIVE_EXTENSIONS:
|
||||
if filename.endswith(bad_ext):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class _NotHTML(Exception):
|
||||
def __init__(self, content_type, request_desc):
|
||||
# type: (str, str) -> None
|
||||
super(_NotHTML, self).__init__(content_type, request_desc)
|
||||
self.content_type = content_type
|
||||
self.request_desc = request_desc
|
||||
|
||||
|
||||
def _ensure_html_header(response):
|
||||
# type: (Response) -> None
|
||||
"""Check the Content-Type header to ensure the response contains HTML.
|
||||
|
||||
Raises `_NotHTML` if the content type is not text/html.
|
||||
"""
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if not content_type.lower().startswith("text/html"):
|
||||
raise _NotHTML(content_type, response.request.method)
|
||||
|
||||
|
||||
class _NotHTTP(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _ensure_html_response(url, session):
|
||||
# type: (str, PipSession) -> None
|
||||
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
||||
|
||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||
`_NotHTML` if the content type is not text/html.
|
||||
"""
|
||||
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
|
||||
if scheme not in {'http', 'https'}:
|
||||
raise _NotHTTP()
|
||||
|
||||
resp = session.head(url, allow_redirects=True)
|
||||
resp.raise_for_status()
|
||||
|
||||
_ensure_html_header(resp)
|
||||
|
||||
|
||||
def _get_html_response(url, session):
|
||||
# type: (str, PipSession) -> Response
|
||||
"""Access an HTML page with GET, and return the response.
|
||||
|
||||
This consists of three parts:
|
||||
|
||||
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
||||
check the Content-Type is HTML, to avoid downloading a large file.
|
||||
Raise `_NotHTTP` if the content type cannot be determined, or
|
||||
`_NotHTML` if it is not HTML.
|
||||
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
||||
3. Check the Content-Type header to make sure we got HTML, and raise
|
||||
`_NotHTML` otherwise.
|
||||
"""
|
||||
if _is_url_like_archive(url):
|
||||
_ensure_html_response(url, session=session)
|
||||
|
||||
logger.debug('Getting page %s', url)
|
||||
|
||||
resp = session.get(
|
||||
url,
|
||||
headers={
|
||||
"Accept": "text/html",
|
||||
# We don't want to blindly returned cached data for
|
||||
# /simple/, because authors generally expecting that
|
||||
# twine upload && pip install will function, but if
|
||||
# they've done a pip install in the last ~10 minutes
|
||||
# it won't. Thus by setting this to zero we will not
|
||||
# blindly use any cached data, however the benefit of
|
||||
# using max-age=0 instead of no-cache, is that we will
|
||||
# still support conditional requests, so we will still
|
||||
# minimize traffic sent in cases where the page hasn't
|
||||
# changed at all, we will just always incur the round
|
||||
# trip for the conditional GET now instead of only
|
||||
# once per 10 minutes.
|
||||
# For more information, please see pypa/pip#5670.
|
||||
"Cache-Control": "max-age=0",
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
# The check for archives above only works if the url ends with
|
||||
# something that looks like an archive. However that is not a
|
||||
# requirement of an url. Unless we issue a HEAD request on every
|
||||
# url we cannot know ahead of time for sure if something is HTML
|
||||
# or not. However we can check after we've downloaded it.
|
||||
_ensure_html_header(resp)
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def _handle_get_page_fail(
|
||||
link, # type: Link
|
||||
reason, # type: Union[str, Exception]
|
||||
meth=None # type: Optional[Callable[..., None]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
if meth is None:
|
||||
meth = logger.debug
|
||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||
|
||||
|
||||
def _get_html_page(link, session=None):
|
||||
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"_get_html_page() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
url = link.url.split('#', 1)[0]
|
||||
|
||||
# Check for VCS schemes that do not support lookup as web pages.
|
||||
vcs_scheme = _match_vcs_scheme(url)
|
||||
if vcs_scheme:
|
||||
logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
|
||||
return None
|
||||
|
||||
# Tack index.html onto file:// URLs that point to directories
|
||||
scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
|
||||
if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
|
||||
# add trailing slash if not present so urljoin doesn't trim
|
||||
# final segment
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url = urllib_parse.urljoin(url, 'index.html')
|
||||
logger.debug(' file: URL is directory, getting %s', url)
|
||||
|
||||
try:
|
||||
resp = _get_html_response(url, session=session)
|
||||
except _NotHTTP as exc:
|
||||
logger.debug(
|
||||
'Skipping page %s because it looks like an archive, and cannot '
|
||||
'be checked by HEAD.', link,
|
||||
)
|
||||
except _NotHTML as exc:
|
||||
logger.debug(
|
||||
'Skipping page %s because the %s request got Content-Type: %s',
|
||||
link, exc.request_desc, exc.content_type,
|
||||
)
|
||||
except requests.HTTPError as exc:
|
||||
_handle_get_page_fail(link, exc)
|
||||
except RetryError as exc:
|
||||
_handle_get_page_fail(link, exc)
|
||||
except SSLError as exc:
|
||||
reason = "There was a problem confirming the ssl certificate: "
|
||||
reason += str(exc)
|
||||
_handle_get_page_fail(link, reason, meth=logger.info)
|
||||
except requests.ConnectionError as exc:
|
||||
_handle_get_page_fail(link, "connection error: %s" % exc)
|
||||
except requests.Timeout:
|
||||
_handle_get_page_fail(link, "timed out")
|
||||
else:
|
||||
return HTMLPage(resp.content, resp.url, resp.headers)
|
||||
return None
|
||||
|
||||
|
||||
class PackageFinder(object):
|
||||
"""This finds packages.
|
||||
|
||||
This is meant to match easy_install's technique for looking for
|
||||
packages, by reading pages and looking for appropriate links.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
find_links, # type: List[str]
|
||||
index_urls, # type: List[str]
|
||||
allow_all_prereleases=False, # type: bool
|
||||
trusted_hosts=None, # type: Optional[Iterable[str]]
|
||||
session=None, # type: Optional[PipSession]
|
||||
format_control=None, # type: Optional[FormatControl]
|
||||
platform=None, # type: Optional[str]
|
||||
versions=None, # type: Optional[List[str]]
|
||||
abi=None, # type: Optional[str]
|
||||
implementation=None, # type: Optional[str]
|
||||
prefer_binary=False # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Create a PackageFinder.
|
||||
|
||||
:param format_control: A FormatControl object or None. Used to control
|
||||
the selection of source packages / binary packages when consulting
|
||||
the index and links.
|
||||
:param platform: A string or None. If None, searches for packages
|
||||
that are supported by the current system. Otherwise, will find
|
||||
packages that can be built on the platform passed in. These
|
||||
packages will only be downloaded for distribution: they will
|
||||
not be built locally.
|
||||
:param versions: A list of strings or None. This is passed directly
|
||||
to pep425tags.py in the get_supported() method.
|
||||
:param abi: A string or None. This is passed directly
|
||||
to pep425tags.py in the get_supported() method.
|
||||
:param implementation: A string or None. This is passed directly
|
||||
to pep425tags.py in the get_supported() method.
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"PackageFinder() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
# Build find_links. If an argument starts with ~, it may be
|
||||
# a local file relative to a home directory. So try normalizing
|
||||
# it and if it exists, use the normalized version.
|
||||
# This is deliberately conservative - it might be fine just to
|
||||
# blindly normalize anything starting with a ~...
|
||||
self.find_links = [] # type: List[str]
|
||||
for link in find_links:
|
||||
if link.startswith('~'):
|
||||
new_link = normalize_path(link)
|
||||
if os.path.exists(new_link):
|
||||
link = new_link
|
||||
self.find_links.append(link)
|
||||
|
||||
self.index_urls = index_urls
|
||||
|
||||
# These are boring links that have already been logged somehow:
|
||||
self.logged_links = set() # type: Set[Link]
|
||||
|
||||
self.format_control = format_control or FormatControl(set(), set())
|
||||
|
||||
# Domains that we won't emit warnings for when not using HTTPS
|
||||
self.secure_origins = [
|
||||
("*", host, "*")
|
||||
for host in (trusted_hosts if trusted_hosts else [])
|
||||
] # type: List[SecureOrigin]
|
||||
|
||||
# Do we want to allow _all_ pre-releases?
|
||||
self.allow_all_prereleases = allow_all_prereleases
|
||||
|
||||
# The Session we'll use to make requests
|
||||
self.session = session
|
||||
|
||||
# The valid tags to check potential found wheel candidates against
|
||||
self.valid_tags = get_supported(
|
||||
versions=versions,
|
||||
platform=platform,
|
||||
abi=abi,
|
||||
impl=implementation,
|
||||
)
|
||||
|
||||
# Do we prefer old, but valid, binary dist over new source dist
|
||||
self.prefer_binary = prefer_binary
|
||||
|
||||
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
||||
# relies on TLS.
|
||||
if not HAS_TLS:
|
||||
for link in itertools.chain(self.index_urls, self.find_links):
|
||||
parsed = urllib_parse.urlparse(link)
|
||||
if parsed.scheme == "https":
|
||||
logger.warning(
|
||||
"pip is configured with locations that require "
|
||||
"TLS/SSL, however the ssl module in Python is not "
|
||||
"available."
|
||||
)
|
||||
break
|
||||
|
||||
def get_formatted_locations(self):
|
||||
# type: () -> str
|
||||
lines = []
|
||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||
lines.append(
|
||||
"Looking in indexes: {}".format(", ".join(
|
||||
redact_password_from_url(url) for url in self.index_urls))
|
||||
)
|
||||
if self.find_links:
|
||||
lines.append(
|
||||
"Looking in links: {}".format(", ".join(self.find_links))
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
@staticmethod
|
||||
def _sort_locations(locations, expand_dir=False):
|
||||
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
|
||||
"""
|
||||
Sort locations into "files" (archives) and "urls", and return
|
||||
a pair of lists (files,urls)
|
||||
"""
|
||||
files = []
|
||||
urls = []
|
||||
|
||||
# puts the url for the given file path into the appropriate list
|
||||
def sort_path(path):
|
||||
url = path_to_url(path)
|
||||
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
|
||||
urls.append(url)
|
||||
else:
|
||||
files.append(url)
|
||||
|
||||
for url in locations:
|
||||
|
||||
is_local_path = os.path.exists(url)
|
||||
is_file_url = url.startswith('file:')
|
||||
|
||||
if is_local_path or is_file_url:
|
||||
if is_local_path:
|
||||
path = url
|
||||
else:
|
||||
path = url_to_path(url)
|
||||
if os.path.isdir(path):
|
||||
if expand_dir:
|
||||
path = os.path.realpath(path)
|
||||
for item in os.listdir(path):
|
||||
sort_path(os.path.join(path, item))
|
||||
elif is_file_url:
|
||||
urls.append(url)
|
||||
else:
|
||||
logger.warning(
|
||||
"Path '{0}' is ignored: "
|
||||
"it is a directory.".format(path),
|
||||
)
|
||||
elif os.path.isfile(path):
|
||||
sort_path(path)
|
||||
else:
|
||||
logger.warning(
|
||||
"Url '%s' is ignored: it is neither a file "
|
||||
"nor a directory.", url,
|
||||
)
|
||||
elif is_url(url):
|
||||
# Only add url with clear scheme
|
||||
urls.append(url)
|
||||
else:
|
||||
logger.warning(
|
||||
"Url '%s' is ignored. It is either a non-existing "
|
||||
"path or lacks a specific scheme.", url,
|
||||
)
|
||||
|
||||
return files, urls
|
||||
|
||||
def _candidate_sort_key(self, candidate):
|
||||
# type: (InstallationCandidate) -> CandidateSortingKey
|
||||
"""
|
||||
Function used to generate link sort key for link tuples.
|
||||
The greater the return value, the more preferred it is.
|
||||
If not finding wheels, then sorted by version only.
|
||||
If finding wheels, then the sort order is by version, then:
|
||||
1. existing installs
|
||||
2. wheels ordered via Wheel.support_index_min(self.valid_tags)
|
||||
3. source archives
|
||||
If prefer_binary was set, then all wheels are sorted above sources.
|
||||
Note: it was considered to embed this logic into the Link
|
||||
comparison operators, but then different sdist links
|
||||
with the same version, would have to be considered equal
|
||||
"""
|
||||
support_num = len(self.valid_tags)
|
||||
build_tag = tuple() # type: BuildTag
|
||||
binary_preference = 0
|
||||
if candidate.location.is_wheel:
|
||||
# can raise InvalidWheelFilename
|
||||
wheel = Wheel(candidate.location.filename)
|
||||
if not wheel.supported(self.valid_tags):
|
||||
raise UnsupportedWheel(
|
||||
"%s is not a supported wheel for this platform. It "
|
||||
"can't be sorted." % wheel.filename
|
||||
)
|
||||
if self.prefer_binary:
|
||||
binary_preference = 1
|
||||
pri = -(wheel.support_index_min(self.valid_tags))
|
||||
if wheel.build_tag is not None:
|
||||
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
|
||||
build_tag_groups = match.groups()
|
||||
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
||||
else: # sdist
|
||||
pri = -(support_num)
|
||||
return (binary_preference, candidate.version, build_tag, pri)
|
||||
|
||||
def _validate_secure_origin(self, logger, location):
|
||||
# type: (Logger, Link) -> bool
|
||||
# Determine if this url used a secure transport mechanism
|
||||
parsed = urllib_parse.urlparse(str(location))
|
||||
origin = (parsed.scheme, parsed.hostname, parsed.port)
|
||||
|
||||
# The protocol to use to see if the protocol matches.
|
||||
# Don't count the repository type as part of the protocol: in
|
||||
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
||||
# the last scheme.)
|
||||
protocol = origin[0].rsplit('+', 1)[-1]
|
||||
|
||||
# Determine if our origin is a secure origin by looking through our
|
||||
# hardcoded list of secure origins, as well as any additional ones
|
||||
# configured on this PackageFinder instance.
|
||||
for secure_origin in (SECURE_ORIGINS + self.secure_origins):
|
||||
if protocol != secure_origin[0] and secure_origin[0] != "*":
|
||||
continue
|
||||
|
||||
try:
|
||||
# We need to do this decode dance to ensure that we have a
|
||||
# unicode object, even on Python 2.x.
|
||||
addr = ipaddress.ip_address(
|
||||
origin[1]
|
||||
if (
|
||||
isinstance(origin[1], six.text_type) or
|
||||
origin[1] is None
|
||||
)
|
||||
else origin[1].decode("utf8")
|
||||
)
|
||||
network = ipaddress.ip_network(
|
||||
secure_origin[1]
|
||||
if isinstance(secure_origin[1], six.text_type)
|
||||
# setting secure_origin[1] to proper Union[bytes, str]
|
||||
# creates problems in other places
|
||||
else secure_origin[1].decode("utf8") # type: ignore
|
||||
)
|
||||
except ValueError:
|
||||
# We don't have both a valid address or a valid network, so
|
||||
# we'll check this origin against hostnames.
|
||||
if (origin[1] and
|
||||
origin[1].lower() != secure_origin[1].lower() and
|
||||
secure_origin[1] != "*"):
|
||||
continue
|
||||
else:
|
||||
# We have a valid address and network, so see if the address
|
||||
# is contained within the network.
|
||||
if addr not in network:
|
||||
continue
|
||||
|
||||
# Check to see if the port patches
|
||||
if (origin[2] != secure_origin[2] and
|
||||
secure_origin[2] != "*" and
|
||||
secure_origin[2] is not None):
|
||||
continue
|
||||
|
||||
# If we've gotten here, then this origin matches the current
|
||||
# secure origin and we should return True
|
||||
return True
|
||||
|
||||
# If we've gotten to this point, then the origin isn't secure and we
|
||||
# will not accept it as a valid location to search. We will however
|
||||
# log a warning that we are ignoring it.
|
||||
logger.warning(
|
||||
"The repository located at %s is not a trusted or secure host and "
|
||||
"is being ignored. If this repository is available via HTTPS we "
|
||||
"recommend you use HTTPS instead, otherwise you may silence "
|
||||
"this warning and allow it anyway with '--trusted-host %s'.",
|
||||
parsed.hostname,
|
||||
parsed.hostname,
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def _get_index_urls_locations(self, project_name):
|
||||
# type: (str) -> List[str]
|
||||
"""Returns the locations found via self.index_urls
|
||||
|
||||
Checks the url_name on the main (first in the list) index and
|
||||
use this url_name to produce all locations
|
||||
"""
|
||||
|
||||
def mkurl_pypi_url(url):
|
||||
loc = posixpath.join(
|
||||
url,
|
||||
urllib_parse.quote(canonicalize_name(project_name)))
|
||||
# For maximum compatibility with easy_install, ensure the path
|
||||
# ends in a trailing slash. Although this isn't in the spec
|
||||
# (and PyPI can handle it without the slash) some other index
|
||||
# implementations might break if they relied on easy_install's
|
||||
# behavior.
|
||||
if not loc.endswith('/'):
|
||||
loc = loc + '/'
|
||||
return loc
|
||||
|
||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
||||
|
||||
def find_all_candidates(self, project_name):
|
||||
# type: (str) -> List[Optional[InstallationCandidate]]
|
||||
"""Find all available InstallationCandidate for project_name
|
||||
|
||||
This checks index_urls and find_links.
|
||||
All versions found are returned as an InstallationCandidate list.
|
||||
|
||||
See _link_package_versions for details on which files are accepted
|
||||
"""
|
||||
index_locations = self._get_index_urls_locations(project_name)
|
||||
index_file_loc, index_url_loc = self._sort_locations(index_locations)
|
||||
fl_file_loc, fl_url_loc = self._sort_locations(
|
||||
self.find_links, expand_dir=True,
|
||||
)
|
||||
|
||||
file_locations = (Link(url) for url in itertools.chain(
|
||||
index_file_loc, fl_file_loc,
|
||||
))
|
||||
|
||||
# We trust every url that the user has given us whether it was given
|
||||
# via --index-url or --find-links.
|
||||
# We want to filter out any thing which does not have a secure origin.
|
||||
url_locations = [
|
||||
link for link in itertools.chain(
|
||||
(Link(url) for url in index_url_loc),
|
||||
(Link(url) for url in fl_url_loc),
|
||||
)
|
||||
if self._validate_secure_origin(logger, link)
|
||||
]
|
||||
|
||||
logger.debug('%d location(s) to search for versions of %s:',
|
||||
len(url_locations), project_name)
|
||||
|
||||
for location in url_locations:
|
||||
logger.debug('* %s', location)
|
||||
|
||||
canonical_name = canonicalize_name(project_name)
|
||||
formats = self.format_control.get_allowed_formats(canonical_name)
|
||||
search = Search(project_name, canonical_name, formats)
|
||||
find_links_versions = self._package_versions(
|
||||
# We trust every directly linked archive in find_links
|
||||
(Link(url, '-f') for url in self.find_links),
|
||||
search
|
||||
)
|
||||
|
||||
page_versions = []
|
||||
for page in self._get_pages(url_locations, project_name):
|
||||
logger.debug('Analyzing links from page %s', page.url)
|
||||
with indent_log():
|
||||
page_versions.extend(
|
||||
self._package_versions(page.iter_links(), search)
|
||||
)
|
||||
|
||||
file_versions = self._package_versions(file_locations, search)
|
||||
if file_versions:
|
||||
file_versions.sort(reverse=True)
|
||||
logger.debug(
|
||||
'Local files found: %s',
|
||||
', '.join([
|
||||
url_to_path(candidate.location.url)
|
||||
for candidate in file_versions
|
||||
])
|
||||
)
|
||||
|
||||
# This is an intentional priority ordering
|
||||
return file_versions + find_links_versions + page_versions
|
||||
|
||||
def find_requirement(self, req, upgrade):
|
||||
# type: (InstallRequirement, bool) -> Optional[Link]
|
||||
"""Try to find a Link matching req
|
||||
|
||||
Expects req, an InstallRequirement and upgrade, a boolean
|
||||
Returns a Link if found,
|
||||
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
|
||||
"""
|
||||
all_candidates = self.find_all_candidates(req.name)
|
||||
|
||||
# Filter out anything which doesn't match our specifier
|
||||
compatible_versions = set(
|
||||
req.specifier.filter(
|
||||
# We turn the version object into a str here because otherwise
|
||||
# when we're debundled but setuptools isn't, Python will see
|
||||
# packaging.version.Version and
|
||||
# pkg_resources._vendor.packaging.version.Version as different
|
||||
# types. This way we'll use a str as a common data interchange
|
||||
# format. If we stop using the pkg_resources provided specifier
|
||||
# and start using our own, we can drop the cast to str().
|
||||
[str(c.version) for c in all_candidates],
|
||||
prereleases=(
|
||||
self.allow_all_prereleases
|
||||
if self.allow_all_prereleases else None
|
||||
),
|
||||
)
|
||||
)
|
||||
applicable_candidates = [
|
||||
# Again, converting to str to deal with debundling.
|
||||
c for c in all_candidates if str(c.version) in compatible_versions
|
||||
]
|
||||
|
||||
if applicable_candidates:
|
||||
best_candidate = max(applicable_candidates,
|
||||
key=self._candidate_sort_key)
|
||||
else:
|
||||
best_candidate = None
|
||||
|
||||
if req.satisfied_by is not None:
|
||||
installed_version = parse_version(req.satisfied_by.version)
|
||||
else:
|
||||
installed_version = None
|
||||
|
||||
if installed_version is None and best_candidate is None:
|
||||
logger.critical(
|
||||
'Could not find a version that satisfies the requirement %s '
|
||||
'(from versions: %s)',
|
||||
req,
|
||||
', '.join(
|
||||
sorted(
|
||||
{str(c.version) for c in all_candidates},
|
||||
key=parse_version,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
raise DistributionNotFound(
|
||||
'No matching distribution found for %s' % req
|
||||
)
|
||||
|
||||
best_installed = False
|
||||
if installed_version and (
|
||||
best_candidate is None or
|
||||
best_candidate.version <= installed_version):
|
||||
best_installed = True
|
||||
|
||||
if not upgrade and installed_version is not None:
|
||||
if best_installed:
|
||||
logger.debug(
|
||||
'Existing installed version (%s) is most up-to-date and '
|
||||
'satisfies requirement',
|
||||
installed_version,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
'Existing installed version (%s) satisfies requirement '
|
||||
'(most up-to-date version is %s)',
|
||||
installed_version,
|
||||
best_candidate.version,
|
||||
)
|
||||
return None
|
||||
|
||||
if best_installed:
|
||||
# We have an existing version, and its the best version
|
||||
logger.debug(
|
||||
'Installed version (%s) is most up-to-date (past versions: '
|
||||
'%s)',
|
||||
installed_version,
|
||||
', '.join(sorted(compatible_versions, key=parse_version)) or
|
||||
"none",
|
||||
)
|
||||
raise BestVersionAlreadyInstalled
|
||||
|
||||
logger.debug(
|
||||
'Using version %s (newest of versions: %s)',
|
||||
best_candidate.version,
|
||||
', '.join(sorted(compatible_versions, key=parse_version))
|
||||
)
|
||||
return best_candidate.location
|
||||
|
||||
def _get_pages(self, locations, project_name):
|
||||
# type: (Iterable[Link], str) -> Iterable[HTMLPage]
|
||||
"""
|
||||
Yields (page, page_url) from the given locations, skipping
|
||||
locations that have errors.
|
||||
"""
|
||||
seen = set() # type: Set[Link]
|
||||
for location in locations:
|
||||
if location in seen:
|
||||
continue
|
||||
seen.add(location)
|
||||
|
||||
page = _get_html_page(location, session=self.session)
|
||||
if page is None:
|
||||
continue
|
||||
|
||||
yield page
|
||||
|
||||
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
|
||||
|
||||
def _sort_links(self, links):
|
||||
# type: (Iterable[Link]) -> List[Link]
|
||||
"""
|
||||
Returns elements of links in order, non-egg links first, egg links
|
||||
second, while eliminating duplicates
|
||||
"""
|
||||
eggs, no_eggs = [], []
|
||||
seen = set() # type: Set[Link]
|
||||
for link in links:
|
||||
if link not in seen:
|
||||
seen.add(link)
|
||||
if link.egg_fragment:
|
||||
eggs.append(link)
|
||||
else:
|
||||
no_eggs.append(link)
|
||||
return no_eggs + eggs
|
||||
|
||||
def _package_versions(
|
||||
self,
|
||||
links, # type: Iterable[Link]
|
||||
search # type: Search
|
||||
):
|
||||
# type: (...) -> List[Optional[InstallationCandidate]]
|
||||
result = []
|
||||
for link in self._sort_links(links):
|
||||
v = self._link_package_versions(link, search)
|
||||
if v is not None:
|
||||
result.append(v)
|
||||
return result
|
||||
|
||||
def _log_skipped_link(self, link, reason):
|
||||
# type: (Link, str) -> None
|
||||
if link not in self.logged_links:
|
||||
logger.debug('Skipping link %s; %s', link, reason)
|
||||
self.logged_links.add(link)
|
||||
|
||||
def _link_package_versions(self, link, search):
|
||||
# type: (Link, Search) -> Optional[InstallationCandidate]
|
||||
"""Return an InstallationCandidate or None"""
|
||||
version = None
|
||||
if link.egg_fragment:
|
||||
egg_info = link.egg_fragment
|
||||
ext = link.ext
|
||||
else:
|
||||
egg_info, ext = link.splitext()
|
||||
if not ext:
|
||||
self._log_skipped_link(link, 'not a file')
|
||||
return None
|
||||
if ext not in SUPPORTED_EXTENSIONS:
|
||||
self._log_skipped_link(
|
||||
link, 'unsupported archive format: %s' % ext,
|
||||
)
|
||||
return None
|
||||
if "binary" not in search.formats and ext == WHEEL_EXTENSION:
|
||||
self._log_skipped_link(
|
||||
link, 'No binaries permitted for %s' % search.supplied,
|
||||
)
|
||||
return None
|
||||
if "macosx10" in link.path and ext == '.zip':
|
||||
self._log_skipped_link(link, 'macosx10 one')
|
||||
return None
|
||||
if ext == WHEEL_EXTENSION:
|
||||
try:
|
||||
wheel = Wheel(link.filename)
|
||||
except InvalidWheelFilename:
|
||||
self._log_skipped_link(link, 'invalid wheel filename')
|
||||
return None
|
||||
if canonicalize_name(wheel.name) != search.canonical:
|
||||
self._log_skipped_link(
|
||||
link, 'wrong project name (not %s)' % search.supplied)
|
||||
return None
|
||||
|
||||
if not wheel.supported(self.valid_tags):
|
||||
self._log_skipped_link(
|
||||
link, 'it is not compatible with this Python')
|
||||
return None
|
||||
|
||||
version = wheel.version
|
||||
|
||||
# This should be up by the search.ok_binary check, but see issue 2700.
|
||||
if "source" not in search.formats and ext != WHEEL_EXTENSION:
|
||||
self._log_skipped_link(
|
||||
link, 'No sources permitted for %s' % search.supplied,
|
||||
)
|
||||
return None
|
||||
|
||||
if not version:
|
||||
version = _egg_info_matches(egg_info, search.canonical)
|
||||
if not version:
|
||||
self._log_skipped_link(
|
||||
link, 'Missing project version for %s' % search.supplied)
|
||||
return None
|
||||
|
||||
match = self._py_version_re.search(version)
|
||||
if match:
|
||||
version = version[:match.start()]
|
||||
py_version = match.group(1)
|
||||
if py_version != sys.version[:3]:
|
||||
self._log_skipped_link(
|
||||
link, 'Python version is incorrect')
|
||||
return None
|
||||
try:
|
||||
support_this_python = check_requires_python(link.requires_python)
|
||||
except specifiers.InvalidSpecifier:
|
||||
logger.debug("Package %s has an invalid Requires-Python entry: %s",
|
||||
link.filename, link.requires_python)
|
||||
support_this_python = True
|
||||
|
||||
if not support_this_python:
|
||||
logger.debug("The package %s is incompatible with the python "
|
||||
"version in use. Acceptable python versions are: %s",
|
||||
link, link.requires_python)
|
||||
return None
|
||||
logger.debug('Found link %s, version: %s', link, version)
|
||||
|
||||
return InstallationCandidate(search.supplied, version, link)
|
||||
|
||||
|
||||
def _find_name_version_sep(egg_info, canonical_name):
|
||||
# type: (str, str) -> int
|
||||
"""Find the separator's index based on the package's canonical name.
|
||||
|
||||
`egg_info` must be an egg info string for the given package, and
|
||||
`canonical_name` must be the package's canonical name.
|
||||
|
||||
This function is needed since the canonicalized name does not necessarily
|
||||
have the same length as the egg info's name part. An example::
|
||||
|
||||
>>> egg_info = 'foo__bar-1.0'
|
||||
>>> canonical_name = 'foo-bar'
|
||||
>>> _find_name_version_sep(egg_info, canonical_name)
|
||||
8
|
||||
"""
|
||||
# Project name and version must be separated by one single dash. Find all
|
||||
# occurrences of dashes; if the string in front of it matches the canonical
|
||||
# name, this is the one separating the name and version parts.
|
||||
for i, c in enumerate(egg_info):
|
||||
if c != "-":
|
||||
continue
|
||||
if canonicalize_name(egg_info[:i]) == canonical_name:
|
||||
return i
|
||||
raise ValueError("{} does not match {}".format(egg_info, canonical_name))
|
||||
|
||||
|
||||
def _egg_info_matches(egg_info, canonical_name):
|
||||
# type: (str, str) -> Optional[str]
|
||||
"""Pull the version part out of a string.
|
||||
|
||||
:param egg_info: The string to parse. E.g. foo-2.1
|
||||
:param canonical_name: The canonicalized name of the package this
|
||||
belongs to.
|
||||
"""
|
||||
try:
|
||||
version_start = _find_name_version_sep(egg_info, canonical_name) + 1
|
||||
except ValueError:
|
||||
return None
|
||||
version = egg_info[version_start:]
|
||||
if not version:
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def _determine_base_url(document, page_url):
|
||||
"""Determine the HTML document's base URL.
|
||||
|
||||
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
||||
attribute denotes the base URL of anchor tags in the document. If there is
|
||||
no such tag (or if it does not have a valid href attribute), the HTML
|
||||
file's URL is used as the base URL.
|
||||
|
||||
:param document: An HTML document representation. The current
|
||||
implementation expects the result of ``html5lib.parse()``.
|
||||
:param page_url: The URL of the HTML document.
|
||||
"""
|
||||
for base in document.findall(".//base"):
|
||||
href = base.get("href")
|
||||
if href is not None:
|
||||
return href
|
||||
return page_url
|
||||
|
||||
|
||||
def _get_encoding_from_headers(headers):
|
||||
"""Determine if we have any encoding information in our headers.
|
||||
"""
|
||||
if headers and "Content-Type" in headers:
|
||||
content_type, params = cgi.parse_header(headers["Content-Type"])
|
||||
if "charset" in params:
|
||||
return params['charset']
|
||||
return None
|
||||
|
||||
|
||||
_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
|
||||
|
||||
|
||||
def _clean_link(url):
|
||||
# type: (str) -> str
|
||||
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
|
||||
the link, it will be rewritten to %20 (while not over-quoting
|
||||
% or other characters)."""
|
||||
return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url)
|
||||
|
||||
|
||||
class HTMLPage(object):
|
||||
"""Represents one page, along with its URL"""
|
||||
|
||||
def __init__(self, content, url, headers=None):
|
||||
# type: (bytes, str, MutableMapping[str, str]) -> None
|
||||
self.content = content
|
||||
self.url = url
|
||||
self.headers = headers
|
||||
|
||||
def __str__(self):
|
||||
return redact_password_from_url(self.url)
|
||||
|
||||
def iter_links(self):
|
||||
# type: () -> Iterable[Link]
|
||||
"""Yields all links in the page"""
|
||||
document = html5lib.parse(
|
||||
self.content,
|
||||
transport_encoding=_get_encoding_from_headers(self.headers),
|
||||
namespaceHTMLElements=False,
|
||||
)
|
||||
base_url = _determine_base_url(document, self.url)
|
||||
for anchor in document.findall(".//a"):
|
||||
if anchor.get("href"):
|
||||
href = anchor.get("href")
|
||||
url = _clean_link(urllib_parse.urljoin(base_url, href))
|
||||
pyrequire = anchor.get('data-requires-python')
|
||||
pyrequire = unescape(pyrequire) if pyrequire else None
|
||||
yield Link(url, self.url, requires_python=pyrequire)
|
||||
|
||||
|
||||
Search = namedtuple('Search', 'supplied canonical formats')
|
||||
"""Capture key aspects of a search.
|
||||
|
||||
:attribute supplied: The user supplied package.
|
||||
:attribute canonical: The canonical package name.
|
||||
:attribute formats: The formats allowed for this package. Should be a set
|
||||
with 'binary' or 'source' or both in it.
|
||||
"""
|
|
@ -1,211 +0,0 @@
|
|||
"""Locations where we look for configs, install stuff, etc"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import site
|
||||
import sys
|
||||
import sysconfig
|
||||
from distutils import sysconfig as distutils_sysconfig
|
||||
from distutils.command.install import SCHEME_KEYS # type: ignore
|
||||
|
||||
from pip._internal.utils import appdirs
|
||||
from pip._internal.utils.compat import WINDOWS, expanduser
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, Union, Dict, List, Optional # noqa: F401
|
||||
|
||||
|
||||
# Application Directories
|
||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||
|
||||
|
||||
DELETE_MARKER_MESSAGE = '''\
|
||||
This file is placed here by pip to indicate the source was put
|
||||
here by pip.
|
||||
|
||||
Once this package is successfully installed this source code will be
|
||||
deleted (unless you remove this file).
|
||||
'''
|
||||
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
|
||||
|
||||
|
||||
def write_delete_marker_file(directory):
|
||||
# type: (str) -> None
|
||||
"""
|
||||
Write the pip delete marker file into this directory.
|
||||
"""
|
||||
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
|
||||
with open(filepath, 'w') as marker_fp:
|
||||
marker_fp.write(DELETE_MARKER_MESSAGE)
|
||||
|
||||
|
||||
def running_under_virtualenv():
|
||||
# type: () -> bool
|
||||
"""
|
||||
Return True if we're running inside a virtualenv, False otherwise.
|
||||
|
||||
"""
|
||||
if hasattr(sys, 'real_prefix'):
|
||||
return True
|
||||
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def virtualenv_no_global():
|
||||
# type: () -> bool
|
||||
"""
|
||||
Return True if in a venv and no system site packages.
|
||||
"""
|
||||
# this mirrors the logic in virtualenv.py for locating the
|
||||
# no-global-site-packages.txt file
|
||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
|
||||
if running_under_virtualenv() and os.path.isfile(no_global_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
if running_under_virtualenv():
|
||||
src_prefix = os.path.join(sys.prefix, 'src')
|
||||
else:
|
||||
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
||||
try:
|
||||
src_prefix = os.path.join(os.getcwd(), 'src')
|
||||
except OSError:
|
||||
# In case the current working directory has been renamed or deleted
|
||||
sys.exit(
|
||||
"The folder you are executing pip from can no longer be found."
|
||||
)
|
||||
|
||||
# under macOS + virtualenv sys.prefix is not properly resolved
|
||||
# it is something like /path/to/python/bin/..
|
||||
# Note: using realpath due to tmp dirs on OSX being symlinks
|
||||
src_prefix = os.path.abspath(src_prefix)
|
||||
|
||||
# FIXME doesn't account for venv linked to global site-packages
|
||||
|
||||
site_packages = sysconfig.get_path("purelib") # type: Optional[str]
|
||||
|
||||
# This is because of a bug in PyPy's sysconfig module, see
|
||||
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
|
||||
# for more information.
|
||||
if platform.python_implementation().lower() == "pypy":
|
||||
site_packages = distutils_sysconfig.get_python_lib()
|
||||
try:
|
||||
# Use getusersitepackages if this is present, as it ensures that the
|
||||
# value is initialised properly.
|
||||
user_site = site.getusersitepackages()
|
||||
except AttributeError:
|
||||
user_site = site.USER_SITE
|
||||
user_dir = expanduser('~')
|
||||
if WINDOWS:
|
||||
bin_py = os.path.join(sys.prefix, 'Scripts')
|
||||
bin_user = os.path.join(user_site, 'Scripts')
|
||||
# buildout uses 'bin' on Windows too?
|
||||
if not os.path.exists(bin_py):
|
||||
bin_py = os.path.join(sys.prefix, 'bin')
|
||||
bin_user = os.path.join(user_site, 'bin')
|
||||
|
||||
config_basename = 'pip.ini'
|
||||
|
||||
legacy_storage_dir = os.path.join(user_dir, 'pip')
|
||||
legacy_config_file = os.path.join(
|
||||
legacy_storage_dir,
|
||||
config_basename,
|
||||
)
|
||||
else:
|
||||
bin_py = os.path.join(sys.prefix, 'bin')
|
||||
bin_user = os.path.join(user_site, 'bin')
|
||||
|
||||
config_basename = 'pip.conf'
|
||||
|
||||
legacy_storage_dir = os.path.join(user_dir, '.pip')
|
||||
legacy_config_file = os.path.join(
|
||||
legacy_storage_dir,
|
||||
config_basename,
|
||||
)
|
||||
# Forcing to use /usr/local/bin for standard macOS framework installs
|
||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
||||
bin_py = '/usr/local/bin'
|
||||
|
||||
site_config_files = [
|
||||
os.path.join(path, config_basename)
|
||||
for path in appdirs.site_config_dirs('pip')
|
||||
]
|
||||
|
||||
venv_config_file = os.path.join(sys.prefix, config_basename)
|
||||
new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
|
||||
|
||||
|
||||
def distutils_scheme(dist_name, user=False, home=None, root=None,
|
||||
isolated=False, prefix=None):
|
||||
# type:(str, bool, str, str, bool, str) -> dict
|
||||
"""
|
||||
Return a distutils install scheme
|
||||
"""
|
||||
from distutils.dist import Distribution
|
||||
|
||||
scheme = {}
|
||||
|
||||
if isolated:
|
||||
extra_dist_args = {"script_args": ["--no-user-cfg"]}
|
||||
else:
|
||||
extra_dist_args = {}
|
||||
dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
|
||||
dist_args.update(extra_dist_args)
|
||||
|
||||
d = Distribution(dist_args)
|
||||
# Ignoring, typeshed issue reported python/typeshed/issues/2567
|
||||
d.parse_config_files()
|
||||
# NOTE: Ignoring type since mypy can't find attributes on 'Command'
|
||||
i = d.get_command_obj('install', create=True) # type: Any
|
||||
assert i is not None
|
||||
# NOTE: setting user or home has the side-effect of creating the home dir
|
||||
# or user base for installations during finalize_options()
|
||||
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||
assert not (user and prefix), "user={} prefix={}".format(user, prefix)
|
||||
i.user = user or i.user
|
||||
if user:
|
||||
i.prefix = ""
|
||||
i.prefix = prefix or i.prefix
|
||||
i.home = home or i.home
|
||||
i.root = root or i.root
|
||||
i.finalize_options()
|
||||
for key in SCHEME_KEYS:
|
||||
scheme[key] = getattr(i, 'install_' + key)
|
||||
|
||||
# install_lib specified in setup.cfg should install *everything*
|
||||
# into there (i.e. it takes precedence over both purelib and
|
||||
# platlib). Note, i.install_lib is *always* set after
|
||||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
|
||||
# Ignoring, typeshed issue reported python/typeshed/issues/2567
|
||||
if 'install_lib' in d.get_option_dict('install'): # type: ignore
|
||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||
|
||||
if running_under_virtualenv():
|
||||
scheme['headers'] = os.path.join(
|
||||
sys.prefix,
|
||||
'include',
|
||||
'site',
|
||||
'python' + sys.version[:3],
|
||||
dist_name,
|
||||
)
|
||||
|
||||
if root is not None:
|
||||
path_no_drive = os.path.splitdrive(
|
||||
os.path.abspath(scheme["headers"]))[1]
|
||||
scheme["headers"] = os.path.join(
|
||||
root,
|
||||
path_no_drive[1:],
|
||||
)
|
||||
|
||||
return scheme
|
|
@ -1,2 +0,0 @@
|
|||
"""A package that contains models that represent entities.
|
||||
"""
|
|
@ -1,31 +0,0 @@
|
|||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._vendor.packaging.version import _BaseVersion # noqa: F401
|
||||
from pip._internal.models.link import Link # noqa: F401
|
||||
from typing import Any, Union # noqa: F401
|
||||
|
||||
|
||||
class InstallationCandidate(KeyBasedCompareMixin):
|
||||
"""Represents a potential "candidate" for installation.
|
||||
"""
|
||||
|
||||
def __init__(self, project, version, location):
|
||||
# type: (Any, str, Link) -> None
|
||||
self.project = project
|
||||
self.version = parse_version(version) # type: _BaseVersion
|
||||
self.location = location
|
||||
|
||||
super(InstallationCandidate, self).__init__(
|
||||
key=(self.project, self.version, self.location),
|
||||
defining_class=InstallationCandidate
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||
self.project, self.version, self.location,
|
||||
)
|
|
@ -1,73 +0,0 @@
|
|||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, Set, FrozenSet # noqa: F401
|
||||
|
||||
|
||||
class FormatControl(object):
|
||||
"""Helper for managing formats from which a package can be installed.
|
||||
"""
|
||||
|
||||
def __init__(self, no_binary=None, only_binary=None):
|
||||
# type: (Optional[Set], Optional[Set]) -> None
|
||||
if no_binary is None:
|
||||
no_binary = set()
|
||||
if only_binary is None:
|
||||
only_binary = set()
|
||||
|
||||
self.no_binary = no_binary
|
||||
self.only_binary = only_binary
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({}, {})".format(
|
||||
self.__class__.__name__,
|
||||
self.no_binary,
|
||||
self.only_binary
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def handle_mutual_excludes(value, target, other):
|
||||
# type: (str, Optional[Set], Optional[Set]) -> None
|
||||
new = value.split(',')
|
||||
while ':all:' in new:
|
||||
other.clear()
|
||||
target.clear()
|
||||
target.add(':all:')
|
||||
del new[:new.index(':all:') + 1]
|
||||
# Without a none, we want to discard everything as :all: covers it
|
||||
if ':none:' not in new:
|
||||
return
|
||||
for name in new:
|
||||
if name == ':none:':
|
||||
target.clear()
|
||||
continue
|
||||
name = canonicalize_name(name)
|
||||
other.discard(name)
|
||||
target.add(name)
|
||||
|
||||
def get_allowed_formats(self, canonical_name):
|
||||
# type: (str) -> FrozenSet
|
||||
result = {"binary", "source"}
|
||||
if canonical_name in self.only_binary:
|
||||
result.discard('source')
|
||||
elif canonical_name in self.no_binary:
|
||||
result.discard('binary')
|
||||
elif ':all:' in self.only_binary:
|
||||
result.discard('source')
|
||||
elif ':all:' in self.no_binary:
|
||||
result.discard('binary')
|
||||
return frozenset(result)
|
||||
|
||||
def disallow_binaries(self):
|
||||
# type: () -> None
|
||||
self.handle_mutual_excludes(
|
||||
':all:', self.no_binary, self.only_binary,
|
||||
)
|
|
@ -1,31 +0,0 @@
|
|||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
|
||||
class PackageIndex(object):
|
||||
"""Represents a Package Index and provides easier access to endpoints
|
||||
"""
|
||||
|
||||
def __init__(self, url, file_storage_domain):
|
||||
# type: (str, str) -> None
|
||||
super(PackageIndex, self).__init__()
|
||||
self.url = url
|
||||
self.netloc = urllib_parse.urlsplit(url).netloc
|
||||
self.simple_url = self._url_for_path('simple')
|
||||
self.pypi_url = self._url_for_path('pypi')
|
||||
|
||||
# This is part of a temporary hack used to block installs of PyPI
|
||||
# packages which depend on external urls only necessary until PyPI can
|
||||
# block such packages themselves
|
||||
self.file_storage_domain = file_storage_domain
|
||||
|
||||
def _url_for_path(self, path):
|
||||
# type: (str) -> str
|
||||
return urllib_parse.urljoin(self.url, path)
|
||||
|
||||
|
||||
PyPI = PackageIndex(
|
||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
||||
)
|
||||
TestPyPI = PackageIndex(
|
||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
||||
)
|
|
@ -1,163 +0,0 @@
|
|||
import posixpath
|
||||
import re
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.download import path_to_url
|
||||
from pip._internal.utils.misc import (
|
||||
WHEEL_EXTENSION, redact_password_from_url, splitext,
|
||||
)
|
||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, Tuple, Union, Text # noqa: F401
|
||||
from pip._internal.index import HTMLPage # noqa: F401
|
||||
|
||||
|
||||
class Link(KeyBasedCompareMixin):
|
||||
"""Represents a parsed link from a Package Index's simple URL
|
||||
"""
|
||||
|
||||
def __init__(self, url, comes_from=None, requires_python=None):
|
||||
# type: (str, Optional[Union[str, HTMLPage]], Optional[str]) -> None
|
||||
"""
|
||||
url:
|
||||
url of the resource pointed to (href of the link)
|
||||
comes_from:
|
||||
instance of HTMLPage where the link was found, or string.
|
||||
requires_python:
|
||||
String containing the `Requires-Python` metadata field, specified
|
||||
in PEP 345. This may be specified by a data-requires-python
|
||||
attribute in the HTML link tag, as described in PEP 503.
|
||||
"""
|
||||
|
||||
# url can be a UNC windows share
|
||||
if url.startswith('\\\\'):
|
||||
url = path_to_url(url)
|
||||
|
||||
self.url = url
|
||||
self.comes_from = comes_from
|
||||
self.requires_python = requires_python if requires_python else None
|
||||
|
||||
super(Link, self).__init__(
|
||||
key=(self.url),
|
||||
defining_class=Link
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self.requires_python:
|
||||
rp = ' (requires-python:%s)' % self.requires_python
|
||||
else:
|
||||
rp = ''
|
||||
if self.comes_from:
|
||||
return '%s (from %s)%s' % (redact_password_from_url(self.url),
|
||||
self.comes_from, rp)
|
||||
else:
|
||||
return redact_password_from_url(str(self.url))
|
||||
|
||||
def __repr__(self):
|
||||
return '<Link %s>' % self
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
# type: () -> str
|
||||
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
|
||||
name = posixpath.basename(path.rstrip('/')) or netloc
|
||||
name = urllib_parse.unquote(name)
|
||||
assert name, ('URL %r produced no filename' % self.url)
|
||||
return name
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
# type: () -> str
|
||||
return urllib_parse.urlsplit(self.url)[0]
|
||||
|
||||
@property
|
||||
def netloc(self):
|
||||
# type: () -> str
|
||||
return urllib_parse.urlsplit(self.url)[1]
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
# type: () -> str
|
||||
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
|
||||
|
||||
def splitext(self):
|
||||
# type: () -> Tuple[str, str]
|
||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
||||
|
||||
@property
|
||||
def ext(self):
|
||||
# type: () -> str
|
||||
return self.splitext()[1]
|
||||
|
||||
@property
|
||||
def url_without_fragment(self):
|
||||
# type: () -> str
|
||||
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
|
||||
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
|
||||
|
||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
||||
|
||||
@property
|
||||
def egg_fragment(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._egg_fragment_re.search(self.url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
||||
|
||||
@property
|
||||
def subdirectory_fragment(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._subdirectory_fragment_re.search(self.url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_hash_re = re.compile(
|
||||
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
|
||||
)
|
||||
|
||||
@property
|
||||
def hash(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._hash_re.search(self.url)
|
||||
if match:
|
||||
return match.group(2)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hash_name(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._hash_re.search(self.url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
@property
|
||||
def show_url(self):
|
||||
# type: () -> Optional[str]
|
||||
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
|
||||
|
||||
@property
|
||||
def is_wheel(self):
|
||||
# type: () -> bool
|
||||
return self.ext == WHEEL_EXTENSION
|
||||
|
||||
@property
|
||||
def is_artifact(self):
|
||||
# type: () -> bool
|
||||
"""
|
||||
Determines if this points to an actual artifact (e.g. a tarball) or if
|
||||
it points to an "abstract" thing like a path or a VCS location.
|
||||
"""
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if self.scheme in vcs.all_schemes:
|
||||
return False
|
||||
|
||||
return True
|
|
@ -1,155 +0,0 @@
|
|||
"""Validation of dependencies of packages
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import RequirementParseError
|
||||
|
||||
from pip._internal.operations.prepare import make_abstract_dist
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
||||
from typing import ( # noqa: F401
|
||||
Any, Callable, Dict, Optional, Set, Tuple, List
|
||||
)
|
||||
|
||||
# Shorthands
|
||||
PackageSet = Dict[str, 'PackageDetails']
|
||||
Missing = Tuple[str, Any]
|
||||
Conflicting = Tuple[str, str, Any]
|
||||
|
||||
MissingDict = Dict[str, List[Missing]]
|
||||
ConflictingDict = Dict[str, List[Conflicting]]
|
||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||
|
||||
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
||||
|
||||
|
||||
def create_package_set_from_installed(**kwargs):
|
||||
# type: (**Any) -> Tuple[PackageSet, bool]
|
||||
"""Converts a list of distributions into a PackageSet.
|
||||
"""
|
||||
# Default to using all packages installed on the system
|
||||
if kwargs == {}:
|
||||
kwargs = {"local_only": False, "skip": ()}
|
||||
|
||||
package_set = {}
|
||||
problems = False
|
||||
for dist in get_installed_distributions(**kwargs):
|
||||
name = canonicalize_name(dist.project_name)
|
||||
try:
|
||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||
except RequirementParseError as e:
|
||||
# Don't crash on broken metadata
|
||||
logging.warning("Error parsing requirements for %s: %s", name, e)
|
||||
problems = True
|
||||
return package_set, problems
|
||||
|
||||
|
||||
def check_package_set(package_set, should_ignore=None):
|
||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
||||
"""Check if a package set is consistent
|
||||
|
||||
If should_ignore is passed, it should be a callable that takes a
|
||||
package name and returns a boolean.
|
||||
"""
|
||||
if should_ignore is None:
|
||||
def should_ignore(name):
|
||||
return False
|
||||
|
||||
missing = dict()
|
||||
conflicting = dict()
|
||||
|
||||
for package_name in package_set:
|
||||
# Info about dependencies of package_name
|
||||
missing_deps = set() # type: Set[Missing]
|
||||
conflicting_deps = set() # type: Set[Conflicting]
|
||||
|
||||
if should_ignore(package_name):
|
||||
continue
|
||||
|
||||
for req in package_set[package_name].requires:
|
||||
name = canonicalize_name(req.project_name) # type: str
|
||||
|
||||
# Check if it's missing
|
||||
if name not in package_set:
|
||||
missed = True
|
||||
if req.marker is not None:
|
||||
missed = req.marker.evaluate()
|
||||
if missed:
|
||||
missing_deps.add((name, req))
|
||||
continue
|
||||
|
||||
# Check if there's a conflict
|
||||
version = package_set[name].version # type: str
|
||||
if not req.specifier.contains(version, prereleases=True):
|
||||
conflicting_deps.add((name, version, req))
|
||||
|
||||
if missing_deps:
|
||||
missing[package_name] = sorted(missing_deps, key=str)
|
||||
if conflicting_deps:
|
||||
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
||||
|
||||
return missing, conflicting
|
||||
|
||||
|
||||
def check_install_conflicts(to_install):
|
||||
# type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
|
||||
"""For checking if the dependency graph would be consistent after \
|
||||
installing given requirements
|
||||
"""
|
||||
# Start from the current state
|
||||
package_set, _ = create_package_set_from_installed()
|
||||
# Install packages
|
||||
would_be_installed = _simulate_installation_of(to_install, package_set)
|
||||
|
||||
# Only warn about directly-dependent packages; create a whitelist of them
|
||||
whitelist = _create_whitelist(would_be_installed, package_set)
|
||||
|
||||
return (
|
||||
package_set,
|
||||
check_package_set(
|
||||
package_set, should_ignore=lambda name: name not in whitelist
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _simulate_installation_of(to_install, package_set):
|
||||
# type: (List[InstallRequirement], PackageSet) -> Set[str]
|
||||
"""Computes the version of packages after installing to_install.
|
||||
"""
|
||||
|
||||
# Keep track of packages that were installed
|
||||
installed = set()
|
||||
|
||||
# Modify it as installing requirement_set would (assuming no errors)
|
||||
for inst_req in to_install:
|
||||
dist = make_abstract_dist(inst_req).dist()
|
||||
name = canonicalize_name(dist.key)
|
||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||
|
||||
installed.add(name)
|
||||
|
||||
return installed
|
||||
|
||||
|
||||
def _create_whitelist(would_be_installed, package_set):
|
||||
# type: (Set[str], PackageSet) -> Set[str]
|
||||
packages_affected = set(would_be_installed)
|
||||
|
||||
for package_name in package_set:
|
||||
if package_name in packages_affected:
|
||||
continue
|
||||
|
||||
for req in package_set[package_name].requires:
|
||||
if canonicalize_name(req.name) in packages_affected:
|
||||
packages_affected.add(package_name)
|
||||
break
|
||||
|
||||
return packages_affected
|
|
@ -1,247 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import RequirementParseError
|
||||
|
||||
from pip._internal.exceptions import BadCommand, InstallationError
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable, install_req_from_line,
|
||||
)
|
||||
from pip._internal.req.req_file import COMMENT_RE
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable, get_installed_distributions,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import ( # noqa: F401
|
||||
Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
|
||||
)
|
||||
from pip._internal.cache import WheelCache # noqa: F401
|
||||
from pip._vendor.pkg_resources import ( # noqa: F401
|
||||
Distribution, Requirement
|
||||
)
|
||||
|
||||
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def freeze(
|
||||
requirement=None, # type: Optional[List[str]]
|
||||
find_links=None, # type: Optional[List[str]]
|
||||
local_only=None, # type: Optional[bool]
|
||||
user_only=None, # type: Optional[bool]
|
||||
skip_regex=None, # type: Optional[str]
|
||||
isolated=False, # type: bool
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
exclude_editable=False, # type: bool
|
||||
skip=() # type: Container[str]
|
||||
):
|
||||
# type: (...) -> Iterator[str]
|
||||
find_links = find_links or []
|
||||
skip_match = None
|
||||
|
||||
if skip_regex:
|
||||
skip_match = re.compile(skip_regex).search
|
||||
|
||||
for link in find_links:
|
||||
yield '-f %s' % link
|
||||
installations = {} # type: Dict[str, FrozenRequirement]
|
||||
for dist in get_installed_distributions(local_only=local_only,
|
||||
skip=(),
|
||||
user_only=user_only):
|
||||
try:
|
||||
req = FrozenRequirement.from_dist(dist)
|
||||
except RequirementParseError:
|
||||
logger.warning(
|
||||
"Could not parse requirement: %s",
|
||||
dist.project_name
|
||||
)
|
||||
continue
|
||||
if exclude_editable and req.editable:
|
||||
continue
|
||||
installations[req.name] = req
|
||||
|
||||
if requirement:
|
||||
# the options that don't get turned into an InstallRequirement
|
||||
# should only be emitted once, even if the same option is in multiple
|
||||
# requirements files, so we need to keep track of what has been emitted
|
||||
# so that we don't emit it again if it's seen again
|
||||
emitted_options = set() # type: Set[str]
|
||||
# keep track of which files a requirement is in so that we can
|
||||
# give an accurate warning if a requirement appears multiple times.
|
||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
||||
for req_file_path in requirement:
|
||||
with open(req_file_path) as req_file:
|
||||
for line in req_file:
|
||||
if (not line.strip() or
|
||||
line.strip().startswith('#') or
|
||||
(skip_match and skip_match(line)) or
|
||||
line.startswith((
|
||||
'-r', '--requirement',
|
||||
'-Z', '--always-unzip',
|
||||
'-f', '--find-links',
|
||||
'-i', '--index-url',
|
||||
'--pre',
|
||||
'--trusted-host',
|
||||
'--process-dependency-links',
|
||||
'--extra-index-url'))):
|
||||
line = line.rstrip()
|
||||
if line not in emitted_options:
|
||||
emitted_options.add(line)
|
||||
yield line
|
||||
continue
|
||||
|
||||
if line.startswith('-e') or line.startswith('--editable'):
|
||||
if line.startswith('-e'):
|
||||
line = line[2:].strip()
|
||||
else:
|
||||
line = line[len('--editable'):].strip().lstrip('=')
|
||||
line_req = install_req_from_editable(
|
||||
line,
|
||||
isolated=isolated,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
else:
|
||||
line_req = install_req_from_line(
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
isolated=isolated,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
|
||||
if not line_req.name:
|
||||
logger.info(
|
||||
"Skipping line in requirement file [%s] because "
|
||||
"it's not clear what it would install: %s",
|
||||
req_file_path, line.strip(),
|
||||
)
|
||||
logger.info(
|
||||
" (add #egg=PackageName to the URL to avoid"
|
||||
" this warning)"
|
||||
)
|
||||
elif line_req.name not in installations:
|
||||
# either it's not installed, or it is installed
|
||||
# but has been processed already
|
||||
if not req_files[line_req.name]:
|
||||
logger.warning(
|
||||
"Requirement file [%s] contains %s, but "
|
||||
"package %r is not installed",
|
||||
req_file_path,
|
||||
COMMENT_RE.sub('', line).strip(), line_req.name
|
||||
)
|
||||
else:
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
else:
|
||||
yield str(installations[line_req.name]).rstrip()
|
||||
del installations[line_req.name]
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
|
||||
# Warn about requirements that were included multiple times (in a
|
||||
# single requirements file or in different requirements files).
|
||||
for name, files in six.iteritems(req_files):
|
||||
if len(files) > 1:
|
||||
logger.warning("Requirement %s included multiple times [%s]",
|
||||
name, ', '.join(sorted(set(files))))
|
||||
|
||||
yield(
|
||||
'## The following requirements were added by '
|
||||
'pip freeze:'
|
||||
)
|
||||
for installation in sorted(
|
||||
installations.values(), key=lambda x: x.name.lower()):
|
||||
if canonicalize_name(installation.name) not in skip:
|
||||
yield str(installation).rstrip()
|
||||
|
||||
|
||||
def get_requirement_info(dist):
|
||||
# type: (Distribution) -> RequirementInfo
|
||||
"""
|
||||
Compute and return values (req, editable, comments) for use in
|
||||
FrozenRequirement.from_dist().
|
||||
"""
|
||||
if not dist_is_editable(dist):
|
||||
return (None, False, [])
|
||||
|
||||
location = os.path.normcase(os.path.abspath(dist.location))
|
||||
|
||||
from pip._internal.vcs import vcs, RemoteNotFoundError
|
||||
vc_type = vcs.get_backend_type(location)
|
||||
|
||||
if not vc_type:
|
||||
req = dist.as_requirement()
|
||||
logger.debug(
|
||||
'No VCS found for editable requirement {!r} in: {!r}', req,
|
||||
location,
|
||||
)
|
||||
comments = [
|
||||
'# Editable install with no version control ({})'.format(req)
|
||||
]
|
||||
return (location, True, comments)
|
||||
|
||||
try:
|
||||
req = vc_type.get_src_requirement(location, dist.project_name)
|
||||
except RemoteNotFoundError:
|
||||
req = dist.as_requirement()
|
||||
comments = [
|
||||
'# Editable {} install with no remote ({})'.format(
|
||||
vc_type.__name__, req,
|
||||
)
|
||||
]
|
||||
return (location, True, comments)
|
||||
|
||||
except BadCommand:
|
||||
logger.warning(
|
||||
'cannot determine version of editable source in %s '
|
||||
'(%s command not found in path)',
|
||||
location,
|
||||
vc_type.name,
|
||||
)
|
||||
return (None, True, [])
|
||||
|
||||
except InstallationError as exc:
|
||||
logger.warning(
|
||||
"Error when trying to get requirement for VCS system %s, "
|
||||
"falling back to uneditable format", exc
|
||||
)
|
||||
else:
|
||||
if req is not None:
|
||||
return (req, True, [])
|
||||
|
||||
logger.warning(
|
||||
'Could not determine repository location of %s', location
|
||||
)
|
||||
comments = ['## !! Could not determine repository location']
|
||||
|
||||
return (None, False, comments)
|
||||
|
||||
|
||||
class FrozenRequirement(object):
|
||||
def __init__(self, name, req, editable, comments=()):
|
||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
||||
self.name = name
|
||||
self.req = req
|
||||
self.editable = editable
|
||||
self.comments = comments
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist):
|
||||
# type: (Distribution) -> FrozenRequirement
|
||||
req, editable, comments = get_requirement_info(dist)
|
||||
if req is None:
|
||||
req = dist.as_requirement()
|
||||
|
||||
return cls(dist.project_name, req, editable, comments=comments)
|
||||
|
||||
def __str__(self):
|
||||
req = self.req
|
||||
if self.editable:
|
||||
req = '-e %s' % req
|
||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
|
@ -1,413 +0,0 @@
|
|||
"""Prepares a distribution for installation
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor import pkg_resources, requests
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.download import (
|
||||
is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
|
||||
)
|
||||
from pip._internal.exceptions import (
|
||||
DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
|
||||
PreviousBuildDirError, VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.utils.compat import expanduser
|
||||
from pip._internal.utils.hashes import MissingHashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import display_path, normalize_path
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, Optional # noqa: F401
|
||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
||||
from pip._internal.index import PackageFinder # noqa: F401
|
||||
from pip._internal.download import PipSession # noqa: F401
|
||||
from pip._internal.req.req_tracker import RequirementTracker # noqa: F401
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_abstract_dist(req):
|
||||
# type: (InstallRequirement) -> DistAbstraction
|
||||
"""Factory to make an abstract dist object.
|
||||
|
||||
Preconditions: Either an editable req with a source_dir, or satisfied_by or
|
||||
a wheel link, or a non-editable req with a source_dir.
|
||||
|
||||
:return: A concrete DistAbstraction.
|
||||
"""
|
||||
if req.editable:
|
||||
return IsSDist(req)
|
||||
elif req.link and req.link.is_wheel:
|
||||
return IsWheel(req)
|
||||
else:
|
||||
return IsSDist(req)
|
||||
|
||||
|
||||
class DistAbstraction(object):
|
||||
"""Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
|
||||
|
||||
The requirements for anything installable are as follows:
|
||||
- we must be able to determine the requirement name
|
||||
(or we can't correctly handle the non-upgrade case).
|
||||
- we must be able to generate a list of run-time dependencies
|
||||
without installing any additional packages (or we would
|
||||
have to either burn time by doing temporary isolated installs
|
||||
or alternatively violate pips 'don't start installing unless
|
||||
all requirements are available' rule - neither of which are
|
||||
desirable).
|
||||
- for packages with setup requirements, we must also be able
|
||||
to determine their requirements without installing additional
|
||||
packages (for the same reason as run-time dependencies)
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
"""
|
||||
|
||||
def __init__(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
self.req = req # type: InstallRequirement
|
||||
|
||||
def dist(self):
|
||||
# type: () -> Any
|
||||
"""Return a setuptools Dist object."""
|
||||
raise NotImplementedError
|
||||
|
||||
def prep_for_dist(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> Any
|
||||
"""Ensure that we can get a Dist for this requirement."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class IsWheel(DistAbstraction):
|
||||
|
||||
def dist(self):
|
||||
# type: () -> pkg_resources.Distribution
|
||||
return list(pkg_resources.find_distributions(
|
||||
self.req.source_dir))[0]
|
||||
|
||||
def prep_for_dist(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> Any
|
||||
# FIXME:https://github.com/pypa/pip/issues/1112
|
||||
pass
|
||||
|
||||
|
||||
class IsSDist(DistAbstraction):
|
||||
|
||||
def dist(self):
|
||||
return self.req.get_dist()
|
||||
|
||||
def prep_for_dist(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> None
|
||||
# Prepare for building. We need to:
|
||||
# 1. Load pyproject.toml (if it exists)
|
||||
# 2. Set up the build environment
|
||||
|
||||
self.req.load_pyproject_toml()
|
||||
should_isolate = self.req.use_pep517 and build_isolation
|
||||
|
||||
def _raise_conflicts(conflicting_with, conflicting_reqs):
|
||||
raise InstallationError(
|
||||
"Some build dependencies for %s conflict with %s: %s." % (
|
||||
self.req, conflicting_with, ', '.join(
|
||||
'%s is incompatible with %s' % (installed, wanted)
|
||||
for installed, wanted in sorted(conflicting))))
|
||||
|
||||
if should_isolate:
|
||||
# Isolate in a BuildEnvironment and install the build-time
|
||||
# requirements.
|
||||
self.req.build_env = BuildEnvironment()
|
||||
self.req.build_env.install_requirements(
|
||||
finder, self.req.pyproject_requires, 'overlay',
|
||||
"Installing build dependencies"
|
||||
)
|
||||
conflicting, missing = self.req.build_env.check_requirements(
|
||||
self.req.requirements_to_check
|
||||
)
|
||||
if conflicting:
|
||||
_raise_conflicts("PEP 517/518 supported requirements",
|
||||
conflicting)
|
||||
if missing:
|
||||
logger.warning(
|
||||
"Missing build requirements in pyproject.toml for %s.",
|
||||
self.req,
|
||||
)
|
||||
logger.warning(
|
||||
"The project does not specify a build backend, and "
|
||||
"pip cannot fall back to setuptools without %s.",
|
||||
" and ".join(map(repr, sorted(missing)))
|
||||
)
|
||||
# Install any extra build dependencies that the backend requests.
|
||||
# This must be done in a second pass, as the pyproject.toml
|
||||
# dependencies must be installed before we can call the backend.
|
||||
with self.req.build_env:
|
||||
# We need to have the env active when calling the hook.
|
||||
self.req.spin_message = "Getting requirements to build wheel"
|
||||
reqs = self.req.pep517_backend.get_requires_for_build_wheel()
|
||||
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
||||
if conflicting:
|
||||
_raise_conflicts("the backend dependencies", conflicting)
|
||||
self.req.build_env.install_requirements(
|
||||
finder, missing, 'normal',
|
||||
"Installing backend dependencies"
|
||||
)
|
||||
|
||||
self.req.prepare_metadata()
|
||||
self.req.assert_source_matches_version()
|
||||
|
||||
|
||||
class Installed(DistAbstraction):
|
||||
|
||||
def dist(self):
|
||||
# type: () -> pkg_resources.Distribution
|
||||
return self.req.satisfied_by
|
||||
|
||||
def prep_for_dist(self, finder, build_isolation):
|
||||
# type: (PackageFinder, bool) -> Any
|
||||
pass
|
||||
|
||||
|
||||
class RequirementPreparer(object):
|
||||
"""Prepares a Requirement
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
build_dir, # type: str
|
||||
download_dir, # type: Optional[str]
|
||||
src_dir, # type: str
|
||||
wheel_download_dir, # type: Optional[str]
|
||||
progress_bar, # type: str
|
||||
build_isolation, # type: bool
|
||||
req_tracker # type: RequirementTracker
|
||||
):
|
||||
# type: (...) -> None
|
||||
super(RequirementPreparer, self).__init__()
|
||||
|
||||
self.src_dir = src_dir
|
||||
self.build_dir = build_dir
|
||||
self.req_tracker = req_tracker
|
||||
|
||||
# Where still packed archives should be written to. If None, they are
|
||||
# not saved, and are deleted immediately after unpacking.
|
||||
self.download_dir = download_dir
|
||||
|
||||
# Where still-packed .whl files should be written to. If None, they are
|
||||
# written to the download_dir parameter. Separate to download_dir to
|
||||
# permit only keeping wheel archives for pip wheel.
|
||||
if wheel_download_dir:
|
||||
wheel_download_dir = normalize_path(wheel_download_dir)
|
||||
self.wheel_download_dir = wheel_download_dir
|
||||
|
||||
# NOTE
|
||||
# download_dir and wheel_download_dir overlap semantically and may
|
||||
# be combined if we're willing to have non-wheel archives present in
|
||||
# the wheelhouse output by 'pip wheel'.
|
||||
|
||||
self.progress_bar = progress_bar
|
||||
|
||||
# Is build isolation allowed?
|
||||
self.build_isolation = build_isolation
|
||||
|
||||
@property
|
||||
def _download_should_save(self):
|
||||
# type: () -> bool
|
||||
# TODO: Modify to reduce indentation needed
|
||||
if self.download_dir:
|
||||
self.download_dir = expanduser(self.download_dir)
|
||||
if os.path.exists(self.download_dir):
|
||||
return True
|
||||
else:
|
||||
logger.critical('Could not find download directory')
|
||||
raise InstallationError(
|
||||
"Could not find or access download directory '%s'"
|
||||
% display_path(self.download_dir))
|
||||
return False
|
||||
|
||||
def prepare_linked_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
session, # type: PipSession
|
||||
finder, # type: PackageFinder
|
||||
upgrade_allowed, # type: bool
|
||||
require_hashes # type: bool
|
||||
):
|
||||
# type: (...) -> DistAbstraction
|
||||
"""Prepare a requirement that would be obtained from req.link
|
||||
"""
|
||||
# TODO: Breakup into smaller functions
|
||||
if req.link and req.link.scheme == 'file':
|
||||
path = url_to_path(req.link.url)
|
||||
logger.info('Processing %s', display_path(path))
|
||||
else:
|
||||
logger.info('Collecting %s', req)
|
||||
|
||||
with indent_log():
|
||||
# @@ if filesystem packages are not marked
|
||||
# editable in a req, a non deterministic error
|
||||
# occurs when the script attempts to unpack the
|
||||
# build directory
|
||||
req.ensure_has_source_dir(self.build_dir)
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
# package unpacked in `req.source_dir`
|
||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '%s' due to a"
|
||||
" pre-existing build directory (%s). This is "
|
||||
"likely due to a previous installation that failed"
|
||||
". pip is being responsible and not assuming it "
|
||||
"can delete this. Please delete it and try again."
|
||||
% (req, req.source_dir)
|
||||
)
|
||||
req.populate_link(finder, upgrade_allowed, require_hashes)
|
||||
|
||||
# We can't hit this spot and have populate_link return None.
|
||||
# req.satisfied_by is None here (because we're
|
||||
# guarded) and upgrade has no impact except when satisfied_by
|
||||
# is not None.
|
||||
# Then inside find_requirement existing_applicable -> False
|
||||
# If no new versions are found, DistributionNotFound is raised,
|
||||
# otherwise a result is guaranteed.
|
||||
assert req.link
|
||||
link = req.link
|
||||
|
||||
# Now that we have the real link, we can tell what kind of
|
||||
# requirements we have and raise some more informative errors
|
||||
# than otherwise. (For example, we can raise VcsHashUnsupported
|
||||
# for a VCS URL rather than HashMissing.)
|
||||
if require_hashes:
|
||||
# We could check these first 2 conditions inside
|
||||
# unpack_url and save repetition of conditions, but then
|
||||
# we would report less-useful error messages for
|
||||
# unhashable requirements, complaining that there's no
|
||||
# hash provided.
|
||||
if is_vcs_url(link):
|
||||
raise VcsHashUnsupported()
|
||||
elif is_file_url(link) and is_dir_url(link):
|
||||
raise DirectoryUrlHashUnsupported()
|
||||
if not req.original_link and not req.is_pinned:
|
||||
# Unpinned packages are asking for trouble when a new
|
||||
# version is uploaded. This isn't a security check, but
|
||||
# it saves users a surprising hash mismatch in the
|
||||
# future.
|
||||
#
|
||||
# file:/// URLs aren't pinnable, so don't complain
|
||||
# about them not being pinned.
|
||||
raise HashUnpinned()
|
||||
|
||||
hashes = req.hashes(trust_internet=not require_hashes)
|
||||
if require_hashes and not hashes:
|
||||
# Known-good hashes are missing for this requirement, so
|
||||
# shim it with a facade object that will provoke hash
|
||||
# computation and then raise a HashMissing exception
|
||||
# showing the user what the hash should be.
|
||||
hashes = MissingHashes()
|
||||
|
||||
try:
|
||||
download_dir = self.download_dir
|
||||
# We always delete unpacked sdists after pip ran.
|
||||
autodelete_unpacked = True
|
||||
if req.link.is_wheel and self.wheel_download_dir:
|
||||
# when doing 'pip wheel` we download wheels to a
|
||||
# dedicated dir.
|
||||
download_dir = self.wheel_download_dir
|
||||
if req.link.is_wheel:
|
||||
if download_dir:
|
||||
# When downloading, we only unpack wheels to get
|
||||
# metadata.
|
||||
autodelete_unpacked = True
|
||||
else:
|
||||
# When installing a wheel, we use the unpacked
|
||||
# wheel.
|
||||
autodelete_unpacked = False
|
||||
unpack_url(
|
||||
req.link, req.source_dir,
|
||||
download_dir, autodelete_unpacked,
|
||||
session=session, hashes=hashes,
|
||||
progress_bar=self.progress_bar
|
||||
)
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
'Could not install requirement %s because of error %s',
|
||||
req,
|
||||
exc,
|
||||
)
|
||||
raise InstallationError(
|
||||
'Could not install requirement %s because of HTTP '
|
||||
'error %s for URL %s' %
|
||||
(req, exc, req.link)
|
||||
)
|
||||
abstract_dist = make_abstract_dist(req)
|
||||
with self.req_tracker.track(req):
|
||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||
if self._download_should_save:
|
||||
# Make a .zip of the source_dir we already created.
|
||||
if req.link.scheme in vcs.all_schemes:
|
||||
req.archive(self.download_dir)
|
||||
return abstract_dist
|
||||
|
||||
def prepare_editable_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
require_hashes, # type: bool
|
||||
use_user_site, # type: bool
|
||||
finder # type: PackageFinder
|
||||
):
|
||||
# type: (...) -> DistAbstraction
|
||||
"""Prepare an editable requirement
|
||||
"""
|
||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||
|
||||
logger.info('Obtaining %s', req)
|
||||
|
||||
with indent_log():
|
||||
if require_hashes:
|
||||
raise InstallationError(
|
||||
'The editable requirement %s cannot be installed when '
|
||||
'requiring hashes, because there is no single file to '
|
||||
'hash.' % req
|
||||
)
|
||||
req.ensure_has_source_dir(self.src_dir)
|
||||
req.update_editable(not self._download_should_save)
|
||||
|
||||
abstract_dist = make_abstract_dist(req)
|
||||
with self.req_tracker.track(req):
|
||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||
|
||||
if self._download_should_save:
|
||||
req.archive(self.download_dir)
|
||||
req.check_if_exists(use_user_site)
|
||||
|
||||
return abstract_dist
|
||||
|
||||
def prepare_installed_requirement(self, req, require_hashes, skip_reason):
|
||||
# type: (InstallRequirement, bool, Optional[str]) -> DistAbstraction
|
||||
"""Prepare an already-installed requirement
|
||||
"""
|
||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||
assert skip_reason is not None, (
|
||||
"did not get skip reason skipped but req.satisfied_by "
|
||||
"is set to %r" % (req.satisfied_by,)
|
||||
)
|
||||
logger.info(
|
||||
'Requirement %s: %s (%s)',
|
||||
skip_reason, req, req.satisfied_by.version
|
||||
)
|
||||
with indent_log():
|
||||
if require_hashes:
|
||||
logger.debug(
|
||||
'Since it is already installed, we are trusting this '
|
||||
'package without checking its hash. To ensure a '
|
||||
'completely repeatable environment, install into an '
|
||||
'empty virtualenv.'
|
||||
)
|
||||
abstract_dist = Installed(req)
|
||||
|
||||
return abstract_dist
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue