From ea5044dda26f3ef18d46f139c4d9eb97baec5ba8 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 17 Jul 2023 13:28:38 -0700 Subject: [PATCH 01/81] Create script to update repo skeleton #80 Signed-off-by: Jono Yang --- etc/scripts/update_skeleton.py | 104 +++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 etc/scripts/update_skeleton.py diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py new file mode 100644 index 0000000..635898b --- /dev/null +++ b/etc/scripts/update_skeleton.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/skeleton for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from pathlib import Path +import os +import subprocess + +import click + + +NEXB_PUBLIC_REPO_NAMES=[ + "aboutcode-toolkit", + "ahocode", + "bitcode", + "clearcode-toolkit", + "commoncode", + "container-inspector", + "debian-inspector", + "deltacode", + "elf-inspector", + "extractcode", + "fetchcode", + "gemfileparser2", + "gh-issue-sandbox", + "go-inspector", + "heritedcode", + "license-expression", + "license_copyright_pipeline", + "nuget-inspector", + "pip-requirements-parser", + "plugincode", + "purldb", + "pygmars", + "python-inspector", + "sanexml", + "saneyaml", + "scancode-analyzer", + "scancode-toolkit-contrib", + "scancode-toolkit-reference-scans", + "thirdparty-toolkit", + "tracecode-toolkit", + "tracecode-toolkit-strace", + "turbo-spdx", + "typecode", + "univers", +] + + +@click.command() +@click.help_option("-h", "--help") +def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): + """ + Update project files of nexB projects that use the skeleton + + This script will: + - Clone the repo + - Add the skeleton repo as a new origin + - Create a new branch named "update-skeleton-files" + - Merge in the new skeleton files into the "update-skeleton-files" branch + + The user will need to save merge commit messages that pop up when running + this script in addition to resolving the merge conflicts on repos that have + them. + """ + + # Create working directory + work_dir_path = Path("/tmp/update_skeleton/") + if not os.path.exists(work_dir_path): + os.makedirs(work_dir_path, exist_ok=True) + + for repo_name in repo_names: + # Move to work directory + os.chdir(work_dir_path) + + # Clone repo + repo_git = f"git@github.com:nexB/{repo_name}.git" + subprocess.run(["git", "clone", repo_git]) + + # Go into cloned repo + os.chdir(work_dir_path / repo_name) + + # Add skeleton as an origin + subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:nexB/skeleton.git"]) + + # Fetch skeleton files + subprocess.run(["git", "fetch", "skeleton"]) + + # Create and checkout new branch + subprocess.run(["git", "checkout", "-b", "update-skeleton-files"]) + + # Merge skeleton files into the repo + subprocess.run(["git", "merge", "skeleton/main", "--allow-unrelated-histories"]) + + +if __name__ == "__main__": + update_skeleton_files() From 5ab9b3a15e0095a8186ca3870cb9f9eade83833d Mon Sep 17 00:00:00 2001 From: Omkar Phansopkar Date: Wed, 18 Oct 2023 15:42:56 +0530 Subject: [PATCH 02/81] Added docs server script, dark mode & copybutton for docs Signed-off-by: Omkar Phansopkar --- .github/workflows/docs-ci.yml | 3 --- docs/Makefile | 8 ++++++++ docs/make.bat | 12 ++++++++++++ docs/scripts/doc8_style_check.sh | 0 docs/source/conf.py | 4 ++++ setup.cfg | 3 +++ 6 files changed, 27 insertions(+), 3 deletions(-) mode change 100644 => 100755 docs/scripts/doc8_style_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 511b7c2..ada779b 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -20,9 +20,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Give permission to run scripts - run: chmod +x ./docs/scripts/doc8_style_check.sh - - name: Install Dependencies run: pip install -e .[docs] diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf..788b039 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,6 +5,7 @@ # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build +SPHINXAUTOBUILD = sphinx-autobuild SOURCEDIR = source BUILDDIR = build @@ -14,6 +15,13 @@ help: .PHONY: help Makefile +# Run the development server using sphinx-autobuild +docs: + @echo + @echo "Starting up the docs server..." + @echo + $(SPHINXAUTOBUILD) --port 8000 --watch ${SOURCEDIR} $(SOURCEDIR) "$(BUILDDIR)/html" $(SPHINXOPTS) $(O) + # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile diff --git a/docs/make.bat b/docs/make.bat index 6247f7e..4a3c1a4 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -7,11 +7,16 @@ REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) +if "%SPHINXAUTOBUILD%" == "" ( + set SPHINXAUTOBUILD=sphinx-autobuild +) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help +if "%1" == "docs" goto docs + %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. @@ -28,6 +33,13 @@ if errorlevel 9009 ( %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end +:docs +@echo +@echo Starting up the docs server... +@echo +%SPHINXAUTOBUILD% --port 8000 --watch %SOURCEDIR% %SOURCEDIR% %BUILDDIR%\html %SPHINXOPTS% %O% +goto end + :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh old mode 100644 new mode 100755 diff --git a/docs/source/conf.py b/docs/source/conf.py index 918d62c..54e5e66 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -30,6 +30,10 @@ extensions = [ "sphinx.ext.intersphinx", "sphinx_reredirects", + 'sphinx_rtd_theme', + "sphinx_rtd_dark_mode", + "sphinx.ext.extlinks", + "sphinx_copybutton", ] diff --git a/setup.cfg b/setup.cfg index d6c7da7..bd0e58a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -62,4 +62,7 @@ docs = sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 doc8>=0.11.2 + sphinx-autobuild + sphinx-rtd-dark-mode>=1.3.0 + sphinx-copybutton From 0a9d983650bf042a5bd2c277711b637979e566f1 Mon Sep 17 00:00:00 2001 From: "John M. Horan" Date: Mon, 20 Nov 2023 16:46:54 -0800 Subject: [PATCH 03/81] Update CSS to widen page and handle mobile #84 Reference: https://github.com/nexB/skeleton/issues/84 Signed-off-by: John M. Horan --- docs/source/_static/theme_overrides.css | 363 +----------------- .../_static/theme_overrides_SUPERSEDED.css | 353 +++++++++++++++++ docs/source/conf.py | 15 +- 3 files changed, 380 insertions(+), 351 deletions(-) create mode 100644 docs/source/_static/theme_overrides_SUPERSEDED.css diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css index 9662d63..de5ae43 100644 --- a/docs/source/_static/theme_overrides.css +++ b/docs/source/_static/theme_overrides.css @@ -1,353 +1,26 @@ -body { - color: #000000; -} - -p { - margin-bottom: 10px; -} - -.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { - margin-bottom: 10px; -} - -.custom_header_01 { - color: #cc0000; - font-size: 22px; - font-weight: bold; - line-height: 50px; -} - -h1, h2, h3, h4, h5, h6 { - margin-bottom: 20px; - margin-top: 20px; -} - -h5 { - font-size: 18px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -h6 { - font-size: 15px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -/* custom admonitions */ -/* success */ -.custom-admonition-success .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-success.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* important */ -.custom-admonition-important .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #000000; -} -div.custom-admonition-important.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* caution */ -.custom-admonition-caution .admonition-title { - color: #000000; - background: #ffff99; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #e8e8e8; -} -div.custom-admonition-caution.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* note */ -.custom-admonition-note .admonition-title { - color: #ffffff; - background: #006bb3; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-note.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* todo */ -.custom-admonition-todo .admonition-title { - color: #000000; - background: #cce6ff; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #99ccff; -} -div.custom-admonition-todo.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #99ccff; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* examples */ -.custom-admonition-examples .admonition-title { - color: #000000; - background: #ffe6cc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #d8d8d8; -} -div.custom-admonition-examples.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - +/* this is the container for the pages */ .wy-nav-content { max-width: 100%; - padding-right: 100px; - padding-left: 100px; - background-color: #f2f2f2; -} - -div.rst-content { - background-color: #ffffff; - border: solid 1px #e5e5e5; - padding: 20px 40px 20px 40px; -} - -.rst-content .guilabel { - border: 1px solid #ffff99; - background: #ffff99; - font-size: 100%; - font-weight: normal; - border-radius: 4px; - padding: 2px 0px; - margin: auto 2px; - vertical-align: middle; -} - -.rst-content kbd { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - border: solid 1px #d8d8d8; - background-color: #f5f5f5; - padding: 0px 3px; - border-radius: 3px; -} - -.wy-nav-content-wrap a { - color: #0066cc; - text-decoration: none; -} -.wy-nav-content-wrap a:hover { - color: #0099cc; - text-decoration: underline; -} - -.wy-nav-top a { - color: #ffffff; -} - -/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ -.wy-table-responsive table td { - white-space: normal !important; -} - -.rst-content table.docutils td, -.rst-content table.docutils th { - padding: 5px 10px 5px 10px; -} -.rst-content table.docutils td p, -.rst-content table.docutils th p { - font-size: 14px; - margin-bottom: 0px; -} -.rst-content table.docutils td p cite, -.rst-content table.docutils th p cite { - font-size: 14px; - background-color: transparent; -} - -.colwidths-given th { - border: solid 1px #d8d8d8 !important; -} -.colwidths-given td { - border: solid 1px #d8d8d8 !important; -} - -/*handles single-tick inline code*/ -.wy-body-for-nav cite { - color: #000000; - background-color: transparent; - font-style: normal; - font-family: "Courier New"; - font-size: 13px; - padding: 3px 3px 3px 3px; -} - -.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - font-size: 13px; - overflow: visible; - white-space: pre-wrap; - color: #000000; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] { - background-color: #f8f8f8; - border: solid 1px #e8e8e8; -} - -/* This enables inline code to wrap. */ -code, .rst-content tt, .rst-content code { - white-space: pre-wrap; - padding: 2px 3px 1px; - border-radius: 3px; - font-size: 13px; - background-color: #ffffff; -} - -/* use this added class for code blocks attached to bulleted list items */ -.highlight-top-margin { - margin-top: 20px !important; -} - -/* change color of inline code block */ -span.pre { - color: #e01e5a; -} - -.wy-body-for-nav blockquote { - margin: 1em 0; - padding-left: 1em; - border-left: 4px solid #ddd; - color: #000000; -} - -/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ -.rst-content .section ol p, .rst-content .section ul p { - margin-bottom: 0px; -} - -/* add spacing between bullets for legibility */ -.rst-content .section ol li, .rst-content .section ul li { - margin-bottom: 5px; -} - -.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { - margin-top: 5px; -} - -/* but exclude the toctree bullets */ -.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { + padding: 0px 40px 0px 0px; margin-top: 0px; - margin-bottom: 0px; } -/* remove extra space at bottom of multine list-table cell */ -.rst-content .line-block { - margin-left: 0px; - margin-bottom: 0px; - line-height: 24px; +.wy-nav-content-wrap { + border-right: solid 1px; } -/* fix extra vertical spacing in page toctree */ -.rst-content .toctree-wrapper ul li ul, article ul li ul { - margin-top: 0; - margin-bottom: 0; -} - -/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ -.reference.internal.toc-index { - color: #d9d9d9; -} - -.reference.internal.toc-index.current { - background-color: #ffffff; - color: #000000; - font-weight: bold; -} - -.toc-index-div { - border-top: solid 1px #000000; - margin-top: 10px; - padding-top: 5px; -} - -.indextable ul li { - font-size: 14px; - margin-bottom: 5px; -} - -/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ -.indextable.genindextable { - margin-bottom: 20px; -} - -div.genindex-jumpbox { - margin-bottom: 10px; -} - -/* rst image classes */ - -.clear-both { - clear: both; - } - -.float-left { - float: left; - margin-right: 20px; -} - -img { - border: solid 1px #e8e8e8; -} - -/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ -.img-title { - color: #000000; - /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ - line-height: 3.0; - font-style: italic; - font-weight: 600; -} - -.img-title-para { - color: #000000; - margin-top: 20px; - margin-bottom: 0px; - font-style: italic; - font-weight: 500; -} - -.red { - color: red; +div.rst-content { + max-width: 1300px; + border: 0; + padding: 0px 80px 10px 80px; + margin-left: 50px; +} + +@media (max-width: 768px) { + div.rst-content { + max-width: 1300px; + border: 0; + padding: 0px 10px 10px 10px; + margin-left: 0px; + } } diff --git a/docs/source/_static/theme_overrides_SUPERSEDED.css b/docs/source/_static/theme_overrides_SUPERSEDED.css new file mode 100644 index 0000000..9662d63 --- /dev/null +++ b/docs/source/_static/theme_overrides_SUPERSEDED.css @@ -0,0 +1,353 @@ +body { + color: #000000; +} + +p { + margin-bottom: 10px; +} + +.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { + margin-bottom: 10px; +} + +.custom_header_01 { + color: #cc0000; + font-size: 22px; + font-weight: bold; + line-height: 50px; +} + +h1, h2, h3, h4, h5, h6 { + margin-bottom: 20px; + margin-top: 20px; +} + +h5 { + font-size: 18px; + color: #000000; + font-style: italic; + margin-bottom: 10px; +} + +h6 { + font-size: 15px; + color: #000000; + font-style: italic; + margin-bottom: 10px; +} + +/* custom admonitions */ +/* success */ +.custom-admonition-success .admonition-title { + color: #000000; + background: #ccffcc; + border-radius: 5px 5px 0px 0px; +} +div.custom-admonition-success.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* important */ +.custom-admonition-important .admonition-title { + color: #000000; + background: #ccffcc; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #000000; +} +div.custom-admonition-important.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* caution */ +.custom-admonition-caution .admonition-title { + color: #000000; + background: #ffff99; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #e8e8e8; +} +div.custom-admonition-caution.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* note */ +.custom-admonition-note .admonition-title { + color: #ffffff; + background: #006bb3; + border-radius: 5px 5px 0px 0px; +} +div.custom-admonition-note.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* todo */ +.custom-admonition-todo .admonition-title { + color: #000000; + background: #cce6ff; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #99ccff; +} +div.custom-admonition-todo.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #99ccff; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* examples */ +.custom-admonition-examples .admonition-title { + color: #000000; + background: #ffe6cc; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #d8d8d8; +} +div.custom-admonition-examples.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +.wy-nav-content { + max-width: 100%; + padding-right: 100px; + padding-left: 100px; + background-color: #f2f2f2; +} + +div.rst-content { + background-color: #ffffff; + border: solid 1px #e5e5e5; + padding: 20px 40px 20px 40px; +} + +.rst-content .guilabel { + border: 1px solid #ffff99; + background: #ffff99; + font-size: 100%; + font-weight: normal; + border-radius: 4px; + padding: 2px 0px; + margin: auto 2px; + vertical-align: middle; +} + +.rst-content kbd { + font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; + border: solid 1px #d8d8d8; + background-color: #f5f5f5; + padding: 0px 3px; + border-radius: 3px; +} + +.wy-nav-content-wrap a { + color: #0066cc; + text-decoration: none; +} +.wy-nav-content-wrap a:hover { + color: #0099cc; + text-decoration: underline; +} + +.wy-nav-top a { + color: #ffffff; +} + +/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ +.wy-table-responsive table td { + white-space: normal !important; +} + +.rst-content table.docutils td, +.rst-content table.docutils th { + padding: 5px 10px 5px 10px; +} +.rst-content table.docutils td p, +.rst-content table.docutils th p { + font-size: 14px; + margin-bottom: 0px; +} +.rst-content table.docutils td p cite, +.rst-content table.docutils th p cite { + font-size: 14px; + background-color: transparent; +} + +.colwidths-given th { + border: solid 1px #d8d8d8 !important; +} +.colwidths-given td { + border: solid 1px #d8d8d8 !important; +} + +/*handles single-tick inline code*/ +.wy-body-for-nav cite { + color: #000000; + background-color: transparent; + font-style: normal; + font-family: "Courier New"; + font-size: 13px; + padding: 3px 3px 3px 3px; +} + +.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { + font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; + font-size: 13px; + overflow: visible; + white-space: pre-wrap; + color: #000000; +} + +.rst-content pre.literal-block, .rst-content div[class^='highlight'] { + background-color: #f8f8f8; + border: solid 1px #e8e8e8; +} + +/* This enables inline code to wrap. */ +code, .rst-content tt, .rst-content code { + white-space: pre-wrap; + padding: 2px 3px 1px; + border-radius: 3px; + font-size: 13px; + background-color: #ffffff; +} + +/* use this added class for code blocks attached to bulleted list items */ +.highlight-top-margin { + margin-top: 20px !important; +} + +/* change color of inline code block */ +span.pre { + color: #e01e5a; +} + +.wy-body-for-nav blockquote { + margin: 1em 0; + padding-left: 1em; + border-left: 4px solid #ddd; + color: #000000; +} + +/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ +.rst-content .section ol p, .rst-content .section ul p { + margin-bottom: 0px; +} + +/* add spacing between bullets for legibility */ +.rst-content .section ol li, .rst-content .section ul li { + margin-bottom: 5px; +} + +.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { + margin-top: 5px; +} + +/* but exclude the toctree bullets */ +.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { + margin-top: 0px; + margin-bottom: 0px; +} + +/* remove extra space at bottom of multine list-table cell */ +.rst-content .line-block { + margin-left: 0px; + margin-bottom: 0px; + line-height: 24px; +} + +/* fix extra vertical spacing in page toctree */ +.rst-content .toctree-wrapper ul li ul, article ul li ul { + margin-top: 0; + margin-bottom: 0; +} + +/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ +.reference.internal.toc-index { + color: #d9d9d9; +} + +.reference.internal.toc-index.current { + background-color: #ffffff; + color: #000000; + font-weight: bold; +} + +.toc-index-div { + border-top: solid 1px #000000; + margin-top: 10px; + padding-top: 5px; +} + +.indextable ul li { + font-size: 14px; + margin-bottom: 5px; +} + +/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ +.indextable.genindextable { + margin-bottom: 20px; +} + +div.genindex-jumpbox { + margin-bottom: 10px; +} + +/* rst image classes */ + +.clear-both { + clear: both; + } + +.float-left { + float: left; + margin-right: 20px; +} + +img { + border: solid 1px #e8e8e8; +} + +/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ +.img-title { + color: #000000; + /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ + line-height: 3.0; + font-style: italic; + font-weight: 600; +} + +.img-title-para { + color: #000000; + margin-top: 20px; + margin-bottom: 0px; + font-style: italic; + font-weight: 500; +} + +.red { + color: red; +} diff --git a/docs/source/conf.py b/docs/source/conf.py index 54e5e66..7771ff0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -30,7 +30,7 @@ extensions = [ "sphinx.ext.intersphinx", "sphinx_reredirects", - 'sphinx_rtd_theme', + "sphinx_rtd_theme", "sphinx_rtd_dark_mode", "sphinx.ext.extlinks", "sphinx_copybutton", @@ -47,7 +47,10 @@ intersphinx_mapping = { "aboutcode": ("https://aboutcode.readthedocs.io/en/latest/", None), - "scancode-workbench": ("https://scancode-workbench.readthedocs.io/en/develop/", None), + "scancode-workbench": ( + "https://scancode-workbench.readthedocs.io/en/develop/", + None, + ), } @@ -82,7 +85,9 @@ "conf_py_path": "/docs/source/", # path in the checkout to the docs root } -html_css_files = ["_static/theme_overrides.css"] +html_css_files = [ + "theme_overrides.css", +] # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. @@ -108,6 +113,4 @@ # -- Options for LaTeX output ------------------------------------------------- -latex_elements = { - 'classoptions': ',openany,oneside' -} \ No newline at end of file +latex_elements = {"classoptions": ",openany,oneside"} From 4e36fc601eaa17bde0d2a4bebfed70d7bde28e7c Mon Sep 17 00:00:00 2001 From: "John M. Horan" Date: Tue, 16 Jan 2024 12:22:54 -0800 Subject: [PATCH 04/81] Delete theme_overrides_SUPERSEDED.css as no longer needed #84 Reference: https://github.com/nexB/skeleton/issues/84 Signed-off-by: John M. Horan --- .../_static/theme_overrides_SUPERSEDED.css | 353 ------------------ 1 file changed, 353 deletions(-) delete mode 100644 docs/source/_static/theme_overrides_SUPERSEDED.css diff --git a/docs/source/_static/theme_overrides_SUPERSEDED.css b/docs/source/_static/theme_overrides_SUPERSEDED.css deleted file mode 100644 index 9662d63..0000000 --- a/docs/source/_static/theme_overrides_SUPERSEDED.css +++ /dev/null @@ -1,353 +0,0 @@ -body { - color: #000000; -} - -p { - margin-bottom: 10px; -} - -.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { - margin-bottom: 10px; -} - -.custom_header_01 { - color: #cc0000; - font-size: 22px; - font-weight: bold; - line-height: 50px; -} - -h1, h2, h3, h4, h5, h6 { - margin-bottom: 20px; - margin-top: 20px; -} - -h5 { - font-size: 18px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -h6 { - font-size: 15px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -/* custom admonitions */ -/* success */ -.custom-admonition-success .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-success.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* important */ -.custom-admonition-important .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #000000; -} -div.custom-admonition-important.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* caution */ -.custom-admonition-caution .admonition-title { - color: #000000; - background: #ffff99; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #e8e8e8; -} -div.custom-admonition-caution.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* note */ -.custom-admonition-note .admonition-title { - color: #ffffff; - background: #006bb3; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-note.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* todo */ -.custom-admonition-todo .admonition-title { - color: #000000; - background: #cce6ff; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #99ccff; -} -div.custom-admonition-todo.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #99ccff; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* examples */ -.custom-admonition-examples .admonition-title { - color: #000000; - background: #ffe6cc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #d8d8d8; -} -div.custom-admonition-examples.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -.wy-nav-content { - max-width: 100%; - padding-right: 100px; - padding-left: 100px; - background-color: #f2f2f2; -} - -div.rst-content { - background-color: #ffffff; - border: solid 1px #e5e5e5; - padding: 20px 40px 20px 40px; -} - -.rst-content .guilabel { - border: 1px solid #ffff99; - background: #ffff99; - font-size: 100%; - font-weight: normal; - border-radius: 4px; - padding: 2px 0px; - margin: auto 2px; - vertical-align: middle; -} - -.rst-content kbd { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - border: solid 1px #d8d8d8; - background-color: #f5f5f5; - padding: 0px 3px; - border-radius: 3px; -} - -.wy-nav-content-wrap a { - color: #0066cc; - text-decoration: none; -} -.wy-nav-content-wrap a:hover { - color: #0099cc; - text-decoration: underline; -} - -.wy-nav-top a { - color: #ffffff; -} - -/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ -.wy-table-responsive table td { - white-space: normal !important; -} - -.rst-content table.docutils td, -.rst-content table.docutils th { - padding: 5px 10px 5px 10px; -} -.rst-content table.docutils td p, -.rst-content table.docutils th p { - font-size: 14px; - margin-bottom: 0px; -} -.rst-content table.docutils td p cite, -.rst-content table.docutils th p cite { - font-size: 14px; - background-color: transparent; -} - -.colwidths-given th { - border: solid 1px #d8d8d8 !important; -} -.colwidths-given td { - border: solid 1px #d8d8d8 !important; -} - -/*handles single-tick inline code*/ -.wy-body-for-nav cite { - color: #000000; - background-color: transparent; - font-style: normal; - font-family: "Courier New"; - font-size: 13px; - padding: 3px 3px 3px 3px; -} - -.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - font-size: 13px; - overflow: visible; - white-space: pre-wrap; - color: #000000; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] { - background-color: #f8f8f8; - border: solid 1px #e8e8e8; -} - -/* This enables inline code to wrap. */ -code, .rst-content tt, .rst-content code { - white-space: pre-wrap; - padding: 2px 3px 1px; - border-radius: 3px; - font-size: 13px; - background-color: #ffffff; -} - -/* use this added class for code blocks attached to bulleted list items */ -.highlight-top-margin { - margin-top: 20px !important; -} - -/* change color of inline code block */ -span.pre { - color: #e01e5a; -} - -.wy-body-for-nav blockquote { - margin: 1em 0; - padding-left: 1em; - border-left: 4px solid #ddd; - color: #000000; -} - -/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ -.rst-content .section ol p, .rst-content .section ul p { - margin-bottom: 0px; -} - -/* add spacing between bullets for legibility */ -.rst-content .section ol li, .rst-content .section ul li { - margin-bottom: 5px; -} - -.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { - margin-top: 5px; -} - -/* but exclude the toctree bullets */ -.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { - margin-top: 0px; - margin-bottom: 0px; -} - -/* remove extra space at bottom of multine list-table cell */ -.rst-content .line-block { - margin-left: 0px; - margin-bottom: 0px; - line-height: 24px; -} - -/* fix extra vertical spacing in page toctree */ -.rst-content .toctree-wrapper ul li ul, article ul li ul { - margin-top: 0; - margin-bottom: 0; -} - -/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ -.reference.internal.toc-index { - color: #d9d9d9; -} - -.reference.internal.toc-index.current { - background-color: #ffffff; - color: #000000; - font-weight: bold; -} - -.toc-index-div { - border-top: solid 1px #000000; - margin-top: 10px; - padding-top: 5px; -} - -.indextable ul li { - font-size: 14px; - margin-bottom: 5px; -} - -/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ -.indextable.genindextable { - margin-bottom: 20px; -} - -div.genindex-jumpbox { - margin-bottom: 10px; -} - -/* rst image classes */ - -.clear-both { - clear: both; - } - -.float-left { - float: left; - margin-right: 20px; -} - -img { - border: solid 1px #e8e8e8; -} - -/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ -.img-title { - color: #000000; - /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ - line-height: 3.0; - font-style: italic; - font-weight: 600; -} - -.img-title-para { - color: #000000; - margin-top: 20px; - margin-bottom: 0px; - font-style: italic; - font-weight: 500; -} - -.red { - color: red; -} From 7d74b8a3c98761293cd133d543e4d58a525dc7bf Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Thu, 18 Jan 2024 17:11:14 +0530 Subject: [PATCH 05/81] Fix top padding for rst content Signed-off-by: Ayan Sinha Mahapatra --- docs/source/_static/theme_overrides.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css index de5ae43..5863ccf 100644 --- a/docs/source/_static/theme_overrides.css +++ b/docs/source/_static/theme_overrides.css @@ -12,7 +12,7 @@ div.rst-content { max-width: 1300px; border: 0; - padding: 0px 80px 10px 80px; + padding: 10px 80px 10px 80px; margin-left: 50px; } From 008d521aec51e5983f6d6a2adc4efa7fd92159cf Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Mon, 19 Feb 2024 15:21:45 +0530 Subject: [PATCH 06/81] Update CI runners and python version Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 2 +- .github/workflows/pypi-release.yml | 8 ++++---- azure-pipelines.yml | 22 +++++++++++++++------- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index ada779b..8c2abfe 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -4,7 +4,7 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: max-parallel: 4 diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 9585730..d2206c8 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -21,10 +21,10 @@ on: jobs: build-pypi-distribs: name: Build and publish library to PyPI - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -47,7 +47,7 @@ jobs: name: Create GH release needs: - build-pypi-distribs - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Download built archives @@ -67,7 +67,7 @@ jobs: name: Create PyPI release needs: - create-gh-release - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Download built archives diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 764883d..373b78c 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,7 +11,7 @@ jobs: parameters: job_name: ubuntu20_cpython image_name: ubuntu-20.04 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: macos11_cpython image_name: macOS-11 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos12_cpython image_name: macOS-12 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,15 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos14_cpython + image_name: macOS-14 + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +59,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -59,6 +67,6 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 124da3dcef0d95a6f6aa76ed849f47ada25b83e2 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Mon, 1 Jul 2024 15:11:21 +0530 Subject: [PATCH 07/81] Replace deprecated macos CI runners Replace macos-11 runners with macos-14 runners. Reference: https://github.com/actions/runner-images?tab=readme-ov-file#available-images Reference: https://github.com/nexB/skeleton/issues/89 Signed-off-by: Ayan Sinha Mahapatra --- azure-pipelines.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 373b78c..c2a3b52 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -25,24 +25,24 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos11_cpython - image_name: macOS-11 + job_name: macos12_cpython + image_name: macOS-12 python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs - template: etc/ci/azure-posix.yml parameters: - job_name: macos12_cpython - image_name: macOS-12 + job_name: macos13_cpython + image_name: macOS-13 python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs - template: etc/ci/azure-posix.yml parameters: - job_name: macos13_cpython - image_name: macOS-13 + job_name: macos14_cpython_arm64 + image_name: macOS-14 python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -50,8 +50,8 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython - image_name: macOS-14 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + image_name: macOS-14-large + python_versions: ['3.8', '3.8', '3.9', '3.10', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs From be4e14d414cf4f7112b529dc71f7abccc9dcf24a Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Mon, 1 Jul 2024 16:00:40 +0530 Subject: [PATCH 08/81] Update minimum required python version to 3.8 Signed-off-by: Ayan Sinha Mahapatra --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index bd0e58a..a8e20c5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,7 +38,7 @@ zip_safe = false setup_requires = setuptools_scm[toml] >= 4 -python_requires = >=3.7 +python_requires = >=3.8 install_requires = From 9c57f340d22d8891a5614a93553b20d75e2f3136 Mon Sep 17 00:00:00 2001 From: Chin Yeung Li Date: Tue, 20 Aug 2024 16:46:20 +0800 Subject: [PATCH 09/81] Update link references of ownership from nexB to aboutcode-org Signed-off-by: Chin Yeung Li --- Makefile | 4 +- NOTICE | 2 +- configure | 2 +- configure.bat | 2 +- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 2 +- docs/source/skeleton-usage.rst | 2 +- etc/scripts/check_thirdparty.py | 5 +- etc/scripts/fetch_thirdparty.py | 19 ++++-- etc/scripts/gen_requirements.py | 2 +- etc/scripts/gen_requirements_dev.py | 2 +- etc/scripts/utils_dejacode.py | 11 ++-- etc/scripts/utils_requirements.py | 11 ++-- etc/scripts/utils_thirdparty.py | 89 +++++++++++++++++--------- setup.cfg | 2 +- tests/test_skeleton_codestyle.py | 2 +- 16 files changed, 100 insertions(+), 59 deletions(-) diff --git a/Makefile b/Makefile index cc36c35..94451b3 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -35,7 +35,7 @@ check: @echo "-> Run pycodestyle (PEP8) validation" @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . + @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . @echo "-> Run black validation" @${ACTIVATE} black --check --check -l 100 src tests setup.py diff --git a/NOTICE b/NOTICE index 65936b2..cbdaef7 100644 --- a/NOTICE +++ b/NOTICE @@ -2,7 +2,7 @@ # Copyright (c) nexB Inc. and others. # SPDX-License-Identifier: Apache-2.0 # -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Visit https://aboutcode.org and https://github.com/aboutcode-org/ for support and download. # ScanCode is a trademark of nexB Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/configure b/configure index 926a894..22d9288 100755 --- a/configure +++ b/configure @@ -3,7 +3,7 @@ # Copyright (c) nexB Inc. and others. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/ for support or download. +# See https://github.com/aboutcode-org/ for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # diff --git a/configure.bat b/configure.bat index 5e95b31..5b9a9d6 100644 --- a/configure.bat +++ b/configure.bat @@ -4,7 +4,7 @@ @rem Copyright (c) nexB Inc. and others. All rights reserved. @rem SPDX-License-Identifier: Apache-2.0 @rem See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -@rem See https://github.com/nexB/ for support or download. +@rem See https://github.com/aboutcode-org/ for support or download. @rem See https://aboutcode.org for more information about nexB OSS projects. diff --git a/docs/source/conf.py b/docs/source/conf.py index 7771ff0..8c88fa2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -43,7 +43,7 @@ # This points to aboutcode.readthedocs.io # In case of "undefined label" ERRORS check docs on intersphinx to troubleshoot -# Link was created at commit - https://github.com/nexB/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 +# Link was created at commit - https://github.com/aboutcode-org/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 intersphinx_mapping = { "aboutcode": ("https://aboutcode.readthedocs.io/en/latest/", None), diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 13882e1..5640db2 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -12,7 +12,7 @@ To get started, create or identify a working directory on your local machine. Open that directory and execute the following command in a terminal session:: - git clone https://github.com/nexB/skeleton.git + git clone https://github.com/aboutcode-org/skeleton.git That will create an ``/skeleton`` directory in your working directory. Now you can install the dependencies in a virtualenv:: diff --git a/docs/source/skeleton-usage.rst b/docs/source/skeleton-usage.rst index cde23dc..6cb4cc5 100644 --- a/docs/source/skeleton-usage.rst +++ b/docs/source/skeleton-usage.rst @@ -118,7 +118,7 @@ corrected. You can check to see if your corrections are valid by running: Once the wheels are collected and the ABOUT files are generated and correct, upload them to thirdparty.aboutcode.org/pypi by placing the wheels and ABOUT files from the thirdparty directory to the pypi directory at -https://github.com/nexB/thirdparty-packages +https://github.com/aboutcode-org/thirdparty-packages Usage after project initialization diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index b052f25..2daded9 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import click @@ -17,7 +17,8 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, + path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index eedf05c..3f9ff52 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -55,7 +55,8 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, + path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -224,7 +225,8 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os( + pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -260,13 +262,14 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append( + environment) if TRACE: print(f" NOT FOUND") if (sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + ): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -289,7 +292,8 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any( + d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -299,7 +303,8 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses( + dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 07e26f7..2b65ae8 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import argparse diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 12cc06d..5db1c48 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import argparse diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c42e6c9..652252d 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import io @@ -33,7 +33,8 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print( + "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,7 +69,8 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print( + f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -149,7 +151,8 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) + for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 0fc25a3..1c50239 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -102,7 +102,8 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join( + f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -113,10 +114,12 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception( + f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", + "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index addf8e5..46dc728 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import email @@ -245,9 +245,11 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print( + f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list(package.get_supported_wheels(environment=environment)) + supported_wheels = list( + package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,7 +293,8 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print( + f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -300,7 +303,8 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print( + f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -533,7 +537,8 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version(name=self.name, version=self.version) + package = repo.get_package_version( + name=self.name, version=self.version) if not package: if TRACE: print( @@ -772,7 +777,8 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print(f"Failed to fetch NOTICE file: {self.notice_download_url}") + print( + f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -821,9 +827,11 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url(use_cached_index=use_cached_index).links + urls = LinksRepository.from_url( + use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") for l in self.extra_data.get("licenses", {})] + extra_lic_names = [l.get("file") + for l in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] extra_lic_names = [ln for ln in extra_lic_names if ln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] @@ -834,7 +842,8 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename(filename=filename, urls=urls) + lic_url = get_license_link_for_filename( + filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -911,7 +920,8 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [c for c in classifiers if not c.startswith("License")] + other_classifiers = [ + c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -953,7 +963,8 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string(self.package_url) + purl_from_self = packageurl.PackageURL.from_string( + self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1003,7 +1014,8 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception( + f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1397,7 +1409,8 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) + for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1458,7 +1471,8 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith( + EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1476,7 +1490,8 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print(f" Skipping invalid distribution from: {path_or_url}") + print( + f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1525,7 +1540,8 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict(help="Python implementation supported by this environment."), + metadata=dict( + help="Python implementation supported by this environment."), repr=False, ) @@ -1539,7 +1555,8 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict(help="List of platform tags supported by this environment."), + metadata=dict( + help="List of platform tags supported by this environment."), repr=False, ) @@ -1623,7 +1640,8 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict(help="A set of already fetched package normalized names."), + metadata=dict( + help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1654,10 +1672,12 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print( + f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print(f"WARNING: package {name} not found in repo: {self.index_url}") + print( + f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1842,7 +1862,8 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content(path_or_url=path_or_url, as_text=as_text) + content = get_file_content( + path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1864,7 +1885,8 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content( + url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1930,7 +1952,8 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException( + f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2021,7 +2044,8 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2033,7 +2057,8 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2044,7 +2069,8 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list(other_local_version.get_distributions()) + latest_local_dists = list( + other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2070,7 +2096,8 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2111,7 +2138,8 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files( + dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2259,7 +2287,8 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join( + dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: diff --git a/setup.cfg b/setup.cfg index a8e20c5..ef7d369 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,7 +6,7 @@ license = Apache-2.0 description = skeleton long_description = file:README.rst long_description_content_type = text/x-rst -url = https://github.com/nexB/skeleton +url = https://github.com/aboutcode-org/skeleton author = nexB. Inc. and others author_email = info@aboutcode.org diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 2eb6e55..b4ce8c1 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -3,7 +3,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # From a92905297acf39ecd820bfb133f8670c39b40c97 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Fri, 17 Jan 2025 20:07:25 +0100 Subject: [PATCH 10/81] Drop deprecated macos-12 runner --- azure-pipelines.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index c2a3b52..39601e6 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -23,14 +23,6 @@ jobs: test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos12_cpython - image_name: macOS-12 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython From 4af4fce3cc57d001c6c26f77d477cd44cef2ffef Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Sat, 15 Feb 2025 00:09:49 +0530 Subject: [PATCH 11/81] Update CI/Actions runners Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 8 ++++---- .github/workflows/pypi-release.yml | 18 +++++++++--------- azure-pipelines.yml | 22 +++++++++++----------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 8c2abfe..621de4b 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -4,19 +4,19 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 strategy: max-parallel: 4 matrix: - python-version: [3.9] + python-version: [3.12] steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index d2206c8..a66c9c8 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -21,14 +21,14 @@ on: jobs: build-pypi-distribs: name: Build and publish library to PyPI - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.12 - name: Install pypa/build run: python -m pip install build --user @@ -37,7 +37,7 @@ jobs: run: python -m build --sdist --wheel --outdir dist/ - name: Upload built archives - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pypi_archives path: dist/* @@ -47,17 +47,17 @@ jobs: name: Create GH release needs: - build-pypi-distribs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download built archives - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: pypi_archives path: dist - name: Create GH release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: draft: true files: dist/* @@ -67,11 +67,11 @@ jobs: name: Create PyPI release needs: - create-gh-release - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download built archives - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: pypi_archives path: dist diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 39601e6..a220f2b 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -9,17 +9,17 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: ubuntu20_cpython - image_name: ubuntu-20.04 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + job_name: ubuntu22_cpython + image_name: ubuntu-22.04 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs - template: etc/ci/azure-posix.yml parameters: - job_name: ubuntu22_cpython - image_name: ubuntu-22.04 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + job_name: ubuntu24_cpython + image_name: ubuntu-24.04 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos14_cpython_arm64 image_name: macOS-14 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14-large - python_versions: ['3.8', '3.8', '3.9', '3.10', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +51,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -59,6 +59,6 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 320ec21daa249ceae0c07787f9e52134b3ad06ab Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Thu, 27 Mar 2025 14:54:31 -0700 Subject: [PATCH 12/81] Replace black and isort with ruff * Use ruff config and Make commands from scancode.io Signed-off-by: Jono Yang --- Makefile | 27 ++++++++++++--------------- pyproject.toml | 37 +++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index 94451b3..1738b20 100644 --- a/Makefile +++ b/Makefile @@ -17,27 +17,24 @@ dev: @echo "-> Configure the development envt." ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py - -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py - doc8: @echo "-> Run doc8 validation" @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" diff --git a/pyproject.toml b/pyproject.toml index cde7907..01e60fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,40 @@ addopts = [ "--strict-markers", "--doctest-modules" ] + +[tool.ruff] +line-length = 88 +extend-exclude = [] +target-version = "py310" + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle + "W", # pycodestyle warnings + "D", # pydocstyle + "F", # Pyflakes + "UP", # pyupgrade + "S", # flake8-bandit + "I", # isort + "C9", # McCabe complexity +] +ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] + +[tool.ruff.lint.isort] +force-single-line = true +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here diff --git a/setup.cfg b/setup.cfg index ef7d369..aaec643 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,8 +54,7 @@ testing = aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - isort + ruff docs = Sphinx>=5.0.2 From d4e29c36c21ab81797604911cdeaea83d80e8088 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:46:06 +0100 Subject: [PATCH 13/81] Use org standard 100 line length Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 01e60fc..cea91bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ addopts = [ ] [tool.ruff] -line-length = 88 +line-length = 100 extend-exclude = [] target-version = "py310" From 6c028f7219ae876ea62074ae435e574525e205d6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 08:40:28 +0100 Subject: [PATCH 14/81] Lint all common code directories Signed-off-by: Philippe Ombredanne --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea91bd..9e62736 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,14 @@ addopts = [ line-length = 100 extend-exclude = [] target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "doc/**/*", + "*.py" +] [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 233f3edabfbab390029fb9f1842bf43766b04583 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 09:07:47 +0100 Subject: [PATCH 15/81] Remove unused targets Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1738b20..930e801 100644 --- a/Makefile +++ b/Makefile @@ -48,4 +48,4 @@ docs: rm -rf docs/_build/ @${ACTIVATE} sphinx-build docs/ docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs From 55545bf7a1a8f119a560c7f548ce5a460f39f37d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 11:03:05 +0100 Subject: [PATCH 16/81] Improve import sorting Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 - etc/scripts/fetch_thirdparty.py | 2 +- etc/scripts/test_utils_pip_compatibility_tags.py | 3 +-- etc/scripts/utils_dejacode.py | 1 - etc/scripts/utils_pip_compatibility_tags.py | 14 ++++++-------- etc/scripts/utils_thirdparty.py | 3 +-- pyproject.toml | 7 ++++++- 7 files changed, 15 insertions(+), 16 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 2daded9..62dbb14 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -12,7 +12,6 @@ import utils_thirdparty - @click.command() @click.option( "-d", diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 3f9ff52..30d376c 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -16,8 +16,8 @@ import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c5..a33b8b3 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -25,14 +25,13 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest import utils_pip_compatibility_tags - @pytest.mark.parametrize( "version_info, expected", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 652252d..c71543f 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -14,7 +14,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0c..de0ac95 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,14 +27,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 46dc728..b0295ec 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -25,14 +25,13 @@ import packageurl import requests import saneyaml +import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version -import utils_pip_compatibility_tags - """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. diff --git a/pyproject.toml b/pyproject.toml index 9e62736..ba55770 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,10 +76,15 @@ select = [ "I", # isort "C9", # McCabe complexity ] -ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] [tool.ruff.lint.isort] force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + sections = { django = ["django"] } section-order = [ "future", From 0b63e5073b6b1cdc0960abe35060ad0fdb67b665 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:35:16 +0100 Subject: [PATCH 17/81] Apply small code updates Signed-off-by: Philippe Ombredanne --- etc/scripts/utils_requirements.py | 20 ++++++++----- etc/scripts/utils_thirdparty.py | 48 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 31 deletions(-) diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 1c50239..a9ac223 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -57,21 +57,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -120,7 +124,7 @@ def get_installed_reqs(site_packages_dir): # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -150,9 +154,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index b0295ec..6d5ffdc 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -559,7 +558,8 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - assert self.filename + if not self.filename: + raise ValueError(f"self.filename has no value but is required: {self.filename!r}") if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -829,10 +829,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): urls = LinksRepository.from_url( use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") - for l in self.extra_data.get("licenses", {})] + extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [ln for ln in extra_lic_names if ln] + extra_lic_names = [eln for eln in extra_lic_names if eln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -853,7 +852,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except: + except Exception: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -866,8 +865,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except: - msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' + except Exception: + msg = f"No text for license {filename} in expression " + f"{self.license_expression!r} from {self}" print(msg) errors.append(msg) @@ -1009,7 +1009,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [l for l in urls if l.endswith(f"/{filename}")] + path_or_url = [url for url in urls if url.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1140,7 +1140,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -1301,7 +1300,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except: + except Exception: return False @@ -1489,8 +1488,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print( - f" Skipping invalid distribution from: {path_or_url}") + print(f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1500,8 +1498,7 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - for wheel in self.wheels: - yield wheel + yield from self.wheels def get_url_for_filename(self, filename): """ @@ -1632,7 +1629,8 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " + "available in this repo" ), ) @@ -1647,7 +1645,8 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. " + "Otherwise, fetch and cache." ), ) @@ -1656,7 +1655,8 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - assert name + if not name: + raise ValueError(f"name is required: {name!r}") normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1713,7 +1713,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] + links = [link.partition("#sha256=") for link in links] links = [url for url, _, _sha256 in links] return links @@ -1936,7 +1936,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2161,7 +2161,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: @@ -2227,7 +2227,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print(f"Downloading wheels using command:", " ".join(cli_args)) + print("Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2260,7 +2260,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2312,5 +2312,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(l).lower() for l in declared_licenses] + lics = [python_safe_name(lic).lower() for lic in declared_licenses] return " AND ".join(lics).lower() From 092f545f5b87442ae22884cb4d5381883343a1c2 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:42:03 +0100 Subject: [PATCH 18/81] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 3 +- etc/scripts/fetch_thirdparty.py | 26 ++++----- etc/scripts/gen_pypi_simple.py | 4 +- etc/scripts/utils_dejacode.py | 15 +++--- etc/scripts/utils_requirements.py | 9 ++-- etc/scripts/utils_thirdparty.py | 90 +++++++++++-------------------- 6 files changed, 50 insertions(+), 97 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 62dbb14..1aa4e28 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,8 +16,7 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 30d376c..c224683 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -55,8 +55,7 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -121,7 +120,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -132,7 +131,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -143,7 +142,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -225,8 +224,7 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os( - pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -250,7 +248,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,14 +259,11 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append( - environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: print(f" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -292,8 +286,7 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any( - d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -303,8 +296,7 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses( - dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90d..cfe68e6 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -69,7 +69,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -200,11 +199,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c71543f..cd39cda 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -32,8 +32,7 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print( - "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,8 +67,7 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print( - f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -150,12 +148,11 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) - for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -181,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -209,5 +206,5 @@ def create_dejacode_package(distribution): if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index a9ac223..167bc9f 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -106,8 +106,7 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join( - f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -118,12 +117,10 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception( - f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", - "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") # noqa: S603 diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6d5ffdc..4ea1bab 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -243,11 +243,9 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print( - f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list( - package.get_supported_wheels(environment=environment)) + supported_wheels = list(package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,8 +289,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print( - f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -301,8 +298,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print( - f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -357,7 +353,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -535,8 +530,7 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version( - name=self.name, version=self.version) + package = repo.get_package_version(name=self.name, version=self.version) if not package: if TRACE: print( @@ -776,8 +770,7 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print( - f"Failed to fetch NOTICE file: {self.notice_download_url}") + print(f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -826,8 +819,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url( - use_cached_index=use_cached_index).links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -840,8 +832,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename( - filename=filename, urls=urls) + lic_url = get_license_link_for_filename(filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -919,8 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [ - c for c in classifiers if not c.startswith("License")] + other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -962,8 +952,7 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string( - self.package_url) + purl_from_self = packageurl.PackageURL.from_string(self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1013,8 +1002,7 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception( - f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1102,7 +1090,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1407,8 +1394,7 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) - for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1469,8 +1455,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith( - EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1536,8 +1521,7 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict( - help="Python implementation supported by this environment."), + metadata=dict(help="Python implementation supported by this environment."), repr=False, ) @@ -1551,8 +1535,7 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict( - help="List of platform tags supported by this environment."), + metadata=dict(help="List of platform tags supported by this environment."), repr=False, ) @@ -1637,8 +1620,7 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict( - help="A set of already fetched package normalized names."), + metadata=dict(help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1671,12 +1653,10 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print( - f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print( - f"WARNING: package {name} not found in repo: {self.index_url}") + print(f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1861,8 +1841,7 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content( - path_or_url=path_or_url, as_text=as_text) + content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1884,8 +1863,7 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content( - url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1936,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -1951,8 +1929,7 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException( - f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2043,8 +2020,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2056,8 +2032,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2068,8 +2043,7 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list( - other_local_version.get_distributions()) + latest_local_dists = list(other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2095,8 +2069,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2137,8 +2110,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files( - dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2161,10 +2133,9 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() @@ -2260,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2286,8 +2257,7 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join( - dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: From d05665ad44a50b71f66b974ad24c81f7443e8180 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:02:19 +0100 Subject: [PATCH 19/81] Apply cosmetic refactorings Signed-off-by: Philippe Ombredanne --- docs/source/conf.py | 3 ++- etc/scripts/check_thirdparty.py | 4 +--- etc/scripts/fetch_thirdparty.py | 17 ++++++++--------- etc/scripts/gen_pypi_simple.py | 15 +++++++-------- etc/scripts/gen_requirements.py | 4 ++-- etc/scripts/gen_requirements_dev.py | 4 ++-- .../test_utils_pip_compatibility_tags.py | 9 +++++---- etc/scripts/utils_dejacode.py | 9 +++++---- etc/scripts/utils_pip_compatibility_tags.py | 8 +++++--- etc/scripts/utils_requirements.py | 3 +-- etc/scripts/utils_thirdparty.py | 3 ++- 11 files changed, 40 insertions(+), 39 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8c88fa2..8aad829 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -94,7 +94,8 @@ html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 1aa4e28..bb8347a 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -41,8 +40,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index c224683..76a19a6 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -10,7 +9,6 @@ # import itertools -import os import sys from collections import defaultdict @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -261,7 +260,7 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: @@ -276,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -295,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index cfe68e6..89d0626 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -132,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -177,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 2b65ae8..1b87944 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 5db1c48..8548205 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index a33b8b3..de4b706 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -50,7 +51,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -81,7 +82,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -104,7 +105,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index cd39cda..b6bff51 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -25,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -50,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -93,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -113,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -201,6 +201,7 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index de0ac95..dd954bc 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -130,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -140,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 167bc9f..b9b2c0e 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 4ea1bab..aafc1d6 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -91,7 +91,8 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as + {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: From 63bcbf507e8a25f22853d56605c107e47c3673cc Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:05:23 +0100 Subject: [PATCH 20/81] Reformat test code Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + pyproject.toml | 19 +++++++++++-------- tests/test_skeleton_codestyle.py | 25 ++++++++++++++++--------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 2d48196..8a93c94 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ +/.ruff_cache/ diff --git a/pyproject.toml b/pyproject.toml index ba55770..a872ab3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,16 +67,17 @@ include = [ [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle warnings - "D", # pydocstyle - "F", # Pyflakes - "UP", # pyupgrade - "S", # flake8-bandit +# "E", # pycodestyle +# "W", # pycodestyle warnings +# "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit "I", # isort - "C9", # McCabe complexity +# "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] + [tool.ruff.lint.isort] force-single-line = true @@ -100,3 +101,5 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index b4ce8c1..8cd85c9 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -7,30 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import configparser import subprocess import unittest -import configparser - class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ + # This test shouldn't run in proliferated repositories. + + # TODO: update with switch to pyproject.toml setup_cfg = configparser.ConfigParser() setup_cfg.read("setup.cfg") if setup_cfg["metadata"]["name"] != "skeleton": return - args = "venv/bin/black --check -l 100 setup.py etc tests" + commands = [ + ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "format", "--check"], + ] + command = None try: - subprocess.check_output(args.split()) + for command in commands: + subprocess.check_output(command) # noqa: S603 except subprocess.CalledProcessError as e: print("===========================================================") print(e.output) print("===========================================================") raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", + f"Code style and linting command check failed: {' '.join(command)!r}.\n" + "You can check and format the code using:\n" + " make valid\n", + "OR:\n ruff format\n", + " ruff check --fix\n", e.output, ) from e From 9d1393a85303bf8cf92c9a25aa5cc50bdfd080d1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:08:25 +0100 Subject: [PATCH 21/81] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 + etc/scripts/test_utils_pip_compatibility_tags.py | 1 + tests/test_skeleton_codestyle.py | 1 + 3 files changed, 3 insertions(+) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index bb8347a..65ae595 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -11,6 +11,7 @@ import utils_thirdparty + @click.command() @click.option( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index de4b706..0e9c360 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -33,6 +33,7 @@ import utils_pip_compatibility_tags + @pytest.mark.parametrize( "version_info, expected", [ diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 8cd85c9..7135ac0 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -11,6 +11,7 @@ import subprocess import unittest + class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): # This test shouldn't run in proliferated repositories. From f10b783b6b6fe33032a7862352ed532294efdf14 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:10:45 +0100 Subject: [PATCH 22/81] Refine ruff configuration Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a872ab3..0f8bd58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,11 +72,11 @@ select = [ # "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade -# "S", # flake8-bandit + "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] [tool.ruff.lint.isort] From 1d6c8f3bb8755aa7c9d2804240c01b0161417328 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:01 +0100 Subject: [PATCH 23/81] Format doc Signed-off-by: Philippe Ombredanne --- AUTHORS.rst | 2 +- README.rst | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 51a19cc..16e2046 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,3 +1,3 @@ The following organizations or individuals have contributed to this repo: -- +- diff --git a/README.rst b/README.rst index 6cbd839..3d6cb4e 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,6 @@ A Simple Python Project Skeleton ================================ + This repo attempts to standardize the structure of the Python-based project's repositories using modern Python packaging and configuration techniques. Using this `blog post`_ as inspiration, this repository serves as the base for @@ -47,16 +48,19 @@ Release Notes - 2022-03-04: - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies - There are now fewer scripts. See etc/scripts/README.rst for details + - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party + dependencies. There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: - - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` + and ``requirements-dev.txt`` - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` - - Update README.rst with instructions for generating ``requirements.txt`` and ``requirements-dev.txt``, - as well as collecting dependencies as wheels and generating ABOUT files for them. + - Update README.rst with instructions for generating ``requirements.txt`` + and ``requirements-dev.txt``, as well as collecting dependencies as wheels and generating + ABOUT files for them. - 2021-05-11: - - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which + Python version is used. From 0213c1ea9a15ab94a854b8d7af27a1a036e393f4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:35 +0100 Subject: [PATCH 24/81] Run doc8 on all rst files Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930e801..debc404 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst valid: @echo "-> Run Ruff format" From c112f2a9c20d58e986424f5f32bd259814fc8e3f Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:55:20 +0100 Subject: [PATCH 25/81] Enable doc style checks Signed-off-by: Philippe Ombredanne --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0f8bd58..51761ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "doc/**/*", - "*.py" + "*.py", + "." ] [tool.ruff.lint] @@ -69,10 +70,10 @@ include = [ select = [ # "E", # pycodestyle # "W", # pycodestyle warnings -# "D", # pydocstyle + "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade - "S", # flake8-bandit +# "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] From 944b6c5371bea2ce0763fd26888de6436116d185 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:34:26 +0100 Subject: [PATCH 26/81] Add support for new OS versions Signed-off-by: Philippe Ombredanne --- README.rst | 50 +++++++++++++++++++++++++++++++++++++++++---- azure-pipelines.yml | 36 ++++++++++++++++++++++++++++++-- 2 files changed, 80 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index 6cbd839..f848b4b 100644 --- a/README.rst +++ b/README.rst @@ -1,9 +1,11 @@ A Simple Python Project Skeleton ================================ -This repo attempts to standardize the structure of the Python-based project's -repositories using modern Python packaging and configuration techniques. -Using this `blog post`_ as inspiration, this repository serves as the base for -all new Python projects and is mergeable in existing repositories as well. + +This repo attempts to standardize the structure of the Python-based project's repositories using +modern Python packaging and configuration techniques that can then be applied to many repos. + +Using this `blog post`_ as inspiration, this repository serves as the base for all new Python +projects and is mergeable in existing repositories as well. .. _blog post: https://blog.jaraco.com/a-project-skeleton-for-python-projects/ @@ -13,6 +15,7 @@ Usage A brand new project ------------------- + .. code-block:: bash git init my-new-repo @@ -26,6 +29,7 @@ From here, you can make the appropriate changes to the files for your specific p Update an existing project --------------------------- + .. code-block:: bash cd my-existing-project @@ -41,17 +45,54 @@ More usage instructions can be found in ``docs/skeleton-usage.rst``. Release Notes ============= +- 2025-03-29: + + - Add support for beta macOS-15 + - Add support for beta windows-2025 + +- 2025-02-14: + + - Drop support for Python 3.8, add support in CI for Python 3.13, use Python 3.12 as default + version. + +- 2025-01-17: + + - Drop support for macOS-12, add support for macOS-14 + - Add support in CI for ubuntu-24.04 + - Add support in CI for Python 3.12 + +- 2024-08-20: + + - Update references of ownership from nexB to aboutcode-org + +- 2024-07-01: + + - Drop support for Python 3.8 + - Drop support for macOS-11, add support for macOS-14 + +- 2024-02-19: + + - Replace support in CI of default ubuntu-20.04 by ubuntu-22.04 + +- 2023-10-18: + + - Add dark mode support in documentation + - 2023-07-18: + - Add macOS-13 job in azure-pipelines.yml - 2022-03-04: + - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` + - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` @@ -59,4 +100,5 @@ Release Notes as well as collecting dependencies as wheels and generating ABOUT files for them. - 2021-05-11: + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a220f2b..80ae45b 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,11 +26,27 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython + image_name: macOS-13-xlarge + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos13_cpython_arm64 image_name: macOS-13 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos14_cpython + image_name: macOS-14-large + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython_arm64 @@ -41,8 +57,16 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos14_cpython - image_name: macOS-14-large + job_name: macos15_cpython + image_name: macOS-15 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos15_cpython_arm64 + image_name: macOS-15-large python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -62,3 +86,11 @@ jobs: python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs + + - template: etc/ci/azure-win.yml + parameters: + job_name: win2025_cpython + image_name: windows-2025 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv\Scripts\pytest -n 2 -vvs From 136af3912336616fbd2431a96230961517a2c356 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 12:45:32 +0200 Subject: [PATCH 27/81] Update scripts aboutcode references Signed-off-by: Philippe Ombredanne --- etc/scripts/update_skeleton.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py index 635898b..5705fc4 100644 --- a/etc/scripts/update_skeleton.py +++ b/etc/scripts/update_skeleton.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. AboutCode, and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -16,7 +15,7 @@ import click -NEXB_PUBLIC_REPO_NAMES=[ +ABOUTCODE_PUBLIC_REPO_NAMES=[ "aboutcode-toolkit", "ahocode", "bitcode", @@ -56,9 +55,9 @@ @click.command() @click.help_option("-h", "--help") -def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): +def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): """ - Update project files of nexB projects that use the skeleton + Update project files of AboutCode projects that use the skeleton This script will: - Clone the repo @@ -81,14 +80,14 @@ def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): os.chdir(work_dir_path) # Clone repo - repo_git = f"git@github.com:nexB/{repo_name}.git" + repo_git = f"git@github.com:aboutcode-org/{repo_name}.git" subprocess.run(["git", "clone", repo_git]) # Go into cloned repo os.chdir(work_dir_path / repo_name) # Add skeleton as an origin - subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:nexB/skeleton.git"]) + subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"]) # Fetch skeleton files subprocess.run(["git", "fetch", "skeleton"]) From da8eff0383611df60311b8bac599657450eaeb52 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:40:36 +0200 Subject: [PATCH 28/81] Do not format more test data Signed-off-by: Philippe Ombredanne --- pyproject.toml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 51761ff..7d807eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,25 @@ include = [ "src/**/*.py", "etc/**/*.py", "test/**/*.py", + "tests/**/*.py", "doc/**/*", + "docs/**/*", "*.py", "." ] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +] + [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 4f9e936d452acc3822df8d3f932cbd7071b31d72 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:58:36 +0200 Subject: [PATCH 29/81] Do not treat rst as Python Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7d807eb..5e16b56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "tests/**/*.py", - "doc/**/*", - "docs/**/*", + "doc/**/*.py", + "docs/**/*.py", "*.py", "." ] From a2809fb28c60b54aec0c367285acacdea1cb03a8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 16:41:57 +0200 Subject: [PATCH 30/81] Combine testing and docs extra for simplicity Signed-off-by: Philippe Ombredanne --- configure | 2 -- configure.bat | 4 ---- setup.cfg | 3 --- 3 files changed, 9 deletions(-) diff --git a/configure b/configure index 22d9288..83fd203 100755 --- a/configure +++ b/configure @@ -30,7 +30,6 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -185,7 +184,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index 5b9a9d6..18b3703 100644 --- a/configure.bat +++ b/configure.bat @@ -28,7 +28,6 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) diff --git a/setup.cfg b/setup.cfg index aaec643..ad8e0d8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,8 +55,6 @@ testing = pycodestyle >= 2.8.0 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 @@ -64,4 +62,3 @@ docs = sphinx-autobuild sphinx-rtd-dark-mode>=1.3.0 sphinx-copybutton - From 43b96c28baaa1621d24b6f5791c6d915d2edc5f3 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 17:18:19 +0200 Subject: [PATCH 31/81] Refine checking of docs with doc8 Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- pyproject.toml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index debc404..d21a2f9 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst + @${ACTIVATE} doc8 docs/ *.rst valid: @echo "-> Run Ruff format" diff --git a/pyproject.toml b/pyproject.toml index 5e16b56..bfb1d35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,3 +119,10 @@ max-complexity = 10 # Place paths of files to be ignored by ruff here "tests/*" = ["S101"] "test_*.py" = ["S101"] + + +[tool.doc8] + +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 +verbose=0 From b7194c80c9425087f1d05e430bd9d6a14fb9c3a0 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:41:00 +0200 Subject: [PATCH 32/81] Refine doc handling * remove CI scripts and use Makefile targets instead * ensure doc8 runs quiet * add new docs-check make target to run documentation and links checks * update oudated doc for docs contribution Signed-off-by: Philippe Ombredanne --- .github/workflows/docs-ci.yml | 12 +++++------- Makefile | 10 +++++++--- docs/scripts/doc8_style_check.sh | 5 ----- docs/scripts/sphinx_build_link_check.sh | 5 ----- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 8 ++++---- pyproject.toml | 2 -- 7 files changed, 17 insertions(+), 27 deletions(-) delete mode 100755 docs/scripts/doc8_style_check.sh delete mode 100644 docs/scripts/sphinx_build_link_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 621de4b..10ba5fa 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -21,14 +21,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/Makefile b/Makefile index d21a2f9..413399e 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 docs/ *.rst + @${ACTIVATE} doc8 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -46,6 +46,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ -.PHONY: conf dev check valid clean test docs +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ + +.PHONY: conf dev check valid clean test docs docs-check diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100755 index 9416323..0000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c542686..0000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8aad829..056ca6e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 5640db2..041b358 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -147,7 +147,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -169,11 +169,11 @@ What is checked: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +ScanCode toolkit documentation uses `Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ for more information. For example:: @@ -230,7 +230,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: diff --git a/pyproject.toml b/pyproject.toml index bfb1d35..c9e6772 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,7 +122,5 @@ max-complexity = 10 [tool.doc8] - ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] max-line-length=100 -verbose=0 From a5bcdbdd71d1542a0e9ec9b190a2e3d573c53744 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:49:01 +0200 Subject: [PATCH 33/81] Add twine check to release publication Signed-off-by: Philippe Ombredanne --- .github/workflows/pypi-release.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index a66c9c8..cf0579a 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -30,12 +30,15 @@ jobs: with: python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user build twine - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* + - name: Upload built archives uses: actions/upload-artifact@v4 with: From a6c25fb2a2fa35311d26621b9db400ca52bd376e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 19:16:31 +0200 Subject: [PATCH 34/81] Refine doc contribution docs Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 119 ++++++++----------------- 1 file changed, 38 insertions(+), 81 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 041b358..dee9296 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/aboutcode-org/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. From 68daae1e7e475a89568e353f64f29af13754ce9e Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Thu, 27 Mar 2025 14:54:31 -0700 Subject: [PATCH 35/81] Replace black and isort with ruff * Use ruff config and Make commands from scancode.io Signed-off-by: Jono Yang --- Makefile | 27 ++++++++++++--------------- pyproject.toml | 37 +++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index 94451b3..1738b20 100644 --- a/Makefile +++ b/Makefile @@ -17,27 +17,24 @@ dev: @echo "-> Configure the development envt." ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py - -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py - doc8: @echo "-> Run doc8 validation" @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" diff --git a/pyproject.toml b/pyproject.toml index cde7907..01e60fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,40 @@ addopts = [ "--strict-markers", "--doctest-modules" ] + +[tool.ruff] +line-length = 88 +extend-exclude = [] +target-version = "py310" + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle + "W", # pycodestyle warnings + "D", # pydocstyle + "F", # Pyflakes + "UP", # pyupgrade + "S", # flake8-bandit + "I", # isort + "C9", # McCabe complexity +] +ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] + +[tool.ruff.lint.isort] +force-single-line = true +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here diff --git a/setup.cfg b/setup.cfg index ef7d369..aaec643 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,8 +54,7 @@ testing = aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - isort + ruff docs = Sphinx>=5.0.2 From 6a8c9ae144a1985b59fb69a0b2c55e32831714b8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:46:06 +0100 Subject: [PATCH 36/81] Use org standard 100 line length Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 01e60fc..cea91bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ addopts = [ ] [tool.ruff] -line-length = 88 +line-length = 100 extend-exclude = [] target-version = "py310" From 2fd31d54afa47418c764de0f1a30d67c7059ed7b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 08:40:28 +0100 Subject: [PATCH 37/81] Lint all common code directories Signed-off-by: Philippe Ombredanne --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea91bd..9e62736 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,14 @@ addopts = [ line-length = 100 extend-exclude = [] target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "doc/**/*", + "*.py" +] [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From eb5fc82ab1cab8a4742a2b9028d1436956960e81 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 09:07:47 +0100 Subject: [PATCH 38/81] Remove unused targets Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1738b20..930e801 100644 --- a/Makefile +++ b/Makefile @@ -48,4 +48,4 @@ docs: rm -rf docs/_build/ @${ACTIVATE} sphinx-build docs/ docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs From 529d51621c9e2af8e1ec2503044b8752c71c3ba7 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 11:03:05 +0100 Subject: [PATCH 39/81] Improve import sorting Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 - etc/scripts/fetch_thirdparty.py | 2 +- etc/scripts/test_utils_pip_compatibility_tags.py | 3 +-- etc/scripts/utils_dejacode.py | 1 - etc/scripts/utils_pip_compatibility_tags.py | 14 ++++++-------- etc/scripts/utils_thirdparty.py | 3 +-- pyproject.toml | 7 ++++++- 7 files changed, 15 insertions(+), 16 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 2daded9..62dbb14 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -12,7 +12,6 @@ import utils_thirdparty - @click.command() @click.option( "-d", diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 3f9ff52..30d376c 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -16,8 +16,8 @@ import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c5..a33b8b3 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -25,14 +25,13 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest import utils_pip_compatibility_tags - @pytest.mark.parametrize( "version_info, expected", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 652252d..c71543f 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -14,7 +14,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0c..de0ac95 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,14 +27,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 46dc728..b0295ec 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -25,14 +25,13 @@ import packageurl import requests import saneyaml +import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version -import utils_pip_compatibility_tags - """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. diff --git a/pyproject.toml b/pyproject.toml index 9e62736..ba55770 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,10 +76,15 @@ select = [ "I", # isort "C9", # McCabe complexity ] -ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] [tool.ruff.lint.isort] force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + sections = { django = ["django"] } section-order = [ "future", From aae1a2847c0e493b0e8bea542da30dbdfb2be68e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:35:16 +0100 Subject: [PATCH 40/81] Apply small code updates Signed-off-by: Philippe Ombredanne --- etc/scripts/utils_requirements.py | 20 ++++++++----- etc/scripts/utils_thirdparty.py | 48 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 31 deletions(-) diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 1c50239..a9ac223 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -57,21 +57,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -120,7 +124,7 @@ def get_installed_reqs(site_packages_dir): # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -150,9 +154,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index b0295ec..6d5ffdc 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -559,7 +558,8 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - assert self.filename + if not self.filename: + raise ValueError(f"self.filename has no value but is required: {self.filename!r}") if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -829,10 +829,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): urls = LinksRepository.from_url( use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") - for l in self.extra_data.get("licenses", {})] + extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [ln for ln in extra_lic_names if ln] + extra_lic_names = [eln for eln in extra_lic_names if eln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -853,7 +852,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except: + except Exception: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -866,8 +865,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except: - msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' + except Exception: + msg = f"No text for license {filename} in expression " + f"{self.license_expression!r} from {self}" print(msg) errors.append(msg) @@ -1009,7 +1009,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [l for l in urls if l.endswith(f"/{filename}")] + path_or_url = [url for url in urls if url.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1140,7 +1140,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -1301,7 +1300,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except: + except Exception: return False @@ -1489,8 +1488,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print( - f" Skipping invalid distribution from: {path_or_url}") + print(f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1500,8 +1498,7 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - for wheel in self.wheels: - yield wheel + yield from self.wheels def get_url_for_filename(self, filename): """ @@ -1632,7 +1629,8 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " + "available in this repo" ), ) @@ -1647,7 +1645,8 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. " + "Otherwise, fetch and cache." ), ) @@ -1656,7 +1655,8 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - assert name + if not name: + raise ValueError(f"name is required: {name!r}") normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1713,7 +1713,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] + links = [link.partition("#sha256=") for link in links] links = [url for url, _, _sha256 in links] return links @@ -1936,7 +1936,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2161,7 +2161,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: @@ -2227,7 +2227,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print(f"Downloading wheels using command:", " ".join(cli_args)) + print("Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2260,7 +2260,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2312,5 +2312,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(l).lower() for l in declared_licenses] + lics = [python_safe_name(lic).lower() for lic in declared_licenses] return " AND ".join(lics).lower() From 037f9fc1b03736eeac9e0eefac3e35acc916d193 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:42:03 +0100 Subject: [PATCH 41/81] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 3 +- etc/scripts/fetch_thirdparty.py | 26 ++++----- etc/scripts/gen_pypi_simple.py | 4 +- etc/scripts/utils_dejacode.py | 15 +++--- etc/scripts/utils_requirements.py | 9 ++-- etc/scripts/utils_thirdparty.py | 90 +++++++++++-------------------- 6 files changed, 50 insertions(+), 97 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 62dbb14..1aa4e28 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,8 +16,7 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 30d376c..c224683 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -55,8 +55,7 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -121,7 +120,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -132,7 +131,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -143,7 +142,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -225,8 +224,7 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os( - pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -250,7 +248,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,14 +259,11 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append( - environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: print(f" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -292,8 +286,7 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any( - d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -303,8 +296,7 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses( - dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90d..cfe68e6 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -69,7 +69,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -200,11 +199,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c71543f..cd39cda 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -32,8 +32,7 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print( - "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,8 +67,7 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print( - f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -150,12 +148,11 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) - for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -181,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -209,5 +206,5 @@ def create_dejacode_package(distribution): if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index a9ac223..167bc9f 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -106,8 +106,7 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join( - f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -118,12 +117,10 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception( - f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", - "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") # noqa: S603 diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6d5ffdc..4ea1bab 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -243,11 +243,9 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print( - f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list( - package.get_supported_wheels(environment=environment)) + supported_wheels = list(package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,8 +289,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print( - f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -301,8 +298,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print( - f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -357,7 +353,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -535,8 +530,7 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version( - name=self.name, version=self.version) + package = repo.get_package_version(name=self.name, version=self.version) if not package: if TRACE: print( @@ -776,8 +770,7 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print( - f"Failed to fetch NOTICE file: {self.notice_download_url}") + print(f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -826,8 +819,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url( - use_cached_index=use_cached_index).links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -840,8 +832,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename( - filename=filename, urls=urls) + lic_url = get_license_link_for_filename(filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -919,8 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [ - c for c in classifiers if not c.startswith("License")] + other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -962,8 +952,7 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string( - self.package_url) + purl_from_self = packageurl.PackageURL.from_string(self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1013,8 +1002,7 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception( - f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1102,7 +1090,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1407,8 +1394,7 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) - for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1469,8 +1455,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith( - EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1536,8 +1521,7 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict( - help="Python implementation supported by this environment."), + metadata=dict(help="Python implementation supported by this environment."), repr=False, ) @@ -1551,8 +1535,7 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict( - help="List of platform tags supported by this environment."), + metadata=dict(help="List of platform tags supported by this environment."), repr=False, ) @@ -1637,8 +1620,7 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict( - help="A set of already fetched package normalized names."), + metadata=dict(help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1671,12 +1653,10 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print( - f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print( - f"WARNING: package {name} not found in repo: {self.index_url}") + print(f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1861,8 +1841,7 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content( - path_or_url=path_or_url, as_text=as_text) + content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1884,8 +1863,7 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content( - url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1936,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -1951,8 +1929,7 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException( - f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2043,8 +2020,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2056,8 +2032,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2068,8 +2043,7 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list( - other_local_version.get_distributions()) + latest_local_dists = list(other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2095,8 +2069,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2137,8 +2110,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files( - dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2161,10 +2133,9 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() @@ -2260,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2286,8 +2257,7 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join( - dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: From 1189dda52570ed018f52eacd38c4990e8be8ff1a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:02:19 +0100 Subject: [PATCH 42/81] Apply cosmetic refactorings Signed-off-by: Philippe Ombredanne --- docs/source/conf.py | 3 ++- etc/scripts/check_thirdparty.py | 4 +--- etc/scripts/fetch_thirdparty.py | 17 ++++++++--------- etc/scripts/gen_pypi_simple.py | 15 +++++++-------- etc/scripts/gen_requirements.py | 4 ++-- etc/scripts/gen_requirements_dev.py | 4 ++-- .../test_utils_pip_compatibility_tags.py | 9 +++++---- etc/scripts/utils_dejacode.py | 9 +++++---- etc/scripts/utils_pip_compatibility_tags.py | 8 +++++--- etc/scripts/utils_requirements.py | 3 +-- etc/scripts/utils_thirdparty.py | 3 ++- 11 files changed, 40 insertions(+), 39 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8c88fa2..8aad829 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -94,7 +94,8 @@ html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 1aa4e28..bb8347a 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -41,8 +40,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index c224683..76a19a6 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -10,7 +9,6 @@ # import itertools -import os import sys from collections import defaultdict @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -261,7 +260,7 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: @@ -276,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -295,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index cfe68e6..89d0626 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -132,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -177,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 2b65ae8..1b87944 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 5db1c48..8548205 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index a33b8b3..de4b706 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -50,7 +51,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -81,7 +82,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -104,7 +105,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index cd39cda..b6bff51 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -25,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -50,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -93,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -113,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -201,6 +201,7 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index de0ac95..dd954bc 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -130,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -140,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 167bc9f..b9b2c0e 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 4ea1bab..aafc1d6 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -91,7 +91,8 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as + {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: From 84257fbe200e5780cb13536a5c8eb56a88539e6a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:05:23 +0100 Subject: [PATCH 43/81] Reformat test code Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + pyproject.toml | 19 +++++++++++-------- tests/test_skeleton_codestyle.py | 25 ++++++++++++++++--------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 2d48196..8a93c94 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ +/.ruff_cache/ diff --git a/pyproject.toml b/pyproject.toml index ba55770..a872ab3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,16 +67,17 @@ include = [ [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle warnings - "D", # pydocstyle - "F", # Pyflakes - "UP", # pyupgrade - "S", # flake8-bandit +# "E", # pycodestyle +# "W", # pycodestyle warnings +# "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit "I", # isort - "C9", # McCabe complexity +# "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] + [tool.ruff.lint.isort] force-single-line = true @@ -100,3 +101,5 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index b4ce8c1..8cd85c9 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -7,30 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import configparser import subprocess import unittest -import configparser - class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ + # This test shouldn't run in proliferated repositories. + + # TODO: update with switch to pyproject.toml setup_cfg = configparser.ConfigParser() setup_cfg.read("setup.cfg") if setup_cfg["metadata"]["name"] != "skeleton": return - args = "venv/bin/black --check -l 100 setup.py etc tests" + commands = [ + ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "format", "--check"], + ] + command = None try: - subprocess.check_output(args.split()) + for command in commands: + subprocess.check_output(command) # noqa: S603 except subprocess.CalledProcessError as e: print("===========================================================") print(e.output) print("===========================================================") raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", + f"Code style and linting command check failed: {' '.join(command)!r}.\n" + "You can check and format the code using:\n" + " make valid\n", + "OR:\n ruff format\n", + " ruff check --fix\n", e.output, ) from e From 00684f733a0873bd837af85471814907ba93f456 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:08:25 +0100 Subject: [PATCH 44/81] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 + etc/scripts/test_utils_pip_compatibility_tags.py | 1 + tests/test_skeleton_codestyle.py | 1 + 3 files changed, 3 insertions(+) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index bb8347a..65ae595 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -11,6 +11,7 @@ import utils_thirdparty + @click.command() @click.option( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index de4b706..0e9c360 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -33,6 +33,7 @@ import utils_pip_compatibility_tags + @pytest.mark.parametrize( "version_info, expected", [ diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 8cd85c9..7135ac0 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -11,6 +11,7 @@ import subprocess import unittest + class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): # This test shouldn't run in proliferated repositories. From 7c4278df4e8acf04888a188d115b4c687060f1e5 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:10:45 +0100 Subject: [PATCH 45/81] Refine ruff configuration Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a872ab3..0f8bd58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,11 +72,11 @@ select = [ # "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade -# "S", # flake8-bandit + "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] [tool.ruff.lint.isort] From 47cb840db13d3e7328dba8d8e62197cda82e48ec Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:01 +0100 Subject: [PATCH 46/81] Format doc Signed-off-by: Philippe Ombredanne --- AUTHORS.rst | 2 +- README.rst | 29 ++++++++++++++--------------- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 51a19cc..16e2046 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,3 +1,3 @@ The following organizations or individuals have contributed to this repo: -- +- diff --git a/README.rst b/README.rst index f848b4b..01d0210 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,10 @@ A Simple Python Project Skeleton ================================ -This repo attempts to standardize the structure of the Python-based project's repositories using -modern Python packaging and configuration techniques that can then be applied to many repos. - -Using this `blog post`_ as inspiration, this repository serves as the base for all new Python -projects and is mergeable in existing repositories as well. +This repo attempts to standardize the structure of the Python-based project's +repositories using modern Python packaging and configuration techniques. +Using this `blog post`_ as inspiration, this repository serves as the base for +all new Python projects and is mergeable in existing repositories as well. .. _blog post: https://blog.jaraco.com/a-project-skeleton-for-python-projects/ @@ -69,7 +68,7 @@ Release Notes - Drop support for Python 3.8 - Drop support for macOS-11, add support for macOS-14 - + - 2024-02-19: - Replace support in CI of default ubuntu-20.04 by ubuntu-22.04 @@ -86,19 +85,19 @@ Release Notes - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies - There are now fewer scripts. See etc/scripts/README.rst for details + - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party + dependencies. There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: - - - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` - + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` + and ``requirements-dev.txt`` - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` - - Update README.rst with instructions for generating ``requirements.txt`` and ``requirements-dev.txt``, - as well as collecting dependencies as wheels and generating ABOUT files for them. + - Update README.rst with instructions for generating ``requirements.txt`` + and ``requirements-dev.txt``, as well as collecting dependencies as wheels and generating + ABOUT files for them. - 2021-05-11: - - - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which + Python version is used. From 7b29b5914a443069a7ff967eb4ef096034333248 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:35 +0100 Subject: [PATCH 47/81] Run doc8 on all rst files Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930e801..debc404 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst valid: @echo "-> Run Ruff format" From 86c7ca45d3132e5f6873658ab1743b6e27cfeb58 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:55:20 +0100 Subject: [PATCH 48/81] Enable doc style checks Signed-off-by: Philippe Ombredanne --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0f8bd58..51761ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "doc/**/*", - "*.py" + "*.py", + "." ] [tool.ruff.lint] @@ -69,10 +70,10 @@ include = [ select = [ # "E", # pycodestyle # "W", # pycodestyle warnings -# "D", # pydocstyle + "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade - "S", # flake8-bandit +# "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] From 71583c5ecdfe5dd352b7f2bb9d26deaad971e151 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:40:36 +0200 Subject: [PATCH 49/81] Do not format more test data Signed-off-by: Philippe Ombredanne --- pyproject.toml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 51761ff..7d807eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,25 @@ include = [ "src/**/*.py", "etc/**/*.py", "test/**/*.py", + "tests/**/*.py", "doc/**/*", + "docs/**/*", "*.py", "." ] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +] + [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 0f1a40382bdcadf82512395787faab50927256f6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:58:36 +0200 Subject: [PATCH 50/81] Do not treat rst as Python Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7d807eb..5e16b56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "tests/**/*.py", - "doc/**/*", - "docs/**/*", + "doc/**/*.py", + "docs/**/*.py", "*.py", "." ] From 6bea6577864bf432438dabaed9e46a721aae2961 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 16:41:57 +0200 Subject: [PATCH 51/81] Combine testing and docs extra for simplicity Signed-off-by: Philippe Ombredanne --- configure | 2 -- configure.bat | 4 ---- setup.cfg | 3 --- 3 files changed, 9 deletions(-) diff --git a/configure b/configure index 22d9288..83fd203 100755 --- a/configure +++ b/configure @@ -30,7 +30,6 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -185,7 +184,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index 5b9a9d6..18b3703 100644 --- a/configure.bat +++ b/configure.bat @@ -28,7 +28,6 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) diff --git a/setup.cfg b/setup.cfg index aaec643..ad8e0d8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,8 +55,6 @@ testing = pycodestyle >= 2.8.0 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 @@ -64,4 +62,3 @@ docs = sphinx-autobuild sphinx-rtd-dark-mode>=1.3.0 sphinx-copybutton - From c615589b54bfac74b6f17b2233b2afe60bf1d0f6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 17:18:19 +0200 Subject: [PATCH 52/81] Refine checking of docs with doc8 Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- pyproject.toml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index debc404..d21a2f9 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst + @${ACTIVATE} doc8 docs/ *.rst valid: @echo "-> Run Ruff format" diff --git a/pyproject.toml b/pyproject.toml index 5e16b56..bfb1d35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,3 +119,10 @@ max-complexity = 10 # Place paths of files to be ignored by ruff here "tests/*" = ["S101"] "test_*.py" = ["S101"] + + +[tool.doc8] + +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 +verbose=0 From 04e0a89a3bbf5359f27b6e3ef9a5026638e63de8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:41:00 +0200 Subject: [PATCH 53/81] Refine doc handling * remove CI scripts and use Makefile targets instead * ensure doc8 runs quiet * add new docs-check make target to run documentation and links checks * update oudated doc for docs contribution Signed-off-by: Philippe Ombredanne --- .github/workflows/docs-ci.yml | 12 +++++------- Makefile | 10 +++++++--- docs/scripts/doc8_style_check.sh | 5 ----- docs/scripts/sphinx_build_link_check.sh | 5 ----- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 8 ++++---- pyproject.toml | 2 -- 7 files changed, 17 insertions(+), 27 deletions(-) delete mode 100755 docs/scripts/doc8_style_check.sh delete mode 100644 docs/scripts/sphinx_build_link_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 621de4b..10ba5fa 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -21,14 +21,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/Makefile b/Makefile index d21a2f9..413399e 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 docs/ *.rst + @${ACTIVATE} doc8 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -46,6 +46,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ -.PHONY: conf dev check valid clean test docs +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ + +.PHONY: conf dev check valid clean test docs docs-check diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100755 index 9416323..0000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c542686..0000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8aad829..056ca6e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 5640db2..041b358 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -147,7 +147,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -169,11 +169,11 @@ What is checked: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +ScanCode toolkit documentation uses `Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ for more information. For example:: @@ -230,7 +230,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: diff --git a/pyproject.toml b/pyproject.toml index bfb1d35..c9e6772 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,7 +122,5 @@ max-complexity = 10 [tool.doc8] - ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] max-line-length=100 -verbose=0 From 8897cc63eb9ef9b06a1fdc77ebfe21289c69961b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:49:01 +0200 Subject: [PATCH 54/81] Add twine check to release publication Signed-off-by: Philippe Ombredanne --- .github/workflows/pypi-release.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index a66c9c8..cf0579a 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -30,12 +30,15 @@ jobs: with: python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user build twine - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* + - name: Upload built archives uses: actions/upload-artifact@v4 with: From 3d42985990860188c5fbf9f64f3fd4d14c590a65 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 19:16:31 +0200 Subject: [PATCH 55/81] Refine doc contribution docs Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 119 ++++++++----------------- 1 file changed, 38 insertions(+), 81 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 041b358..dee9296 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/aboutcode-org/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. From f428366859a143add93160bd0b1ae685be89fcbb Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 13:35:36 -0700 Subject: [PATCH 56/81] Update codestyle command * Remove trailing whitespace Signed-off-by: Jono Yang --- docs/source/contribute/contrib_doc.rst | 4 ++-- tests/test_skeleton_codestyle.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index dee9296..2a719a5 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -97,7 +97,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -263,7 +263,7 @@ Converting from Markdown ------------------------ If you want to convert a ``.md`` file to a ``.rst`` file, this -`tool `_ does it pretty well. +`tool `_ does it pretty well. You will still have to clean up and check for errors as this contains a lot of bugs. But this is definitely better than converting everything by yourself. diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 7135ac0..6060c08 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -23,7 +23,7 @@ def test_skeleton_codestyle(self): return commands = [ - ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "check"], ["venv/bin/ruff", "format", "--check"], ] command = None From f0d0e21d5e6f98645b02ff9a8fee6ee3def1be75 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 13:44:30 -0700 Subject: [PATCH 57/81] Update README.rst Signed-off-by: Jono Yang --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index 01d0210..11a4dfb 100644 --- a/README.rst +++ b/README.rst @@ -44,6 +44,10 @@ More usage instructions can be found in ``docs/skeleton-usage.rst``. Release Notes ============= +- 2025-03-31: + + - Use ruff as the main code formatting tool, add ruff rules to pyproject.toml + - 2025-03-29: - Add support for beta macOS-15 From f3a8aa6cee5f645a668750ab7e6bf0cdc774e041 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 14:31:37 -0700 Subject: [PATCH 58/81] Update BUILDDIR envvar in docs/Makefile Signed-off-by: Jono Yang --- docs/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Makefile b/docs/Makefile index 788b039..94f686b 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -7,7 +7,7 @@ SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SPHINXAUTOBUILD = sphinx-autobuild SOURCEDIR = source -BUILDDIR = build +BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: From 5b0f4d6b4079719caa9ed97efb2ba776bc2bbac1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:42:52 +0200 Subject: [PATCH 59/81] Fix doc line length Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index dee9296..2a719a5 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -97,7 +97,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -263,7 +263,7 @@ Converting from Markdown ------------------------ If you want to convert a ``.md`` file to a ``.rst`` file, this -`tool `_ does it pretty well. +`tool `_ does it pretty well. You will still have to clean up and check for errors as this contains a lot of bugs. But this is definitely better than converting everything by yourself. From e776fef5ad595378752d39425109a4cbd2cb5175 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:49:40 +0200 Subject: [PATCH 60/81] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/update_skeleton.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py index 5705fc4..374c06f 100644 --- a/etc/scripts/update_skeleton.py +++ b/etc/scripts/update_skeleton.py @@ -15,7 +15,7 @@ import click -ABOUTCODE_PUBLIC_REPO_NAMES=[ +ABOUTCODE_PUBLIC_REPO_NAMES = [ "aboutcode-toolkit", "ahocode", "bitcode", @@ -87,7 +87,9 @@ def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): os.chdir(work_dir_path / repo_name) # Add skeleton as an origin - subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"]) + subprocess.run( + ["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"] + ) # Fetch skeleton files subprocess.run(["git", "fetch", "skeleton"]) From 2a43f4cdc8105473b279eea873db00addaa14551 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:59:05 +0200 Subject: [PATCH 61/81] Correct supported runner on Azure See for details: https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml macOS ARM images do not seem to be supported there Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 80ae45b..fb03c09 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,14 +26,6 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython - image_name: macOS-13-xlarge - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos13_cpython_arm64 image_name: macOS-13 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: @@ -42,14 +34,6 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython - image_name: macOS-14-large - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos14_cpython_arm64 image_name: macOS-14 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: @@ -63,14 +47,6 @@ jobs: test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos15_cpython_arm64 - image_name: macOS-15-large - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml parameters: job_name: win2019_cpython From 4a15550b7bcea5ec949a5049fe1a501d3bb888ff Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 19:34:14 +0200 Subject: [PATCH 62/81] Add code checks to CI Remove running "make check" as a test Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 8 ++++++ tests/test_skeleton_codestyle.py | 44 -------------------------------- 2 files changed, 8 insertions(+), 44 deletions(-) delete mode 100644 tests/test_skeleton_codestyle.py diff --git a/azure-pipelines.yml b/azure-pipelines.yml index fb03c09..ad18b28 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -7,6 +7,14 @@ jobs: + - template: etc/ci/azure-posix.yml + parameters: + job_name: run_code_checks + image_name: ubuntu-24.04 + python_versions: ['3.12'] + test_suites: + all: make check + - template: etc/ci/azure-posix.yml parameters: job_name: ubuntu22_cpython diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py deleted file mode 100644 index 6060c08..0000000 --- a/tests/test_skeleton_codestyle.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) nexB Inc. and others. All rights reserved. -# ScanCode is a trademark of nexB Inc. -# SPDX-License-Identifier: Apache-2.0 -# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/aboutcode-org/skeleton for support or download. -# See https://aboutcode.org for more information about nexB OSS projects. -# - -import configparser -import subprocess -import unittest - - -class BaseTests(unittest.TestCase): - def test_skeleton_codestyle(self): - # This test shouldn't run in proliferated repositories. - - # TODO: update with switch to pyproject.toml - setup_cfg = configparser.ConfigParser() - setup_cfg.read("setup.cfg") - if setup_cfg["metadata"]["name"] != "skeleton": - return - - commands = [ - ["venv/bin/ruff", "check"], - ["venv/bin/ruff", "format", "--check"], - ] - command = None - try: - for command in commands: - subprocess.check_output(command) # noqa: S603 - except subprocess.CalledProcessError as e: - print("===========================================================") - print(e.output) - print("===========================================================") - raise Exception( - f"Code style and linting command check failed: {' '.join(command)!r}.\n" - "You can check and format the code using:\n" - " make valid\n", - "OR:\n ruff format\n", - " ruff check --fix\n", - e.output, - ) from e From b2d7512735ca257088d2cac4b55590fc5d7b20b4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 20:15:18 +0200 Subject: [PATCH 63/81] Revert support for Python 3.13 This is not yet supported everywhere Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ad18b28..7a2d4d9 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +51,7 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -59,7 +59,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -67,7 +67,7 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -75,6 +75,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 2e3464b79811bcf505d93692da2418e2444150ed Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:52:32 +0200 Subject: [PATCH 64/81] Ignore local .env file Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8a93c94..4818bb3 100644 --- a/.gitignore +++ b/.gitignore @@ -73,3 +73,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ /.ruff_cache/ +.env \ No newline at end of file From d4af79f0da82ab0d16dcf60b363a6a5290cd9403 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:54:29 +0200 Subject: [PATCH 65/81] Add correct extras for documentation Signed-off-by: Philippe Ombredanne --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 8ab2368..7e399c8 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -26,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - docs + - testing From 49bfd37c7273f2118d35585c67e53f0cf7642f43 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:58:37 +0200 Subject: [PATCH 66/81] Improve MANIFEST Signed-off-by: Philippe Ombredanne --- MANIFEST.in | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index ef3721e..0f19707 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,6 @@ graft src +graft docs +graft etc include *.LICENSE include NOTICE @@ -6,10 +8,18 @@ include *.ABOUT include *.toml include *.yml include *.rst +include *.png include setup.* include configure* include requirements* -include .git* +include .dockerignore +include .gitignore +include .readthedocs.yml +include manage.py +include Dockerfile* +include Makefile +include MANIFEST.in -global-exclude *.py[co] __pycache__ *.*~ +include .VERSION +global-exclude *.py[co] __pycache__ *.*~ From 5bc987a16cb3ae0f6de101a9c9e277df431f4317 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:12:58 +0200 Subject: [PATCH 67/81] Improve cleaning on POSIX Signed-off-by: Philippe Ombredanne --- configure | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/configure b/configure index 83fd203..3dd9a0a 100755 --- a/configure +++ b/configure @@ -35,7 +35,7 @@ DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constrai VIRTUALENV_DIR=venv # Cleanable files and directories to delete with the --clean option -CLEANABLE="build dist venv .cache .eggs" +CLEANABLE="build dist venv .cache .eggs *.egg-info docs/_build/ pip-selfcheck.json" # extra arguments passed to pip PIP_EXTRA_ARGS=" " @@ -167,6 +167,7 @@ clean() { for cln in $CLEANABLE; do rm -rf "${CFG_ROOT_DIR:?}/${cln:?}"; done + find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete set +e exit } From 887779a9bd36650ffc6751f0069b492e80dd2f08 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:19:44 +0200 Subject: [PATCH 68/81] Rename dev extra to "dev" Instead of testing ... and update references accordingly Signed-off-by: Philippe Ombredanne --- .readthedocs.yml | 2 +- Makefile | 7 ++++++- configure | 2 +- configure.bat | 2 +- setup.cfg | 2 +- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7e399c8..683f3a8 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -26,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - testing + - dev diff --git a/Makefile b/Makefile index 413399e..3041547 100644 --- a/Makefile +++ b/Makefile @@ -13,8 +13,13 @@ PYTHON_EXE?=python3 VENV=venv ACTIVATE?=. ${VENV}/bin/activate; + +conf: + @echo "-> Install dependencies" + ./configure + dev: - @echo "-> Configure the development envt." + @echo "-> Configure and install development dependencies" ./configure --dev doc8: diff --git a/configure b/configure index 3dd9a0a..5ef0e06 100755 --- a/configure +++ b/configure @@ -29,7 +29,7 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" -DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +DEV_REQUIREMENTS="--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" # where we create a virtualenv VIRTUALENV_DIR=venv diff --git a/configure.bat b/configure.bat index 18b3703..3e9881f 100644 --- a/configure.bat +++ b/configure.bat @@ -27,7 +27,7 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" -set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +set "DEV_REQUIREMENTS=--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" diff --git a/setup.cfg b/setup.cfg index ad8e0d8..99ba260 100644 --- a/setup.cfg +++ b/setup.cfg @@ -48,7 +48,7 @@ where = src [options.extras_require] -testing = +dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 From 209231f0d27de0b0cfcc51eeb0fbaf9393d3df1c Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:50:44 +0200 Subject: [PATCH 69/81] Add more excludes from tests Signed-off-by: Philippe Ombredanne --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c9e6772..bcca1a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ norecursedirs = [ "dist", "build", "_build", - "dist", "etc", "local", "ci", @@ -34,7 +33,9 @@ norecursedirs = [ "thirdparty", "tmp", "venv", + ".venv", "tests/data", + "*/tests/test_data", ".eggs", "src/*/data", "tests/*/data" From 47bce2da33db6b3ce3bb16831ddca89a65494e23 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:54:23 +0200 Subject: [PATCH 70/81] Do not lint django migrations Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bcca1a8..d79574e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,6 +66,7 @@ include = [ "docs/**/*.py", "*.py", "." + ] # ignore test data and testfiles: they should never be linted nor formatted exclude = [ @@ -78,9 +79,10 @@ exclude = [ # vulnerablecode, fetchcode "**/tests/*/test_data/**/*", "**/tests/test_data/**/*", +# django migrations + "**/migrations/**/*" ] - [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ From 5025cfb59f0555bf4b40cd75e75ce41188e19e11 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:58:07 +0200 Subject: [PATCH 71/81] Add README.rst to list of "license files" Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 99ba260..e5b56da 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,6 +28,7 @@ license_files = AUTHORS.rst CHANGELOG.rst CODE_OF_CONDUCT.rst + README.rst [options] package_dir = From 548a72eac69e4400e4b01f22941d38fe1cb4648d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 18:59:08 +0200 Subject: [PATCH 72/81] Use Python 3.9 as lowest suupported version Signed-off-by: Philippe Ombredanne --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index e5b56da..a9c5dbc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,6 +31,8 @@ license_files = README.rst [options] +python_requires = >=3.9 + package_dir = =src packages = find: @@ -39,7 +41,6 @@ zip_safe = false setup_requires = setuptools_scm[toml] >= 4 -python_requires = >=3.8 install_requires = From 3d256b4ac7976b46c23424e86bb62a38f0e4a095 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 19:01:22 +0200 Subject: [PATCH 73/81] Drop pycodestyle Not used anymore Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index a9c5dbc..6d0b648 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,6 @@ dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 - pycodestyle >= 2.8.0 twine ruff Sphinx>=5.0.2 From 645052974bf7e6f45c1e55a24a2acaa0cee24523 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 19:26:17 +0200 Subject: [PATCH 74/81] Bump pytest minimal version Signed-off-by: Philippe Ombredanne --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 6d0b648..69f850c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -51,7 +51,7 @@ where = src [options.extras_require] dev = - pytest >= 6, != 7.0.0 + pytest >= 7.0.1 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 twine From af87cfab2d06fb034a90412a87e0d4e660e214ee Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 00:40:47 +0530 Subject: [PATCH 75/81] Update CI runners and scripts Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 2 +- .github/workflows/pypi-release.yml | 4 +- azure-pipelines.yml | 24 ++++-------- configure | 2 +- configure.bat | 4 +- etc/ci/azure-container-deb.yml | 2 +- etc/ci/azure-container-rpm.yml | 2 +- etc/scripts/utils_thirdparty.py | 61 +++++++++++++++--------------- 8 files changed, 46 insertions(+), 55 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 10ba5fa..8d8aa55 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -9,7 +9,7 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: [3.12] + python-version: [3.13] steps: - name: Checkout code diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index cf0579a..7f81361 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -31,10 +31,10 @@ jobs: python-version: 3.12 - name: Install pypa/build and twine - run: python -m pip install --user build twine + run: python -m pip install --user --upgrade build twine pkginfo - name: Build a binary wheel and a source tarball - run: python -m build --sdist --wheel --outdir dist/ + run: python -m build --sdist --outdir dist/ - name: Validate wheel and sdis for Pypi run: python -m twine check dist/* diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7a2d4d9..4d347b7 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,7 +11,7 @@ jobs: parameters: job_name: run_code_checks image_name: ubuntu-24.04 - python_versions: ['3.12'] + python_versions: ['3.13'] test_suites: all: make check @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,23 +51,15 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml - parameters: - job_name: win2019_cpython - image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12'] - test_suites: - all: venv\Scripts\pytest -n 2 -vvs - - template: etc/ci/azure-win.yml parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -75,6 +67,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs diff --git a/configure b/configure index 5ef0e06..6d317d4 100755 --- a/configure +++ b/configure @@ -110,7 +110,7 @@ create_virtualenv() { fi $PYTHON_EXECUTABLE "$VIRTUALENV_PYZ" \ - --wheel embed --pip embed --setuptools embed \ + --pip embed --setuptools embed \ --seeder pip \ --never-download \ --no-periodic-update \ diff --git a/configure.bat b/configure.bat index 3e9881f..15ab701 100644 --- a/configure.bat +++ b/configure.bat @@ -110,7 +110,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( if exist "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ( %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ @@ -126,7 +126,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( ) ) %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ diff --git a/etc/ci/azure-container-deb.yml b/etc/ci/azure-container-deb.yml index 85b611d..d80e8df 100644 --- a/etc/ci/azure-container-deb.yml +++ b/etc/ci/azure-container-deb.yml @@ -21,7 +21,7 @@ jobs: - job: ${{ parameters.job_name }} pool: - vmImage: 'ubuntu-16.04' + vmImage: 'ubuntu-22.04' container: image: ${{ parameters.container }} diff --git a/etc/ci/azure-container-rpm.yml b/etc/ci/azure-container-rpm.yml index 1e6657d..a64138c 100644 --- a/etc/ci/azure-container-rpm.yml +++ b/etc/ci/azure-container-rpm.yml @@ -1,6 +1,6 @@ parameters: job_name: '' - image_name: 'ubuntu-16.04' + image_name: 'ubuntu-22.04' container: '' python_path: '' python_version: '' diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index aafc1d6..6f812f0 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -24,13 +25,14 @@ import packageurl import requests import saneyaml -import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version +import utils_pip_compatibility_tags + """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. @@ -91,8 +93,7 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as - {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: @@ -114,13 +115,14 @@ TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "37", "38", "39", "310" +PYTHON_VERSIONS = "39", "310", "311", "312", "313" PYTHON_DOT_VERSIONS_BY_VER = { - "37": "3.7", - "38": "3.8", "39": "3.9", "310": "3.10", + "311": "3.11", + "312": "3.12", + "313": "3.13", } @@ -132,10 +134,11 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "37": ["cp37", "cp37m", "abi3"], - "38": ["cp38", "cp38m", "abi3"], "39": ["cp39", "cp39m", "abi3"], "310": ["cp310", "cp310m", "abi3"], + "311": ["cp311", "cp311m", "abi3"], + "312": ["cp312", "cp312m", "abi3"], + "313": ["cp313", "cp313m", "abi3"], } PLATFORMS_BY_OS = { @@ -553,8 +556,7 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - if not self.filename: - raise ValueError(f"self.filename has no value but is required: {self.filename!r}") + assert self.filename if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -822,9 +824,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): """ urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] + extra_lic_names = [l.get("file") for l in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [eln for eln in extra_lic_names if eln] + extra_lic_names = [ln for ln in extra_lic_names if ln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -844,7 +846,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except Exception: + except: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -857,9 +859,8 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except Exception: - msg = f"No text for license {filename} in expression " - f"{self.license_expression!r} from {self}" + except: + msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' print(msg) errors.append(msg) @@ -999,7 +1000,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [url for url in urls if url.endswith(f"/{filename}")] + path_or_url = [l for l in urls if l.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1288,7 +1289,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except Exception: + except: return False @@ -1484,7 +1485,8 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - yield from self.wheels + for wheel in self.wheels: + yield wheel def get_url_for_filename(self, filename): """ @@ -1613,8 +1615,7 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " - "available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" ), ) @@ -1628,8 +1629,7 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. " - "Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." ), ) @@ -1638,8 +1638,7 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - if not name: - raise ValueError(f"name is required: {name!r}") + assert name normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1694,7 +1693,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [link.partition("#sha256=") for link in links] + links = [l.partition("#sha256=") for l in links] links = [url for url, _, _sha256 in links] return links @@ -1915,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2134,7 +2133,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: stdouts = [] @@ -2199,7 +2198,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print("Downloading wheels using command:", " ".join(cli_args)) + print(f"Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2232,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2283,5 +2282,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(lic).lower() for lic in declared_licenses] + lics = [python_safe_name(l).lower() for l in declared_licenses] return " AND ".join(lics).lower() From 72c7d266275a472073d2829a25d730ada9436ab3 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 18:16:30 +0530 Subject: [PATCH 76/81] Add missing wheel builds on release CI Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/pypi-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 7f81361..d41fbf2 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -34,7 +34,7 @@ jobs: run: python -m pip install --user --upgrade build twine pkginfo - name: Build a binary wheel and a source tarball - run: python -m build --sdist --outdir dist/ + run: python -m build --wheel --sdist --outdir dist/ - name: Validate wheel and sdis for Pypi run: python -m twine check dist/* From 265e6121c9bf0eb331e8465a08efb2cc9a169500 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Tue, 21 Oct 2025 20:07:02 +0530 Subject: [PATCH 77/81] Drop python3.9 support and add python 3.14 Signed-off-by: Ayan Sinha Mahapatra --- azure-pipelines.yml | 14 +++++++------- etc/scripts/utils_thirdparty.py | 6 +++--- setup.cfg | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 4d347b7..7230c41 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +51,7 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -59,7 +59,7 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -67,6 +67,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] test_suites: all: venv\Scripts\pytest -n 2 -vvs diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6f812f0..bc68ac7 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -115,14 +115,14 @@ TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "39", "310", "311", "312", "313" +PYTHON_VERSIONS = "310", "311", "312", "313", "314" PYTHON_DOT_VERSIONS_BY_VER = { - "39": "3.9", "310": "3.10", "311": "3.11", "312": "3.12", "313": "3.13", + "314": "3.14", } @@ -134,11 +134,11 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "39": ["cp39", "cp39m", "abi3"], "310": ["cp310", "cp310m", "abi3"], "311": ["cp311", "cp311m", "abi3"], "312": ["cp312", "cp312m", "abi3"], "313": ["cp313", "cp313m", "abi3"], + "314": ["cp314", "cp314m", "abi3"], } PLATFORMS_BY_OS = { diff --git a/setup.cfg b/setup.cfg index 69f850c..fa111c2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ license_files = README.rst [options] -python_requires = >=3.9 +python_requires = >=3.10 package_dir = =src From fc4fe3addd98f64684d146926d7f318a9fd469c4 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 22 Oct 2025 18:44:23 +0530 Subject: [PATCH 78/81] Support trusted-publishing for package releases Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/pypi-release.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index d41fbf2..7da0a40 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -71,6 +71,9 @@ jobs: needs: - create-gh-release runs-on: ubuntu-24.04 + environment: pypi-publish + permissions: + id-token: write steps: - name: Download built archives @@ -81,6 +84,4 @@ jobs: - name: Publish to PyPI if: startsWith(github.ref, 'refs/tags') - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_API_TOKEN }} + uses: pypa/gh-action-pypi-publish@release/v1 \ No newline at end of file From e81ff6d37248d93689f9947581a7a2148c96785d Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Fri, 24 Oct 2025 16:43:15 +0530 Subject: [PATCH 79/81] Update RTD build python version Signed-off-by: Ayan Sinha Mahapatra --- .readthedocs.yml | 2 +- pyproject.toml | 2 +- setup.cfg | 3 --- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 683f3a8..27c1595 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -9,7 +9,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.11" + python: "3.13" # Build PDF & ePub formats: diff --git a/pyproject.toml b/pyproject.toml index d79574e..f106e69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools >= 50", "wheel", "setuptools_scm[toml] >= 6"] +requires = ["setuptools >= 50", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] diff --git a/setup.cfg b/setup.cfg index fa111c2..a0f2985 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,9 +39,6 @@ packages = find: include_package_data = true zip_safe = false -setup_requires = setuptools_scm[toml] >= 4 - - install_requires = From fe5264beb35b0fd757c2567373dc29576ebe2f47 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Fri, 31 Oct 2025 16:42:45 +0530 Subject: [PATCH 80/81] Update requirements and python version in manifests Signed-off-by: Ayan Sinha Mahapatra --- configure | 2 +- requirements.txt | 40 ++++++++++++++++++++-------------------- setup.cfg | 2 +- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/configure b/configure index eff6d02..8765e78 100755 --- a/configure +++ b/configure @@ -84,7 +84,7 @@ find_python() { if [ -f "$CFG_ROOT_DIR/PYTHON_EXECUTABLE" ]; then PYTHON_EXECUTABLE=$(cat "$CFG_ROOT_DIR/PYTHON_EXECUTABLE") else - PYTHON_EXECUTABLE=python3 + PYTHON_EXECUTABLE=python3.14 fi fi } diff --git a/requirements.txt b/requirements.txt index 2b171e0..80b4f98 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,18 @@ -attrs==22.1.0 +attrs==25.4.0 banal==1.0.6 -beautifulsoup4==4.11.1 +beautifulsoup4==4.14.2 binaryornot==0.4.4 boolean.py==4.0 -certifi==2022.6.15 -cffi==1.15.1 -chardet==5.0.0 -charset-normalizer==2.1.0 -click==8.1.3 +certifi==2025.10.5 +cffi==2.0.0 +chardet==5.2.0 +charset-normalizer==3.4.4 +click==8.3.0 colorama==0.4.5 -commoncode==31.0.2 +commoncode==32.4.0 construct==2.10.68 container-inspector==31.1.0 -cryptography==37.0.4 +cryptography==46.0.3 debian-inspector==31.0.0 dockerfile-parse==1.2.0 dparse2==0.7.0 @@ -24,7 +24,7 @@ ftfy==6.1.1 future==0.18.2 gemfileparser2==0.9.0 html5lib==1.1 -idna==3.3 +idna==3.11 importlib-metadata==4.12.0 inflection==0.5.1 intbitset==3.0.2 @@ -42,34 +42,34 @@ packageurl-python==0.10.0 packaging==21.3 packvers==21.5 parameter-expansion-patched==0.3.1 -pdfminer.six==20220524 +pdfminer.six==20250506 pefile==2022.5.30 pip-requirements-parser==32.0.1 pkginfo2==30.0.0 -pluggy==1.0.0 +pluggy==1.6.0 plugincode==32.0.0 ply==3.11 publicsuffix2==2.20191221 pyahocorasick==2.0.0 -pycparser==2.21 +pycparser==2.23 pygmars==0.7.0 Pygments==2.13.0 pymaven-patch==0.3.0 pyparsing==3.0.9 pytz==2022.1 -PyYAML==6.0 +PyYAML==6.0.3 rdflib==6.2.0 -requests==2.28.1 -saneyaml==0.6.0 +requests==2.32.5 +saneyaml==0.6.1 six==1.16.0 -soupsieve==2.3.2.post1 +soupsieve==2.8 spdx-tools==0.7.0rc0 text-unidecode==1.3 toml==0.10.2 -typecode==30.0.1 +typecode==30.0.2 typecode-libmagic==5.39.210531 -typing-extensions==4.3.0 -urllib3==1.26.11 +typing_extensions==4.15.0 +urllib3==2.5.0 urlpy==0.5 wcwidth==0.2.5 webencodings==0.5.1 diff --git a/setup.cfg b/setup.cfg index cae2e90..c495104 100644 --- a/setup.cfg +++ b/setup.cfg @@ -62,7 +62,7 @@ packages = find: include_package_data = true zip_safe = false -python_requires = >=3.7 +python_requires = >=3.10 install_requires = attrs >= 18.1,!=20.1.0;python_version<'3.11' From f1041dcc0f330ce5806a0056182068d677f335ee Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Fri, 31 Oct 2025 17:12:40 +0530 Subject: [PATCH 81/81] Fix test failures on CI Signed-off-by: Ayan Sinha Mahapatra --- setup.cfg | 3 +- src/extractcode/__init__.py | 60 +- src/extractcode/api.py | 1 + src/extractcode/archive.py | 860 +++++++----- src/extractcode/cli.py | 160 +-- src/extractcode/extract.py | 47 +- src/extractcode/libarchive2.py | 96 +- src/extractcode/patch.py | 23 +- src/extractcode/sevenzip.py | 238 ++-- src/extractcode/uncompress.py | 19 +- src/extractcode/vmimage.py | 141 +- tests/extractcode_assert_utils.py | 65 +- tests/test_archive.py | 2185 +++++++++++++++-------------- tests/test_extract.py | 1165 +++++++-------- tests/test_extractcode.py | 52 +- tests/test_extractcode_api.py | 24 +- tests/test_extractcode_cli.py | 219 +-- tests/test_libarchive2.py | 15 +- tests/test_patch.py | 1520 +++++++++++++------- tests/test_sevenzip.py | 171 ++- tests/test_vmimage.py | 26 +- 21 files changed, 3980 insertions(+), 3110 deletions(-) diff --git a/setup.cfg b/setup.cfg index 033a88d..c8b1be7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -92,11 +92,12 @@ dev = twine black isort + ruff Sphinx == 5.1.0 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 doc8>=0.11.2 [options.entry_points] -console-scripts = +console_scripts = extractcode = extractcode.cli:extractcode diff --git a/src/extractcode/__init__.py b/src/extractcode/__init__.py index fb6095d..f0c0fe6 100644 --- a/src/extractcode/__init__.py +++ b/src/extractcode/__init__.py @@ -32,7 +32,7 @@ logger.setLevel(logging.DEBUG) # Suffix added to extracted target_dir paths -EXTRACT_SUFFIX = '-extract' +EXTRACT_SUFFIX = "-extract" # high level archive "kinds" docs = 1 @@ -44,13 +44,13 @@ special_package = 7 kind_labels = { - 1: 'docs', - 2: 'regular', - 3: 'regular_nested', - 4: 'package', - 5: 'file_system', - 6: 'patches', - 7: 'special_package', + 1: "docs", + 2: "regular", + 3: "regular_nested", + 4: "package", + 5: "file_system", + 6: "patches", + 7: "special_package", } # note: we do not include special_package in all_kinds by default @@ -72,13 +72,13 @@ # map user-visible extract types to tuples of "kinds" extract_types = { - 'default': default_kinds, - 'all': all_kinds, - 'package': (package,), - 'filesystem': (file_system,), - 'doc': (docs,), - 'patch': (patches,), - 'special_package': (special_package,), + "default": default_kinds, + "all": all_kinds, + "package": (package,), + "filesystem": (file_system,), + "doc": (docs,), + "patch": (patches,), + "special_package": (special_package,), } @@ -86,7 +86,7 @@ def is_extraction_path(path): """ Return True is the path points to an extraction path. """ - return path and path.rstrip('\\/').endswith(EXTRACT_SUFFIX) + return path and path.rstrip("\\/").endswith(EXTRACT_SUFFIX) def is_extracted(location): @@ -101,14 +101,14 @@ def get_extraction_path(path): """ Return a path where to extract. """ - return path.rstrip('\\/') + EXTRACT_SUFFIX + return path.rstrip("\\/") + EXTRACT_SUFFIX def remove_archive_suffix(path): """ Remove all the extracted suffix from a path. """ - return re.sub(EXTRACT_SUFFIX, '', path) + return re.sub(EXTRACT_SUFFIX, "", path) def remove_backslashes_and_dotdots(directory): @@ -119,13 +119,13 @@ def remove_backslashes_and_dotdots(directory): errors = [] for top, _, files in os.walk(directory): for filename in files: - if not ('\\' in filename or '..' in filename): + if not ("\\" in filename or ".." in filename): continue try: - new_path = as_posixpath(filename).strip('/') - new_path = posixpath.normpath(new_path).replace('..', '/').strip('/') + new_path = as_posixpath(filename).strip("/") + new_path = posixpath.normpath(new_path).replace("..", "/").strip("/") new_path = posixpath.normpath(new_path) - segments = new_path.split('/') + segments = new_path.split("/") directory = join(top, *segments[:-1]) create_dir(directory) shutil.move(join(top, filename), join(top, *segments)) @@ -149,7 +149,7 @@ def new_name(location, is_dir=False): the extension unchanged. """ assert location - location = location.rstrip('\\/') + location = location.rstrip("\\/") assert location parent = parent_directory(location) @@ -160,8 +160,8 @@ def new_name(location, is_dir=False): filename = file_name(location) # corner case - if filename in ('.', '..'): - filename = '_' + if filename in (".", ".."): + filename = "_" # if unique, return this if filename.lower() not in siblings_lower: @@ -171,19 +171,19 @@ def new_name(location, is_dir=False): if is_dir: # directories do not have an "extension" base_name = filename - ext = '' + ext = "" else: - base_name, dot, ext = filename.partition('.') + base_name, dot, ext = filename.partition(".") if dot: - ext = f'.{ext}' + ext = f".{ext}" else: base_name = filename - ext = '' + ext = "" # find a unique filename, adding a counter int to the base_name counter = 1 while 1: - filename = f'{base_name}_{counter}{ext}' + filename = f"{base_name}_{counter}{ext}" if filename.lower() not in siblings_lower: break counter += 1 diff --git a/src/extractcode/api.py b/src/extractcode/api.py index 38d83b2..67c5758 100644 --- a/src/extractcode/api.py +++ b/src/extractcode/api.py @@ -68,6 +68,7 @@ def extract_archive(location, target, verbose=False): from extractcode.extract import extract_file from extractcode import all_kinds + return extract_file( location=location, target=target, diff --git a/src/extractcode/archive.py b/src/extractcode/archive.py index d52399e..4ae35c2 100644 --- a/src/extractcode/archive.py +++ b/src/extractcode/archive.py @@ -40,6 +40,7 @@ if TRACE: import sys + logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.DEBUG) @@ -71,16 +72,16 @@ # if strict, all handlers criteria must be matched for a handler to be selected Handler = namedtuple( - 'Handler', + "Handler", [ - 'name', - 'filetypes', - 'mimetypes', - 'extensions', - 'kind', - 'extractors', - 'strict', - ] + "name", + "filetypes", + "mimetypes", + "extensions", + "kind", + "extractors", + "strict", + ], ) @@ -102,17 +103,13 @@ def should_extract(location, kinds, ignore_pattern=()): """ location = os.path.abspath(os.path.expanduser(location)) ignore_pattern = { - extension : 'User ignore: Supplied by --ignore' - for extension in ignore_pattern + extension: "User ignore: Supplied by --ignore" for extension in ignore_pattern } should_ignore = is_ignored(location, ignore_pattern) extractor = get_extractor(location, kinds=kinds) if TRACE_DEEP: - logger.debug( - f' should_extract: extractor: {extractor}, ' - f'should_ignore: {should_ignore}' - ) + logger.debug(f" should_extract: extractor: {extractor}, should_ignore: {should_ignore}") if extractor and not should_ignore: return True @@ -137,7 +134,7 @@ def get_extractor(location, kinds=all_kinds): extractors = get_extractors(location, kinds=kinds) if not extractors: if TRACE_DEEP: - logger.debug(f' get_extractor: not extractors: {extractors}') + logger.debug(f" get_extractor: not extractors: {extractors}") return None if len(extractors) == 2: @@ -161,7 +158,7 @@ def get_extractors(location, kinds=all_kinds): """ handler = get_best_handler(location, kinds) if TRACE_DEEP: - logger.debug(f' get_extractors: handler: {handler}') + logger.debug(f" get_extractors: handler: {handler}") return handler and handler.extractors or [] @@ -176,21 +173,21 @@ def get_best_handler(location, kinds=all_kinds): handlers = list(get_handlers(location)) if TRACE_DEEP: - logger.debug(f' get_best_handler: handlers: {handlers}') + logger.debug(f" get_best_handler: handlers: {handlers}") if not handlers: return candidates = list(score_handlers(handlers)) if TRACE_DEEP: - logger.debug(f' get_best_handler: candidates: {candidates}') + logger.debug(f" get_best_handler: candidates: {candidates}") if not candidates: if TRACE_DEEP: - logger.debug(f' get_best_handler: candidates: {candidates}') + logger.debug(f" get_best_handler: candidates: {candidates}") return picked = pick_best_handler(candidates, kinds=kinds) if TRACE_DEEP: - logger.debug(f' get_best_handler: picked: {picked}') + logger.debug(f" get_best_handler: picked: {picked}") return picked @@ -200,27 +197,27 @@ def get_handlers(location): extension_matched,) for this `location`. """ if filetype.is_file(location): - T = contenttype.get_type(location) ftype = T.filetype_file.lower() mtype = T.mimetype_file if TRACE_DEEP: logger.debug( - 'get_handlers: processing %(location)s: ' - 'ftype: %(ftype)s, mtype: %(mtype)s ' % locals()) + "get_handlers: processing %(location)s: " + "ftype: %(ftype)s, mtype: %(mtype)s " % locals() + ) for handler in archive_handlers: if not handler.extractors: continue extractor_count = len(handler.extractors) if extractor_count > 2: - raise Exception('Maximum level of archive nesting is two.') + raise Exception("Maximum level of archive nesting is two.") # default to False type_matched = handler.filetypes and any(t in ftype for t in handler.filetypes) if TRACE_DEEP: - logger.debug(f' get_handlers: handler.filetypes={handler.filetypes}') + logger.debug(f" get_handlers: handler.filetypes={handler.filetypes}") mime_matched = handler.mimetypes and any(m in mtype for m in handler.mimetypes) exts = handler.extensions if exts: @@ -228,26 +225,19 @@ def get_handlers(location): if TRACE_DEEP: print( - f' get_handlers: matched type: {type_matched}, ' - f'mime: {mime_matched}, ext: {extension_matched}' % locals() - ) - - if ( - handler.strict - and not ( - type_matched - and mime_matched - and extension_matched + f" get_handlers: matched type: {type_matched}, " + f"mime: {mime_matched}, ext: {extension_matched}" % locals() ) - ): + + if handler.strict and not (type_matched and mime_matched and extension_matched): if TRACE_DEEP: - print(f' get_handlers: skip strict: {handler.name}') + print(f" get_handlers: skip strict: {handler.name}") continue if type_matched or mime_matched or extension_matched: if TRACE_DEEP: handler_name = handler.name - logger.debug(' get_handlers: yielding handler: %(handler_name)r' % locals()) + logger.debug(" get_handlers: yielding handler: %(handler_name)r" % locals()) yield handler, type_matched, mime_matched, extension_matched @@ -258,16 +248,17 @@ def score_handlers(handlers): for handler, type_matched, mime_matched, extension_matched in handlers: if TRACE_DEEP: logger.debug( - f' score_handlers: handler={handler}, ' - f'type_matched={type_matched}, ' - f'mime_matched={mime_matched}, ' - f'extension_matched={extension_matched}' + f" score_handlers: handler={handler}, " + f"type_matched={type_matched}, " + f"mime_matched={mime_matched}, " + f"extension_matched={extension_matched}" ) score = 0 # increment kind value: higher kinds numerical values are more # specific by design score += handler.kind - if TRACE_DEEP: logger.debug(f' score_handlers: score += handler.kind {score}') + if TRACE_DEEP: + logger.debug(f" score_handlers: score += handler.kind {score}") # increment score based on matched criteria if type_matched and mime_matched and extension_matched: @@ -300,8 +291,9 @@ def score_handlers(handlers): if TRACE_DEEP: handler_name = handler.name logger.debug( - ' score_handlers: yielding handler: %(handler_name)r, ' - 'score: %(score)d, extension_matched: %(extension_matched)r' % locals()) + " score_handlers: yielding handler: %(handler_name)r, " + "score: %(score)d, extension_matched: %(extension_matched)r" % locals() + ) if score > 0: yield score, handler, extension_matched @@ -320,7 +312,7 @@ def pick_best_handler(candidates, kinds): scored = sorted(candidates, reverse=True) if TRACE_DEEP: - logger.debug(f' pick_best_handler: scored: {scored}') + logger.debug(f" pick_best_handler: scored: {scored}") if not scored: return @@ -377,20 +369,20 @@ def extract_twice(location, target_dir, extractor1, extractor2): abs_location = os.path.abspath(os.path.expanduser(location)) abs_target_dir = str(os.path.abspath(os.path.expanduser(target_dir))) # extract first the intermediate payload to a temp dir - temp_target = str(fileutils.get_temp_dir(prefix='extractcode-extract-')) + temp_target = str(fileutils.get_temp_dir(prefix="extractcode-extract-")) warnings = extractor1(abs_location, temp_target) if TRACE: - logger.debug('extract_twice: temp_target: %(temp_target)r' % locals()) + logger.debug("extract_twice: temp_target: %(temp_target)r" % locals()) # extract this intermediate payload to the final target_dir try: inner_archives = list(fileutils.resource_iter(temp_target, with_dirs=False)) if not inner_archives: - warnings.append(location + ': No files found in archive.') + warnings.append(location + ": No files found in archive.") else: for extracted1_loc in inner_archives: if TRACE: - logger.debug('extract_twice: extractor2: %(extracted1_loc)r' % locals()) + logger.debug("extract_twice: extractor2: %(extracted1_loc)r" % locals()) warnings.extend(extractor2(extracted1_loc, abs_target_dir)) finally: # cleanup the temporary output from extractor1 @@ -411,18 +403,18 @@ def extract_with_fallback(location, target_dir, extractor1, extractor2): abs_location = os.path.abspath(os.path.expanduser(location)) abs_target_dir = str(os.path.abspath(os.path.expanduser(target_dir))) # attempt extract first to a temp dir - temp_target1 = str(fileutils.get_temp_dir(prefix='extractcode-extract1-')) + temp_target1 = str(fileutils.get_temp_dir(prefix="extractcode-extract1-")) try: warnings = extractor1(abs_location, temp_target1) if TRACE: - logger.debug('extract_with_fallback: temp_target1: %(temp_target1)r' % locals()) + logger.debug("extract_with_fallback: temp_target1: %(temp_target1)r" % locals()) fileutils.copytree(temp_target1, abs_target_dir) except: try: - temp_target2 = str(fileutils.get_temp_dir(prefix='extractcode-extract2-')) + temp_target2 = str(fileutils.get_temp_dir(prefix="extractcode-extract2-")) warnings = extractor2(abs_location, temp_target2) if TRACE: - logger.debug('extract_with_fallback: temp_target2: %(temp_target2)r' % locals()) + logger.debug("extract_with_fallback: temp_target2: %(temp_target2)r" % locals()) fileutils.copytree(temp_target2, abs_target_dir) finally: fileutils.delete(temp_target2) @@ -441,12 +433,12 @@ def try_to_extract(location, target_dir, extractor): """ abs_location = os.path.abspath(os.path.expanduser(location)) abs_target_dir = str(os.path.abspath(os.path.expanduser(target_dir))) - temp_target = str(fileutils.get_temp_dir(prefix='extractcode-extract1-')) + temp_target = str(fileutils.get_temp_dir(prefix="extractcode-extract1-")) warnings = [] try: warnings = extractor(abs_location, temp_target) if TRACE: - logger.debug('try_to_extract: temp_target: %(temp_target)r' % locals()) + logger.debug("try_to_extract: temp_target: %(temp_target)r" % locals()) fileutils.copytree(temp_target, abs_target_dir) except: return warnings @@ -454,6 +446,7 @@ def try_to_extract(location, target_dir, extractor): fileutils.delete(temp_target) return warnings + # High level aliases to lower level extraction functions ######################################################## @@ -511,626 +504,780 @@ def try_to_extract(location, target_dir, extractor): #################### TarHandler = Handler( - name='Tar', - filetypes=('.tar', 'tar archive',), - mimetypes=('application/x-tar',), - extensions=('.tar',), + name="Tar", + filetypes=( + ".tar", + "tar archive", + ), + mimetypes=("application/x-tar",), + extensions=(".tar",), kind=regular, extractors=[extract_tar], - strict=False + strict=False, ) RubyGemHandler = Handler( - name='Ruby Gem package', - filetypes=('.tar', 'tar archive',), - mimetypes=('application/x-tar',), - extensions=('.gem',), + name="Ruby Gem package", + filetypes=( + ".tar", + "tar archive", + ), + mimetypes=("application/x-tar",), + extensions=(".gem",), kind=package, extractors=[extract_tar], - strict=True + strict=True, ) ZipHandler = Handler( - name='Zip', - filetypes=('zip archive',), - mimetypes=('application/zip',), - extensions=('.zip', '.zipx',), + name="Zip", + filetypes=("zip archive",), + mimetypes=("application/zip",), + extensions=( + ".zip", + ".zipx", + ), kind=regular, extractors=[extract_zip], - strict=False + strict=False, ) OfficeDocHandler = Handler( - name='Office doc', + name="Office doc", filetypes=( - 'zip archive', - 'microsoft word 2007+', - 'microsoft excel 2007+', - 'microsoft powerpoint 2007+', + "zip archive", + "microsoft word 2007+", + "microsoft excel 2007+", + "microsoft powerpoint 2007+", + ), + mimetypes=( + "application/zip", + "application/vnd.openxmlformats", ), - mimetypes=('application/zip', 'application/vnd.openxmlformats',), # Extensions of office documents that are zip files too extensions=( # ms doc - '.docx', '.dotx', '.docm', + ".docx", + ".dotx", + ".docm", # ms xls - '.xlsx', '.xltx', '.xlsm', '.xltm', + ".xlsx", + ".xltx", + ".xlsm", + ".xltm", # ms ppt - '.pptx', '.ppsx', '.potx', '.pptm', '.potm', '.ppsm', + ".pptx", + ".ppsx", + ".potx", + ".pptm", + ".potm", + ".ppsm", # oo write - '.odt', '.odf', '.sxw', '.stw', + ".odt", + ".odf", + ".sxw", + ".stw", # oo calc - '.ods', '.ots', '.sxc', '.stc', + ".ods", + ".ots", + ".sxc", + ".stc", # oo pres and draw - '.odp', '.otp', '.odg', '.otg', '.sxi', '.sti', '.sxd', - '.sxg', '.std', + ".odp", + ".otp", + ".odg", + ".otg", + ".sxi", + ".sti", + ".sxd", + ".sxg", + ".std", # star office - '.sdc', '.sda', '.sdd', '.smf', '.sdw', '.sxm', '.stw', - '.oxt', '.sldx', - - '.epub', + ".sdc", + ".sda", + ".sdd", + ".smf", + ".sdw", + ".sxm", + ".stw", + ".oxt", + ".sldx", + ".epub", ), kind=docs, extractors=[extract_zip], - strict=True + strict=True, ) AndroidAppHandler = Handler( - name='Android app', - filetypes=('zip archive',), - mimetypes=('application/zip',), - extensions=('.apk',), + name="Android app", + filetypes=("zip archive",), + mimetypes=("application/zip",), + extensions=(".apk",), kind=package, extractors=[extract_zip], - strict=True + strict=True, ) # see http://tools.android.com/tech-docs/new-build-system/aar-formats AndroidLibHandler = Handler( - name='Android library', - filetypes=('zip archive',), - mimetypes=('application/zip',), + name="Android library", + filetypes=("zip archive",), + mimetypes=("application/zip",), # note: Apache Axis also uses AAR extensions for plain Jars - extensions=('.aar',), + extensions=(".aar",), kind=package, extractors=[extract_zip], - strict=True + strict=True, ) MozillaExtHandler = Handler( - name='Mozilla extension', - filetypes=('zip archive',), - mimetypes=('application/zip',), - extensions=('.xpi',), + name="Mozilla extension", + filetypes=("zip archive",), + mimetypes=("application/zip",), + extensions=(".xpi",), kind=package, extractors=[extract_zip], - strict=True + strict=True, ) # see https://developer.chrome.com/extensions/crx # not supported for now ChromeExtHandler = Handler( - name='Chrome extension', - filetypes=('data',), - mimetypes=('application/octet-stream',), - extensions=('.crx',), + name="Chrome extension", + filetypes=("data",), + mimetypes=("application/octet-stream",), + extensions=(".crx",), kind=package, extractors=[extract_7z], - strict=True + strict=True, ) IosAppHandler = Handler( - name='iOS app', - filetypes=('zip archive',), - mimetypes=('application/zip',), - extensions=('.ipa',), + name="iOS app", + filetypes=("zip archive",), + mimetypes=("application/zip",), + extensions=(".ipa",), kind=package, extractors=[extract_zip], - strict=True + strict=True, ) JavaJarHandler = Handler( - name='Java Jar package', - filetypes=('java archive',), - mimetypes=('application/java-archive',), - extensions=('.jar', '.zip',), + name="Java Jar package", + filetypes=("java archive",), + mimetypes=("application/java-archive",), + extensions=( + ".jar", + ".zip", + ), kind=package, extractors=[extract_zip], - strict=False + strict=False, ) JavaJarZipHandler = Handler( - name='Java Jar package', - filetypes=('zip archive',), - mimetypes=('application/zip',), - extensions=('.jar',), + name="Java Jar package", + filetypes=("zip archive",), + mimetypes=("application/zip",), + extensions=(".jar",), kind=package, extractors=[extract_zip], - strict=False + strict=False, ) # See https://projects.spring.io/spring-boot/ # this is a ZIP with a shell header (e.g. a self-executing zip of sorts) # internalyl the zip is really a war rather than a jar SpringBootShellJarHandler = Handler( - name='Springboot Java Jar package', - filetypes=('bourne-again shell script executable (binary data)',), - mimetypes=('text/x-shellscript',), - extensions=('.jar',), + name="Springboot Java Jar package", + filetypes=("bourne-again shell script executable (binary data)",), + mimetypes=("text/x-shellscript",), + extensions=(".jar",), kind=package, extractors=[extract_springboot], - strict=True + strict=True, ) JavaWebHandler = Handler( - name='Java archive', - filetypes=('zip archive',), - mimetypes=('application/zip', 'application/java-archive',), - extensions=('.war', '.sar', '.ear',), + name="Java archive", + filetypes=("zip archive",), + mimetypes=( + "application/zip", + "application/java-archive", + ), + extensions=( + ".war", + ".sar", + ".ear", + ), kind=regular, extractors=[extract_zip], - strict=True + strict=True, ) PythonHandler = Handler( - name='Python package', - filetypes=('zip archive',), - mimetypes=('application/zip',), - extensions=('.egg', '.whl', '.pyz', '.pex',), + name="Python package", + filetypes=("zip archive",), + mimetypes=("application/zip",), + extensions=( + ".egg", + ".whl", + ".pyz", + ".pex", + ), kind=package, extractors=[extract_zip], - strict=True + strict=True, ) XzHandler = Handler( - name='xz', - filetypes=('xz compressed',), - mimetypes=('application/x-xz',) , - extensions=('.xz',), + name="xz", + filetypes=("xz compressed",), + mimetypes=("application/x-xz",), + extensions=(".xz",), kind=regular, extractors=[extract_xz], - strict=False + strict=False, ) LzmaHandler = Handler( - name='lzma', - filetypes=('lzma compressed',), - mimetypes=('application/x-xz',) , - extensions=('.lzma',), + name="lzma", + filetypes=("lzma compressed",), + mimetypes=("application/x-xz",), + extensions=(".lzma",), kind=regular, extractors=[extract_lzma], - strict=False + strict=False, ) TarXzHandler = Handler( - name='Tar xz', - filetypes=('xz compressed',), - mimetypes=('application/x-xz',) , - extensions=('.tar.xz', '.txz', '.tarxz',), + name="Tar xz", + filetypes=("xz compressed",), + mimetypes=("application/x-xz",), + extensions=( + ".tar.xz", + ".txz", + ".tarxz", + ), kind=regular_nested, extractors=[extract_xz, extract_tar], - strict=False + strict=False, ) TarLzmaHandler = Handler( - name='Tar lzma', - filetypes=('lzma compressed',), - mimetypes=('application/x-lzma',) , - extensions=('tar.lzma', '.tlz', '.tarlz', '.tarlzma',), + name="Tar lzma", + filetypes=("lzma compressed",), + mimetypes=("application/x-lzma",), + extensions=( + "tar.lzma", + ".tlz", + ".tarlz", + ".tarlzma", + ), kind=regular_nested, extractors=[extract_lzma, extract_tar], - strict=False + strict=False, ) TarGzipHandler = Handler( - name='Tar gzip', - filetypes=('gzip compressed',), - mimetypes=('application/gzip',), - extensions=('.tgz', '.tar.gz', '.tar.gzip', '.targz', '.targzip', '.tgzip',), + name="Tar gzip", + filetypes=("gzip compressed",), + mimetypes=("application/gzip",), + extensions=( + ".tgz", + ".tar.gz", + ".tar.gzip", + ".targz", + ".targzip", + ".tgzip", + ), kind=regular_nested, extractors=[extract_tar], - strict=False + strict=False, ) TarLzipHandler = Handler( - name='Tar lzip', - filetypes=('lzip compressed',), - mimetypes=('application/x-lzip',) , - extensions=('.tar.lz', '.tar.lzip',), + name="Tar lzip", + filetypes=("lzip compressed",), + mimetypes=("application/x-lzip",), + extensions=( + ".tar.lz", + ".tar.lzip", + ), kind=regular_nested, extractors=[extract_lzip, extract_tar], - strict=False + strict=False, ) TarZstdHandler = Handler( - name='Tar zstd', - filetypes=('zstandard compressed',), - mimetypes=('application/x-zstd',) , - extensions=('.tar.zst', '.tar.zstd',), + name="Tar zstd", + filetypes=("zstandard compressed",), + mimetypes=("application/x-zstd",), + extensions=( + ".tar.zst", + ".tar.zstd", + ), kind=regular_nested, extractors=[extract_zstd, extract_tar], - strict=True + strict=True, ) TarLz4Handler = Handler( - name='Tar lz4', - filetypes=('lz4 compressed',), - mimetypes=('application/x-lz4',) , - extensions=('.tar.lz4',), + name="Tar lz4", + filetypes=("lz4 compressed",), + mimetypes=("application/x-lz4",), + extensions=(".tar.lz4",), kind=regular_nested, extractors=[extract_lz4, extract_tar], - strict=True + strict=True, ) # https://wiki.openwrt.org/doc/techref/opkg: ipk # http://downloads.openwrt.org/snapshots/trunk/x86/64/packages/base/ OpkgHandler = Handler( - name='OPKG package', - filetypes=('gzip compressed',), - mimetypes=('application/gzip',), - extensions=('.ipk',), + name="OPKG package", + filetypes=("gzip compressed",), + mimetypes=("application/gzip",), + extensions=(".ipk",), kind=regular_nested, extractors=[extract_tar], - strict=False + strict=False, ) GzipHandler = Handler( - name='Gzip', - filetypes=('gzip compressed', 'gzip compressed data'), - mimetypes=('application/gzip',), - extensions=('.gz', '.gzip', '.wmz', '.arz',), + name="Gzip", + filetypes=("gzip compressed", "gzip compressed data"), + mimetypes=("application/gzip",), + extensions=( + ".gz", + ".gzip", + ".wmz", + ".arz", + ), kind=regular, extractors=[uncompress_gzip], - strict=False + strict=False, ) LzipHandler = Handler( - name='lzip', - filetypes=('lzip compressed',), - mimetypes=('application/x-lzip',) , - extensions=('.lzip',), + name="lzip", + filetypes=("lzip compressed",), + mimetypes=("application/x-lzip",), + extensions=(".lzip",), kind=regular, extractors=[extract_lzip], - strict=False + strict=False, ) ZstdHandler = Handler( - name='zstd', - filetypes=('zstandard compressed',), - mimetypes=('application/x-zstd',) , - extensions=('.zst', '.zstd',), + name="zstd", + filetypes=("zstandard compressed",), + mimetypes=("application/x-zstd",), + extensions=( + ".zst", + ".zstd", + ), kind=regular_nested, extractors=[extract_zstd], - strict=False + strict=False, ) Lz4Handler = Handler( - name='lz4', - filetypes=('lz4 compressed',), - mimetypes=('application/x-lz4',) , - extensions=('.lz4',), + name="lz4", + filetypes=("lz4 compressed",), + mimetypes=("application/x-lz4",), + extensions=(".lz4",), kind=regular_nested, extractors=[extract_lz4], - strict=False + strict=False, ) DiaDocHandler = Handler( - name='Dia diagram doc', - filetypes=('gzip compressed',), - mimetypes=('application/gzip',), - extensions=('.dia',), + name="Dia diagram doc", + filetypes=("gzip compressed",), + mimetypes=("application/gzip",), + extensions=(".dia",), kind=docs, extractors=[uncompress_gzip], - strict=True + strict=True, ) GraffleDocHandler = Handler( - name='Graffle diagram doc', - filetypes=('gzip compressed',), - mimetypes=('application/gzip',), - extensions=('.graffle',), + name="Graffle diagram doc", + filetypes=("gzip compressed",), + mimetypes=("application/gzip",), + extensions=(".graffle",), kind=docs, extractors=[uncompress_gzip], - strict=True + strict=True, ) SvgGzDocHandler = Handler( - name='SVG Compressed doc', - filetypes=('gzip compressed',), - mimetypes=('application/gzip',), - extensions=('.svgz',), + name="SVG Compressed doc", + filetypes=("gzip compressed",), + mimetypes=("application/gzip",), + extensions=(".svgz",), kind=docs, extractors=[uncompress_gzip], - strict=True + strict=True, ) BzipHandler = Handler( - name='bzip2', - filetypes=('bzip2 compressed',), - mimetypes=('application/x-bzip2',), - extensions=('.bz', '.bz2', 'bzip2',), + name="bzip2", + filetypes=("bzip2 compressed",), + mimetypes=("application/x-bzip2",), + extensions=( + ".bz", + ".bz2", + "bzip2", + ), kind=regular, extractors=[uncompress_bzip2], - strict=False + strict=False, ) TarBzipHandler = Handler( - name='Tar bzip2', - filetypes=('bzip2 compressed',), - mimetypes=('application/x-bzip2',), + name="Tar bzip2", + filetypes=("bzip2 compressed",), + mimetypes=("application/x-bzip2",), extensions=( - '.tar.bz2', - '.tar.bz', - '.tar.bzip', - '.tar.bzip2', - '.tbz', - '.tbz2', - '.tb2', - '.tarbz2', + ".tar.bz2", + ".tar.bz", + ".tar.bzip", + ".tar.bzip2", + ".tbz", + ".tbz2", + ".tb2", + ".tarbz2", ), kind=regular_nested, extractors=[extract_tar], - strict=False + strict=False, ) RarHandler = Handler( - name='RAR', - filetypes=('rar archive',), - mimetypes=('application/x-rar',), - extensions=('.rar',), + name="RAR", + filetypes=("rar archive",), + mimetypes=("application/x-rar",), + extensions=(".rar",), kind=regular, extractors=[extract_rar], - strict=True + strict=True, ) CabHandler = Handler( - name='Microsoft cab', - filetypes=('microsoft cabinet',), - mimetypes=('application/vnd.ms-cab-compressed',), - extensions=('.cab',), + name="Microsoft cab", + filetypes=("microsoft cabinet",), + mimetypes=("application/vnd.ms-cab-compressed",), + extensions=(".cab",), kind=package, extractors=[extract_cab], - strict=True + strict=True, ) MsiInstallerHandler = Handler( - name='Microsoft MSI Installer', - filetypes=('msi installer',), - mimetypes=('application/x-msi',), - extensions=('.msi',), + name="Microsoft MSI Installer", + filetypes=("msi installer",), + mimetypes=("application/x-msi",), + extensions=(".msi",), kind=package, extractors=[extract_msi], - strict=True + strict=True, ) InstallShieldHandler = Handler( - name='InstallShield Installer', - filetypes=('installshield',), - mimetypes=('application/x-dosexec',), - extensions=('.exe',), + name="InstallShield Installer", + filetypes=("installshield",), + mimetypes=("application/x-dosexec",), + extensions=(".exe",), kind=special_package, extractors=[extract_ishield], - strict=True + strict=True, ) NugetHandler = Handler( - name='Nuget', + name="Nuget", # TODO: file a bug upstream # Weirdly enough the detection by libmagic is sometimes wrong # this is due to this issue: # being recognized by libmagic as an OOXML file # https://en.wikipedia.org/wiki/Open_Packaging_Conventions#File_formats_using_the_OPC - filetypes=('zip archive', 'microsoft ooxml',), - mimetypes=('application/zip', 'application/octet-stream',), - extensions=('.nupkg',), + filetypes=( + "zip archive", + "microsoft ooxml", + ), + mimetypes=( + "application/zip", + "application/octet-stream", + ), + extensions=(".nupkg",), kind=package, extractors=[extract_zip], - strict=True + strict=True, ) NSISInstallerHandler = Handler( - name='Nullsoft Installer', - filetypes=('nullsoft installer',), - mimetypes=('application/x-dosexec',), - extensions=('.exe',), + name="Nullsoft Installer", + filetypes=("nullsoft installer",), + mimetypes=("application/x-dosexec",), + extensions=(".exe",), kind=special_package, extractors=[extract_nsis], - strict=True + strict=True, ) ArHandler = Handler( - name='ar archive', - filetypes=('current ar archive',), - mimetypes=('application/x-archive',), - extensions=('.ar',), + name="ar archive", + filetypes=("current ar archive",), + mimetypes=("application/x-archive",), + extensions=(".ar",), kind=regular, extractors=[extract_ar], - strict=False + strict=False, ) StaticLibHandler = Handler( - name='Static Library', - filetypes=('current ar archive', 'current ar archive random library',), - mimetypes=('application/x-archive',), - extensions=('.a', '.lib', '.out', '.ka',), + name="Static Library", + filetypes=( + "current ar archive", + "current ar archive random library", + ), + mimetypes=("application/x-archive",), + extensions=( + ".a", + ".lib", + ".out", + ".ka", + ), kind=package, extractors=[extract_ar], - strict=True + strict=True, ) DebHandler = Handler( - name='Debian package', - filetypes=('debian binary package',), + name="Debian package", + filetypes=("debian binary package",), mimetypes=( - 'application/vnd.debian.binary-package', - 'application/x-archive', + "application/vnd.debian.binary-package", + "application/x-archive", + ), + extensions=( + ".deb", + ".udeb", ), - extensions=('.deb', '.udeb',), kind=package, extractors=[extract_deb], - strict=True + strict=True, ) RpmHandler = Handler( - name='RPM package', - filetypes=('rpm ',), - mimetypes=('application/x-rpm',), - extensions=('.rpm', '.srpm', '.mvl', '.vip',), + name="RPM package", + filetypes=("rpm ",), + mimetypes=("application/x-rpm",), + extensions=( + ".rpm", + ".srpm", + ".mvl", + ".vip", + ), kind=package, extractors=[extract_rpm, extract_cpio], - strict=False + strict=False, ) SevenZipHandler = Handler( - name='7zip', - filetypes=('7-zip archive',), - mimetypes=('application/x-7z-compressed',), - extensions=('.7z',), + name="7zip", + filetypes=("7-zip archive",), + mimetypes=("application/x-7z-compressed",), + extensions=(".7z",), kind=regular, extractors=[extract_7z], - strict=False + strict=False, ) TarSevenZipHandler = Handler( - name='Tar 7zip', - filetypes=('7-zip archive',), - mimetypes=('application/x-7z-compressed',), - extensions=('.tar.7z', '.tar.7zip', '.t7z',), + name="Tar 7zip", + filetypes=("7-zip archive",), + mimetypes=("application/x-7z-compressed",), + extensions=( + ".tar.7z", + ".tar.7zip", + ".t7z", + ), kind=regular_nested, extractors=[extract_7z, extract_tar], - strict=True + strict=True, ) SharHandler = Handler( - name='shar shell archive', - filetypes=('posix shell script',), - mimetypes=('text/x-shellscript',), - extensions=('.sha', '.shar', '.bin',), + name="shar shell archive", + filetypes=("posix shell script",), + mimetypes=("text/x-shellscript",), + extensions=( + ".sha", + ".shar", + ".bin", + ), kind=special_package, extractors=[], - strict=True + strict=True, ) CpioHandler = Handler( - name='cpio', - filetypes=('cpio archive',), - mimetypes=('application/x-cpio',), - extensions=('.cpio',), + name="cpio", + filetypes=("cpio archive",), + mimetypes=("application/x-cpio",), + extensions=(".cpio",), kind=regular, extractors=[extract_cpio], - strict=False + strict=False, ) ZHandler = Handler( - name='Z', + name="Z", filetypes=("compress'd data",), - mimetypes=('application/x-compress',), - extensions=('.z',), + mimetypes=("application/x-compress",), + extensions=(".z",), kind=regular, extractors=[extract_Z], - strict=False + strict=False, ) TarZHandler = Handler( - name='Tar Z', + name="Tar Z", filetypes=("compress'd data",), - mimetypes=('application/x-compress',), - extensions=('.tz', '.tar.z', '.tarz',), + mimetypes=("application/x-compress",), + extensions=( + ".tz", + ".tar.z", + ".tarz", + ), kind=regular_nested, extractors=[extract_Z, extract_tar], - strict=False + strict=False, ) AppleDmgHandler = Handler( - name='Apple dmg', - filetypes=('zlib compressed',), - mimetypes=('application/zlib',), - extensions=('.dmg', '.sparseimage',), + name="Apple dmg", + filetypes=("zlib compressed",), + mimetypes=("application/zlib",), + extensions=( + ".dmg", + ".sparseimage", + ), kind=package, extractors=[extract_iso], - strict=True + strict=True, ) ApplePkgHandler = Handler( - name='Apple pkg or mpkg package installer', - filetypes=('xar archive',), - mimetypes=('application/octet-stream',), - extensions=('.pkg', '.mpkg',), + name="Apple pkg or mpkg package installer", + filetypes=("xar archive",), + mimetypes=("application/octet-stream",), + extensions=( + ".pkg", + ".mpkg", + ), kind=package, extractors=[extract_xarpkg], - strict=True + strict=True, ) XarHandler = Handler( - name='Xar archive v1', - filetypes=('xar archive',), - mimetypes=('application/octet-stream', 'application/x-xar',), - extensions=('.xar',), + name="Xar archive v1", + filetypes=("xar archive",), + mimetypes=( + "application/octet-stream", + "application/x-xar", + ), + extensions=(".xar",), kind=package, extractors=[extract_xarpkg], - strict=True + strict=True, ) IsoImageHandler = Handler( - name='ISO CD image', - filetypes=('iso 9660 cd-rom', 'high sierra cd-rom',), - mimetypes=('application/x-iso9660-image',), - extensions=('.iso', '.udf', '.img',), + name="ISO CD image", + filetypes=( + "iso 9660 cd-rom", + "high sierra cd-rom", + ), + mimetypes=("application/x-iso9660-image",), + extensions=( + ".iso", + ".udf", + ".img", + ), kind=file_system, extractors=[extract_iso], - strict=True + strict=True, ) SquashfsHandler = Handler( - name='SquashFS disk image', - filetypes=('squashfs',), + name="SquashFS disk image", + filetypes=("squashfs",), mimetypes=(), extensions=(), kind=file_system, extractors=[extract_squashfs], - strict=False + strict=False, ) QCOWHandler = Handler( # note that there are v1, v2 and v3 formats. - name='QEMU QCOW2 disk image', - filetypes=('qemu qcow2 image', 'qemu qcow image',), - mimetypes=('application/octet-stream',), - extensions=('.qcow2', '.qcow', '.qcow2c', '.img',), + name="QEMU QCOW2 disk image", + filetypes=( + "qemu qcow2 image", + "qemu qcow image", + ), + mimetypes=("application/octet-stream",), + extensions=( + ".qcow2", + ".qcow", + ".qcow2c", + ".img", + ), kind=file_system, extractors=[extract_vm_image], strict=True, ) VMDKHandler = Handler( - name='VMDK disk image', - filetypes=('vmware4 disk image',), - mimetypes=('application/octet-stream',), - extensions=('.vmdk',), + name="VMDK disk image", + filetypes=("vmware4 disk image",), + mimetypes=("application/octet-stream",), + extensions=(".vmdk",), kind=file_system, extractors=[extract_vm_image], strict=True, ) VirtualBoxHandler = Handler( - name='VirtualBox disk image', - filetypes=('virtualbox disk image',), - mimetypes=('application/octet-stream',), - extensions=('.vdi',), + name="VirtualBox disk image", + filetypes=("virtualbox disk image",), + mimetypes=("application/octet-stream",), + extensions=(".vdi",), kind=file_system, extractors=[extract_vm_image], strict=True, ) PatchHandler = Handler( - name='Patch', - filetypes=('diff', 'patch',), - mimetypes=('text/x-diff',), - extensions=('.diff', '.patch',), + name="Patch", + filetypes=( + "diff", + "patch", + ), + mimetypes=("text/x-diff",), + extensions=( + ".diff", + ".patch", + ), kind=patches, extractors=[extract_patch], - strict=True + strict=True, ) # Actual list of handlers @@ -1199,6 +1346,7 @@ def try_to_extract(location, target_dir, extractor): # only support extracting patches if patch is installed. This is not a default try: import patch as _pythonpatch + archive_handlers.append(PatchHandler) except: pass diff --git a/src/extractcode/cli.py b/src/extractcode/cli.py index 1afccaf..4bf3f39 100644 --- a/src/extractcode/cli.py +++ b/src/extractcode/cli.py @@ -11,6 +11,7 @@ import functools import click + click.disable_unicode_literals_warning = True from commoncode import cliutils @@ -20,7 +21,7 @@ from extractcode.api import extract_archives -__version__ = '2021.6.2' +__version__ = "2021.6.2" echo_stderr = functools.partial(click.secho, err=True) @@ -28,7 +29,7 @@ def print_version(ctx, param, value): if not value or ctx.resilient_parsing: return - echo_stderr('ExtractCode version ' + __version__) + echo_stderr("ExtractCode version " + __version__) ctx.exit() @@ -40,34 +41,34 @@ def print_archive_formats(ctx, param, value): if not value or ctx.resilient_parsing: return - kindkey = lambda x:x.kind + kindkey = lambda x: x.kind by_kind = groupby(sorted(archive_handlers, key=kindkey), key=kindkey) for kind, handlers in by_kind: - click.echo(f'Archive format kind: {kind_labels[kind]}') - click.echo('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') + click.echo(f"Archive format kind: {kind_labels[kind]}") + click.echo("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") for handler in handlers: - exts = ', '.join(handler.extensions) - mimes = ', '.join(handler.mimetypes) - types = ', '.join(handler.filetypes) - click.echo(f' name: {handler.name}') - click.echo(f' - extensions: {exts}') - click.echo(f' - filetypes : {types}') - click.echo(f' - mimetypes : {mimes}') - click.echo('') + exts = ", ".join(handler.extensions) + mimes = ", ".join(handler.mimetypes) + types = ", ".join(handler.filetypes) + click.echo(f" name: {handler.name}") + click.echo(f" - extensions: {exts}") + click.echo(f" - filetypes : {types}") + click.echo(f" - mimetypes : {mimes}") + click.echo("") ctx.exit() -info_text = ''' +info_text = """ ExtractCode is a mostly universal archive and compressed files extractor, with a particular focus on code archives. Visit https://aboutcode.org and https://github.com/nexB/extractcode/ for support and download. -''' +""" -notice_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'NOTICE') +notice_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "NOTICE") notice_text = open(notice_path).read() @@ -81,7 +82,7 @@ def print_about(ctx, param, value): ctx.exit() -epilog_text = '''\b\bExamples: +epilog_text = """\b\bExamples: (Note for Windows: use '\\' backslash instead of '/' slash for paths.) @@ -101,80 +102,76 @@ def print_about(ctx, param, value): 'samples/arch/zlib.tar.gz-extract/': extractcode samples/arch/zlib.tar.gz -''' +""" class ExtractCommand(cliutils.BaseCommand): - short_usage_help = ''' -Try 'extractcode --help' for help on options and arguments.''' + short_usage_help = """ +Try 'extractcode --help' for help on options and arguments.""" -@click.command(name='extractcode', epilog=epilog_text, cls=ExtractCommand) +@click.command(name="extractcode", epilog=epilog_text, cls=ExtractCommand) @click.pass_context - @click.argument( - 'input', - metavar='', + "input", + metavar="", type=click.Path(exists=True, readable=True), ) - @click.option( - '--verbose', + "--verbose", is_flag=True, - help='Print verbose file-by-file progress messages.', + help="Print verbose file-by-file progress messages.", ) @click.option( - '--quiet', + "--quiet", is_flag=True, - help='Do not print any summary or progress message.', + help="Do not print any summary or progress message.", ) @click.option( - '--shallow', + "--shallow", is_flag=True, - help='Do not extract recursively nested archives in archives.', + help="Do not extract recursively nested archives in archives.", ) @click.option( - '--replace-originals', + "--replace-originals", is_flag=True, - help='Replace extracted archives by the extracted content.', + help="Replace extracted archives by the extracted content.", ) @click.option( - '--ignore', + "--ignore", default=[], multiple=True, - help='Ignore files/directories matching this glob pattern.', + help="Ignore files/directories matching this glob pattern.", ) - @click.option( - '--all-formats', + "--all-formats", is_flag=True, - help= - 'Extract archives from all known formats. ' - 'The default is to extract only the common format of these kinds: ' + help="Extract archives from all known formats. " + "The default is to extract only the common format of these kinds: " '"regular", "regular_nested" and "package". ' - 'To show all supported formats use the option --list-formats .', + "To show all supported formats use the option --list-formats .", ) @click.option( - '--list-formats', + "--list-formats", is_flag=True, is_eager=True, callback=print_archive_formats, - help='Show the list of supported archive and compressed file formats and exit.', + help="Show the list of supported archive and compressed file formats and exit.", ) -@click.help_option('-h', '--help') +@click.help_option("-h", "--help") @click.option( - '--about', + "--about", is_flag=True, is_eager=True, callback=print_about, - help='Show information about ExtractCode and its licensing and exit.', + help="Show information about ExtractCode and its licensing and exit.", ) @click.option( - '--version', + "--version", is_flag=True, is_eager=True, callback=print_version, - help='Show the version and exit.', + help="Show the version and exit.", ) def extractcode( ctx, @@ -188,7 +185,8 @@ def extractcode( *args, **kwargs, ): - """extract archives and compressed files in the file or directory tree. + """ + Extract archives and compressed files in the file or directory tree. Archives found inside an extracted archive are extracted recursively. Use --shallow for a shallow extraction. @@ -196,41 +194,41 @@ def extractcode( '-extract' created side-by-side with an archive. """ - abs_location = fileutils.as_posixpath( - os.path.abspath( - os.path.expanduser(input) - ) - ) + abs_location = fileutils.as_posixpath(os.path.abspath(os.path.expanduser(input))) def extract_event(item): """ Display an extract event. """ if quiet: - return '' + return "" if not item: - return '' + return "" source = item.source if not isinstance(source, str): - source = toascii(source, translit=True).decode('utf-8', 'replace') + source = toascii(source, translit=True).decode("utf-8", "replace") if verbose: if item.done: - return '' - line = source and get_relative_path( - path=source, - len_base_path=len_base_path, - base_is_dir=base_is_dir, - ) or '' + return "" + line = ( + source + and get_relative_path( + path=source, + len_base_path=len_base_path, + base_is_dir=base_is_dir, + ) + or "" + ) else: - line = source and fileutils.file_name(source) or '' + line = source and fileutils.file_name(source) or "" if not isinstance(line, str): - line = toascii(line, translit=True).decode('utf-8', 'replace') + line = toascii(line, translit=True).decode("utf-8", "replace") - return 'Extracting: %(line)s' % locals() + return "Extracting: %(line)s" % locals() def display_extract_summary(): """ @@ -245,7 +243,7 @@ def display_extract_summary(): source = fileutils.as_posixpath(xev.source) if not isinstance(source, str): - source = toascii(source, translit=True).decode('utf-8', 'replace') + source = toascii(source, translit=True).decode("utf-8", "replace") source = get_relative_path( path=source, @@ -254,24 +252,18 @@ def display_extract_summary(): ) for e in xev.errors: - echo_stderr( - 'ERROR extracting: %(source)s: %(e)s' % locals(), - fg='red' - ) + echo_stderr("ERROR extracting: %(source)s: %(e)s" % locals(), fg="red") for warn in xev.warnings: - echo_stderr( - 'WARNING extracting: %(source)s: %(warn)s' % locals(), - fg='yellow' - ) + echo_stderr("WARNING extracting: %(source)s: %(warn)s" % locals(), fg="yellow") - summary_color = 'green' + summary_color = "green" if has_warnings: - summary_color = 'yellow' + summary_color = "yellow" if has_errors: - summary_color = 'red' + summary_color = "red" - echo_stderr('Extracting done.', fg=summary_color, reset=True) + echo_stderr("Extracting done.", fg=summary_color, reset=True) # use for relative paths computation len_base_path = len(abs_location) @@ -290,14 +282,11 @@ def display_extract_summary(): ) if not quiet: - echo_stderr('Extracting archives...', fg='green') + echo_stderr("Extracting archives...", fg="green") with cliutils.progressmanager( - extractibles, - item_show_func=extract_event, - verbose=verbose + extractibles, item_show_func=extract_event, verbose=verbose ) as extraction_events: - for xev in extraction_events: if xev.done and (xev.warnings or xev.errors): has_extract_errors = has_extract_errors or xev.errors @@ -328,5 +317,4 @@ def get_relative_path(path, len_base_path, base_is_dir): else: rel_path = fileutils.file_name(path) - return rel_path.lstrip('/') - + return rel_path.lstrip("/") diff --git a/src/extractcode/extract.py b/src/extractcode/extract.py index 8c547f2..9aeee9b 100644 --- a/src/extractcode/extract.py +++ b/src/extractcode/extract.py @@ -27,6 +27,7 @@ if TRACE: import sys + logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.DEBUG) @@ -77,7 +78,7 @@ - `warnings` is a mapping of extracted paths to a list of warning messages. - `errors` is a list of error messages. """ -ExtractEvent = namedtuple('ExtractEvent', 'source target done warnings errors') +ExtractEvent = namedtuple("ExtractEvent", "source target done warnings errors") def extract( @@ -137,10 +138,7 @@ def extract( source = xevent.source target = xevent.target if TRACE: - logger.debug( - f'extract:replace_originals: replacing ' - f'{source!r} by {target!r}' - ) + logger.debug(f"extract:replace_originals: replacing {source!r} by {target!r}") fileutils.delete(source) fileutils.copytree(target, source) fileutils.delete(target) @@ -167,13 +165,12 @@ def extract_files( """ ignored = partial(ignore.is_ignored, ignores=ignore.default_ignores, unignores={}) if TRACE: - logger.debug('extract:start: %(location)r recurse: %(recurse)r\n' % locals()) + logger.debug("extract:start: %(location)r recurse: %(recurse)r\n" % locals()) abs_location = abspath(expanduser(location)) for top, dirs, files in fileutils.walk(abs_location, ignored): if TRACE: - logger.debug( - 'extract:walk: top: %(top)r dirs: %(dirs)r files: r(files)r' % locals()) + logger.debug("extract:walk: top: %(top)r dirs: %(dirs)r files: r(files)r" % locals()) if not recurse: if TRACE: @@ -183,29 +180,27 @@ def extract_files( dirs.remove(d) if TRACE: rd = repr(drs.symmetric_difference(set(dirs))) - logger.debug(f'extract:walk: not recurse: removed dirs: {rd}') + logger.debug(f"extract:walk: not recurse: removed dirs: {rd}") for f in files: loc = join(top, f) if not recurse and extractcode.is_extraction_path(loc): if TRACE: - logger.debug( - 'extract:walk not recurse: skipped file: %(loc)r' % locals()) + logger.debug("extract:walk not recurse: skipped file: %(loc)r" % locals()) continue if not extractcode.archive.should_extract( - location=loc, - kinds=kinds, - ignore_pattern=ignore_pattern + location=loc, kinds=kinds, ignore_pattern=ignore_pattern ): if TRACE: logger.debug( - 'extract:walk: skipped file: not should_extract: %(loc)r' % locals()) + "extract:walk: skipped file: not should_extract: %(loc)r" % locals() + ) continue target = join(abspath(top), extractcode.get_extraction_path(loc)) if TRACE: - logger.debug('extract:target: %(target)r' % locals()) + logger.debug("extract:target: %(target)r" % locals()) # extract proper for xevent in extract_file( @@ -214,12 +209,12 @@ def extract_files( kinds=kinds, ): if TRACE: - logger.debug('extract:walk:extraction event: %(xevent)r' % locals()) + logger.debug("extract:walk:extraction event: %(xevent)r" % locals()) yield xevent if recurse: if TRACE: - logger.debug('extract:walk: recursing on target: %(target)r' % locals()) + logger.debug("extract:walk: recursing on target: %(target)r" % locals()) for xevent in extract( location=target, kinds=kinds, @@ -227,7 +222,7 @@ def extract_files( ignore_pattern=ignore_pattern, ): if TRACE: - logger.debug('extract:walk:recurse:extraction event: %(xevent)r' % locals()) + logger.debug("extract:walk:recurse:extraction event: %(xevent)r" % locals()) yield xevent @@ -252,11 +247,10 @@ def extract_file( ) if TRACE: - emodule = getattr(extractor, '__module__', '') - ename = getattr(extractor, '__name__', '') + emodule = getattr(extractor, "__module__", "") + ename = getattr(extractor, "__name__", "") logger.debug( - f'extract_file: extractor: for: {location} with kinds: ' - f'{kinds}: {emodule}.{ename}' + f"extract_file: extractor: for: {location} with kinds: {kinds}: {emodule}.{ename}" ) if extractor: @@ -271,7 +265,7 @@ def extract_file( try: # Extract first to a temp directory: if there is an error, the # extracted files will not be moved to the target. - tmp_tgt = fileutils.get_temp_dir(prefix='extractcode-extract-') + tmp_tgt = fileutils.get_temp_dir(prefix="extractcode-extract-") abs_location = abspath(expanduser(location)) warns = extractor(abs_location, tmp_tgt) or [] warnings.extend(warns) @@ -279,13 +273,12 @@ def extract_file( fileutils.delete(tmp_tgt) except Exception as e: - errors = [str(e).strip(' \'"')] + errors = [str(e).strip(" '\"")] if verbose: errors.append(traceback.format_exc()) if TRACE: tb = traceback.format_exc() - logger.debug( - f'extract_file: ERROR: {location}: {errors}\n{e}\n{tb}') + logger.debug(f"extract_file: ERROR: {location}: {errors}\n{e}\n{tb}") finally: yield ExtractEvent( diff --git a/src/extractcode/libarchive2.py b/src/extractcode/libarchive2.py index 988cd3e..024d116 100644 --- a/src/extractcode/libarchive2.py +++ b/src/extractcode/libarchive2.py @@ -41,6 +41,7 @@ if TRACE or TRACE_DEEP: import sys + logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.DEBUG) @@ -75,11 +76,11 @@ """ # keys for plugin-provided locations -EXTRACTCODE_LIBARCHIVE_DLL = 'extractcode.libarchive.dll' +EXTRACTCODE_LIBARCHIVE_DLL = "extractcode.libarchive.dll" -EXTRACTCODE_LIBARCHIVE_PATH_ENVVAR = 'EXTRACTCODE_LIBARCHIVE_PATH' +EXTRACTCODE_LIBARCHIVE_PATH_ENVVAR = "EXTRACTCODE_LIBARCHIVE_PATH" -_LIBRARY_NAME = 'libarchive' +_LIBRARY_NAME = "libarchive" def load_lib(): @@ -106,41 +107,41 @@ def load_lib(): if libarchive: warnings.warn( 'Using "libarchive" library found in a system location. ' - 'Install instead a extractcode-libarchive plugin for best support.' + "Install instead a extractcode-libarchive plugin for best support." ) return libarchive # try the PATH if not dll_loc: - dll = 'libarchive.dll' if on_windows else 'libarchive.so' + dll = "libarchive.dll" if on_windows else "libarchive.so" dll_loc = command.find_in_path(dll) if dll_loc: warnings.warn( 'Using "libarchive" library found in the PATH. ' - 'Install instead a extractcode-libarchive plugin for best support.' + "Install instead a extractcode-libarchive plugin for best support." ) if not dll_loc or not os.path.isfile(dll_loc): raise Exception( - 'CRITICAL: libarchive DLL is not installed. ' - 'Unable to continue: you need to install a valid extractcode-libarchive ' - 'plugin with a valid libarchive DLL available. ' - f'OR set the {EXTRACTCODE_LIBARCHIVE_PATH_ENVVAR} environment variable. ' - 'OR install libarchive as a system package. ' - 'OR ensure libarchive is available in the system PATH.' - ) + "CRITICAL: libarchive DLL is not installed. " + "Unable to continue: you need to install a valid extractcode-libarchive " + "plugin with a valid libarchive DLL available. " + f"OR set the {EXTRACTCODE_LIBARCHIVE_PATH_ENVVAR} environment variable. " + "OR install libarchive as a system package. " + "OR ensure libarchive is available in the system PATH." + ) return command.load_shared_library(dll_loc) def set_env_with_tz(): # NOTE: this is important to avoid timezone differences - os.environ['TZ'] = 'UTC' + os.environ["TZ"] = "UTC" set_env_with_tz() # NOTE: this is important to avoid locale-specific errors on various OS -locale.setlocale(locale.LC_ALL, '') +locale.setlocale(locale.LC_ALL, "") # load and initialize the shared library libarchive = load_lib() @@ -162,48 +163,51 @@ def extract(location, target_dir, skip_symlinks=True): set_env_with_tz() for entry in list_entries(abs_location): - logger.debug('processing entry: {}'.format(entry)) + logger.debug("processing entry: {}".format(entry)) if not entry: continue if entry.is_empty(): if TRACE: - logger.debug('Skipping empty: {}'.format(entry)) + logger.debug("Skipping empty: {}".format(entry)) continue if entry.warnings: if not entry.is_empty(): entry_path = entry.path - msgs = ['%(entry_path)r: ' % locals()] + msgs = ["%(entry_path)r: " % locals()] else: - msgs = ['No path available: '] + msgs = ["No path available: "] messages = (w for w in entry.warnings if w and w.strip()) messages = map(text.as_unicode, messages) - messages = (w.strip('"\' ') for w in messages) + messages = (w.strip("\"' ") for w in messages) msgs.extend(w for w in messages if w) - msgs = '\n'.join(msgs) or 'No message provided' + msgs = "\n".join(msgs) or "No message provided" if msgs not in warnings: warnings.append(msgs) if TRACE: - logger.debug('\n'.join(msgs)) + logger.debug("\n".join(msgs)) if not (entry.isdir or entry.isfile): # skip special files and links if TRACE: - logger.debug('skipping: {}'.format(entry)) + logger.debug("skipping: {}".format(entry)) if entry.issym and not skip_symlinks: raise NotImplemented( - 'extraction of symlinks with libarchive is not yet implemented.') + "extraction of symlinks with libarchive is not yet implemented." + ) continue if TRACE: - logger.debug(' writing.....') + logger.debug(" writing.....") - _target_path = entry.write(abs_target_dir, transform_path=partial(paths.safe_path, preserve_spaces=True)) + _target_path = entry.write( + abs_target_dir, transform_path=partial(paths.safe_path, preserve_spaces=True) + ) return warnings @@ -246,7 +250,7 @@ def __init__(self, location, uncompress=True, extract=True, block_size=10240): If both are True, the archive will be uncompressed then extracted as needed. (e.g. a tar.xz will be unxzed then untarred at once). """ - msg = 'At least one of `uncompress` or `extract` flag is required.' + msg = "At least one of `uncompress` or `extract` flag is required." assert uncompress or extract, msg self.location = location self.uncompress = uncompress @@ -289,7 +293,7 @@ def iter(self): """ Yield Entry(ies) for this archive. """ - assert self.archive_struct, 'Archive must be used as a context manager.' + assert self.archive_struct, "Archive must be used as a context manager." entry_struct = new_entry() try: while True: @@ -335,6 +339,7 @@ class attributes. Some attributes are not handled on purpose because they by design to ensure extracted files are readable/writable and owned by the extracting user. """ + # TODO: re-check if users/groups may have some value for origin determination? # an archive object @@ -425,7 +430,7 @@ def write(self, target_dir, transform_path=lambda x: x, skip_links=True): The default is a no-op lambda. """ if TRACE: - logger.debug('writing entry: {}'.format(self)) + logger.debug("writing entry: {}".format(self)) if not self.archive.archive_struct: raise ArchiveErrorIllegalOperationOnClosedArchive() @@ -439,8 +444,7 @@ def write(self, target_dir, transform_path=lambda x: x, skip_links=True): if skip_links and self.issym: return if not skip_links and self.issym: - raise NotImplemented( - 'extraction of sym links with librarchive is not yet implemented.') + raise NotImplemented("extraction of sym links with librarchive is not yet implemented.") abs_target_dir = os.path.abspath(os.path.expanduser(target_dir)) # TODO: return some warning when original path has been transformed @@ -464,14 +468,13 @@ def write(self, target_dir, transform_path=lambda x: x, skip_links=True): unique_path = extractcode.new_name(target_path, is_dir=False) if TRACE: logger.debug( - f'path: \ntarget_path: {target_path}\n' - f'unique_path: {unique_path}', + f"path: \ntarget_path: {target_path}\nunique_path: {unique_path}", ) - with open(unique_path, 'wb') as target: + with open(unique_path, "wb") as target: for content in self.get_content(): if TRACE_DEEP: - logger.debug(' chunk: {}'.format(repr(content))) + logger.debug(" chunk: {}".format(repr(content))) target.write(content) os.utime(unique_path, (self.time, self.time)) @@ -492,7 +495,6 @@ def get_content(self): class ArchiveException(ExtractError): - def __init__( self, rc=None, @@ -506,23 +508,23 @@ def __init__( self.errno = root_ex.errno msg = root_ex.args or [] msg = map(text.as_unicode, msg) - msg = u'\n'.join(msg) + msg = "\n".join(msg) self.msg = msg or None self.func = root_ex.func else: self.rc = rc self.errno = archive_struct and errno(archive_struct) or None - msg = archive_struct and err_msg(archive_struct) or '' - self.msg = msg and text.as_unicode(msg) or 'Unknown error' + msg = archive_struct and err_msg(archive_struct) or "" + self.msg = msg and text.as_unicode(msg) or "Unknown error" self.func = archive_func and archive_func.__name__ or None def __str__(self): if TRACE: msg = ( - '%(msg)r: in function %(func)r with rc=%(rc)r, ' - 'errno=%(errno)r, root_ex=%(root_ex)r') + "%(msg)r: in function %(func)r with rc=%(rc)r, errno=%(errno)r, root_ex=%(root_ex)r" + ) return msg % self.__dict__ - return self.msg or '' + return self.msg or "" class ArchiveWarning(ArchiveException): @@ -555,6 +557,7 @@ class ArchiveErrorPasswordProtected( class ArchiveErrorIllegalOperationOnClosedArchive(ArchiveException): pass + ################################################# # ctypes defintion of the interface to libarchive ################################################# @@ -562,7 +565,7 @@ class ArchiveErrorIllegalOperationOnClosedArchive(ArchiveException): def errcheck(rc, archive_func, args, null=False): """ - ctypes error check handler for functions returning int, or null if null is + Ctypes error check handler for functions returning int, or null if null is True. """ if null: @@ -785,7 +788,12 @@ def errcheck(rc, archive_func, args, null=False): """ # int archive_read_data_block(struct archive *, const void **buff, size_t *len, off_t *offset); read_entry_data_block = libarchive.archive_read_data_block -read_entry_data_block.argtypes = [c_void_p, POINTER(c_void_p), POINTER(c_size_t), POINTER(c_longlong)] +read_entry_data_block.argtypes = [ + c_void_p, + POINTER(c_void_p), + POINTER(c_size_t), + POINTER(c_longlong), +] read_entry_data_block.restype = c_int read_entry_data_block.errcheck = errcheck diff --git a/src/extractcode/patch.py b/src/extractcode/patch.py index 3588695..81d4529 100644 --- a/src/extractcode/patch.py +++ b/src/extractcode/patch.py @@ -45,7 +45,7 @@ def extract(location, target_dir): for source, target, text in patch_info(location): # prefer the target path for writing the patch text to a subfile # unless target is /dev/null (a deletion) - if '/dev/null' in target: + if "/dev/null" in target: patch_subfile_path = source else: patch_subfile_path = target @@ -64,15 +64,15 @@ def extract(location, target_dir): counter = 0 fp = base_subfile_path while os.path.exists(fp + extractcode.EXTRACT_SUFFIX): - fp = base_subfile_path + '_%d' % counter + fp = base_subfile_path + "_%d" % counter counter += 1 base_subfile_path = fp # write the location proper, with a suffix extension to avoid # recursive extraction subfile_path = base_subfile_path + extractcode.EXTRACT_SUFFIX - with open(subfile_path, 'w') as subfile: - subfile.write('\n'.join(text)) + with open(subfile_path, "w") as subfile: + subfile.write("\n".join(text)) return [] @@ -84,11 +84,7 @@ def is_patch(location, include_extracted=False): """ T = typecode.contenttype.get_type(location) file_name = fileutils.file_name(location) - patch_like = ( - 'diff ' in T.filetype_file.lower() - or '.diff' in file_name - or '.patch' in file_name - ) + patch_like = "diff " in T.filetype_file.lower() or ".diff" in file_name or ".patch" in file_name if not patch_like: return False @@ -108,9 +104,9 @@ def patch_text(ptch): """ for head in ptch.header: yield head - yield '--- ' + fileutils.as_posixpath(ptch.source) - yield '+++ ' + fileutils.as_posixpath(ptch.target) - hk = '@@ -%(startsrc)d,%(linessrc)d +%(starttgt)d,%(linestgt)d @@ %(desc)s' + yield "--- " + fileutils.as_posixpath(ptch.source) + yield "+++ " + fileutils.as_posixpath(ptch.target) + hk = "@@ -%(startsrc)d,%(linessrc)d +%(starttgt)d,%(linestgt)d @@ %(desc)s" def hunk_data(hnk): return dict( @@ -135,9 +131,10 @@ def patch_info(location): Raise an exception if the file is not a patch file or cannot be parsed. """ import patch as pythonpatch + patchset = pythonpatch.fromfile(location) if not patchset: - msg = 'Unable to parse patch file: %(location)s' % locals() + msg = "Unable to parse patch file: %(location)s" % locals() raise ExtractErrorFailedToExtract(msg) for ptch in patchset.items: diff --git a/src/extractcode/sevenzip.py b/src/extractcode/sevenzip.py index 4ecc782..7b8bd2a 100644 --- a/src/extractcode/sevenzip.py +++ b/src/extractcode/sevenzip.py @@ -20,7 +20,7 @@ import attr -from commoncode import command +from commoncode import command from commoncode import fileutils from commoncode import paths from commoncode.system import is_case_sensitive_fs @@ -44,23 +44,24 @@ if TRACE or TRACE_DEEP or TRACE_ENTRIES: import sys + logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.DEBUG) # key of a plugin-provided location -EXTRACTCODE_7ZIP_EXE = 'extractcode.sevenzip.exe' +EXTRACTCODE_7ZIP_EXE = "extractcode.sevenzip.exe" -EXTRACTCODE_7ZIP_PATH_ENVVAR = 'EXTRACTCODE_7Z_PATH' +EXTRACTCODE_7ZIP_PATH_ENVVAR = "EXTRACTCODE_7Z_PATH" sevenzip_errors = [ - ('unsupported method', 'Unsupported archive or broken archive'), - ('wrong password', 'Password protected archive, unable to extract'), + ("unsupported method", "Unsupported archive or broken archive"), + ("wrong password", "Password protected archive, unable to extract"), # not being able to open an archive is not an error condition for now - ('can not open file as archive', None), - ('no files to process', 'Empty archive or incorrect arguments'), + ("can not open file as archive", None), + ("no files to process", "Empty archive or incorrect arguments"), ] -UNKNOWN_ERROR = 'Unknown extraction error' +UNKNOWN_ERROR = "Unknown extraction error" def get_command_location(_cache=[]): @@ -85,7 +86,7 @@ def get_command_location(_cache=[]): # try the PATH if not cmd_loc: - cmd = '7z.exe' if on_windows else '7z' + cmd = "7z.exe" if on_windows else "7z" cmd_loc = command.find_in_path(cmd) if not cmd_loc: @@ -94,18 +95,18 @@ def get_command_location(_cache=[]): if cmd_loc: warnings.warn( 'Using "7z" 7zip command found in the PATH. ' - 'Install instead a extractcode-7z plugin for best support.' + "Install instead a extractcode-7z plugin for best support." ) if not cmd_loc or not os.path.isfile(cmd_loc): raise Exception( - 'CRITICAL: 7zip executable is not installed. ' - 'Unable to continue: you need to install a valid extractcode-7z ' - 'plugin with a valid executable available. ' - f'OR set the {EXTRACTCODE_7ZIP_PATH_ENVVAR} environment variable. ' - 'OR install 7zip as a system package. ' - 'OR ensure 7zip is available in the system PATH.' - ) + "CRITICAL: 7zip executable is not installed. " + "Unable to continue: you need to install a valid extractcode-7z " + "plugin with a valid executable available. " + f"OR set the {EXTRACTCODE_7ZIP_PATH_ENVVAR} environment variable. " + "OR install 7zip as a system package. " + "OR ensure 7zip is available in the system PATH." + ) _cache.append(cmd_loc) return cmd_loc @@ -122,10 +123,7 @@ def get_7z_errors(stdout, stderr): # ERROR: Can not create symbolic link : A required privilege is not held by # the client. : .\2-SYMTYPE - find_7z_errors = re.compile( - '^Error:(.*)$', - re.MULTILINE | re.DOTALL | re.IGNORECASE - ).findall + find_7z_errors = re.compile("^Error:(.*)$", re.MULTILINE | re.DOTALL | re.IGNORECASE).findall stdlow = stderr.lower() for err, msg in sevenzip_errors: @@ -139,11 +137,11 @@ def get_7z_errors(stdout, stderr): file_errors = find_7z_errors(stderr) if file_errors: - return ' '.join(fe.strip('"\' ') for fe in file_errors).strip() + return " ".join(fe.strip("\"' ") for fe in file_errors).strip() file_errors = find_7z_errors(stdout) if file_errors: - return ' '.join(fe.strip('"\' ') for fe in file_errors).strip() + return " ".join(fe.strip("\"' ") for fe in file_errors).strip() def get_7z_warnings(stdout): @@ -152,7 +150,7 @@ def get_7z_warnings(stdout): a `stdout` text. """ # FIXME: we should use only one pass over stdout for errors and warnings - cannot_open = 'can not open output file' + cannot_open = "can not open output file" msg_len = len(cannot_open) + 1 warnings = defaultdict(list) @@ -169,7 +167,7 @@ def get_7z_warnings(stdout): def convert_warnings_to_list(warnings): warning_messages = [] for pathname, messages in warnings.items(): - msg = pathname + ': ' + '\n'.join(messages.strip('\' "')) + msg = pathname + ": " + "\n".join(messages.strip("' \"")) if msg not in warning_messages: warning_messages.append(msg) return warning_messages @@ -183,7 +181,7 @@ def list_extracted_7z_files(stdout): static const char *kExtractingString = "Extracting "; """ # FIXME: handle Unicode paths with 7zip command line flags - get_file_list = re.compile('Extracting ' + '(.*)$', re.MULTILINE).findall # NOQA + get_file_list = re.compile("Extracting " + "(.*)$", re.MULTILINE).findall # NOQA return get_file_list(stdout) @@ -194,14 +192,15 @@ def is_rar(location): if not os.path.exists(location): return from typecode import contenttype + T = contenttype.get_type(location) - return T.filetype_file.lower().startswith('rar archive') + return T.filetype_file.lower().startswith("rar archive") def extract( location, target_dir, - arch_type='*', + arch_type="*", file_by_file=on_mac, skip_symlinks=True, ): @@ -222,17 +221,18 @@ def extract( abs_location = os.path.abspath(os.path.expanduser(location)) if not os.path.exists(abs_location): raise ExtractErrorFailedToExtract( - f'The system cannot find the path specified: {abs_location}') + f"The system cannot find the path specified: {abs_location}" + ) if is_rar(location): - raise ExtractErrorFailedToExtract( - f'RAR extraction deactivated: {location}') + raise ExtractErrorFailedToExtract(f"RAR extraction deactivated: {location}") assert target_dir abs_target_dir = os.path.abspath(os.path.expanduser(target_dir)) if not os.path.exists(abs_target_dir): raise ExtractErrorFailedToExtract( - f'The system cannot find the target path specified: {target_dir}') + f"The system cannot find the target path specified: {target_dir}" + ) if file_by_file: extractor = extract_file_by_file @@ -250,7 +250,7 @@ def extract( def extract_all_files_at_once( location, target_dir, - arch_type='*', + arch_type="*", skip_symlinks=True, ): """ @@ -268,16 +268,16 @@ def extract_all_files_at_once( # note: there are some issues with the extraction of debian .deb ar files # see sevenzip bug http://sourceforge.net/p/sevenzip/bugs/1472/ ex_args = build_7z_extract_command( - location=location, target_dir=target_dir, arch_type=arch_type) + location=location, target_dir=target_dir, arch_type=arch_type + ) rc, stdout, stderr = command.execute(**ex_args) if rc != 0: if TRACE: logger.debug( - 'extract: failure: {rc}\n' - 'stderr: {stderr}\n' - 'stdout: {stdout}\n'.format(**locals())) + "extract: failure: {rc}\nstderr: {stderr}\nstdout: {stdout}\n".format(**locals()) + ) error = get_7z_errors(stdout, stderr) or UNKNOWN_ERROR raise ExtractErrorFailedToExtract(error) @@ -289,7 +289,7 @@ def build_7z_extract_command( location, target_dir, single_entry=None, - arch_type='*', + arch_type="*", ): """ Return a mapping of 7z command line aguments to extract the archive at @@ -302,24 +302,24 @@ def build_7z_extract_command( # 7z arguments if single_entry: # do not use full path - extract = 'e' + extract = "e" else: - extract = 'x' + extract = "x" - yes_to_all = '-y' + yes_to_all = "-y" # NB: we use t* to ensure that all archive types are honored if not arch_type: - arch_type = '' + arch_type = "" else: - arch_type = '-t' + arch_type + arch_type = "-t" + arch_type # pass an empty password so that extraction with passwords WILL fail - password = '-p' + password = "-p" # renaming may not behave the same way on all OSes in particular Mac and # Windows - auto_rename_dupe_names = '-aou' + auto_rename_dupe_names = "-aou" # Ensure that we treat the FS as case insensitive if that's what it is # -ssc Set case-sensitive mode. It's default for Posix/Linux systems. @@ -327,9 +327,9 @@ def build_7z_extract_command( # historically, this was not needed on macOS, but now APFS is case # insentitive as a default if on_windows or on_macos_14_or_higher or not is_case_sensitive_fs: - case_sensitive = '-ssc-' + case_sensitive = "-ssc-" else: - case_sensitive = '-ssc' + case_sensitive = "-ssc" # These does not work well with p7zip for now: # - force any console output to be UTF-8 encoded @@ -341,7 +341,7 @@ def build_7z_extract_command( # TZ correctly when the archive does not contain TZ info. This does not work # on Windows, because 7z is not using the TZ env var there. timezone = dict(os.environ) - timezone.update({u'TZ': u'GMT'}) + timezone.update({"TZ": "GMT"}) timezone = command.get_env(timezone) # Note: 7z does extract in the current directory so we cwd to the target dir # first @@ -352,7 +352,7 @@ def build_7z_extract_command( auto_rename_dupe_names, arch_type, password, - '--', + "--", location, ] @@ -369,7 +369,7 @@ def build_7z_extract_command( ) if TRACE: - logger.debug('extract: args:') + logger.debug("extract: args:") pprint.pprint(ex_args) return ex_args @@ -378,7 +378,7 @@ def build_7z_extract_command( def extract_file_by_file( location, target_dir, - arch_type='*', + arch_type="*", skip_symlinks=True, ): """ @@ -411,18 +411,16 @@ def extract_file_by_file( if not need_by_file: # use regular extract return extract_all_files_at_once( - location=location, - target_dir=target_dir, - arch_type=arch_type) + location=location, target_dir=target_dir, arch_type=arch_type + ) # now we are extracting one file at a time. this is a tad painful because we # are dealing with a full command execution at each time. errors = {} warnings = {} - tmp_dir = fileutils.get_temp_dir(prefix='extractcode-extract-') + tmp_dir = fileutils.get_temp_dir(prefix="extractcode-extract-") for i, entry in enumerate(entries): - if not entry.is_file: continue @@ -442,17 +440,17 @@ def extract_file_by_file( error = error or UNKNOWN_ERROR if TRACE: logger.debug( - 'extract: failure: {rc}\n' - 'stderr: {stderr}\nstdout: {stdout}'.format(**locals())) + "extract: failure: {rc}\nstderr: {stderr}\nstdout: {stdout}".format(**locals()) + ) errors[entry.path] = error continue # these are all for a single file path warns = get_7z_warnings(stdout) or {} - wmsg = '\n'.join(warns.values()) + wmsg = "\n".join(warns.values()) if wmsg: if entry.path in warnings: - warnings[entry.path] += '\n' + wmsg + warnings[entry.path] += "\n" + wmsg else: warnings[entry.path] = wmsg @@ -462,9 +460,9 @@ def extract_file_by_file( source_file_loc = os.path.join(tmp_extract_dir, source_file_name) if not os.path.exists(source_file_loc): if entry.path in errors: - errors[entry.path] += '\nNo file name extracted.' + errors[entry.path] += "\nNo file name extracted." else: - errors[entry.path] = 'No file name extracted.' + errors[entry.path] = "No file name extracted." continue safe_path = paths.safe_path(entry.path, posix=True, preserve_spaces=True) @@ -475,8 +473,11 @@ def extract_file_by_file( unique_target_file_loc = extractcode.new_name(target_file_loc, is_dir=False) if TRACE: - logger.debug('extract: unique_target_file_loc: from {} to {}'.format( - target_file_loc, unique_target_file_loc)) + logger.debug( + "extract: unique_target_file_loc: from {} to {}".format( + target_file_loc, unique_target_file_loc + ) + ) if os.path.isfile(source_file_loc): fileutils.copyfile(source_file_loc, unique_target_file_loc) @@ -490,7 +491,7 @@ def extract_file_by_file( return convert_warnings_to_list(warnings) -def list_entries(location, arch_type='*'): +def list_entries(location, arch_type="*"): """ Return a tuple of (iterator of Entry, error_messages). The generator contains each entry found in a 7zip-supported archive file at `location`. @@ -504,27 +505,27 @@ def list_entries(location, arch_type='*'): return [] # 7z arguments - listing = 'l' + listing = "l" # NB: we use t* to ensure that all archive types are honored if not arch_type: - arch_type = '' + arch_type = "" else: - arch_type = '-t' + arch_type + arch_type = "-t" + arch_type # pass an empty password so that extraction with passwords WILL fail - password = '-p' - tech_info = '-slt' + password = "-p" + tech_info = "-slt" - output_as_utf = '' + output_as_utf = "" if on_windows: - output_as_utf = '-sccUTF-8' + output_as_utf = "-sccUTF-8" # NB: we force running in the GMT timezone, because 7z is unable to set the # TZ correctly when the archive does not contain TZ info. This does not work # on Windows, because 7z is not using the TZ env var there. timezone = dict(os.environ) - timezone.update({u'TZ': u'GMT'}) + timezone.update({"TZ": "GMT"}) timezone = command.get_env(timezone) args = [ @@ -533,23 +534,20 @@ def list_entries(location, arch_type='*'): arch_type, output_as_utf, password, - '--', + "--", abs_location, ] cmd_loc = get_command_location() - rc, stdout, stderr = command.execute( - cmd_loc=cmd_loc, - args=args, - env=timezone, - to_files=True) + rc, stdout, stderr = command.execute(cmd_loc=cmd_loc, args=args, env=timezone, to_files=True) if TRACE: logger.debug( - 'list_entries: rc: {rc}\n' - 'stderr: file://{stderr}\n' - 'stdout: file://{stdout}\n'.format(**locals())) + "list_entries: rc: {rc}\nstderr: file://{stderr}\nstdout: file://{stdout}\n".format( + **locals() + ) + ) error_messages = [] if rc != 0: @@ -601,19 +599,19 @@ def parse_7z_listing(location): """ # read to unicode - with io.open(location, 'r', encoding='utf-8') as listing: + with io.open(location, "r", encoding="utf-8") as listing: text = listing.read() # normalize line endings to POSIX - text = text.replace('\r\n', '\n') + text = text.replace("\r\n", "\n") if TRACE: - logger.debug('parse_7z_listing: initial text: type: ' + repr(type(text))) - print('--------------------------------------') + logger.debug("parse_7z_listing: initial text: type: " + repr(type(text))) + print("--------------------------------------") print(text) - print('--------------------------------------') + print("--------------------------------------") # for now we ignore the header, and only start dealing with text after that - end_of_header = '----------\n' + end_of_header = "----------\n" _header, _, paths = text.rpartition(end_of_header) if not paths: @@ -626,48 +624,37 @@ def parse_7z_listing(location): # (unless there is a \n in file name which is an error condition) # - ends with an empty line # then we have a global footer - two_empty_lines = '\n\n' - path_key = 'Path' - path_blocks = [ - pb for pb in paths.split(two_empty_lines) - if pb and path_key in pb - ] + two_empty_lines = "\n\n" + path_key = "Path" + path_blocks = [pb for pb in paths.split(two_empty_lines) if pb and path_key in pb] - key_value_sep = '=' + key_value_sep = "=" entries = [] for path_block in path_blocks: # we ignore empty lines as well as lines that do not contain a key - lines = [ - line.strip() for line in path_block.splitlines(False) - if line.strip() - ] + lines = [line.strip() for line in path_block.splitlines(False) if line.strip()] if not lines: continue # we have a weird case of path with line returns in the file name # we concatenate these in the first Path line - while ( - len(lines) > 1 - and lines[0].startswith(path_key) - and key_value_sep not in lines[1] - ): + while len(lines) > 1 and lines[0].startswith(path_key) and key_value_sep not in lines[1]: first_line = lines[0] second_line = lines.pop(1) - first_line = '\n'.join([first_line, second_line]) + first_line = "\n".join([first_line, second_line]) lines[0] = first_line - dangling_lines = [line for line in lines if key_value_sep not in line] + dangling_lines = [line for line in lines if key_value_sep not in line] entry_errors = [] if dangling_lines: - emsg = ( - 'Invalid 7z listing path block missing "=" as key/value ' - 'separator: {}'.format(repr(path_block)) + emsg = 'Invalid 7z listing path block missing "=" as key/value separator: {}'.format( + repr(path_block) ) entry_errors.append(emsg) entry_attributes = {} - key_lines = [line for line in lines if key_value_sep in line] + key_lines = [line for line in lines if key_value_sep in line] for line in key_lines: k, _, v = line.partition(key_value_sep) k = k.strip() @@ -678,9 +665,9 @@ def parse_7z_listing(location): entries.append(ntry) if TRACE_ENTRIES: - logger.debug('parse_7z_listing: entries# {}\n'.format(len(entries))) + logger.debug("parse_7z_listing: entries# {}\n".format(len(entries))) for entry in entries: - logger.debug(' ' + repr(entry.to_dict())) + logger.debug(" " + repr(entry.to_dict())) return entries @@ -691,6 +678,7 @@ class Entry(object): Represent an Archive entry for a directory, file or link in an archive with its path and attributes. """ + # the actual posix path as-is as in the archive (relative, absolute, etc) path = attr.ib() # bytes @@ -709,14 +697,14 @@ def to_dict(self, full=False): data = attr.asdict(self) # data.pop('errors', None) if not full: - data.pop('date', None) + data.pop("date", None) return data def has_illegal_path(self): - return '\n' in self.path + return "\n" in self.path def is_relative_path(self): - return '..' in self.path + return ".." in self.path def is_empty(self): return not self.size @@ -731,41 +719,43 @@ def from_dict(cls, infos, errors=None): is_hardlink = False link_target = None - sl = infos.get('Symbolic Link') + sl = infos.get("Symbolic Link") if sl: is_symlink = True link_target = sl - hl = infos.get('Hard Link') + hl = infos.get("Hard Link") if hl: is_hardlink = True link_target = hl if sl and hl: from pprint import pformat + raise ExtractWarningIncorrectEntry( - 'A symlink cannot be also a hardlink: {}'.format(pformat(infos))) + "A symlink cannot be also a hardlink: {}".format(pformat(infos)) + ) # depending on the type of arhcive the file vs dir flags are in # diiferent attributes :| is_dir = ( # in some listings we have this: Mode = drwxrwxr-x - infos.get('Mode', '').lower().startswith('d') + infos.get("Mode", "").lower().startswith("d") or # in cpio and a few more we have a Folder attrib - infos.get('Folder', '').startswith('+') + infos.get("Folder", "").startswith("+") or # in 7z listing we have this: Attributes = D_ drwxrwxr-x - infos.get('Attributes', '').lower().startswith('d_') + infos.get("Attributes", "").lower().startswith("d_") ) or False is_file = not is_dir e = cls( - path=infos.get('Path'), - size=infos.get('Size', 0), - date=infos.get('Modified', None), + path=infos.get("Path"), + size=infos.get("Size", 0), + date=infos.get("Modified", None), is_dir=is_dir, is_file=is_file, is_symlink=is_symlink, diff --git a/src/extractcode/uncompress.py b/src/extractcode/uncompress.py index 143e336..d39f2e6 100644 --- a/src/extractcode/uncompress.py +++ b/src/extractcode/uncompress.py @@ -38,7 +38,7 @@ def uncompress(location, target_dir, decompressor, suffix=EXTRACT_SUFFIX): # extension such gz, etc. or introspect the archive header to get the file # name when present. if DEBUG: - logger.debug('uncompress: ' + location) + logger.debug("uncompress: " + location) tmp_loc, warnings = uncompress_file(location, decompressor) @@ -63,11 +63,10 @@ def uncompress_file(location, decompressor): warnings = [] base_name = fileutils.file_base_name(location) - target_location = os.path.join(fileutils.get_temp_dir( - prefix='extractcode-extract-'), base_name) + target_location = os.path.join(fileutils.get_temp_dir(prefix="extractcode-extract-"), base_name) - with decompressor(location, 'rb') as compressed: - with open(target_location, 'wb') as uncompressed: + with decompressor(location, "rb") as compressed: + with open(target_location, "wb") as uncompressed: buffer_size = 32 * 1024 * 1024 while True: chunk = compressed.read(buffer_size) @@ -75,8 +74,8 @@ def uncompress_file(location, decompressor): break uncompressed.write(chunk) - if getattr(decompressor, 'has_trailing_garbage', False): - warnings.append(location + ': Trailing garbage found and ignored.') + if getattr(decompressor, "has_trailing_garbage", False): + warnings.append(location + ": Trailing garbage found and ignored.") return target_location, warnings @@ -105,10 +104,10 @@ def get_compressed_file_content(location, decompressor): `decompressor` object for decompression. """ warnings = [] - with decompressor(location, 'rb') as compressed: + with decompressor(location, "rb") as compressed: content = compressed.read() - if getattr(decompressor, 'has_trailing_garbage', False): - warnings.append(location + ': Trailing garbage found and ignored.') + if getattr(decompressor, "has_trailing_garbage", False): + warnings.append(location + ": Trailing garbage found and ignored.") return content, warnings diff --git a/src/extractcode/vmimage.py b/src/extractcode/vmimage.py index 2ce0ea7..23e9722 100644 --- a/src/extractcode/vmimage.py +++ b/src/extractcode/vmimage.py @@ -37,29 +37,29 @@ if TRACE: import sys + logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.DEBUG) GUESTFISH_NOT_FOUND = ( - 'WARNING: guestfish executable is not installed. ' - 'Unable to extract virtual machine image: you need to install the ' - 'guestfish tool from libguestfs and extra FS drivers as needed. ' - 'See the ExtractCode README.rst at ' - 'https://github.com/nexB/extractcode/blob/main/README.rst ' - 'and https://libguestfs.org/ for details.' + "WARNING: guestfish executable is not installed. " + "Unable to extract virtual machine image: you need to install the " + "guestfish tool from libguestfs and extra FS drivers as needed. " + "See the ExtractCode README.rst at " + "https://github.com/nexB/extractcode/blob/main/README.rst " + "and https://libguestfs.org/ for details." ) -GUESTFISH_KERNEL_NOT_READABLE = ( -'''libguestfs requires the kernel executable to be readable. +GUESTFISH_KERNEL_NOT_READABLE = """libguestfs requires the kernel executable to be readable. This is the case by default on most Linux distributions except on Ubuntu. Please follow the ExtractCode installation instructions in the README.rst at: https://github.com/nexB/extractcode/blob/main/README.rst ' -''') +""" -EXTRACTCODE_GUESTFISH_PATH_ENVVAR = 'EXTRACTCODE_GUESTFISH_PATH' +EXTRACTCODE_GUESTFISH_PATH_ENVVAR = "EXTRACTCODE_GUESTFISH_PATH" -def get_command(env_var=EXTRACTCODE_GUESTFISH_PATH_ENVVAR, command='guestfish'): +def get_command(env_var=EXTRACTCODE_GUESTFISH_PATH_ENVVAR, command="guestfish"): """ Return the location to the guestfish command or None. """ @@ -86,14 +86,13 @@ def check_linux_kernel_is_readable(): """ if on_linux: - kernels = list(pathlib.Path('/boot').glob('vmlinuz-*')) + kernels = list(pathlib.Path("/boot").glob("vmlinuz-*")) if not kernels: raise ExtractErrorFailedToExtract(GUESTFISH_KERNEL_NOT_READABLE) for kern in kernels: if not os.access(kern, os.R_OK): raise ExtractErrorFailedToExtract( - f'Unable to read kernel at: {kern}.\n' - f'{GUESTFISH_KERNEL_NOT_READABLE}' + f"Unable to read kernel at: {kern}.\n{GUESTFISH_KERNEL_NOT_READABLE}" ) @@ -110,8 +109,7 @@ def from_file(cls, location): Raise excptions on errors. """ if not on_linux: - raise ExtractErrorFailedToExtract( - 'VM Image extraction only supported on Linux.') + raise ExtractErrorFailedToExtract("VM Image extraction only supported on Linux.") check_linux_kernel_is_readable() @@ -120,23 +118,23 @@ def from_file(cls, location): if not os.path.exists(abs_location): raise ExtractErrorFailedToExtract( - f'The system cannot find the path specified: {abs_location}') + f"The system cannot find the path specified: {abs_location}" + ) supported_gfs_formats_by_extension = { - '.qcow2': 'qcow2', - '.qcow2c': 'qcow2', - '.qcow': 'qcow2', - '.img': 'qcow2', - '.vmdk': 'vmdk', - '.vdi': 'vdi', + ".qcow2": "qcow2", + ".qcow2c": "qcow2", + ".qcow": "qcow2", + ".img": "qcow2", + ".vmdk": "vmdk", + ".vdi": "vdi", } extension = fileutils.file_extension(location) image_format = supported_gfs_formats_by_extension.get(extension) if not image_format: - raise ExtractErrorFailedToExtract( - f'Unsupported VM image format: {location}') + raise ExtractErrorFailedToExtract(f"Unsupported VM image format: {location}") cmd_loc = get_command() if not cmd_loc: @@ -148,7 +146,7 @@ def from_file(cls, location): guestfish_command=cmd_loc, ) - def listfs(self, skip_partitions=('swap',)): + def listfs(self, skip_partitions=("swap",)): """ Return a list of (filesystem /partition/ device path, filesystem type) for each filesystem found in this image. @@ -160,11 +158,13 @@ def listfs(self, skip_partitions=('swap',)): /partition/sda1: ext4 """ args = [ - '--ro', - f'--format={self.image_format}', - '--add' , self.location, - 'run', - ':', 'list-filesystems', + "--ro", + f"--format={self.image_format}", + "--add", + self.location, + "run", + ":", + "list-filesystems", ] stdout = self.run_guestfish(args) @@ -174,8 +174,8 @@ def listfs(self, skip_partitions=('swap',)): entry = entry.strip() if not entry: continue - if ':' in entry: - partition, _, fstype = entry.partition(':') + if ":" in entry: + partition, _, fstype = entry.partition(":") fstype = fstype.strip() else: partition = entry @@ -184,7 +184,12 @@ def listfs(self, skip_partitions=('swap',)): if any(s in partition for s in skip_partitions): continue - filesystems.append((partition, fstype,)) + filesystems.append( + ( + partition, + fstype, + ) + ) return filesystems @@ -194,11 +199,15 @@ def extract_image(self, target_tarball): gzipped-compressed tarball (.tar.gz). Raise Exception on errors. """ args = [ - '--ro', - '--inspector', - f'--format={self.image_format}', - '--add', self.location, - 'tar-out', '/', target_tarball, 'compress:gzip', + "--ro", + "--inspector", + f"--format={self.image_format}", + "--add", + self.location, + "tar-out", + "/", + target_tarball, + "compress:gzip", ] self.run_guestfish(args) @@ -213,12 +222,20 @@ def extract_partition(self, partition, target_tarball): # guestfish --ro add foo.qcow2 : run : mount /dev/sda1 / : tar-out /etc foo.tgz compress:gzip args = [ - '--ro', - f'--format={self.image_format}', - '--add', self.location, - 'run', - ':', 'mount', partition, '/', - ':', 'tar-out', '/', target_tarball, 'compress:gzip', + "--ro", + f"--format={self.image_format}", + "--add", + self.location, + "run", + ":", + "mount", + partition, + "/", + ":", + "tar-out", + "/", + target_tarball, + "compress:gzip", ] self.run_guestfish(args) @@ -228,6 +245,7 @@ def run_guestfish(self, args, timeout=None): seconds. Return stdout as a unicode string. Raise Exception on error. """ import subprocess + full_args = [self.guestfish_command] + args try: stdout = subprocess.check_output( @@ -236,12 +254,9 @@ def run_guestfish(self, args, timeout=None): stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as cpe: - args = ' '.join([self.guestfish_command] + args) + args = " ".join([self.guestfish_command] + args) output = as_unicode(cpe.output) - error = ( - f'Failed to run guestfish to extract VM image: {args}\n' - f'output: {output}' - ) + error = f"Failed to run guestfish to extract VM image: {args}\noutput: {output}" raise ExtractErrorFailedToExtract(error) return as_unicode(stdout) @@ -266,8 +281,8 @@ def extract(location, target_dir, as_tarballs=False, skip_symlinks=True): abs_target_dir = os.path.abspath(os.path.expanduser(target_dir)) if not os.path.exists(abs_target_dir) or not os.path.isdir(abs_target_dir): raise ExtractErrorFailedToExtract( - f'The system cannot find the target directory path ' - f'specified: {target_dir}') + f"The system cannot find the target directory path specified: {target_dir}" + ) vmimage = VmImage.from_file(location) @@ -277,15 +292,13 @@ def extract(location, target_dir, as_tarballs=False, skip_symlinks=True): # try a plain extract first try: - if not as_tarballs: - intermediate_dir = fileutils.get_temp_dir( - prefix='extractcode-vmimage') + intermediate_dir = fileutils.get_temp_dir(prefix="extractcode-vmimage") tdir = intermediate_dir else: tdir = target_dir - target_tarball = os.path.join(tdir, f'{filename}.tar.gz') + target_tarball = os.path.join(tdir, f"{filename}.tar.gz") vmimage.extract_image(target_tarball=target_tarball) if not as_tarballs: @@ -298,10 +311,9 @@ def extract(location, target_dir, as_tarballs=False, skip_symlinks=True): warnings.extend(warns) except ExtractErrorFailedToExtract as e: - print('Cannot extract VM Image filesystems as a single file tree.') + print("Cannot extract VM Image filesystems as a single file tree.") - warnings.append( - f'Cannot extract VM Image filesystems as a single file tree:\n{e}') + warnings.append(f"Cannot extract VM Image filesystems as a single file tree:\n{e}") # fall back to file system extraction, one partition at a time partitions = vmimage.listfs() if not partitions: @@ -311,11 +323,11 @@ def extract(location, target_dir, as_tarballs=False, skip_symlinks=True): # we can safely extract this to a root / dir as we have only one partition partition, _parttype = partitions[0] if not as_tarballs: - tdir = fileutils.get_temp_dir(prefix='extractcode-vmimage') + tdir = fileutils.get_temp_dir(prefix="extractcode-vmimage") else: tdir = target_dir - target_tarball = os.path.join(tdir, f'{filename}.tar.gz') + target_tarball = os.path.join(tdir, f"{filename}.tar.gz") vmimage.extract_partition( partition=partition, target_tarball=target_tarball, @@ -334,16 +346,16 @@ def extract(location, target_dir, as_tarballs=False, skip_symlinks=True): # base name based after the partition device name for partition, _parttype in partitions: - base_name = partition.replace('/', '-') + base_name = partition.replace("/", "-") if not as_tarballs: - tdir = fileutils.get_temp_dir(prefix='extractcode-vmimage') + tdir = fileutils.get_temp_dir(prefix="extractcode-vmimage") else: tdir = target_dir partition_tarball = os.path.join( tdir, - f'{filename}-{base_name}.tar.gz', + f"{filename}-{base_name}.tar.gz", ) vmimage.extract_partition( partition=partition, @@ -372,6 +384,7 @@ def extract_image_tarball(tarball, target_dir, skip_symlinks=True): Return a list of warning messages. Raise Exception on errors. """ from extractcode.libarchive2 import extract + return extract( location=tarball, target_dir=target_dir, diff --git a/tests/extractcode_assert_utils.py b/tests/extractcode_assert_utils.py index 1b3ac34..cc4e95b 100644 --- a/tests/extractcode_assert_utils.py +++ b/tests/extractcode_assert_utils.py @@ -28,8 +28,8 @@ def check_size(expected_size, location): def check_results_with_expected_json(results, expected_loc, regen=False): if regen: - with open(expected_loc, 'w') as ex: - json.dump(results, ex, indent=2, separators=(',', ':')) + with open(expected_loc, "w") as ex: + json.dump(results, ex, indent=2, separators=(",", ":")) with open(expected_loc) as ex: expected = json.load(ex) try: @@ -58,17 +58,17 @@ def check_files(test_dir, expected, regen=False): location = os.path.join(top, f) locs.append(location) path = fileutils.as_posixpath(location) - path = path.replace(test_dir_path, '').strip('/') + path = path.replace(test_dir_path, "").strip("/") result.append(path) expected_is_json_file = False - if not isinstance(expected, (list, tuple)) and expected.endswith('.json'): + if not isinstance(expected, (list, tuple)) and expected.endswith(".json"): expected_is_json_file = True # this is a path to a JSON file if regen: - with open(expected, 'w') as ex: - json.dump(result, ex, indent=2, separators=(',', ':')) + with open(expected, "w") as ex: + json.dump(result, ex, indent=2, separators=(",", ":")) expected_content = result else: with open(expected) as ex: @@ -83,8 +83,8 @@ def check_files(test_dir, expected, regen=False): assert result == expected_content except AssertionError: files = [ - 'test_dir: file://{}'.format(test_dir), - 'expected: file://{}'.format(expected if expected_is_json_file else ''), + "test_dir: file://{}".format(test_dir), + "expected: file://{}".format(expected if expected_is_json_file else ""), ] assert result == files + expected_content @@ -104,15 +104,15 @@ def check_no_error(result): def is_posixpath(location): - """ + r""" Return True if the `location` path is likely a POSIX-like path using POSIX path separators (slash or "/")or has no path separator. Return False if the `location` path is likely a Windows-like path using backslash as path separators (e.g. "\"). """ - has_slashes = '/' in location - has_backslashes = '\\' in location + has_slashes = "/" in location + has_backslashes = "\\" in location # windows paths with drive if location: drive, _ = ntpath.splitdrive(location) @@ -128,7 +128,7 @@ def is_posixpath(location): def to_posix(path): - """ + r""" Return a path using the posix path separator given a path that may contain posix or windows separators, converting \\ to /. NB: this path will still be valid in the windows explorer (except as a UNC or share name). It will be a @@ -136,8 +136,8 @@ def to_posix(path): line operations. """ is_unicode = isinstance(path, str) - ntpath_sep = is_unicode and u'\\' or '\\' - posixpath_sep = is_unicode and u'/' or '/' + ntpath_sep = is_unicode and "\\" or "\\" + posixpath_sep = is_unicode and "/" or "/" if is_posixpath(path): if on_windows: return path.replace(ntpath_sep, posixpath_sep) @@ -147,7 +147,7 @@ def to_posix(path): class BaseArchiveTestCase(FileBasedTesting): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def check_get_extractors(self, test_file, expected, kinds=()): from extractcode import archive @@ -159,16 +159,16 @@ def check_get_extractors(self, test_file, expected, kinds=()): extractors = archive.get_extractors(test_loc) fe = fileutils.file_extension(test_loc).lower() - em = ', '.join(e.__module__ + '.' + e.__name__ for e in extractors) + em = ", ".join(e.__module__ + "." + e.__name__ for e in extractors) - msg = ('%(expected)r == %(extractors)r for %(test_file)s\n' - 'with fe:%(fe)r, em:%(em)s' % locals()) + msg = ( + "%(expected)r == %(extractors)r for %(test_file)s\nwith fe:%(fe)r, em:%(em)s" % locals() + ) assert expected == extractors, msg def assertRaisesInstance(self, excInstance, callableObj, *args, **kwargs): """ - This assertion accepts an instance instead of a class for refined - exception testing. + Accept an instance instead of a class for refined exception testing. """ kwargs = kwargs or {} excClass = excInstance.__class__ @@ -177,11 +177,11 @@ def assertRaisesInstance(self, excInstance, callableObj, *args, **kwargs): except excClass as e: assert str(e).startswith(str(excInstance)) else: - if hasattr(excClass, '__name__'): + if hasattr(excClass, "__name__"): excName = excClass.__name__ else: excName = str(excClass) - raise self.failureException('%s not raised' % excName) + raise self.failureException("%s not raised" % excName) def check_extract( self, @@ -218,17 +218,17 @@ def check_extract( else: for exp_path, exp_size in expected.items(): exp_loc = os.path.join(test_dir, exp_path) - msg = '''When extracting: %(test_file)s + msg = """When extracting: %(test_file)s With function: %(test_function)r - Failed to find expected path: %(exp_loc)s''' + Failed to find expected path: %(exp_loc)s""" assert os.path.exists(exp_loc), msg % locals() if exp_size is not None: res_size = os.stat(exp_loc).st_size - msg = '''When extracting: %(test_file)s + msg = """When extracting: %(test_file)s With function: %(test_function)r Failed to assert the correct size %(exp_size)d Got instead: %(res_size)d - for expected path: %(exp_loc)s''' + for expected path: %(exp_loc)s""" assert exp_size == res_size, msg % locals() def collect_extracted_path(self, test_dir): @@ -237,10 +237,10 @@ def collect_extracted_path(self, test_dir): for t, dirs, files in os.walk(test_dir): t = fileutils.as_posixpath(t) for d in dirs: - nd = posixpath.join(t, d).replace(td, '') + '/' + nd = posixpath.join(t, d).replace(td, "") + "/" result.append(nd) for f in files: - nf = posixpath.join(t, f).replace(td, '') + nf = posixpath.join(t, f).replace(td, "") result.append(nf) result = sorted(result) return result @@ -251,10 +251,9 @@ def assertExceptionContains(self, text, callableObj, *args, **kwargs): except Exception as e: if text not in str(e): raise self.failureException( - 'Exception %(e)r raised, ' - 'it should contain the text %(text)r ' - 'and does not' % locals() + "Exception %(e)r raised, " + "it should contain the text %(text)r " + "and does not" % locals() ) else: - raise self.failureException( - 'Exception containing %(text)r not raised' % locals()) + raise self.failureException("Exception containing %(text)r not raised" % locals()) diff --git a/tests/test_archive.py b/tests/test_archive.py index 64f17ce..8a0bb0a 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -51,137 +51,138 @@ class TestGetExtractorTest(BaseArchiveTestCase): - def test_get_extractors_1(self): - test_file = 'archive/zip/basic.zip' + test_file = "archive/zip/basic.zip" expected = [archive.extract_zip] self.check_get_extractors(test_file, expected) def test_get_extractors_2(self): - test_file = 'archive/rar/basic.rar' + test_file = "archive/rar/basic.rar" expected = [archive.extract_rar] self.check_get_extractors(test_file, expected) def test_get_extractors_3(self): - test_file = 'archive/deb/adduser_3.112ubuntu1_all.deb' + test_file = "archive/deb/adduser_3.112ubuntu1_all.deb" expected = [archive.extract_deb] self.check_get_extractors(test_file, expected) def test_get_extractors_4(self): - test_file = 'archive/cpio/elfinfo-1.0-1.fc9.src.cpio' + test_file = "archive/cpio/elfinfo-1.0-1.fc9.src.cpio" expected = [archive.extract_cpio] self.check_get_extractors(test_file, expected) def test_get_extractors_5(self): - test_file = 'archive/rpm/elfinfo-1.0-1.fc9.src.rpm' + test_file = "archive/rpm/elfinfo-1.0-1.fc9.src.rpm" expected = [archive.extract_rpm, archive.extract_cpio] self.check_get_extractors(test_file, expected) def test_get_extractors_6(self): - test_file = 'archive/gzip/file_4.26-1.diff.gz' + test_file = "archive/gzip/file_4.26-1.diff.gz" expected = [archive.uncompress_gzip] self.check_get_extractors(test_file, expected) def test_get_extractors_7(self): - test_file = 'archive/ar/liby.a' + test_file = "archive/ar/liby.a" expected = [archive.extract_ar] self.check_get_extractors(test_file, expected) def test_get_extractors_8(self): - test_file = 'archive/bz2/single_file_not_tarred.bz2' + test_file = "archive/bz2/single_file_not_tarred.bz2" expected = [archive.uncompress_bzip2] self.check_get_extractors(test_file, expected) def test_get_extractors_9(self): - test_file = 'archive/tar/tarred.tar' + test_file = "archive/tar/tarred.tar" expected = [archive.extract_tar] self.check_get_extractors(test_file, expected) def test_get_extractors_10(self): - test_file = 'archive/tbz/tarred_bzipped.bz' + test_file = "archive/tbz/tarred_bzipped.bz" expected = [archive.uncompress_bzip2] self.check_get_extractors(test_file, expected) def test_get_extractors_11(self): - test_file = 'archive/tbz/tarred_bzipped.tar.bz2' + test_file = "archive/tbz/tarred_bzipped.tar.bz2" expected = [archive.extract_tar] self.check_get_extractors(test_file, expected) def test_get_extractors_12(self): - test_file = 'archive/tbz/tarred_bzipped.tbz' + test_file = "archive/tbz/tarred_bzipped.tbz" expected = [archive.extract_tar] self.check_get_extractors(test_file, expected) def test_get_extractors_13(self): - test_file = 'archive/tgz/tarred_gzipped.gz' + test_file = "archive/tgz/tarred_gzipped.gz" expected = [archive.uncompress_gzip] self.check_get_extractors(test_file, expected) def test_get_extractors_14(self): - test_file = 'archive/tgz/tarred_gzipped.tar.gz' + test_file = "archive/tgz/tarred_gzipped.tar.gz" expected = [archive.extract_tar] self.check_get_extractors(test_file, expected) def test_get_extractors_15(self): - test_file = 'archive/tgz/tarred_gzipped.tgz' + test_file = "archive/tgz/tarred_gzipped.tgz" expected = [archive.extract_tar] self.check_get_extractors(test_file, expected) def test_get_extractors_16(self): - test_file = 'archive/7z/z.7z' + test_file = "archive/7z/z.7z" expected = [archive.extract_7z] self.check_get_extractors(test_file, expected) def test_get_extractors_17(self): - test_file = 'archive/Z/tr2tex.Z' - expected = [archive.extract_Z, ] + test_file = "archive/Z/tr2tex.Z" + expected = [ + archive.extract_Z, + ] self.check_get_extractors(test_file, expected) def test_get_extractors_18(self): - test_file = 'archive/Z/tkWWW-0.11.tar.Z' + test_file = "archive/Z/tkWWW-0.11.tar.Z" expected = [archive.extract_Z, archive.extract_tar] self.check_get_extractors(test_file, expected) def test_get_extractors_19(self): - test_file = 'archive/xar/xar-1.4.xar' + test_file = "archive/xar/xar-1.4.xar" expected = [archive.extract_xarpkg] self.check_get_extractors(test_file, expected) def test_get_extractor_with_kinds_deb(self): - test_file = 'archive/deb/adduser_3.112ubuntu1_all.deb' + test_file = "archive/deb/adduser_3.112ubuntu1_all.deb" expected = [archive.extract_deb] self.check_get_extractors(test_file, expected, (archive.package,)) def test_get_extractor_with_kinds_rpm(self): - test_file = 'archive/rpm/elfinfo-1.0-1.fc9.src.rpm' + test_file = "archive/rpm/elfinfo-1.0-1.fc9.src.rpm" kinds = (archive.regular, archive.file_system, archive.docs) expected = [] self.check_get_extractors(test_file, expected, kinds) def test_get_extractor_with_kinds_rpm_2(self): - test_file = 'archive/rpm/elfinfo-1.0-1.fc9.src.rpm' + test_file = "archive/rpm/elfinfo-1.0-1.fc9.src.rpm" kinds = (archive.regular, archive.file_system, archive.docs, archive.package) expected = [sevenzip.extract, libarchive2.extract] self.check_get_extractors(test_file, expected, kinds) def test_get_extractor_with_kinds_deb2(self): - test_file = 'archive/deb/adduser_3.112ubuntu1_all.deb' + test_file = "archive/deb/adduser_3.112ubuntu1_all.deb" expected = [] self.check_get_extractors(test_file, expected, (archive.regular,)) def test_get_extractor_with_kinds_ar(self): - test_file = 'archive/ar/liby.a' + test_file = "archive/ar/liby.a" kinds = (archive.regular, archive.file_system, archive.docs) expected = [] self.check_get_extractors(test_file, expected, kinds) def test_get_extractor_with_kinds_bzip(self): - test_file = 'archive/tbz/tarred_bzipped.tar.bz2' + test_file = "archive/tbz/tarred_bzipped.tar.bz2" expected = [] self.check_get_extractors(test_file, expected, (archive.package,)) def test_get_extractor_with_kinds_plain_tar(self): - test_file = 'archive/tar/tarred.tar' + test_file = "archive/tar/tarred.tar" expected = [] self.check_get_extractors(test_file, expected, (archive.package,)) @@ -189,7 +190,7 @@ def test_get_extractor_with_kinds_plain_tar(self): self.check_get_extractors(test_file, expected, (archive.regular,)) def test_get_extractor_for_graffle_docs(self): - test_file = 'archive/graffle/example.graffle' + test_file = "archive/graffle/example.graffle" expected = [archive.uncompress_gzip] self.check_get_extractors(test_file, expected, (archive.docs,)) @@ -198,7 +199,7 @@ def test_get_extractor_for_graffle_docs(self): self.check_get_extractors(test_file, expected, kinds=extractcode.default_kinds) def test_get_extractor_for_compressed_svgz_docs(self): - test_file = 'archive/svgz/insert-emptyframe.svgz' + test_file = "archive/svgz/insert-emptyframe.svgz" expected = [archive.uncompress_gzip] self.check_get_extractors(test_file, expected, (archive.docs,)) @@ -207,8 +208,8 @@ def test_get_extractor_for_compressed_svgz_docs(self): self.check_get_extractors(test_file, expected, kinds=extractcode.default_kinds) def test_get_extractor_qcow2(self): - test_file = self.extract_test_tar('vmimage/foobar.qcow2.tar.gz') - test_file = str(Path(test_file) / 'foobar.qcow2') + test_file = self.extract_test_tar("vmimage/foobar.qcow2.tar.gz") + test_file = str(Path(test_file) / "foobar.qcow2") expected = [] self.check_get_extractors(test_file, expected, kinds=extractcode.default_kinds) @@ -219,7 +220,7 @@ def test_get_extractor_qcow2(self): self.check_get_extractors(test_file, expected, kinds=extractcode.all_kinds) def test_get_extractor_for_dia(self): - test_file = self.get_test_loc('archive/dia/dia.dia', copy=True) + test_file = self.get_test_loc("archive/dia/dia.dia", copy=True) expected = [archive.uncompress_gzip] self.check_get_extractors(test_file, expected, kinds=extractcode.all_kinds) @@ -229,14 +230,14 @@ def test_get_extractor_for_dia(self): def test_get_handlers(self): test_data = [ - ('archive/deb/adduser_3.112ubuntu1_all.deb', ['Tar', 'Debian package']), - ('archive/rpm/elfinfo-1.0-1.fc9.src.rpm', ['RPM package']), - ('archive/ar/liby.a', ['ar archive', 'Static Library']), - ('archive/tar/tarred.tar', ['Tar']), - ('archive/tbz/tarred_bzipped.tar.bz2', ['bzip2', 'Tar bzip2']), - ('archive/tbz/tarred_bzipped.bz', ['bzip2', 'Tar bzip2']), - ('archive/tgz/tarred_gzipped.gz', ['Tar gzip', 'Gzip']), - ('archive/gzip/mysql-arch.ARZ', ['Tar gzip', 'Gzip']), + ("archive/deb/adduser_3.112ubuntu1_all.deb", ["Tar", "Debian package"]), + ("archive/rpm/elfinfo-1.0-1.fc9.src.rpm", ["RPM package"]), + ("archive/ar/liby.a", ["ar archive", "Static Library"]), + ("archive/tar/tarred.tar", ["Tar"]), + ("archive/tbz/tarred_bzipped.tar.bz2", ["bzip2", "Tar bzip2"]), + ("archive/tbz/tarred_bzipped.bz", ["bzip2", "Tar bzip2"]), + ("archive/tgz/tarred_gzipped.gz", ["Tar gzip", "Gzip"]), + ("archive/gzip/mysql-arch.ARZ", ["Tar gzip", "Gzip"]), ] for test_file, expected in test_data: @@ -246,15 +247,15 @@ def test_get_handlers(self): def test_score_handlers(self): test_data = [ - ('archive/deb/adduser_3.112ubuntu1_all.deb', [(31, 'Debian package'), (11, 'Tar')]), - ('archive/rpm/elfinfo-1.0-1.fc9.src.rpm', [(32, 'RPM package')]), - ('archive/ar/liby.a', [(31, 'Static Library'), (17, 'ar archive')]), - ('archive/tar/tarred.tar', [(29, 'Tar')]), - ('archive/tar/gem/panchira-0.1.1.gem', [(31, 'Ruby Gem package'), (17, 'Tar')]), - ('archive/tbz/tarred_bzipped.tar.bz2', [(30, 'Tar bzip2'), (29, 'bzip2')]), - ('archive/tbz/tarred_bzipped.bz', [(29, 'bzip2'), (18, 'Tar bzip2')]), - ('archive/tgz/tarred_gzipped.gz', [(29, 'Gzip'), (18, 'Tar gzip')]), - ('archive/gzip/mysql-arch.ARZ', [(29, 'Gzip'), (18, 'Tar gzip')]), + ("archive/deb/adduser_3.112ubuntu1_all.deb", [(31, "Debian package"), (11, "Tar")]), + ("archive/rpm/elfinfo-1.0-1.fc9.src.rpm", [(32, "RPM package")]), + ("archive/ar/liby.a", [(31, "Static Library"), (17, "ar archive")]), + ("archive/tar/tarred.tar", [(29, "Tar")]), + ("archive/tar/gem/panchira-0.1.1.gem", [(31, "Ruby Gem package"), (17, "Tar")]), + ("archive/tbz/tarred_bzipped.tar.bz2", [(30, "Tar bzip2"), (29, "bzip2")]), + ("archive/tbz/tarred_bzipped.bz", [(29, "bzip2"), (18, "Tar bzip2")]), + ("archive/tgz/tarred_gzipped.gz", [(29, "Gzip"), (18, "Tar gzip")]), + ("archive/gzip/mysql-arch.ARZ", [(29, "Gzip"), (18, "Tar gzip")]), ] for test_file, expected in test_data: @@ -266,7 +267,7 @@ def test_score_handlers(self): def test_no_handler_is_selected_for_a_non_archive(self): # failed because of libmagic bug: http://bugs.gw.com/view.php?id=467 # passing by introducing strict flag for handlers - test_loc = self.get_test_loc('archive/not_archive/hashfile') + test_loc = self.get_test_loc("archive/not_archive/hashfile") assert [] == list(archive.get_handlers(test_loc)) assert None == archive.get_extractor(test_loc) assert None == archive.get_extractor(test_loc, kinds=extractcode.all_kinds) @@ -274,14 +275,14 @@ def test_no_handler_is_selected_for_a_non_archive(self): def test_no_handler_is_selected_for_a_non_archive2(self): # FWIW there is a related libmagic bug: http://bugs.gw.com/view.php?id=473 - test_loc = self.get_test_loc('archive/not_archive/wildtest.txt') + test_loc = self.get_test_loc("archive/not_archive/wildtest.txt") assert [] == list(archive.get_handlers(test_loc)) assert None == archive.get_extractor(test_loc) assert None == archive.get_extractor(test_loc, kinds=extractcode.all_kinds) assert not archive.should_extract(test_loc, kinds=extractcode.default_kinds) def test_no_handler_is_selected_for_a_non_archive3(self): - test_loc = self.get_test_loc('archive/not_archive/savetransfer.c') + test_loc = self.get_test_loc("archive/not_archive/savetransfer.c") assert [] == list(archive.get_handlers(test_loc)) assert None == archive.get_extractor(test_loc) assert None == archive.get_extractor(test_loc, kinds=extractcode.all_kinds) @@ -296,66 +297,65 @@ def test_7zip_extract_can_extract_to_relative_paths(self): import shutil from extractcode.sevenzip import extract - test_file = self.get_test_loc('archive/relative_path/basic.zip', copy=True) + test_file = self.get_test_loc("archive/relative_path/basic.zip", copy=True) - project_tmp = join(project_root, 'tmp') + project_tmp = join(project_root, "tmp") fileutils.create_dir(project_tmp) project_root_abs = abspath(project_root) - test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, '').strip('\\/') - test_tgt_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, '').strip('\\/') + test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") + test_tgt_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") shutil.copy(test_file, test_src_dir) - test_src_file = join(test_src_dir, 'basic.zip') + test_src_file = join(test_src_dir, "basic.zip") result = list(extract(test_src_file, test_tgt_dir)) assert [] == result - expected = ['c/a/a.txt', 'c/b/a.txt', 'c/c/a.txt'] + expected = ["c/a/a.txt", "c/b/a.txt", "c/c/a.txt"] check_files(test_tgt_dir, expected) def test_windows_media_player_skins_are_zip(self): - test_file = self.get_test_loc('archive/wmz/Go.wmz') + test_file = self.get_test_loc("archive/wmz/Go.wmz") extractors = archive.get_extractors(test_file) assert [archive.extract_zip] == extractors def test_windows_ntfs_wmz_are_sometimes_gzip(self): - test_file = self.get_test_loc('archive/wmz/image003.wmz') + test_file = self.get_test_loc("archive/wmz/image003.wmz") extractors = archive.get_extractors(test_file) assert [archive.uncompress_gzip] == extractors class TestTarGzip(BaseArchiveTestCase): - def test_extract_targz_basic(self): - test_file = self.get_test_loc('archive/tgz/tarred_gzipped.tar.gz') + test_file = self.get_test_loc("archive/tgz/tarred_gzipped.tar.gz") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'e/a/b.txt') + result = os.path.join(test_dir, "e/a/b.txt") assert os.path.exists(result) def test_extract_targz_with_trailing_data(self): - test_file = self.get_test_loc('archive/tgz/trailing.tar.gz') + test_file = self.get_test_loc("archive/tgz/trailing.tar.gz") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'a.txt') + result = os.path.join(test_dir, "a.txt") assert os.path.exists(result) def test_extract_targz_broken(self): - test_file = self.get_test_loc('archive/tgz/broken.tar.gz') + test_file = self.get_test_loc("archive/tgz/broken.tar.gz") test_dir = self.get_temp_dir() - expected = Exception('Unrecognized archive format') + expected = Exception("Unrecognized archive format") self.assertRaisesInstance(expected, archive.extract_tar, test_file, test_dir) def test_extract_targz_with_absolute_path(self): - non_result = '/tmp/subdir' + non_result = "/tmp/subdir" assert not os.path.exists(non_result) test_dir = self.get_temp_dir() - test_file = self.get_test_loc('archive/tgz/absolute_path.tar.gz') + test_file = self.get_test_loc("archive/tgz/absolute_path.tar.gz") archive.extract_tar(test_file, test_dir) assert not os.path.exists(non_result) - result = os.path.join(test_dir, 'tmp/subdir/a.txt') + result = os.path.join(test_dir, "tmp/subdir/a.txt") assert os.path.exists(result) def test_extract_targz_with_relative_path(self): - test_file = self.get_test_loc('archive/tgz/relative.tar.gz') + test_file = self.get_test_loc("archive/tgz/relative.tar.gz") """ This test file was created with: import tarfile @@ -368,40 +368,40 @@ def test_extract_targz_with_relative_path(self): test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - non_result = os.path.join(test_dir, '../a_parent_folder.txt') + non_result = os.path.join(test_dir, "../a_parent_folder.txt") assert not os.path.exists(non_result) expected = [ - 'dotdot/dotdot/another_folder/b_two_root.txt', - 'dotdot/a_parent_folder.txt', - 'dotdot/folder/subfolder/b_subfolder.txt' + "dotdot/dotdot/another_folder/b_two_root.txt", + "dotdot/a_parent_folder.txt", + "dotdot/folder/subfolder/b_subfolder.txt", ] check_files(test_dir, expected) def test_extract_targz_with_trailing_data2(self): test_dir1 = self.get_temp_dir() - test_file = self.get_test_loc('archive/tgz/trailing2.tar.gz') + test_file = self.get_test_loc("archive/tgz/trailing2.tar.gz") archive.extract_tar(test_file, test_dir1) test_dir2 = self.get_temp_dir() - test_file2 = self.get_test_loc('archive/tgz/no_trailing.tar.gz') + test_file2 = self.get_test_loc("archive/tgz/no_trailing.tar.gz") archive.extract_tar(test_file2, test_dir2) assert is_same(test_dir1, test_dir2) def test_extract_targz_with_mixed_case_and_symlink(self): - test_file = self.get_test_loc('archive/tgz/mixed_case_and_symlink.tgz') + test_file = self.get_test_loc("archive/tgz/mixed_case_and_symlink.tgz") test_dir = self.get_temp_dir() result = archive.extract_tar(test_file, test_dir) assert [] == result - exp_file = self.get_test_loc('archive/tgz/mixed_case_and_symlink.tgz.expected.json') + exp_file = self.get_test_loc("archive/tgz/mixed_case_and_symlink.tgz.expected.json") check_files(test_dir, exp_file, regen=False) def test_extract_targz_symlinks(self): - test_file = self.get_test_loc('archive/tgz/symlink.tar.gz') + test_file = self.get_test_loc("archive/tgz/symlink.tar.gz") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) expected = [ - 'z/x/a', + "z/x/a", # these are skipped # this is a link: a -> ../x/a # 'z/y/a', @@ -415,7 +415,7 @@ def test_extract_targz_symlinks(self): def test_extract_targz_from_apache_should_not_return_errors(self): # from http://archive.apache.org/dist/commons/logging/source/commons-logging-1.1.2-src.tar.gz # failed with ReadError('not a bzip2 file',) - test_file = self.get_test_loc('archive/tgz/commons-logging-1.1.2-src.tar.gz') + test_file = self.get_test_loc("archive/tgz/commons-logging-1.1.2-src.tar.gz") test_dir = self.get_temp_dir() extractor = archive.get_extractor(test_file) assert archive.extract_tar == extractor @@ -424,7 +424,7 @@ def test_extract_targz_from_apache_should_not_return_errors(self): assert os.listdir(test_dir) def test_extract_targz_with_unicode_path_should_extract_without_error(self): - test_file = self.get_test_loc('archive/tgz/tgz_unicode.tgz') + test_file = self.get_test_loc("archive/tgz/tgz_unicode.tgz") test_dir = self.get_temp_dir() extractor = archive.get_extractor(test_file) assert archive.extract_tar == extractor @@ -434,12 +434,11 @@ def test_extract_targz_with_unicode_path_should_extract_without_error(self): class TestUncompressGzip(BaseArchiveTestCase): - def test_uncompress_gzip_basic(self): - test_file = self.get_test_loc('archive/gzip/file_4.26-1.diff.gz') + test_file = self.get_test_loc("archive/gzip/file_4.26-1.diff.gz") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'file_4.26-1.diff.gz-extract') + result = os.path.join(test_dir, "file_4.26-1.diff.gz-extract") assert os.path.exists(result) def test_uncompress_concatenated_gzip(self): @@ -448,101 +447,100 @@ def test_uncompress_concatenated_gzip(self): # echo "f2content" > f2 # gzip -k f1 # gzip -k -c f2 >> twofiles.gz - test_file = self.get_test_loc('archive/gzip/twofiles.gz') + test_file = self.get_test_loc("archive/gzip/twofiles.gz") test_dir = self.get_temp_dir() warnings = archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'twofiles.gz-extract') + result = os.path.join(test_dir, "twofiles.gz-extract") assert os.path.exists(result) - assert b'f1content\nf2content\n' == open(result, 'rb').read() + assert b"f1content\nf2content\n" == open(result, "rb").read() assert [] == warnings - @pytest.mark.xfail(reason='Fails for now on Python 3') + @pytest.mark.xfail(reason="Fails for now on Python 3") def test_uncompress_gzip_with_trailing_data(self): - test_file = self.get_test_loc('archive/gzip/trailing_data.gz') + test_file = self.get_test_loc("archive/gzip/trailing_data.gz") test_dir = self.get_temp_dir() warnings = archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'trailing_data.gz-extract') + result = os.path.join(test_dir, "trailing_data.gz-extract") assert os.path.exists(result) assert [] == warnings def test_uncompress_gzip_with_leading_data(self): # even though we do not fail when there is invalid trailing data we # should still fail on invalid leading data - test_file = self.get_test_loc('archive/gzip/leading_data.gz') + test_file = self.get_test_loc("archive/gzip/leading_data.gz") test_dir = self.get_temp_dir() - expected = Exception('Not a gzipped file') + expected = Exception("Not a gzipped file") self.assertRaisesInstance(expected, archive.uncompress_gzip, test_file, test_dir) def test_uncompress_gzip_with_random_data(self): - test_file = self.get_test_loc('archive/gzip/random_binary.data') + test_file = self.get_test_loc("archive/gzip/random_binary.data") test_dir = self.get_temp_dir() - expected = Exception('Not a gzipped file') + expected = Exception("Not a gzipped file") self.assertRaisesInstance(expected, archive.uncompress_gzip, test_file, test_dir) def test_uncompress_gzip_with_backslash_in_path(self): # weirdly enough, gzip keeps the original path/name - test_file = self.get_test_loc('archive/gzip/backslash_path.gz') + test_file = self.get_test_loc("archive/gzip/backslash_path.gz") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'backslash_path.gz-extract') + result = os.path.join(test_dir, "backslash_path.gz-extract") assert os.path.exists(result) def test_uncompress_gzip_can_uncompress_windows_ntfs_wmz(self): - test_file = self.get_test_loc('archive/wmz/image003.wmz') + test_file = self.get_test_loc("archive/wmz/image003.wmz") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'image003.wmz-extract') + result = os.path.join(test_dir, "image003.wmz-extract") assert os.path.exists(result) def test_uncompress_gzip_can_uncompress_mysql_arz(self): - test_file = self.get_test_loc('archive/gzip/mysql-arch.ARZ') + test_file = self.get_test_loc("archive/gzip/mysql-arch.ARZ") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'mysql-arch.ARZ-extract') + result = os.path.join(test_dir, "mysql-arch.ARZ-extract") assert os.path.exists(result) class TestTarBz2(BaseArchiveTestCase): - def test_extract_tar_bz2_basic(self): - test_file = self.get_test_loc('archive/tbz/tarred_bzipped.tar.bz2') + test_file = self.get_test_loc("archive/tbz/tarred_bzipped.tar.bz2") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'e/a/b.txt') + result = os.path.join(test_dir, "e/a/b.txt") assert os.path.exists(result) def test_extract_tar_bz2_basic_bz(self): - test_file = self.get_test_loc('archive/tbz/tarred_bzipped.bz') + test_file = self.get_test_loc("archive/tbz/tarred_bzipped.bz") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'e/a/b.txt') + result = os.path.join(test_dir, "e/a/b.txt") assert os.path.exists(result) def test_extract_tar_bz2_with_trailing_data__and_wrong_extension(self): - test_file = self.get_test_loc('archive/tbz/single_file_trailing_data.tar.gz') + test_file = self.get_test_loc("archive/tbz/single_file_trailing_data.tar.gz") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'a.txt') + result = os.path.join(test_dir, "a.txt") assert os.path.exists(result) def test_extract_tar_bz2_broken(self): - test_file = self.get_test_loc('archive/tbz/tarred_bzipped_broken.tar.bz2') + test_file = self.get_test_loc("archive/tbz/tarred_bzipped_broken.tar.bz2") test_dir = self.get_temp_dir() - expected = Exception('bzip decompression failed') + expected = Exception("bzip decompression failed") self.assertRaisesInstance(expected, archive.extract_tar, test_file, test_dir) def test_extract_tar_bz2_absolute_path(self): - assert not os.path.exists('/tmp/subdir') + assert not os.path.exists("/tmp/subdir") test_dir = self.get_temp_dir() - test_file = self.get_test_loc('archive/tbz/absolute_path.tar.bz2') + test_file = self.get_test_loc("archive/tbz/absolute_path.tar.bz2") archive.extract_tar(test_file, test_dir) - assert not os.path.exists('/tmp/subdir') - result = os.path.join(test_dir, 'tmp/subdir/a.txt') + assert not os.path.exists("/tmp/subdir") + result = os.path.join(test_dir, "tmp/subdir/a.txt") assert os.path.exists(result) def test_extract_tar_bz2_relative_path(self): """ - This test file was created with: + Created test file with: import tarfile tar = tarfile.open("TarTest.tar.gz", "w:bz") tar.add('a.txt', '../a_parent_folder.txt') @@ -550,142 +548,138 @@ def test_extract_tar_bz2_relative_path(self): tar.add('b.txt', '../folder/subfolder/b_subfolder.txt') tar.close() """ - test_file = self.get_test_loc('archive/tbz/bz2withtar_relative.tar.bz2') + test_file = self.get_test_loc("archive/tbz/bz2withtar_relative.tar.bz2") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - non_result = os.path.join(test_dir, '../a_parent_folder.txt') + non_result = os.path.join(test_dir, "../a_parent_folder.txt") assert not os.path.exists(non_result) - result = os.path.join(test_dir, 'dotdot/folder/subfolder/b_subfolder.txt') + result = os.path.join(test_dir, "dotdot/folder/subfolder/b_subfolder.txt") assert os.path.exists(result) - result = os.path.join(test_dir, 'dotdot', 'a_parent_folder.txt') + result = os.path.join(test_dir, "dotdot", "a_parent_folder.txt") assert os.path.exists(result) def test_extract_tar_bz2_iproute(self): - test_file = self.get_test_loc('archive/tbz/iproute2.tar.bz2') + test_file = self.get_test_loc("archive/tbz/iproute2.tar.bz2") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'iproute2/README') + result = os.path.join(test_dir, "iproute2/README") assert os.path.exists(result) def test_extract_tar_bz2_multistream(self): - test_file = self.get_test_loc('archive/tbz/bzip2_multistream/example-file.csv.tar.bz2') + test_file = self.get_test_loc("archive/tbz/bzip2_multistream/example-file.csv.tar.bz2") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - expected = self.get_test_loc('archive/tbz/bzip2_multistream/example-file.csv') - result = os.path.join(test_dir, 'example-file.csv') - assert open(expected, 'rb').read() == open(result, 'rb').read() + expected = self.get_test_loc("archive/tbz/bzip2_multistream/example-file.csv") + result = os.path.join(test_dir, "example-file.csv") + assert open(expected, "rb").read() == open(result, "rb").read() class TestUncompressBz2(BaseArchiveTestCase): - def test_uncompress_bzip2_basic(self): - test_file = self.get_test_loc('archive/bz2/single_file_not_tarred.bz2') + test_file = self.get_test_loc("archive/bz2/single_file_not_tarred.bz2") test_dir = self.get_temp_dir() archive.uncompress_bzip2(test_file, test_dir) - result = os.path.join(test_dir, 'single_file_not_tarred.bz2-extract') + result = os.path.join(test_dir, "single_file_not_tarred.bz2-extract") assert os.path.exists(result) def test_uncompress_bzip2_with_trailing_data(self): - test_file = self.get_test_loc('archive/bz2/single_file_trailing_data.bz2') + test_file = self.get_test_loc("archive/bz2/single_file_trailing_data.bz2") test_dir = self.get_temp_dir() archive.uncompress_bzip2(test_file, test_dir) - result = os.path.join(test_dir, 'single_file_trailing_data.bz2-extract') + result = os.path.join(test_dir, "single_file_trailing_data.bz2-extract") assert os.path.exists(result) def test_uncompress_bzip2_broken(self): - test_file = self.get_test_loc('archive/bz2/bz2_not_tarred_broken.bz2') + test_file = self.get_test_loc("archive/bz2/bz2_not_tarred_broken.bz2") test_dir = self.get_temp_dir() - expected = Exception('Invalid data stream') + expected = Exception("Invalid data stream") self.assertRaisesInstance(expected, archive.uncompress_bzip2, test_file, test_dir) def test_uncompress_bzip2_with_invalid_path(self): - test_file = self.get_test_loc('archive/bz2/bz_invalidpath.bz2') + test_file = self.get_test_loc("archive/bz2/bz_invalidpath.bz2") test_dir = self.get_temp_dir() archive.uncompress_bzip2(test_file, test_dir) - result = os.path.join(test_dir, 'bz_invalidpath.bz2-extract') + result = os.path.join(test_dir, "bz_invalidpath.bz2-extract") assert os.path.exists(result) def test_uncompress_bzip2_multistream(self): - test_file = self.get_test_loc('archive/bz2/bzip2_multistream/example-file.csv.bz2') + test_file = self.get_test_loc("archive/bz2/bzip2_multistream/example-file.csv.bz2") test_dir = self.get_temp_dir() archive.uncompress_bzip2(test_file, test_dir) - expected = self.get_test_loc('archive/bz2/bzip2_multistream/expected.csv') - result = os.path.join(test_dir, 'example-file.csv.bz2-extract') - assert open(expected, 'rb').read() == open(result, 'rb').read() + expected = self.get_test_loc("archive/bz2/bzip2_multistream/expected.csv") + result = os.path.join(test_dir, "example-file.csv.bz2-extract") + assert open(expected, "rb").read() == open(result, "rb").read() class TestSevenzipBz2(BaseArchiveTestCase): - def test_sevenzip_extract_can_handle_bz2_multistream_differently(self): - test_file = self.get_test_loc('archive/bz2/bzip2_multistream/example-file.csv.bz2') + test_file = self.get_test_loc("archive/bz2/bzip2_multistream/example-file.csv.bz2") test_dir = self.get_temp_dir() sevenzip.extract(test_file, test_dir) - expected = self.get_test_loc('archive/bz2/bzip2_multistream/expected.csv') + expected = self.get_test_loc("archive/bz2/bzip2_multistream/expected.csv") # the extraction dir is not created with suffix by 7z - result = os.path.join(test_dir, 'example-file.csv') - expected_extracted = open(expected, 'rb').read() - expected_result = open(result, 'rb').read() - assert expected_extracted == expected_result + result = os.path.join(test_dir, "example-file.csv") + expected_extracted = open(expected, "rb").read() + expected_result = open(result, "rb").read() + assert expected_extracted == expected_result class TestShellArchives(BaseArchiveTestCase): - def test_extract_springboot(self): # a self executable springboot Jar is a zip with a shell script prefix - test_file = self.get_test_loc('archive/shar/demo-spring-boot.jar') + test_file = self.get_test_loc("archive/shar/demo-spring-boot.jar") test_dir = self.get_temp_dir() result = archive.extract_springboot(test_file, test_dir) assert [] == result - expected = ['META-INF/MANIFEST.MF', 'application.properties'] + expected = ["META-INF/MANIFEST.MF", "application.properties"] check_files(test_dir, expected) def test_springboot_is_not_recognized_without_jar_extension(self): - test_file = self.get_test_loc('archive/shar/demo-spring-boot.sh') + test_file = self.get_test_loc("archive/shar/demo-spring-boot.sh") handler = archive.get_best_handler(test_file) assert None == handler def test_springboot_is_recognized_with_jar_extension(self): - test_file = self.get_test_loc('archive/shar/demo-spring-boot.jar') + test_file = self.get_test_loc("archive/shar/demo-spring-boot.jar") handler = archive.get_best_handler(test_file) - assert handler.name == 'Springboot Java Jar package' + assert handler.name == "Springboot Java Jar package" class TestZip(BaseArchiveTestCase): - def test_extract_zip_basic(self): - test_file = self.get_test_loc('archive/zip/basic.zip') + test_file = self.get_test_loc("archive/zip/basic.zip") test_dir = self.get_temp_dir() result = archive.extract_zip(test_file, test_dir) assert [] == result - expected = ['c/a/a.txt', 'c/b/a.txt', 'c/c/a.txt'] + expected = ["c/a/a.txt", "c/b/a.txt", "c/c/a.txt"] check_files(test_dir, expected) def test_extract_zip_broken(self): - test_file = self.get_test_loc('archive/zip/zip_broken.zip') + test_file = self.get_test_loc("archive/zip/zip_broken.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) # note: broken zip opens and extracts with 7z sometimes assert [] == os.listdir(test_dir) def test_extract_zip_with_invalid_path(self): - test_file = self.get_test_loc('archive/zip/zip_invalidpath.zip') + test_file = self.get_test_loc("archive/zip/zip_invalidpath.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) - result = os.path.join(test_dir, 'this/that') + result = os.path.join(test_dir, "this/that") assert os.path.exists(result) def test_extract_zip_with_trailing_data(self): - test_file = self.get_test_loc('archive/zip/zip_trailing_data.zip') + test_file = self.get_test_loc("archive/zip/zip_trailing_data.zip") test_dir = self.get_temp_dir() try: archive.extract_zip(test_file, test_dir) except libarchive2.ArchiveError as ae: - assert 'Invalid central directory signature' in str(ae) + assert "Invalid central directory signature" in str(ae) # fails because of https://github.com/libarchive/libarchive/issues/545 - result = os.path.join(test_dir, 'a.txt') + result = os.path.join(test_dir, "a.txt") assert os.path.exists(result) def test_extract_zip_with_trailing_data2(self): @@ -693,14 +687,14 @@ def test_extract_zip_with_trailing_data2(self): # $ echo "test content" > f1 # $ zip test f1 # $ echo "some junk" >> test.zip - test_file = self.get_test_loc('archive/zip/zip_trailing_data2.zip') + test_file = self.get_test_loc("archive/zip/zip_trailing_data2.zip") test_dir = self.get_temp_dir() try: archive.extract_zip(test_file, test_dir) except libarchive2.ArchiveError as ae: - assert 'Invalid central directory signature' in str(ae) + assert "Invalid central directory signature" in str(ae) # fails because of https://github.com/libarchive/libarchive/issues/545 - result = os.path.join(test_dir, 'f1') + result = os.path.join(test_dir, "f1") assert os.path.exists(result) def test_extract_zip_with_relative_path_using_default_function(self): @@ -724,105 +718,106 @@ def test_extract_zip_with_relative_path_using_default_function(self): # f.write('/tmp/a.txt', ('../' * 12) + ('sub/' * 12) + 'a_parent_folder_in_sub_3.txt') # f.close() - test_file = self.get_test_loc('archive/zip/relative_parent_folders.zip') + test_file = self.get_test_loc("archive/zip/relative_parent_folders.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) - abs_path = os.path.join(test_dir , '../a_parent_folder.txt') + abs_path = os.path.join(test_dir, "../a_parent_folder.txt") assert not os.path.exists(abs_path) result = self.collect_extracted_path(test_dir) expected = [ - '/dotdot/', - '/dotdot/a_parent_folder.txt', - '/dotdot/dotdot/', - '/dotdot/dotdot/another_folder/', - '/dotdot/dotdot/another_folder/b_two_root.txt', - '/dotdot/folder/', - '/dotdot/folder/subfolder/', - '/dotdot/folder/subfolder/b_subfolder.txt' + "/dotdot/", + "/dotdot/a_parent_folder.txt", + "/dotdot/dotdot/", + "/dotdot/dotdot/another_folder/", + "/dotdot/dotdot/another_folder/b_two_root.txt", + "/dotdot/folder/", + "/dotdot/folder/subfolder/", + "/dotdot/folder/subfolder/b_subfolder.txt", ] assert expected == result def test_extract_zip_with_relative_path_using_libarchive(self): - test_file = self.get_test_loc('archive/zip/relative_parent_folders.zip') + test_file = self.get_test_loc("archive/zip/relative_parent_folders.zip") test_dir = self.get_temp_dir() result = libarchive2.extract(test_file, test_dir) assert [] == result - abs_path = os.path.join(test_dir , '../a_parent_folder.txt') + abs_path = os.path.join(test_dir, "../a_parent_folder.txt") assert not os.path.exists(abs_path) - result = os.path.join(test_dir, 'dotdot/folder/subfolder/b_subfolder.txt') + result = os.path.join(test_dir, "dotdot/folder/subfolder/b_subfolder.txt") assert os.path.exists(result) - result = os.path.join(test_dir, 'dotdot/a_parent_folder.txt') + result = os.path.join(test_dir, "dotdot/a_parent_folder.txt") assert os.path.exists(result) - result = os.path.join(test_dir, 'dotdot/dotdot/another_folder/b_two_root.txt') + result = os.path.join(test_dir, "dotdot/dotdot/another_folder/b_two_root.txt") assert os.path.exists(result) expected_deeply_nested_relative_path = [ - '/dotdot/', - '/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/a_parent_folder.txt', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_1.txt', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_3.txt', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - '/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_2.txt' + "/dotdot/", + "/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/a_parent_folder.txt", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_1.txt", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_3.txt", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_2.txt", ] # somehow Windows fails randomly and only on certain windows machines at Appveyor # so we retest with a skinny expectation expected_deeply_nested_relative_path_alternative = [ - u'/a_parent_folder.txt', - u'/sub/', - u'/sub/sub/', - u'/sub/sub/sub/', - u'/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_1.txt', - u'/sub/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/', - u'/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_2.txt', - u'/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_3.txt'] + "/a_parent_folder.txt", + "/sub/", + "/sub/sub/", + "/sub/sub/sub/", + "/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_1.txt", + "/sub/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/", + "/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_2.txt", + "/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_3.txt", + ] def test_extract_zip_with_relative_path_deeply_nested(self): - test_file = self.get_test_loc('archive/zip/relative_nested.zip') + test_file = self.get_test_loc("archive/zip/relative_nested.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) result = self.collect_extracted_path(test_dir) @@ -832,131 +827,142 @@ def test_extract_zip_with_relative_path_deeply_nested(self): except: assert self.expected_deeply_nested_relative_path_alternative == result - @pytest.mark.xfail(reason='Expectations are different on Windows and this may fail on Windows') + @pytest.mark.xfail(reason="Expectations are different on Windows and this may fail on Windows") def test_extract_zip_with_relative_path_deeply_nested_with_7zip_posix_py3(self): - test_file = self.get_test_loc('archive/zip/relative_nested.zip') + test_file = self.get_test_loc("archive/zip/relative_nested.zip") test_dir = self.get_temp_dir() try: sevenzip.extract(test_file, test_dir) - self.fail('Should raise an exception') + self.fail("Should raise an exception") except ExtractErrorFailedToExtract as e: - assert 'Unknown extraction error' == str(e) + assert "Unknown extraction error" == str(e) - @pytest.mark.skipif(not on_windows, reason='Expectations are different on Windows') + @pytest.mark.skipif(not on_windows, reason="Expectations are different on Windows") def test_extract_zip_with_relative_path_deeply_nested_with_7zip_windows(self): - test_file = self.get_test_loc('archive/zip/relative_nested.zip') + test_file = self.get_test_loc("archive/zip/relative_nested.zip") test_dir = self.get_temp_dir() sevenzip.extract(test_file, test_dir) result = self.collect_extracted_path(test_dir) assert self.expected_deeply_nested_relative_path_alternative == result def test_list_zip_with_relative_path_deeply_nested_with_7zip(self): - test_file = self.get_test_loc('archive/zip/relative_nested.zip') + test_file = self.get_test_loc("archive/zip/relative_nested.zip") result = [] entries, errors = sevenzip.list_entries(test_file) assert not errors for entry in entries: if on_windows: - entry.path = entry.path.replace('\\', '/') + entry.path = entry.path.replace("\\", "/") result.append(entry.to_dict(full=False)) expected = [ - {'errors': [], - u'is_broken_link': False, - u'is_dir': False, - u'is_file': True, - u'is_hardlink': False, - u'is_special': False, - u'is_symlink': False, - u'link_target': None, - u'path': '../../../../../../../../../../../../a_parent_folder.txt', - u'size': '9'}, - {'errors': [], - u'is_broken_link': False, - u'is_dir': False, - u'is_file': True, - u'is_hardlink': False, - u'is_special': False, - u'is_symlink': False, - u'link_target': None, - u'path': '../../../../../../../../../../../../sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_1.txt', - u'size': '9'}, - {'errors': [], - u'is_broken_link': False, - u'is_dir': False, - u'is_file': True, - u'is_hardlink': False, - u'is_special': False, - u'is_symlink': False, - u'link_target': None, - u'path': '../../../../../../sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_2.txt', - u'size': '9'}, - {'errors': [], - u'is_broken_link': False, - u'is_dir': False, - u'is_file': True, - u'is_hardlink': False, - u'is_special': False, - u'is_symlink': False, - u'link_target': None, - u'path': '../../../../../../../../../../../../sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_3.txt', - u'size': '9'}] + { + "errors": [], + "is_broken_link": False, + "is_dir": False, + "is_file": True, + "is_hardlink": False, + "is_special": False, + "is_symlink": False, + "link_target": None, + "path": "../../../../../../../../../../../../a_parent_folder.txt", + "size": "9", + }, + { + "errors": [], + "is_broken_link": False, + "is_dir": False, + "is_file": True, + "is_hardlink": False, + "is_special": False, + "is_symlink": False, + "link_target": None, + "path": "../../../../../../../../../../../../sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_1.txt", + "size": "9", + }, + { + "errors": [], + "is_broken_link": False, + "is_dir": False, + "is_file": True, + "is_hardlink": False, + "is_special": False, + "is_symlink": False, + "link_target": None, + "path": "../../../../../../sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_2.txt", + "size": "9", + }, + { + "errors": [], + "is_broken_link": False, + "is_dir": False, + "is_file": True, + "is_hardlink": False, + "is_special": False, + "is_symlink": False, + "link_target": None, + "path": "../../../../../../../../../../../../sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/sub/a_parent_folder_in_sub_3.txt", + "size": "9", + }, + ] assert expected == result def test_extract_zip_with_relative_path_deeply_nested_with_libarchive(self): - test_file = self.get_test_loc('archive/zip/relative_nested.zip') + test_file = self.get_test_loc("archive/zip/relative_nested.zip") test_dir = self.get_temp_dir() libarchive2.extract(test_file, test_dir) result = self.collect_extracted_path(test_dir) assert self.expected_deeply_nested_relative_path == result def test_extract_zip_with_password(self): - test_file = self.get_test_loc('archive/zip/zip_password_nexb.zip') + test_file = self.get_test_loc("archive/zip/zip_password_nexb.zip") test_dir = self.get_temp_dir() try: archive.extract_zip(test_file, test_dir) except Exception as e: assert isinstance(e, ExtractErrorFailedToExtract) - assert 'Password protected archive, unable to extract' in str(e) + assert "Password protected archive, unable to extract" in str(e) def test_extract_zip_java_jar(self): - test_file = self.get_test_loc('archive/zip/jar/simple.jar') + test_file = self.get_test_loc("archive/zip/jar/simple.jar") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) expected = [ - '/META-INF/', - '/META-INF/MANIFEST.MF', - '/org/', - '/org/jvnet/', - '/org/jvnet/glassfish/', - '/org/jvnet/glassfish/comms/', - '/org/jvnet/glassfish/comms/sipagent/', - '/org/jvnet/glassfish/comms/sipagent/actions/', - '/org/jvnet/glassfish/comms/sipagent/actions/Bundle.properties', - '/org/jvnet/glassfish/comms/sipagent/actions/SipAgentCookieAction.class', - '/org/jvnet/glassfish/comms/sipagent/actions/bd.png', - '/org/jvnet/glassfish/comms/sipagent/actions/bd24.png', - '/org/jvnet/glassfish/comms/sipagent/org-jvnet-glassfish-comms-sipagent-actions-SipAgentCookieAction.instance', - '/org/jvnet/glassfish/comms/sipagent/org-jvnet-glassfish-comms-sipagent-actions-SipAgentCookieAction_1.instance' + "/META-INF/", + "/META-INF/MANIFEST.MF", + "/org/", + "/org/jvnet/", + "/org/jvnet/glassfish/", + "/org/jvnet/glassfish/comms/", + "/org/jvnet/glassfish/comms/sipagent/", + "/org/jvnet/glassfish/comms/sipagent/actions/", + "/org/jvnet/glassfish/comms/sipagent/actions/Bundle.properties", + "/org/jvnet/glassfish/comms/sipagent/actions/SipAgentCookieAction.class", + "/org/jvnet/glassfish/comms/sipagent/actions/bd.png", + "/org/jvnet/glassfish/comms/sipagent/actions/bd24.png", + "/org/jvnet/glassfish/comms/sipagent/org-jvnet-glassfish-comms-sipagent-actions-SipAgentCookieAction.instance", + "/org/jvnet/glassfish/comms/sipagent/org-jvnet-glassfish-comms-sipagent-actions-SipAgentCookieAction_1.instance", ] assert sorted(expected) == sorted(extracted) def test_extract_zip_with_duplicated_lowercase_paths(self): - test_file = self.get_test_loc('archive/zip/dup_names.zip') - expected = {'META-INF/license/': None, # a directory - 'META-INF/license/LICENSE.base64.txt': 1618, - 'META-INF/LICENSE_1': 11366} + test_file = self.get_test_loc("archive/zip/dup_names.zip") + expected = { + "META-INF/license/": None, # a directory + "META-INF/license/LICENSE.base64.txt": 1618, + "META-INF/LICENSE_1": 11366, + } self.check_extract(archive.extract_zip, test_file, expected) def test_extract_zip_with_timezone(self): - test_file = self.get_test_loc('archive/zip/timezone/c.zip') + test_file = self.get_test_loc("archive/zip/timezone/c.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) expected = [ - (os.path.join(test_dir, 'c/a/a.txt'), '2008-07-29'), - (os.path.join(test_dir, 'c/b/a.txt'), '2008-07-29'), - (os.path.join(test_dir, 'c/c/a.txt'), '2008-07-29'), + (os.path.join(test_dir, "c/a/a.txt"), "2008-07-29"), + (os.path.join(test_dir, "c/b/a.txt"), "2008-07-29"), + (os.path.join(test_dir, "c/c/a.txt"), "2008-07-29"), ] # DST sends a monkey wrench.... so we only test the date, not the time for loc, expected_date in expected: @@ -964,37 +970,49 @@ def test_extract_zip_with_timezone(self): assert result.startswith(expected_date) def test_extract_zip_with_timezone_2(self): - test_file = self.get_test_loc('archive/zip/timezone/projecttest.zip') + test_file = self.get_test_loc("archive/zip/timezone/projecttest.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) # DST sends a monkey wrench.... so we only test the date, not the time # and we accept some varation in the date ... expected = [ - (os.path.join(test_dir, 'primes.txt'), ('2009-12-05', '2009-12-06',)), - (os.path.join(test_dir, 'primes2.txt'), ('2009-12-05', '2009-12-06',)) + ( + os.path.join(test_dir, "primes.txt"), + ( + "2009-12-05", + "2009-12-06", + ), + ), + ( + os.path.join(test_dir, "primes2.txt"), + ( + "2009-12-05", + "2009-12-06", + ), + ), ] for loc, expected_date in expected: result = commoncode_date.get_file_mtime(loc) assert result.startswith(expected_date) def test_extract_zip_with_backslash_in_path_1(self): - test_file = self.get_test_loc('archive/zip/backslash/backslash1.zip') + test_file = self.get_test_loc("archive/zip/backslash/backslash1.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) # Info-ZIP 'zip' displays: # warning: booxw-1202-bin.distribution.zip appears to use # backslashes as path separators (which is the right thing to do) - expected = ['scripts/AutomaticClose.int'] + expected = ["scripts/AutomaticClose.int"] check_files(test_dir, expected) - result = os.path.join(test_dir, 'scripts/AutomaticClose.int') + result = os.path.join(test_dir, "scripts/AutomaticClose.int") assert os.path.exists(result) def test_extract_zip_with_backslash_in_path_2(self): - test_file = self.get_test_loc('archive/zip/backslash/AspectJTest.zip') + test_file = self.get_test_loc("archive/zip/backslash/AspectJTest.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) - expected = ''' + expected = """ AspectJTest/.classpath AspectJTest/.project AspectJTest/src/META-INF/aop.xml @@ -1017,96 +1035,94 @@ def test_extract_zip_with_backslash_in_path_2(self): AspectJTest/bin/p2/MyLoggingAspect.class AspectJTest/bin/p1/Main1.class AspectJTest/bin/p1/MyService.class - '''.split() + """.split() check_files(test_dir, expected) def test_extract_zip_with_backslash_in_path_3(self): - test_file = self.get_test_loc('archive/zip/backslash/boo-0.3-src.zip') + test_file = self.get_test_loc("archive/zip/backslash/boo-0.3-src.zip") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) - result = os.path.join(test_dir, 'src/Boo.Lang.Compiler/TypeSystem/InternalCallableType.cs') + result = os.path.join(test_dir, "src/Boo.Lang.Compiler/TypeSystem/InternalCallableType.cs") assert os.path.exists(result) def test_get_best_handler_nuget_is_selected_over_zip(self): - test_file = self.get_test_loc('archive/zip/moq.4.2.1507.118.nupkg') + test_file = self.get_test_loc("archive/zip/moq.4.2.1507.118.nupkg") handler = archive.get_best_handler(test_file) assert archive.NugetHandler == handler def test_get_best_handler_nuget_is_selected_over_zip2(self): - test_file = self.get_test_loc('archive/zip/exceptionhero.javascript.1.0.5.nupkg') + test_file = self.get_test_loc("archive/zip/exceptionhero.javascript.1.0.5.nupkg") handler = archive.get_best_handler(test_file) assert archive.NugetHandler == handler def test_get_best_handler_nuget_is_selected_over_zip3(self): - test_file = self.get_test_loc('archive/zip/javascript-fastclass.1.1.729.121805.nupkg') + test_file = self.get_test_loc("archive/zip/javascript-fastclass.1.1.729.121805.nupkg") handler = archive.get_best_handler(test_file) assert archive.NugetHandler == handler def test_extract_zip_can_extract_windows_media_player_skins(self): - test_file = self.get_test_loc('archive/wmz/Go.wmz') + test_file = self.get_test_loc("archive/wmz/Go.wmz") test_dir = self.get_temp_dir() result = archive.extract_zip(test_file, test_dir) assert [] == result - expected = ['32px.png', 'go.js', 'go.wms'] + expected = ["32px.png", "go.js", "go.wms"] check_files(test_dir, expected) def test_extract_zip_with_unicode_path_should_extract_without_error(self): - test_file = self.get_test_loc('archive/zip/zip_unicode.zip') + test_file = self.get_test_loc("archive/zip/zip_unicode.zip") test_dir = self.get_temp_dir() result = archive.extract_zip(test_file, test_dir) assert [] == result assert os.listdir(test_dir) def test_extract_zip_can_extract_zip_with_directory_not_marked_with_trailing_slash(self): - test_file = self.get_test_loc('archive/zip/directory-with-no-trailing-slash.zip') + test_file = self.get_test_loc("archive/zip/directory-with-no-trailing-slash.zip") test_dir = self.get_temp_dir() result = archive.extract_zip(test_file, test_dir) assert [] == result - expected = ['online_upgrade_img/machine_type'] + expected = ["online_upgrade_img/machine_type"] check_files(test_dir, expected) class TestTar(BaseArchiveTestCase): - def test_extract_tar_basic(self): - test_file = self.get_test_loc('archive/tar/tarred.tar') + test_file = self.get_test_loc("archive/tar/tarred.tar") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'e/a/b.txt') + result = os.path.join(test_dir, "e/a/b.txt") assert os.path.exists(result) def test_extract_tar_broken(self): - test_file = self.get_test_loc('archive/tar/tarred_broken.tar') + test_file = self.get_test_loc("archive/tar/tarred_broken.tar") test_dir = self.get_temp_dir() expected = Exception("Unrecognized archive format") - self.assertRaisesInstance( - expected, archive.extract_tar, test_file, test_dir) + self.assertRaisesInstance(expected, archive.extract_tar, test_file, test_dir) def test_extract_tar_absolute_path(self): - non_result = '/home/li/Desktop/absolute_folder' + non_result = "/home/li/Desktop/absolute_folder" assert not os.path.exists(non_result) test_dir = self.get_temp_dir() - test_file = self.get_test_loc('archive/tar/tar_absolute.tar') + test_file = self.get_test_loc("archive/tar/tar_absolute.tar") archive.extract_tar(test_file, test_dir) assert not os.path.exists(non_result) - result = os.path.join(test_dir, 'home/li/Desktop/absolute_folder/absolute_file') + result = os.path.join(test_dir, "home/li/Desktop/absolute_folder/absolute_file") assert os.path.exists(result) def test_extract_tar_with_absolute_path2(self): - assert not os.path.exists('/tmp/subdir') + assert not os.path.exists("/tmp/subdir") - test_file = self.get_test_loc('archive/tar/absolute_path.tar') + test_file = self.get_test_loc("archive/tar/absolute_path.tar") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - assert not os.path.exists('/tmp/subdir') - result = os.path.join(test_dir, 'tmp/subdir/a.txt') + assert not os.path.exists("/tmp/subdir") + result = os.path.join(test_dir, "tmp/subdir/a.txt") assert os.path.exists(result) def test_extract_tar_with_relative_path(self): - test_file = self.get_test_loc('archive/tar/tar_relative.tar') + test_file = self.get_test_loc("archive/tar/tar_relative.tar") """ This test file was created with: import tarfile @@ -1118,33 +1134,33 @@ def test_extract_tar_with_relative_path(self): """ test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - non_result = os.path.abspath(test_file + '/../a_parent_folder.txt') + non_result = os.path.abspath(test_file + "/../a_parent_folder.txt") assert not os.path.exists(non_result) extracted = self.collect_extracted_path(test_dir) expected = [ - '/dotdot/', - '/dotdot/dotdot/', - '/dotdot/a_parent_folder.txt', - '/dotdot/dotdot/another_folder/', - '/dotdot/dotdot/another_folder/b_two_root.txt', - '/dotdot/folder/', - '/dotdot/folder/subfolder/', - '/dotdot/folder/subfolder/b_subfolder.txt' + "/dotdot/", + "/dotdot/dotdot/", + "/dotdot/a_parent_folder.txt", + "/dotdot/dotdot/another_folder/", + "/dotdot/dotdot/another_folder/b_two_root.txt", + "/dotdot/folder/", + "/dotdot/folder/subfolder/", + "/dotdot/folder/subfolder/b_subfolder.txt", ] assert sorted(expected) == sorted(extracted) def test_extract_tar_archive_with_special_files(self): - test_file = self.get_test_loc('archive/tar/special.tar') + test_file = self.get_test_loc("archive/tar/special.tar") test_dir = self.get_temp_dir() result = archive.extract_tar(test_file, test_dir) expected = [ - '0-REGTYPE', - '0-REGTYPE-TEXT', - '0-REGTYPE-VEEEERY_LONG_NAME_____________________________________________________________________________________________________________________155', + "0-REGTYPE", + "0-REGTYPE-TEXT", + "0-REGTYPE-VEEEERY_LONG_NAME_____________________________________________________________________________________________________________________155", # we skip links but not hardlinks - '1-LNKTYPE', - 'S-SPARSE', - 'S-SPARSE-WITH-NULLS', + "1-LNKTYPE", + "S-SPARSE", + "S-SPARSE-WITH-NULLS", ] check_files(test_dir, expected) # special files are skipped too @@ -1153,118 +1169,120 @@ def test_extract_tar_archive_with_special_files(self): # '6-FIFOTYPE: Skipping special file.' assert [] == result - @pytest.mark.skipif(on_windows, reason='Unicode and/or Long paths are not handled well yet on windows') + @pytest.mark.skipif( + on_windows, reason="Unicode and/or Long paths are not handled well yet on windows" + ) def test_extract_python_testtar_tar_archive_with_special_files(self): - test_file = self.get_test_loc('archive/tar/testtar.tar') + test_file = self.get_test_loc("archive/tar/testtar.tar") # this is from: # https://hg.python.org/cpython/raw-file/bff88c866886/Lib/test/testtar.tar test_dir = self.get_temp_dir() result = archive.extract_tar(test_file, test_dir) - expected_warnings = [u"'pax/bad-pax-äöü': \nPathname can't be converted from UTF-8 to current locale."] + expected_warnings = [ + "'pax/bad-pax-äöü': \nPathname can't be converted from UTF-8 to current locale." + ] assert sorted(expected_warnings) == sorted(result) expected = [ - 'gnu/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longlink', - 'gnu/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longname', - 'gnu/regtype-gnu-uid', - 'gnu/sparse', - 'gnu/sparse-0.0', - 'gnu/sparse-0.1', - 'gnu/sparse-1.0', - 'misc/eof', - 'misc/regtype-hpux-signed-chksum-AOUaouss', - 'misc/regtype-old-v7', - 'misc/regtype-old-v7-signed-chksum-AOUaouss', - 'misc/regtype-suntar', - 'misc/regtype-xstar', - 'pax/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longlink', - 'pax/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longname', - 'pax/bad-pax-aou', - 'pax/hdrcharset-aou', - 'pax/regtype1', - 'pax/regtype2', - 'pax/regtype3', - 'pax/regtype4', - 'pax/umlauts-AOUaouss', - 'ustar/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/1234567/longname', - 'ustar/conttype', - 'ustar/linktest1/regtype', - 'ustar/linktest2/lnktype', - 'ustar/lnktype', - 'ustar/regtype', - 'ustar/sparse', - 'ustar/umlauts-AOUaouss' + "gnu/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longlink", + "gnu/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longname", + "gnu/regtype-gnu-uid", + "gnu/sparse", + "gnu/sparse-0.0", + "gnu/sparse-0.1", + "gnu/sparse-1.0", + "misc/eof", + "misc/regtype-hpux-signed-chksum-AOUaouss", + "misc/regtype-old-v7", + "misc/regtype-old-v7-signed-chksum-AOUaouss", + "misc/regtype-suntar", + "misc/regtype-xstar", + "pax/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longlink", + "pax/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/123/longname", + "pax/bad-pax-aou", + "pax/hdrcharset-aou", + "pax/regtype1", + "pax/regtype2", + "pax/regtype3", + "pax/regtype4", + "pax/umlauts-AOUaouss", + "ustar/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/12345/1234567/longname", + "ustar/conttype", + "ustar/linktest1/regtype", + "ustar/linktest2/lnktype", + "ustar/lnktype", + "ustar/regtype", + "ustar/sparse", + "ustar/umlauts-AOUaouss", ] check_files(test_dir, expected) def test_extract_rubygem(self): - test_file = self.get_test_loc('archive/tar/gem/panchira-0.1.1.gem') + test_file = self.get_test_loc("archive/tar/gem/panchira-0.1.1.gem") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - expected = ['checksums.yaml.gz', 'data.tar.gz', 'metadata.gz'] + expected = ["checksums.yaml.gz", "data.tar.gz", "metadata.gz"] check_files(test_dir, expected) class TestDebian(BaseArchiveTestCase): - def test_extract_deb_package_1(self): - test_file = self.get_test_loc('archive/deb/adduser_3.112ubuntu1_all.deb') + test_file = self.get_test_loc("archive/deb/adduser_3.112ubuntu1_all.deb") test_dir = self.get_temp_dir() archive.extract_ar(test_file, test_dir) - check_size(110198, os.path.join(test_dir, 'data.tar.gz')) + check_size(110198, os.path.join(test_dir, "data.tar.gz")) def test_extract_deb_package_2(self): - test_file = self.get_test_loc('archive/deb/adduser_3.113+nmu3ubuntu3_all.deb') + test_file = self.get_test_loc("archive/deb/adduser_3.113+nmu3ubuntu3_all.deb") test_dir = self.get_temp_dir() archive.extract_ar(test_file, test_dir) - check_size(158441, os.path.join(test_dir, 'data.tar.gz')) + check_size(158441, os.path.join(test_dir, "data.tar.gz")) def test_get_best_handler_deb_package_is_an_archive(self): - test_file = self.get_test_loc('archive/deb/libjama-dev_1.2.4-2_all.deb') + test_file = self.get_test_loc("archive/deb/libjama-dev_1.2.4-2_all.deb") handler = archive.get_best_handler(test_file) assert archive.DebHandler == handler def test_extract_deb_package_3(self): - test_file = self.get_test_loc('archive/deb/wget-el_0.5.0-8_all.deb') + test_file = self.get_test_loc("archive/deb/wget-el_0.5.0-8_all.deb") test_dir = self.get_temp_dir() archive.extract_ar(test_file, test_dir) - check_size(36376, os.path.join(test_dir, 'data.tar.gz')) + check_size(36376, os.path.join(test_dir, "data.tar.gz")) class TestAr(BaseArchiveTestCase): - def test_extract_ar_basic_7z(self): - test_file = self.get_test_loc('archive/ar/liby.a') + test_file = self.get_test_loc("archive/ar/liby.a") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) - expected = ['1.txt', 'main.o', 'yyerror.o'] + expected = ["1.txt", "main.o", "yyerror.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_basic(self): - test_file = self.get_test_loc('archive/ar/liby.a') + test_file = self.get_test_loc("archive/ar/liby.a") test_dir = self.get_temp_dir() result = archive.extract_ar(test_file, test_dir) - expected = ['__.SYMDEF', 'main.o', 'yyerror.o'] + expected = ["__.SYMDEF", "main.o", "yyerror.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_libarchive(self): - test_file = self.get_test_loc('archive/ar/liby.a') + test_file = self.get_test_loc("archive/ar/liby.a") test_dir = self.get_temp_dir() result = libarchive2.extract(test_file, test_dir) - expected = ['__.SYMDEF', 'main.o', 'yyerror.o'] + expected = ["__.SYMDEF", "main.o", "yyerror.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_verify_dates(self): - test_file = self.get_test_loc('archive/ar/liby.a') + test_file = self.get_test_loc("archive/ar/liby.a") test_dir = self.get_temp_dir() archive.extract_ar(test_file, test_dir) expected = [ - (os.path.join(test_dir, 'main.o'), '2007-06-12'), - (os.path.join(test_dir, 'yyerror.o'), '2007-06-12'), + (os.path.join(test_dir, "main.o"), "2007-06-12"), + (os.path.join(test_dir, "yyerror.o"), "2007-06-12"), ] # DST sends a monkey wrench.... so we only test the date, not the time for loc, expected_date in expected: @@ -1272,376 +1290,372 @@ def test_extract_ar_verify_dates(self): assert result.startswith(expected_date) def test_extract_ar_broken_7z(self): - test_file = self.get_test_loc('archive/ar/liby-corrupted.a') + test_file = self.get_test_loc("archive/ar/liby-corrupted.a") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) - expected = ['__.SYMDEF', 'main.o'] + expected = ["__.SYMDEF", "main.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_broken_libarchive(self): - test_file = self.get_test_loc('archive/ar/liby-corrupted.a') + test_file = self.get_test_loc("archive/ar/liby-corrupted.a") test_dir = self.get_temp_dir() self.assertRaisesInstance( - Exception('Incorrect file header signature'), - libarchive2.extract, test_file, test_dir) - expected = ['__.SYMDEF', 'main.o'] + Exception("Incorrect file header signature"), libarchive2.extract, test_file, test_dir + ) + expected = ["__.SYMDEF", "main.o"] check_files(test_dir, expected) def test_extract_ar_broken(self): - test_file = self.get_test_loc('archive/ar/liby-corrupted.a') + test_file = self.get_test_loc("archive/ar/liby-corrupted.a") test_dir = self.get_temp_dir() result = archive.extract_ar(test_file, test_dir) - expected = ['__.SYMDEF', 'main.o'] + expected = ["__.SYMDEF", "main.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_with_invalid_path(self): - test_file = self.get_test_loc('archive/ar/ar_invalidpath.ar') + test_file = self.get_test_loc("archive/ar/ar_invalidpath.ar") test_dir = self.get_temp_dir() result = archive.extract_ar(test_file, test_dir) - expected = ['this/that'] + expected = ["this/that"] check_files(test_dir, expected) assert [] == result def test_extract_ar_with_relative_path_sevenzip(self): - test_file = self.get_test_loc('archive/ar/winlib/htmlhelp.lib') + test_file = self.get_test_loc("archive/ar/winlib/htmlhelp.lib") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) - expected = [ - '1.txt', - '2.txt', - 'release/init.obj' - ] + expected = ["1.txt", "2.txt", "release/init.obj"] check_files(test_dir, expected) assert [] == result def test_extract_ar_with_relative_path_libarch(self): - test_file = self.get_test_loc('archive/ar/winlib/htmlhelp.lib') + test_file = self.get_test_loc("archive/ar/winlib/htmlhelp.lib") test_dir = self.get_temp_dir() self.assertRaisesInstance( - Exception('Invalid string table'), - archive.libarchive2.extract, test_file, test_dir) + Exception("Invalid string table"), archive.libarchive2.extract, test_file, test_dir + ) # inccorrect for now: need this: ['__.SYMDEF', 'release/init.obj'] - expected = ['dot', 'dot_1'] + expected = ["dot", "dot_1"] check_files(test_dir, expected) def test_extract_ar_with_relative_path_and_backslashes_in_names_libarch(self): - test_file = self.get_test_loc('archive/ar/winlib/freetype.lib') + test_file = self.get_test_loc("archive/ar/winlib/freetype.lib") test_dir = self.get_temp_dir() self.assertRaisesInstance( - Exception('Invalid string table'), - archive.libarchive2.extract, test_file, test_dir) + Exception("Invalid string table"), archive.libarchive2.extract, test_file, test_dir + ) # 7zip is better, but has a security bug for now # GNU ar works fine otherwise, but there are portability issues - expected = ['dot', 'dot_1'] + expected = ["dot", "dot_1"] check_files(test_dir, expected) def test_extract_ar_with_relative_path_and_backslashes_in_names_7z(self): - test_file = self.get_test_loc('archive/ar/winlib/freetype.lib') + test_file = self.get_test_loc("archive/ar/winlib/freetype.lib") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) assert [] == result expected = [ - '1.txt', - '2.txt', - 'objs/debug_mt/autofit.obj', - 'objs/debug_mt/bdf.obj', - 'objs/debug_mt/cff.obj', - 'objs/debug_mt/ftbase.obj', - 'objs/debug_mt/ftbbox.obj', - 'objs/debug_mt/ftbitmap.obj', - 'objs/debug_mt/ftcache.obj', - 'objs/debug_mt/ftdebug.obj', - 'objs/debug_mt/ftgasp.obj', - 'objs/debug_mt/ftglyph.obj', - 'objs/debug_mt/ftgzip.obj', - 'objs/debug_mt/ftinit.obj', - 'objs/debug_mt/ftlzw.obj', - 'objs/debug_mt/ftmm.obj', - 'objs/debug_mt/ftpfr.obj', - 'objs/debug_mt/ftstroke.obj', - 'objs/debug_mt/ftsynth.obj', - 'objs/debug_mt/ftsystem.obj', - 'objs/debug_mt/fttype1.obj', - 'objs/debug_mt/ftwinfnt.obj', - 'objs/debug_mt/pcf.obj', - 'objs/debug_mt/pfr.obj', - 'objs/debug_mt/psaux.obj', - 'objs/debug_mt/pshinter.obj', - 'objs/debug_mt/psmodule.obj', - 'objs/debug_mt/raster.obj', - 'objs/debug_mt/sfnt.obj', - 'objs/debug_mt/smooth.obj', - 'objs/debug_mt/truetype.obj', - 'objs/debug_mt/type1.obj', - 'objs/debug_mt/type1cid.obj', - 'objs/debug_mt/type42.obj', - 'objs/debug_mt/winfnt.obj' + "1.txt", + "2.txt", + "objs/debug_mt/autofit.obj", + "objs/debug_mt/bdf.obj", + "objs/debug_mt/cff.obj", + "objs/debug_mt/ftbase.obj", + "objs/debug_mt/ftbbox.obj", + "objs/debug_mt/ftbitmap.obj", + "objs/debug_mt/ftcache.obj", + "objs/debug_mt/ftdebug.obj", + "objs/debug_mt/ftgasp.obj", + "objs/debug_mt/ftglyph.obj", + "objs/debug_mt/ftgzip.obj", + "objs/debug_mt/ftinit.obj", + "objs/debug_mt/ftlzw.obj", + "objs/debug_mt/ftmm.obj", + "objs/debug_mt/ftpfr.obj", + "objs/debug_mt/ftstroke.obj", + "objs/debug_mt/ftsynth.obj", + "objs/debug_mt/ftsystem.obj", + "objs/debug_mt/fttype1.obj", + "objs/debug_mt/ftwinfnt.obj", + "objs/debug_mt/pcf.obj", + "objs/debug_mt/pfr.obj", + "objs/debug_mt/psaux.obj", + "objs/debug_mt/pshinter.obj", + "objs/debug_mt/psmodule.obj", + "objs/debug_mt/raster.obj", + "objs/debug_mt/sfnt.obj", + "objs/debug_mt/smooth.obj", + "objs/debug_mt/truetype.obj", + "objs/debug_mt/type1.obj", + "objs/debug_mt/type1cid.obj", + "objs/debug_mt/type42.obj", + "objs/debug_mt/winfnt.obj", ] check_files(test_dir, expected) def test_extract_ar_static_library_does_not_delete_symdefs_7z(self): - test_file = self.get_test_loc('archive/ar/liby.a') + test_file = self.get_test_loc("archive/ar/liby.a") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) # the symdef file is 1.txt with 7z - expected = ['1.txt', 'main.o', 'yyerror.o'] + expected = ["1.txt", "main.o", "yyerror.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_static_library_does_not_delete_symdefs(self): - test_file = self.get_test_loc('archive/ar/liby.a') + test_file = self.get_test_loc("archive/ar/liby.a") test_dir = self.get_temp_dir() result = archive.extract_ar(test_file, test_dir) # we use libarchive first - expected = ['__.SYMDEF', 'main.o', 'yyerror.o'] + expected = ["__.SYMDEF", "main.o", "yyerror.o"] check_files(test_dir, expected) assert [] == result def test_extract_ar_with_trailing_data(self): - test_file = self.get_test_loc('archive/ar/ar_trailing.a') + test_file = self.get_test_loc("archive/ar/ar_trailing.a") test_dir = self.get_temp_dir() archive.extract_ar(test_file, test_dir) - result = os.path.join(test_dir, 'main.o') + result = os.path.join(test_dir, "main.o") assert os.path.exists(result) - result = os.path.join(test_dir, 'yyerror.o') + result = os.path.join(test_dir, "yyerror.o") assert os.path.exists(result) def test_extract_ar_with_permissions_7z(self): - test_file = self.get_test_loc('archive/ar/winlib/zlib.lib') + test_file = self.get_test_loc("archive/ar/winlib/zlib.lib") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) - expected = ['1.txt', '1.zlib.pyd', '2.txt', '2.zlib.pyd', '3.zlib.pyd', '4.zlib.pyd'] + expected = ["1.txt", "1.zlib.pyd", "2.txt", "2.zlib.pyd", "3.zlib.pyd", "4.zlib.pyd"] check_files(test_dir, expected) assert [] == result def test_extract_ar_with_permissions(self): - test_file = self.get_test_loc('archive/ar/winlib/zlib.lib') + test_file = self.get_test_loc("archive/ar/winlib/zlib.lib") test_dir = self.get_temp_dir() result = archive.extract_ar(test_file, test_dir) # with 7zip # expected = ['1.txt', '1.zlib.pyd', '2.txt', '2.zlib.pyd', '3.zlib.pyd', '4.zlib.pyd'] # with libarchive - expected = ['dot', 'dot_1', 'zlib.pyd', 'zlib_1.pyd', 'zlib_2.pyd', 'zlib_3.pyd'] + expected = ["dot", "dot_1", "zlib.pyd", "zlib_1.pyd", "zlib_2.pyd", "zlib_3.pyd"] check_files(test_dir, expected) assert [] == result def test_extract_ar_with_permissions_libarchive(self): - test_file = self.get_test_loc('archive/ar/winlib/zlib.lib') + test_file = self.get_test_loc("archive/ar/winlib/zlib.lib") test_dir = self.get_temp_dir() result = libarchive2.extract(test_file, test_dir) assert [] == result - expected = ['dot', 'dot_1', 'zlib.pyd', 'zlib_1.pyd', 'zlib_2.pyd', 'zlib_3.pyd'] + expected = ["dot", "dot_1", "zlib.pyd", "zlib_1.pyd", "zlib_2.pyd", "zlib_3.pyd"] check_files(test_dir, expected) class TestCpio(BaseArchiveTestCase): - def test_extract_cpio_basic(self): - test_file = self.get_test_loc('archive/cpio/elfinfo-1.0-1.fc9.src.cpio') + test_file = self.get_test_loc("archive/cpio/elfinfo-1.0-1.fc9.src.cpio") test_dir = self.get_temp_dir() archive.extract_cpio(test_file, test_dir) - result = os.path.join(test_dir, 'elfinfo-1.0.tar.gz') + result = os.path.join(test_dir, "elfinfo-1.0.tar.gz") assert os.path.exists(result) def test_extract_cpio_with_trailing_data(self): - test_file = self.get_test_loc('archive/cpio/cpio_trailing.cpio') + test_file = self.get_test_loc("archive/cpio/cpio_trailing.cpio") test_dir = self.get_temp_dir() archive.extract_cpio(test_file, test_dir) - result = os.path.join(test_dir, 'elfinfo-1.0.tar.gz') + result = os.path.join(test_dir, "elfinfo-1.0.tar.gz") assert os.path.exists(result) def test_extract_cpio_broken_7z(self): - test_file = self.get_test_loc('archive/cpio/cpio_broken.cpio') + test_file = self.get_test_loc("archive/cpio/cpio_broken.cpio") test_dir = self.get_temp_dir() - self.assertRaisesInstance(Exception('CRC Failed : elfinfo-1.0.tar'), sevenzip.extract, test_file, test_dir) + self.assertRaisesInstance( + Exception("CRC Failed : elfinfo-1.0.tar"), sevenzip.extract, test_file, test_dir + ) def test_extract_cpio_broken2(self): - test_file = self.get_test_loc('archive/cpio/cpio_broken.cpio') + test_file = self.get_test_loc("archive/cpio/cpio_broken.cpio") test_dir = self.get_temp_dir() result = archive.extract_cpio(test_file, test_dir) - expected = sorted(['elfinfo-1.0.tar.gz', 'elfinfo.spec']) + expected = sorted(["elfinfo-1.0.tar.gz", "elfinfo.spec"]) assert expected == sorted(os.listdir(test_dir)) assert ["'elfinfo.spec': \nSkipped 72 bytes before finding valid header"] == result def test_extract_cpio_with_absolute_path(self): - assert not os.path.exists('/tmp/subdir') + assert not os.path.exists("/tmp/subdir") test_dir = self.get_temp_dir() - test_file = self.get_test_loc('archive/cpio/cpio_absolute.cpio') + test_file = self.get_test_loc("archive/cpio/cpio_absolute.cpio") archive.extract_cpio(test_file, test_dir) - assert not os.path.exists('/tmp/subdir') - result = os.path.join(test_dir, 'home/li/Desktop/absolute_folder/absolute_file') + assert not os.path.exists("/tmp/subdir") + result = os.path.join(test_dir, "home/li/Desktop/absolute_folder/absolute_file") assert os.path.exists(result) def test_extract_cpio_with_relative_path(self): # test file is created by cmd: find ../.. - |cpio -ov >relative.cpio # We should somehow add a "parent" folder to extract relative paths - test_file = self.get_test_loc('archive/cpio/cpio_relative.cpio') + test_file = self.get_test_loc("archive/cpio/cpio_relative.cpio") test_dir = self.get_temp_dir() result = archive.extract_cpio(test_file, test_dir) assert [] == result extracted = self.collect_extracted_path(test_dir) expected = [ - '/dotdot/', - '/dotdot/dotdot/', - '/dotdot/dotdot/2folder/', - '/dotdot/dotdot/2folder/3folder/', - '/dotdot/dotdot/2folder/3folder/cpio_relative.cpio', - '/dotdot/dotdot/2folder/3folder/relative_file', - '/dotdot/dotdot/2folder/3folder/relative_file~', - '/dotdot/dotdot/2folder/relative_file', - '/dotdot/dotdot/relative_file' + "/dotdot/", + "/dotdot/dotdot/", + "/dotdot/dotdot/2folder/", + "/dotdot/dotdot/2folder/3folder/", + "/dotdot/dotdot/2folder/3folder/cpio_relative.cpio", + "/dotdot/dotdot/2folder/3folder/relative_file", + "/dotdot/dotdot/2folder/3folder/relative_file~", + "/dotdot/dotdot/2folder/relative_file", + "/dotdot/dotdot/relative_file", ] assert expected == extracted def test_extract_cpio_with_invalidpath(self): - test_file = self.get_test_loc('archive/cpio/cpio-invalidpath.cpio') + test_file = self.get_test_loc("archive/cpio/cpio-invalidpath.cpio") test_dir = self.get_temp_dir() archive.extract_cpio(test_file, test_dir) - result = os.path.join(test_dir, 'backup') + result = os.path.join(test_dir, "backup") assert os.path.exists(result) - result = os.path.join(test_dir, 'this/that') + result = os.path.join(test_dir, "this/that") assert os.path.exists(result) def test_extract_cpio_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/cpio/t.cpio.foo') + test_file = self.get_test_loc("archive/cpio/t.cpio.foo") test_dir = self.get_temp_dir() result = archive.extract_cpio(test_file, test_dir) assert [] == result extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestRpm(BaseArchiveTestCase): - def test_extract_rpm_basic_1(self): - test_file = self.get_test_loc('archive/rpm/elfinfo-1.0-1.fc9.src.rpm') + test_file = self.get_test_loc("archive/rpm/elfinfo-1.0-1.fc9.src.rpm") test_dir = self.get_temp_dir() archive.extract_rpm(test_file, test_dir) - result = os.path.join(test_dir, 'elfinfo-1.0-1.fc9.src.cpio.gz') + result = os.path.join(test_dir, "elfinfo-1.0-1.fc9.src.cpio.gz") assert os.path.exists(result) def test_extract_rpm_basic_2(self): - test_file = self.get_test_loc('archive/rpm/python-glc-0.7.1-1.src.rpm') + test_file = self.get_test_loc("archive/rpm/python-glc-0.7.1-1.src.rpm") test_dir = self.get_temp_dir() archive.extract_rpm(test_file, test_dir) - result = os.path.join(test_dir, 'python-glc-0.7.1-1.src.cpio.gz') + result = os.path.join(test_dir, "python-glc-0.7.1-1.src.cpio.gz") assert os.path.exists(result) def test_extract_rpm_nested_correctly(self): - test_file = self.get_test_loc('archive/rpm/extract_once/libsqueeze0.2_0-0.2.3-8mdv2010.0.i586.rpm') + test_file = self.get_test_loc( + "archive/rpm/extract_once/libsqueeze0.2_0-0.2.3-8mdv2010.0.i586.rpm" + ) test_dir = self.get_temp_dir() archive.extract_rpm(test_file, test_dir) - result = os.path.join(test_dir, 'libsqueeze0.2_0-0.2.3-8mdv2010.0.i586.cpio.lzma') + result = os.path.join(test_dir, "libsqueeze0.2_0-0.2.3-8mdv2010.0.i586.cpio.lzma") assert os.path.exists(result) def test_extract_rpm_with_trailing_data(self): - test_file = self.get_test_loc('archive/rpm/rpm_trailing.rpm') + test_file = self.get_test_loc("archive/rpm/rpm_trailing.rpm") test_dir = self.get_temp_dir() result = archive.extract_rpm(test_file, test_dir) - expected = ['elfinfo-1.0-1.fc9.src.cpio.gz'] + expected = ["elfinfo-1.0-1.fc9.src.cpio.gz"] check_files(test_dir, expected) assert [] == result def test_extract_rpm_with_renamed_content(self): # When the RPM is renamed, we should still be able to find the cpio - test_file = self.get_test_loc('archive/rpm/renamed.rpm') + test_file = self.get_test_loc("archive/rpm/renamed.rpm") test_dir = self.get_temp_dir() result = archive.extract_rpm(test_file, test_dir) - expected = ['python-glc-0.7.1-1.src.cpio.gz'] + expected = ["python-glc-0.7.1-1.src.cpio.gz"] check_files(test_dir, expected) assert [] == result def test_extract_rpm_broken(self): - test_file = self.get_test_loc('archive/rpm/broken.rpm') + test_file = self.get_test_loc("archive/rpm/broken.rpm") test_dir = self.get_temp_dir() - expected = Exception('CRC Failed : broken') - self.assertRaisesInstance(expected, archive.extract_rpm, - test_file, test_dir) + expected = Exception("CRC Failed : broken") + self.assertRaisesInstance(expected, archive.extract_rpm, test_file, test_dir) class TestExtractTwice(BaseArchiveTestCase): - def test_extract_twice_with_rpm_with_xz_compressed_cpio(self): - test_file = self.get_test_loc('archive/rpm/xz-compressed-cpio.rpm') + test_file = self.get_test_loc("archive/rpm/xz-compressed-cpio.rpm") test_dir = self.get_temp_dir() # this will return an extractor that extracts twice extractor = archive.get_extractor(test_file) result = list(extractor(test_file, test_dir)) assert [] == result expected = [ - 'etc/abrt/abrt-action-save-package-data.conf', - 'etc/abrt/abrt.conf', - 'etc/abrt/gpg_keys', - 'etc/dbus-1/system.d/dbus-abrt.conf', - 'etc/libreport/events.d/abrt_event.conf', - 'etc/libreport/events.d/smart_event.conf', - 'etc/rc.d/init.d/abrtd', - 'usr/bin/abrt-action-save-package-data', - 'usr/bin/abrt-handle-upload', - 'usr/libexec/abrt-handle-event', - 'usr/libexec/abrt1-to-abrt2', - 'usr/sbin/abrt-dbus', - 'usr/sbin/abrt-server', - 'usr/sbin/abrtd', - 'usr/share/dbus-1/system-services/com.redhat.abrt.service', - 'usr/share/doc/abrt-2.0.8/COPYING', - 'usr/share/doc/abrt-2.0.8/README', - 'usr/share/locale/ar/LC_MESSAGES/abrt.mo', - 'usr/share/locale/as/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ast/LC_MESSAGES/abrt.mo', - 'usr/share/locale/bg/LC_MESSAGES/abrt.mo', - 'usr/share/locale/bn_IN/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ca/LC_MESSAGES/abrt.mo', - 'usr/share/locale/cs/LC_MESSAGES/abrt.mo', - 'usr/share/locale/da/LC_MESSAGES/abrt.mo', - 'usr/share/locale/de/LC_MESSAGES/abrt.mo', - 'usr/share/locale/el/LC_MESSAGES/abrt.mo', - 'usr/share/locale/en_GB/LC_MESSAGES/abrt.mo', - 'usr/share/locale/es/LC_MESSAGES/abrt.mo', - 'usr/share/locale/fa/LC_MESSAGES/abrt.mo', - 'usr/share/locale/fi/LC_MESSAGES/abrt.mo', - 'usr/share/locale/fr/LC_MESSAGES/abrt.mo', - 'usr/share/locale/gu/LC_MESSAGES/abrt.mo', - 'usr/share/locale/he/LC_MESSAGES/abrt.mo', - 'usr/share/locale/hi/LC_MESSAGES/abrt.mo', - 'usr/share/locale/hu/LC_MESSAGES/abrt.mo', - 'usr/share/locale/id/LC_MESSAGES/abrt.mo', - 'usr/share/locale/it/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ja/LC_MESSAGES/abrt.mo', - 'usr/share/locale/kn/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ko/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ml/LC_MESSAGES/abrt.mo', - 'usr/share/locale/mr/LC_MESSAGES/abrt.mo', - 'usr/share/locale/nb/LC_MESSAGES/abrt.mo', - 'usr/share/locale/nl/LC_MESSAGES/abrt.mo', - 'usr/share/locale/or/LC_MESSAGES/abrt.mo', - 'usr/share/locale/pa/LC_MESSAGES/abrt.mo', - 'usr/share/locale/pl/LC_MESSAGES/abrt.mo', - 'usr/share/locale/pt/LC_MESSAGES/abrt.mo', - 'usr/share/locale/pt_BR/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ru/LC_MESSAGES/abrt.mo', - 'usr/share/locale/sk/LC_MESSAGES/abrt.mo', - 'usr/share/locale/sr/LC_MESSAGES/abrt.mo', - 'usr/share/locale/sr@latin/LC_MESSAGES/abrt.mo', - 'usr/share/locale/sv/LC_MESSAGES/abrt.mo', - 'usr/share/locale/ta/LC_MESSAGES/abrt.mo', - 'usr/share/locale/te/LC_MESSAGES/abrt.mo', - 'usr/share/locale/uk/LC_MESSAGES/abrt.mo', - 'usr/share/locale/zh_CN/LC_MESSAGES/abrt.mo', - 'usr/share/locale/zh_TW/LC_MESSAGES/abrt.mo', - 'usr/share/man/man1/abrt-action-save-package-data.1.gz', - 'usr/share/man/man1/abrt-handle-upload.1.gz', - 'usr/share/man/man1/abrt-server.1.gz', - 'usr/share/man/man5/abrt-action-save-package-data.conf.5.gz', - 'usr/share/man/man5/abrt.conf.5.gz', - 'usr/share/man/man8/abrt-dbus.8.gz', - 'usr/share/man/man8/abrtd.8.gz' + "etc/abrt/abrt-action-save-package-data.conf", + "etc/abrt/abrt.conf", + "etc/abrt/gpg_keys", + "etc/dbus-1/system.d/dbus-abrt.conf", + "etc/libreport/events.d/abrt_event.conf", + "etc/libreport/events.d/smart_event.conf", + "etc/rc.d/init.d/abrtd", + "usr/bin/abrt-action-save-package-data", + "usr/bin/abrt-handle-upload", + "usr/libexec/abrt-handle-event", + "usr/libexec/abrt1-to-abrt2", + "usr/sbin/abrt-dbus", + "usr/sbin/abrt-server", + "usr/sbin/abrtd", + "usr/share/dbus-1/system-services/com.redhat.abrt.service", + "usr/share/doc/abrt-2.0.8/COPYING", + "usr/share/doc/abrt-2.0.8/README", + "usr/share/locale/ar/LC_MESSAGES/abrt.mo", + "usr/share/locale/as/LC_MESSAGES/abrt.mo", + "usr/share/locale/ast/LC_MESSAGES/abrt.mo", + "usr/share/locale/bg/LC_MESSAGES/abrt.mo", + "usr/share/locale/bn_IN/LC_MESSAGES/abrt.mo", + "usr/share/locale/ca/LC_MESSAGES/abrt.mo", + "usr/share/locale/cs/LC_MESSAGES/abrt.mo", + "usr/share/locale/da/LC_MESSAGES/abrt.mo", + "usr/share/locale/de/LC_MESSAGES/abrt.mo", + "usr/share/locale/el/LC_MESSAGES/abrt.mo", + "usr/share/locale/en_GB/LC_MESSAGES/abrt.mo", + "usr/share/locale/es/LC_MESSAGES/abrt.mo", + "usr/share/locale/fa/LC_MESSAGES/abrt.mo", + "usr/share/locale/fi/LC_MESSAGES/abrt.mo", + "usr/share/locale/fr/LC_MESSAGES/abrt.mo", + "usr/share/locale/gu/LC_MESSAGES/abrt.mo", + "usr/share/locale/he/LC_MESSAGES/abrt.mo", + "usr/share/locale/hi/LC_MESSAGES/abrt.mo", + "usr/share/locale/hu/LC_MESSAGES/abrt.mo", + "usr/share/locale/id/LC_MESSAGES/abrt.mo", + "usr/share/locale/it/LC_MESSAGES/abrt.mo", + "usr/share/locale/ja/LC_MESSAGES/abrt.mo", + "usr/share/locale/kn/LC_MESSAGES/abrt.mo", + "usr/share/locale/ko/LC_MESSAGES/abrt.mo", + "usr/share/locale/ml/LC_MESSAGES/abrt.mo", + "usr/share/locale/mr/LC_MESSAGES/abrt.mo", + "usr/share/locale/nb/LC_MESSAGES/abrt.mo", + "usr/share/locale/nl/LC_MESSAGES/abrt.mo", + "usr/share/locale/or/LC_MESSAGES/abrt.mo", + "usr/share/locale/pa/LC_MESSAGES/abrt.mo", + "usr/share/locale/pl/LC_MESSAGES/abrt.mo", + "usr/share/locale/pt/LC_MESSAGES/abrt.mo", + "usr/share/locale/pt_BR/LC_MESSAGES/abrt.mo", + "usr/share/locale/ru/LC_MESSAGES/abrt.mo", + "usr/share/locale/sk/LC_MESSAGES/abrt.mo", + "usr/share/locale/sr/LC_MESSAGES/abrt.mo", + "usr/share/locale/sr@latin/LC_MESSAGES/abrt.mo", + "usr/share/locale/sv/LC_MESSAGES/abrt.mo", + "usr/share/locale/ta/LC_MESSAGES/abrt.mo", + "usr/share/locale/te/LC_MESSAGES/abrt.mo", + "usr/share/locale/uk/LC_MESSAGES/abrt.mo", + "usr/share/locale/zh_CN/LC_MESSAGES/abrt.mo", + "usr/share/locale/zh_TW/LC_MESSAGES/abrt.mo", + "usr/share/man/man1/abrt-action-save-package-data.1.gz", + "usr/share/man/man1/abrt-handle-upload.1.gz", + "usr/share/man/man1/abrt-server.1.gz", + "usr/share/man/man5/abrt-action-save-package-data.conf.5.gz", + "usr/share/man/man5/abrt.conf.5.gz", + "usr/share/man/man8/abrt-dbus.8.gz", + "usr/share/man/man8/abrtd.8.gz", ] check_files(test_dir, expected) @@ -1653,547 +1667,536 @@ def test_extract_twice_can_extract_to_relative_paths(self): import shutil import tempfile - test_file = self.get_test_loc('archive/rpm/xz-compressed-cpio.rpm') + test_file = self.get_test_loc("archive/rpm/xz-compressed-cpio.rpm") # this will return an extractor that extracts twice extractor = archive.get_extractor(test_file) - project_tmp = join(project_root, 'tmp') + project_tmp = join(project_root, "tmp") fileutils.create_dir(project_tmp) project_root_abs = abspath(project_root) - test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, '').strip('\\/') - test_tgt_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, '').strip('\\/') + test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") + test_tgt_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") shutil.copy(test_file, test_src_dir) - test_src_file = join(test_src_dir, 'xz-compressed-cpio.rpm') + test_src_file = join(test_src_dir, "xz-compressed-cpio.rpm") result = list(extractor(test_src_file, test_tgt_dir)) assert [] == result - assert exists(join(test_tgt_dir, 'usr/sbin/abrt-dbus')) + assert exists(join(test_tgt_dir, "usr/sbin/abrt-dbus")) class TestRar(BaseArchiveTestCase): - def test_extract_rar_basic(self): - test_file = self.get_test_loc('archive/rar/basic.rar') + test_file = self.get_test_loc("archive/rar/basic.rar") test_dir = self.get_temp_dir() archive.extract_rar(test_file, test_dir) - result = os.path.join(test_dir, 'd', 'b', 'a.txt') + result = os.path.join(test_dir, "d", "b", "a.txt") assert os.path.exists(result) def test_extract_rar_with_invalid_path(self): - test_file = self.get_test_loc('archive/rar/rar_invalidpath.rar') + test_file = self.get_test_loc("archive/rar/rar_invalidpath.rar") test_dir = self.get_temp_dir() archive.extract_rar(test_file, test_dir) - result = os.path.join(test_dir, 'this/that') + result = os.path.join(test_dir, "this/that") assert os.path.exists(result) def test_extract_rar_with_trailing_data(self): - test_file = self.get_test_loc('archive/rar/rar_trailing.rar') + test_file = self.get_test_loc("archive/rar/rar_trailing.rar") test_dir = self.get_temp_dir() - expected = Exception('Unknown error') + expected = Exception("Unknown error") self.assertRaisesInstance(expected, archive.extract_rar, test_file, test_dir) - result = os.path.join(test_dir, 'd', 'b', 'a.txt') + result = os.path.join(test_dir, "d", "b", "a.txt") assert os.path.exists(result) def test_extract_rar_broken(self): - test_file = self.get_test_loc('archive/rar/broken.rar') + test_file = self.get_test_loc("archive/rar/broken.rar") test_dir = self.get_temp_dir() - expected = Exception('Header CRC error') + expected = Exception("Header CRC error") self.assertRaisesInstance(expected, archive.extract_rar, test_file, test_dir) def test_extract_rar_with_relative_path(self): # FIXME: this file may not have a real relative path - test_file = self.get_test_loc('archive/rar/rar_relative.rar', copy=True) + test_file = self.get_test_loc("archive/rar/rar_relative.rar", copy=True) test_dir = self.get_temp_dir() archive.extract_rar(test_file, test_dir) - result = os.path.abspath(test_file + '/../a_parent_folder.txt') + result = os.path.abspath(test_file + "/../a_parent_folder.txt") assert not os.path.exists(result) - result = os.path.join(test_dir, '2folder/relative_file') + result = os.path.join(test_dir, "2folder/relative_file") assert os.path.exists(result) - result = os.path.join(test_dir, '2folder/3folder/relative_file') + result = os.path.join(test_dir, "2folder/3folder/relative_file") assert os.path.exists(result) def test_extract_rar_with_absolute_path(self): # FIXME: this file may not have a real absolute path - assert not os.path.exists('/home/li/Desktop/zip_folder') - test_file = self.get_test_loc('archive/rar/rar_absolute.rar', copy=True) + assert not os.path.exists("/home/li/Desktop/zip_folder") + test_file = self.get_test_loc("archive/rar/rar_absolute.rar", copy=True) test_dir = self.get_temp_dir() archive.extract_rar(test_file, test_dir) - assert not os.path.exists('/home/li/Desktop/absolute_folder') - result = os.path.join(test_dir, 'home/li/Desktop', - 'absolute_folder/absolute_file') + assert not os.path.exists("/home/li/Desktop/absolute_folder") + result = os.path.join(test_dir, "home/li/Desktop", "absolute_folder/absolute_file") assert os.path.exists(result) def test_extract_rar_with_password(self): - test_file = self.get_test_loc('archive/rar/rar_password.rar') + test_file = self.get_test_loc("archive/rar/rar_password.rar") test_dir = self.get_temp_dir() - expected = Exception('Prefix found') + expected = Exception("Prefix found") self.assertRaisesInstance(expected, archive.extract_rar, test_file, test_dir) def test_extract_rar_with_non_ascii_path(self): - test_file = self.get_test_loc('archive/rar/non_ascii_corrupted.rar') + test_file = self.get_test_loc("archive/rar/non_ascii_corrupted.rar") # The bug only occurs if the path was given as Unicode test_file = str(test_file) test_dir = self.get_temp_dir() # raise an exception but still extracts some - expected = Exception('Prefix found') + expected = Exception("Prefix found") self.assertRaisesInstance(expected, archive.extract_rar, test_file, test_dir) - result = os.path.join(test_dir, 'EdoProject_java/WebContent/WEB-INF/lib/cos.jar') + result = os.path.join(test_dir, "EdoProject_java/WebContent/WEB-INF/lib/cos.jar") assert os.path.exists(result) class TestSevenZip(BaseArchiveTestCase): - def test_extract_7z_basic(self): - test_file = self.get_test_loc('archive/7z/z.7z') + test_file = self.get_test_loc("archive/7z/z.7z") test_dir = self.get_temp_dir() result = archive.extract_7z(test_file, test_dir) assert [] == result - expected = ['z/a/a.txt', 'z/b/a.txt', 'z/c/a.txt'] + expected = ["z/a/a.txt", "z/b/a.txt", "z/c/a.txt"] check_files(test_dir, expected) def test_extract_7z_with_trailing_data(self): - test_file = self.get_test_loc('archive/7z/7zip_trailing.7z') + test_file = self.get_test_loc("archive/7z/7zip_trailing.7z") test_dir = self.get_temp_dir() result = archive.extract_7z(test_file, test_dir) assert [] == result - expected = ['z/a/a.txt', 'z/b/a.txt', 'z/c/a.txt'] + expected = ["z/a/a.txt", "z/b/a.txt", "z/c/a.txt"] check_files(test_dir, expected) def test_extract_7z_with_broken_archive_with7z(self): - test_file = self.get_test_loc('archive/7z/corrupted7z.7z') + test_file = self.get_test_loc("archive/7z/corrupted7z.7z") test_dir = self.get_temp_dir() - msg = 'There are data after the end of archive' + msg = "There are data after the end of archive" self.assertExceptionContains(msg, sevenzip.extract, test_file, test_dir) def test_extract_7z_with_broken_archive_does_not_fail_when_using_fallback(self): - test_file = self.get_test_loc('archive/7z/corrupted7z.7z') + test_file = self.get_test_loc("archive/7z/corrupted7z.7z") test_dir = self.get_temp_dir() - msg = 'There are data after the end of archive' + msg = "There are data after the end of archive" self.assertExceptionContains(msg, archive.extract_7z, test_file, test_dir) def test_extract_7z_with_non_existing_archive(self): - test_file = 'archive/7z/I_DO_NOT_EXIST.zip' + test_file = "archive/7z/I_DO_NOT_EXIST.zip" test_dir = self.get_temp_dir() - msg = 'The system cannot find the path specified' + msg = "The system cannot find the path specified" self.assertExceptionContains(msg, sevenzip.extract, test_file, test_dir) def test_extract_7z_with_invalid_path_using_7z(self): - test_file = self.get_test_loc('archive/7z/7zip_invalidpath.7z') + test_file = self.get_test_loc("archive/7z/7zip_invalidpath.7z") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) assert [] == result extracted = self.collect_extracted_path(test_dir) - expected = ['/this/', '/this/that'] + expected = ["/this/", "/this/that"] assert expected == extracted def test_extract_7z_with_invalid_path(self): - test_file = self.get_test_loc('archive/7z/7zip_invalidpath.7z') + test_file = self.get_test_loc("archive/7z/7zip_invalidpath.7z") test_dir = self.get_temp_dir() result = archive.extract_7z(test_file, test_dir) assert [] == result extracted = self.collect_extracted_path(test_dir) - expected = ['/this/', '/this/that'] + expected = ["/this/", "/this/that"] assert expected == extracted def test_extract_7z_with_relative_path(self): - test_file = self.get_test_loc('archive/7z/7zip_relative.7z') + test_file = self.get_test_loc("archive/7z/7zip_relative.7z") test_dir = self.get_temp_dir() result = archive.extract_7z(test_file, test_dir) - non_result = os.path.join(test_dir, '../a_parent_folder.txt') + non_result = os.path.join(test_dir, "../a_parent_folder.txt") assert not os.path.exists(non_result) assert [] == result extracted = self.collect_extracted_path(test_dir) expected = [ - '/dotdot/', - '/dotdot/2folder/', - '/dotdot/2folder/3folder/', - '/dotdot/2folder/3folder/relative_file', - '/dotdot/2folder/3folder/relative_file~', - '/dotdot/2folder/relative_file', - '/dotdot/relative_file' + "/dotdot/", + "/dotdot/2folder/", + "/dotdot/2folder/3folder/", + "/dotdot/2folder/3folder/relative_file", + "/dotdot/2folder/3folder/relative_file~", + "/dotdot/2folder/relative_file", + "/dotdot/relative_file", ] assert expected == extracted def test_extract_7z_with_password_with_7z(self): - test_file = self.get_test_loc('archive/7z/7zip_password.7z') + test_file = self.get_test_loc("archive/7z/7zip_password.7z") test_dir = self.get_temp_dir() - expected = Exception('Password protected archive, unable to extract') + expected = Exception("Password protected archive, unable to extract") self.assertRaisesInstance(expected, sevenzip.extract, test_file, test_dir) def test_extract_7z_with_password(self): - test_file = self.get_test_loc('archive/7z/7zip_password.7z') + test_file = self.get_test_loc("archive/7z/7zip_password.7z") test_dir = self.get_temp_dir() - expected = Exception('Password protected archive, unable to extract') + expected = Exception("Password protected archive, unable to extract") self.assertRaisesInstance(expected, archive.extract_7z, test_file, test_dir) def test_extract_7zip_native_with_unicode_path_should_extract_without_error(self): - test_file = self.get_test_loc('archive/7z/7zip_unicode.7z') + test_file = self.get_test_loc("archive/7z/7zip_unicode.7z") test_dir = self.get_temp_dir() result = sevenzip.extract(test_file, test_dir) assert [] == result - assert 2 == len(os.listdir(os.path.join(test_dir, 'zip'))) + assert 2 == len(os.listdir(os.path.join(test_dir, "zip"))) def test_extract_7zip_with_fallback_with_unicode_path_should_extract_without_error(self): - test_file = self.get_test_loc('archive/7z/7zip_unicode.7z') + test_file = self.get_test_loc("archive/7z/7zip_unicode.7z") test_dir = self.get_temp_dir() result = archive.extract_7z(test_file, test_dir) assert [] == result - assert 2 == len(os.listdir(os.path.join(test_dir, 'zip'))) + assert 2 == len(os.listdir(os.path.join(test_dir, "zip"))) def test_extract_7zip_extract_with_unicode_path_extracts_with_errors(self): - test_file = self.get_test_loc('archive/7z/7zip_unicode.7z') + test_file = self.get_test_loc("archive/7z/7zip_unicode.7z") test_dir = self.get_temp_dir() try: archive.extract_7z(test_file, test_dir) except libarchive2.ArchiveError as e: - assert 'Damaged 7-Zip archive' in e.msg + assert "Damaged 7-Zip archive" in e.msg def test_extract_7z_basic_with_space_in_file_name(self): - test_file = self.get_test_loc('archive/7z/t .7z') + test_file = self.get_test_loc("archive/7z/t .7z") test_dir = self.get_temp_dir() result = archive.extract_7z(test_file, test_dir) assert [] == result - expected = ['t/t.txt'] + expected = ["t/t.txt"] check_files(test_dir, expected) class TestIso(BaseArchiveTestCase): - def test_extract_iso_basic(self): - test_file = self.get_test_loc('archive/iso/small.iso') + test_file = self.get_test_loc("archive/iso/small.iso") test_dir = self.get_temp_dir() archive.extract_iso(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = [ - '/ChangeLog', - '/ChangeLog (copy)', - '/freebase.ABOUT', - '/this/', - '/this/that' - ] + expected = ["/ChangeLog", "/ChangeLog (copy)", "/freebase.ABOUT", "/this/", "/this/that"] assert sorted(expected) == sorted(extracted) def test_get_extractor_not_iso_text_is_not_mistaken_for_an_iso_image(self): - test_file = self.get_test_loc('archive/iso/ChangeLog') + test_file = self.get_test_loc("archive/iso/ChangeLog") extractor = archive.get_extractor(test_file) assert not extractor def test_extract_iso_basic_with_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/iso/t.iso.foo') + test_file = self.get_test_loc("archive/iso/t.iso.foo") test_dir = self.get_temp_dir() archive.extract_iso(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestXzLzma(BaseArchiveTestCase): - def check_lzma_extract(self, extract_fun, test_file, expected): test_file = self.get_test_loc(test_file) extract_dir = self.get_temp_dir() expected_file = os.path.join(extract_dir, expected) extract_fun(test_file, extract_dir) - assert os.path.exists(expected_file), ( - '%(expected_file)s file was not extracted ' - 'correctly from archive %(test_file)s' - % locals()) + assert os.path.exists(expected_file), ( + "%(expected_file)s file was not extracted " + "correctly from archive %(test_file)s" % locals() + ) def test_extract_archive_tar_xz_1(self): - test_file = 'archive/lzma_xz/texlive-core-patches-20.tar.xz' - self.check_lzma_extract(extract_fun=archive.extract_lzma, - test_file=test_file, - expected='texlive-core-patches-20.tar') + test_file = "archive/lzma_xz/texlive-core-patches-20.tar.xz" + self.check_lzma_extract( + extract_fun=archive.extract_lzma, + test_file=test_file, + expected="texlive-core-patches-20.tar", + ) def test_extract_archive_tar_xz_2(self): - test_file = 'archive/lzma_xz/texlive-core-patches-20.tar.xz' - self.check_lzma_extract(extract_fun=archive.extract_lzma, - test_file=test_file, - expected='texlive-core-patches-20.tar') + test_file = "archive/lzma_xz/texlive-core-patches-20.tar.xz" + self.check_lzma_extract( + extract_fun=archive.extract_lzma, + test_file=test_file, + expected="texlive-core-patches-20.tar", + ) def test_extract_archive_tar_lzma_1(self): - test_file = 'archive/lzma_xz/coreutils-8.5-patches-1.tar.lzma' - self.check_lzma_extract(extract_fun=archive.extract_lzma, - test_file=test_file, - expected='coreutils-8.5-patches-1.tar') + test_file = "archive/lzma_xz/coreutils-8.5-patches-1.tar.lzma" + self.check_lzma_extract( + extract_fun=archive.extract_lzma, + test_file=test_file, + expected="coreutils-8.5-patches-1.tar", + ) def test_extract_archive_tar_lzma_2(self): - test_file = 'archive/lzma_xz/orionsocket-1.0.9.tar.lzma' - self.check_lzma_extract(extract_fun=archive.extract_lzma, - test_file=test_file, - expected='orionsocket-1.0.9.tar') + test_file = "archive/lzma_xz/orionsocket-1.0.9.tar.lzma" + self.check_lzma_extract( + extract_fun=archive.extract_lzma, test_file=test_file, expected="orionsocket-1.0.9.tar" + ) def test_extract_archive_tar_lzma_3(self): - test_file = 'archive/lzma_xz/MinGW-5.1.6.exe-src.tar.lzma' - expected = 'MinGW-5.1.6.exe-src.tar' - self.check_lzma_extract(extract_fun=archive.extract_lzma, - test_file=test_file, - expected=expected) + test_file = "archive/lzma_xz/MinGW-5.1.6.exe-src.tar.lzma" + expected = "MinGW-5.1.6.exe-src.tar" + self.check_lzma_extract( + extract_fun=archive.extract_lzma, test_file=test_file, expected=expected + ) class TestDia(BaseArchiveTestCase): - def test_extract_dia_basic(self): - test_file = self.get_test_loc('archive/dia/dia.dia') + test_file = self.get_test_loc("archive/dia/dia.dia") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'dia.dia-extract') + result = os.path.join(test_dir, "dia.dia-extract") assert os.path.exists(result) - @pytest.mark.xfail(reason='Fails for now on Python 3') + @pytest.mark.xfail(reason="Fails for now on Python 3") def test_extract_dia_with_trailing_data(self): - test_file = self.get_test_loc('archive/dia/dia_trailing.dia') + test_file = self.get_test_loc("archive/dia/dia_trailing.dia") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'dia_trailing.dia-extract') + result = os.path.join(test_dir, "dia_trailing.dia-extract") assert os.path.exists(result) - @pytest.mark.xfail(reason='Fails for now on Python 3') + @pytest.mark.xfail(reason="Fails for now on Python 3") def test_extract_dia_with_trailing_data_py3(self): - test_file = self.get_test_loc('archive/dia/dia_trailing.dia') + test_file = self.get_test_loc("archive/dia/dia_trailing.dia") test_dir = self.get_temp_dir() archive.uncompress_gzip(test_file, test_dir) - result = os.path.join(test_dir, 'dia_trailing.dia-extract') + result = os.path.join(test_dir, "dia_trailing.dia-extract") assert os.path.exists(result) def test_extract_dia_broken_1(self): - test_file = self.get_test_loc('archive/dia/dia_broken.dia') + test_file = self.get_test_loc("archive/dia/dia_broken.dia") test_dir = self.get_temp_dir() - self.assertExceptionContains('CRC check failed', - archive.uncompress_gzip, test_file, test_dir) + self.assertExceptionContains( + "CRC check failed", archive.uncompress_gzip, test_file, test_dir + ) def test_extract_dia_broken_2(self): - test_file = self.get_test_loc('archive/dia/broken/PublisherUML.dia') + test_file = self.get_test_loc("archive/dia/broken/PublisherUML.dia") test_dir = self.get_temp_dir() - self.assertExceptionContains('invalid distance too far back', - archive.uncompress_gzip, test_file, test_dir) + self.assertExceptionContains( + "invalid distance too far back", archive.uncompress_gzip, test_file, test_dir + ) def test_extract_dia_broken_3(self): - test_file = self.get_test_loc('archive/dia/broken/schedulerClassDiagram.dia') + test_file = self.get_test_loc("archive/dia/broken/schedulerClassDiagram.dia") test_dir = self.get_temp_dir() - self.assertExceptionContains('invalid distance too far back', - archive.uncompress_gzip, test_file, test_dir) + self.assertExceptionContains( + "invalid distance too far back", archive.uncompress_gzip, test_file, test_dir + ) def test_extract_dia_broken_4(self): - test_file = self.get_test_loc('archive/dia/broken/ServletProxyGenerator.dia') + test_file = self.get_test_loc("archive/dia/broken/ServletProxyGenerator.dia") test_dir = self.get_temp_dir() - self.assertExceptionContains('invalid distance too far back', - archive.uncompress_gzip, test_file, test_dir) + self.assertExceptionContains( + "invalid distance too far back", archive.uncompress_gzip, test_file, test_dir + ) def test_extract_can_get_extractor_and_uncompress_dia_files(self): - test_file = self.get_test_loc('archive/dia/guess/infoset-doc.dia') + test_file = self.get_test_loc("archive/dia/guess/infoset-doc.dia") test_dir = self.get_temp_dir() ext = archive.get_extractor(test_file) ext(test_file, test_dir) - result = os.path.join(test_dir, 'infoset-doc.dia-extract') + result = os.path.join(test_dir, "infoset-doc.dia-extract") assert os.path.exists(result) class TestTarZ(BaseArchiveTestCase): - def test_extract_tarz_compress_basic(self): - test_file = self.get_test_loc('archive/Z/tkWWW-0.11.tar.Z') + test_file = self.get_test_loc("archive/Z/tkWWW-0.11.tar.Z") test_dir = self.get_temp_dir() archive.extract_Z(test_file, test_dir) - result = os.path.join(test_dir, 'tkWWW-0.11.tar') + result = os.path.join(test_dir, "tkWWW-0.11.tar") assert os.path.exists(result) def test_extract_z_compress_basic(self): - test_file = self.get_test_loc('archive/Z/tr2tex.Z') + test_file = self.get_test_loc("archive/Z/tr2tex.Z") test_dir = self.get_temp_dir() archive.extract_Z(test_file, test_dir) - result = os.path.join(test_dir, 'tr2tex') + result = os.path.join(test_dir, "tr2tex") assert os.path.exists(result) class TestXar(BaseArchiveTestCase): - def test_extract_xar_basic(self): - test_file = self.get_test_loc('archive/xar/xar-1.4.xar') + test_file = self.get_test_loc("archive/xar/xar-1.4.xar") test_dir = self.get_temp_dir() archive.extract_Z(test_file, test_dir) - result = os.path.join(test_dir, '[TOC].xml') + result = os.path.join(test_dir, "[TOC].xml") assert os.path.exists(result) - result = os.path.join(test_dir, 'xar-1.4', 'Makefile.in') + result = os.path.join(test_dir, "xar-1.4", "Makefile.in") assert os.path.exists(result) class TestCb7(BaseArchiveTestCase): - def test_get_extractor_cb7(self): - test_file = self.get_test_loc('archive/cb7/t .cb7') + test_file = self.get_test_loc("archive/cb7/t .cb7") result = archive.get_extractor(test_file) expected = archive.extract_7z assert expected == result def test_extract_cb7_basic_with_space_in_file_name(self): - test_file = self.get_test_loc('archive/cb7/t .cb7') + test_file = self.get_test_loc("archive/cb7/t .cb7") test_dir = self.get_temp_dir() archive.extract_7z(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted def test_extract_cb7_basic_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/cb7/t.cb7.foo') + test_file = self.get_test_loc("archive/cb7/t.cb7.foo") test_dir = self.get_temp_dir() archive.extract_7z(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestCab(BaseArchiveTestCase): - def test_get_extractor_cab(self): - test_file = self.get_test_loc('archive/cab/basic.cab') + test_file = self.get_test_loc("archive/cab/basic.cab") result = archive.get_extractor(test_file) expected = archive.extract_cab assert expected == result def test_extract_cab_basic(self): - test_file = self.get_test_loc('archive/cab/basic.cab') + test_file = self.get_test_loc("archive/cab/basic.cab") test_dir = self.get_temp_dir() archive.extract_cab(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/TREEHELP.TXT'] + expected = ["/TREEHELP.TXT"] assert expected == extracted def test_extract_cab_basic_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/cab/t.cab.foo') + test_file = self.get_test_loc("archive/cab/t.cab.foo") test_dir = self.get_temp_dir() archive.extract_cab(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestCbr(BaseArchiveTestCase): - def test_get_extractor_cbr(self): - test_file = self.get_test_loc('archive/cbr/t.cbr') + test_file = self.get_test_loc("archive/cbr/t.cbr") result = archive.get_extractor(test_file) # FIXME: we do not handle these rare extensions (this is a RAR) expected = None # archive.extract_rar assert expected == result def test_extract_cbr_basic(self): - test_file = self.get_test_loc('archive/cbr/t.cbr') + test_file = self.get_test_loc("archive/cbr/t.cbr") test_dir = self.get_temp_dir() libarchive2.extract(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted def test_extract_cbr_basic_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/cbr/t.cbr.foo') + test_file = self.get_test_loc("archive/cbr/t.cbr.foo") test_dir = self.get_temp_dir() libarchive2.extract(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestCbt(BaseArchiveTestCase): - def test_get_extractor_cbt(self): - test_file = self.get_test_loc('archive/cbt/t.cbt') + test_file = self.get_test_loc("archive/cbt/t.cbt") result = archive.get_extractor(test_file) expected = archive.extract_tar assert expected == result def test_extract_cbt_basic(self): - test_file = self.get_test_loc('archive/cbt/t.cbt') + test_file = self.get_test_loc("archive/cbt/t.cbt") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted def test_extract_cbt_basic_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/cbt/t.cbt.foo') + test_file = self.get_test_loc("archive/cbt/t.cbt.foo") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestCbz(BaseArchiveTestCase): - def test_get_extractor_cbz(self): - test_file = self.get_test_loc('archive/cbz/t.cbz', copy=True) + test_file = self.get_test_loc("archive/cbz/t.cbz", copy=True) result = archive.get_extractor(test_file) expected = archive.extract_zip assert expected == result def test_extract_cbz_basic(self): - test_file = self.get_test_loc('archive/cbz/t.cbz') + test_file = self.get_test_loc("archive/cbz/t.cbz") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted def test_extract_cbz_basic_with_weird_filename_extension(self): - test_file = self.get_test_loc('archive/cbz/t.cbz.foo') + test_file = self.get_test_loc("archive/cbz/t.cbz.foo") test_dir = self.get_temp_dir() archive.extract_zip(test_file, test_dir) extracted = self.collect_extracted_path(test_dir) - expected = ['/t/', '/t/t.txt'] + expected = ["/t/", "/t/t.txt"] assert expected == extracted class TestLzip(BaseArchiveTestCase): - - pytestmark = pytest.mark.skipif(on_windows, reason='FIXME: lzip does not work on Windows') + pytestmark = pytest.mark.skipif(on_windows, reason="FIXME: lzip does not work on Windows") def test_extract_tarlzip_basic(self): - test_file = self.get_test_loc('archive/lzip/sample.tar.lz') + test_file = self.get_test_loc("archive/lzip/sample.tar.lz") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'tst') - assert ['empty', 'some'] == sorted(os.listdir(result)) + result = os.path.join(test_dir, "tst") + assert ["empty", "some"] == sorted(os.listdir(result)) def test_uncompress_lzip_basic(self): - test_file = self.get_test_loc('archive/lzip/some.lz') + test_file = self.get_test_loc("archive/lzip/some.lz") test_dir = self.get_temp_dir() archive.extract_lzip(test_file, test_dir) - assert ['dfsdfsdfsdfsdfsdfsd_'] == os.listdir(test_dir) + assert ["dfsdfsdfsdfsdfsdfsd_"] == os.listdir(test_dir) class TestLz4(BaseArchiveTestCase): - def test_extract_tarlz4_basic(self): - test_file = self.get_test_loc('archive/lz4/sample.tar.lz4') + test_file = self.get_test_loc("archive/lz4/sample.tar.lz4") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'tst') - assert ['empty', 'some'] == sorted(os.listdir(result)) + result = os.path.join(test_dir, "tst") + assert ["empty", "some"] == sorted(os.listdir(result)) def test_uncompress_lz4_basic(self): - test_file = self.get_test_loc('archive/lz4/some.lz4') + test_file = self.get_test_loc("archive/lz4/some.lz4") test_dir = self.get_temp_dir() archive.extract_lzip(test_file, test_dir) - assert ['dfsdfsdfsdfsdfsdfsd_'] == os.listdir(test_dir) + assert ["dfsdfsdfsdfsdfsdfsd_"] == os.listdir(test_dir) class TestZstd(BaseArchiveTestCase): - def test_extract_tarzstd_basic(self): - test_file = self.get_test_loc('archive/zstd/sample.tar.zst') + test_file = self.get_test_loc("archive/zstd/sample.tar.zst") test_dir = self.get_temp_dir() archive.extract_tar(test_file, test_dir) - result = os.path.join(test_dir, 'tst') - assert ['empty', 'some'] == sorted(os.listdir(result)) + result = os.path.join(test_dir, "tst") + assert ["empty", "some"] == sorted(os.listdir(result)) def test_uncompress_lzip_basic(self): - test_file = self.get_test_loc('archive/zstd/some.zst') + test_file = self.get_test_loc("archive/zstd/some.zst") test_dir = self.get_temp_dir() archive.extract_lzip(test_file, test_dir) - assert ['dfsdfsdfsdfsdfsdfsd_'] == os.listdir(test_dir) + assert ["dfsdfsdfsdfsdfsdfsd_"] == os.listdir(test_dir) + ################################################################################ # Note: The following series of test is not easy to grasp but unicode archives @@ -2207,7 +2210,6 @@ def test_uncompress_lzip_basic(self): class ExtractArchiveWithIllegalFilenamesTestCase(BaseArchiveTestCase): - def check_extract_weird_names( self, test_function, @@ -2244,21 +2246,24 @@ def check_extract_weird_names( return len_test_dir = len(test_dir) - extracted = sorted(path[len_test_dir:] for path in fileutils.resource_iter(test_dir, with_dirs=False)) + extracted = sorted( + path[len_test_dir:] for path in fileutils.resource_iter(test_dir, with_dirs=False) + ) extracted = [str(p) for p in extracted] extracted = [to_posix(p) for p in extracted] if on_linux: - os_suffix = 'linux' + os_suffix = "linux" elif on_mac: - os_suffix = 'mac' + os_suffix = "mac" elif on_windows: - os_suffix = 'win' + os_suffix = "win" - expected_file = test_file + '_' + expected_suffix + '_' + os_suffix + '.expected' + expected_file = test_file + "_" + expected_suffix + "_" + os_suffix + ".expected" import json + if regen: - with open(expected_file, 'w') as ef: + with open(expected_file, "w") as ef: ef.write(json.dumps(extracted, indent=2)) expected = json.loads(open(expected_file).read()) @@ -2270,458 +2275,572 @@ def check_extract_weird_names( # temporary CI debug if on_mac: print() - print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') + print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") print(json.dumps(extracted, indent=2)) - print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') + print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") print() raise -@pytest.mark.skipif(on_windows, reason='Run only on POSIX because of specific test expectations.') -class TestExtractArchiveWithIllegalFilenamesWithLibarchiveOnPosix(ExtractArchiveWithIllegalFilenamesTestCase): - +@pytest.mark.skipif(on_windows, reason="Run only on POSIX because of specific test expectations.") +class TestExtractArchiveWithIllegalFilenamesWithLibarchiveOnPosix( + ExtractArchiveWithIllegalFilenamesTestCase +): def test_extract_7zip_with_weird_filenames_with_libarchive_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.7z') + test_file = self.get_test_loc("archive/weird_names/weird_names.7z") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) def test_extract_ar_with_weird_filenames_with_libarchive_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.ar') + test_file = self.get_test_loc("archive/weird_names/weird_names.ar") test_dir = self.get_temp_dir() - expected = Exception('Incorrect file header signature') + expected = Exception("Incorrect file header signature") self.assertRaisesInstance(expected, libarchive2.extract, test_file, test_dir) def test_extract_cpio_with_weird_filenames_with_libarchive_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.cpio') + test_file = self.get_test_loc("archive/weird_names/weird_names.cpio") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) def test_extract_tar_with_weird_filenames_with_libarchive_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.tar') + test_file = self.get_test_loc("archive/weird_names/weird_names.tar") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) def test_extract_zip_with_weird_filenames_with_libarchive_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.zip') + test_file = self.get_test_loc("archive/weird_names/weird_names.zip") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') - + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) -@pytest.mark.skipif(not on_windows, reason='Run only on Windows because of specific test expectations.') -class TestExtractArchiveWithIllegalFilenamesWithLibarchiveOnWindows(ExtractArchiveWithIllegalFilenamesTestCase): +@pytest.mark.skipif( + not on_windows, reason="Run only on Windows because of specific test expectations." +) +class TestExtractArchiveWithIllegalFilenamesWithLibarchiveOnWindows( + ExtractArchiveWithIllegalFilenamesTestCase +): def test_extract_7zip_with_weird_filenames_with_libarchive_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.7z') + test_file = self.get_test_loc("archive/weird_names/weird_names.7z") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) def test_extract_ar_with_weird_filenames_with_libarchive_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.ar') + test_file = self.get_test_loc("archive/weird_names/weird_names.ar") try: self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') - self.fail('Exception not raised.') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) + self.fail("Exception not raised.") except libarchive2.ArchiveError as ae: - assert str(ae).startswith('Incorrect file header signature') + assert str(ae).startswith("Incorrect file header signature") def test_extract_cpio_with_weird_filenames_with_libarchive_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.cpio') + test_file = self.get_test_loc("archive/weird_names/weird_names.cpio") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) def test_extract_tar_with_weird_filenames_with_libarchive_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.tar') + test_file = self.get_test_loc("archive/weird_names/weird_names.tar") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) def test_extract_zip_with_weird_filenames_with_libarchive_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.zip') + test_file = self.get_test_loc("archive/weird_names/weird_names.zip") self.check_extract_weird_names( - libarchive2.extract, test_file, expected_warnings=[], expected_suffix='libarch') + libarchive2.extract, test_file, expected_warnings=[], expected_suffix="libarch" + ) -@pytest.mark.skipif(on_windows or on_mac, reason='Do not run windows and mac because of specific test expectations.') -class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnPosix(ExtractArchiveWithIllegalFilenamesTestCase): - +@pytest.mark.skipif( + on_windows or on_mac, reason="Do not run windows and mac because of specific test expectations." +) +class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnPosix( + ExtractArchiveWithIllegalFilenamesTestCase +): def test_extract_7zip_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.7z') + test_file = self.get_test_loc("archive/weird_names/weird_names.7z") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) def test_extract_ar_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.ar') + test_file = self.get_test_loc("archive/weird_names/weird_names.ar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) def test_extract_cpio_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.cpio') + test_file = self.get_test_loc("archive/weird_names/weird_names.cpio") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) def test_extract_iso_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.iso') + test_file = self.get_test_loc("archive/weird_names/weird_names.iso") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) @pytest.mark.xfail # not a problem: we now use libarchive for these def test_extract_rar_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.rar') + test_file = self.get_test_loc("archive/weird_names/weird_names.rar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) def test_extract_tar_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.tar') + test_file = self.get_test_loc("archive/weird_names/weird_names.tar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) def test_extract_zip_with_weird_filenames_with_sevenzip_posix(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.zip') + test_file = self.get_test_loc("archive/weird_names/weird_names.zip") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip') - + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip" + ) -@pytest.mark.skipif(not on_mac, reason='Run only on macOS because of specific test expectations.') -class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnMac(ExtractArchiveWithIllegalFilenamesTestCase): +@pytest.mark.skipif(not on_mac, reason="Run only on macOS because of specific test expectations.") +class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnMac( + ExtractArchiveWithIllegalFilenamesTestCase +): def test_extract_7zip_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.7z') + test_file = self.get_test_loc("archive/weird_names/weird_names.7z") expected_exception = { - 'weird_names/man\\1/..1.gz': u'Empty archive or incorrect arguments', - 'weird_names/man\\1/:\\.1': u'Empty archive or incorrect arguments', - 'weird_names/man\\1/[.1.gz': u'Empty archive or incorrect arguments', - 'weird_names/man\\1/[\\:*.1': u'Empty archive or incorrect arguments', - 'weird_names/man\\1/\\:.1.gz': u'Empty archive or incorrect arguments', - "weird_names/some 'file": u'Empty archive or incorrect arguments', - 'weird_names/some \\file': u'Empty archive or incorrect arguments', - 'weird_names/some file': u'Empty archive or incorrect arguments', - 'weird_names/some"file': u'Empty archive or incorrect arguments', - 'weird_names/some\\"file': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\t.t': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\n.t': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab".t"': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab*.t*': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab<.t<': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab>.t>': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab?.t?': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\\.t\\': u'Empty archive or incorrect arguments', - 'weird_names/winchr/ab|.t|': u'Empty archive or incorrect arguments', + "weird_names/man\\1/..1.gz": "Empty archive or incorrect arguments", + "weird_names/man\\1/:\\.1": "Empty archive or incorrect arguments", + "weird_names/man\\1/[.1.gz": "Empty archive or incorrect arguments", + "weird_names/man\\1/[\\:*.1": "Empty archive or incorrect arguments", + "weird_names/man\\1/\\:.1.gz": "Empty archive or incorrect arguments", + "weird_names/some 'file": "Empty archive or incorrect arguments", + "weird_names/some \\file": "Empty archive or incorrect arguments", + "weird_names/some file": "Empty archive or incorrect arguments", + 'weird_names/some"file': "Empty archive or incorrect arguments", + 'weird_names/some\\"file': "Empty archive or incorrect arguments", + "weird_names/winchr/ab\t.t": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\n.t": "Empty archive or incorrect arguments", + 'weird_names/winchr/ab".t"': "Empty archive or incorrect arguments", + "weird_names/winchr/ab*.t*": "Empty archive or incorrect arguments", + "weird_names/winchr/ab<.t<": "Empty archive or incorrect arguments", + "weird_names/winchr/ab>.t>": "Empty archive or incorrect arguments", + "weird_names/winchr/ab?.t?": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\\.t\\": "Empty archive or incorrect arguments", + "weird_names/winchr/ab|.t|": "Empty archive or incorrect arguments", } self.check_extract_weird_names( sevenzip.extract, test_file, expected_warnings=[], - expected_suffix='7zip', - expected_exception=expected_exception, regen=False) + expected_suffix="7zip", + expected_exception=expected_exception, + regen=False, + ) def test_extract_ar_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.ar') + test_file = self.get_test_loc("archive/weird_names/weird_names.ar") expected_exception = { - '\\:.1.gz': 'Empty archive or incorrect arguments', - 'ab*.t*': 'Empty archive or incorrect arguments', - 'some file': 'Empty archive or incorrect arguments', - '[\\:*.1': 'Empty archive or incorrect arguments', - 'some"file': 'Empty archive or incorrect arguments', - 'ab\\.t\\': 'Empty archive or incorrect arguments', - 'some\\"file': 'Empty archive or incorrect arguments', - 'ab\t.t': 'Empty archive or incorrect arguments', - 'ab\n.t': 'Empty archive or incorrect arguments', - "some 'file": 'Empty archive or incorrect arguments', - 'ab<.t<': 'Empty archive or incorrect arguments', - 'ab>.t>': 'Empty archive or incorrect arguments', - 'ab?.t?': 'Empty archive or incorrect arguments' + "\\:.1.gz": "Empty archive or incorrect arguments", + "ab*.t*": "Empty archive or incorrect arguments", + "some file": "Empty archive or incorrect arguments", + "[\\:*.1": "Empty archive or incorrect arguments", + 'some"file': "Empty archive or incorrect arguments", + "ab\\.t\\": "Empty archive or incorrect arguments", + 'some\\"file': "Empty archive or incorrect arguments", + "ab\t.t": "Empty archive or incorrect arguments", + "ab\n.t": "Empty archive or incorrect arguments", + "some 'file": "Empty archive or incorrect arguments", + "ab<.t<": "Empty archive or incorrect arguments", + "ab>.t>": "Empty archive or incorrect arguments", + "ab?.t?": "Empty archive or incorrect arguments", } self.check_extract_weird_names( sevenzip.extract, test_file, expected_warnings=[], - expected_suffix='7zip', - expected_exception=expected_exception, regen=False) + expected_suffix="7zip", + expected_exception=expected_exception, + regen=False, + ) def test_extract_cpio_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.cpio') + test_file = self.get_test_loc("archive/weird_names/weird_names.cpio") expected_exception = { - 'weird_names/man\\1/\\:.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab*.t*': 'Empty archive or incorrect arguments', - 'weird_names/some file': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/[\\:*.1': 'Empty archive or incorrect arguments', - 'weird_names/some"file': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\\.t\\': 'Empty archive or incorrect arguments', - 'weird_names/some\\"file': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\t.t': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\n.t': 'Empty archive or incorrect arguments', - "weird_names/some 'file": 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab<.t<': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab>.t>': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab?.t?': 'Empty archive or incorrect arguments', - 'weird_names/some \\file': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/..1.gz': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab".t"': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/[.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab|.t|': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/:\\.1': 'Empty archive or incorrect arguments' + "weird_names/man\\1/\\:.1.gz": "Empty archive or incorrect arguments", + "weird_names/winchr/ab*.t*": "Empty archive or incorrect arguments", + "weird_names/some file": "Empty archive or incorrect arguments", + "weird_names/man\\1/[\\:*.1": "Empty archive or incorrect arguments", + 'weird_names/some"file': "Empty archive or incorrect arguments", + "weird_names/winchr/ab\\.t\\": "Empty archive or incorrect arguments", + 'weird_names/some\\"file': "Empty archive or incorrect arguments", + "weird_names/winchr/ab\t.t": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\n.t": "Empty archive or incorrect arguments", + "weird_names/some 'file": "Empty archive or incorrect arguments", + "weird_names/winchr/ab<.t<": "Empty archive or incorrect arguments", + "weird_names/winchr/ab>.t>": "Empty archive or incorrect arguments", + "weird_names/winchr/ab?.t?": "Empty archive or incorrect arguments", + "weird_names/some \\file": "Empty archive or incorrect arguments", + "weird_names/man\\1/..1.gz": "Empty archive or incorrect arguments", + 'weird_names/winchr/ab".t"': "Empty archive or incorrect arguments", + "weird_names/man\\1/[.1.gz": "Empty archive or incorrect arguments", + "weird_names/winchr/ab|.t|": "Empty archive or incorrect arguments", + "weird_names/man\\1/:\\.1": "Empty archive or incorrect arguments", } self.check_extract_weird_names( sevenzip.extract, test_file, expected_warnings=[], - expected_suffix='7zip', - expected_exception=expected_exception, regen=False) + expected_suffix="7zip", + expected_exception=expected_exception, + regen=False, + ) @pytest.mark.xfail def test_extract_rar_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.rar') + test_file = self.get_test_loc("archive/weird_names/weird_names.rar") self.check_extract_weird_names( - sevenzip.extract, - test_file, - expected_warnings=[], - expected_suffix='7zip', - regen=False) + sevenzip.extract, test_file, expected_warnings=[], expected_suffix="7zip", regen=False + ) def test_extract_iso_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.iso') + test_file = self.get_test_loc("archive/weird_names/weird_names.iso") expected_exception = { - 'weird_names/man_1/[.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/man_1/[___.1': 'Empty archive or incorrect arguments', - "weird_names/some 'file": 'Empty archive or incorrect arguments', - 'weird_names/some _file': 'Empty archive or incorrect arguments', - 'weird_names/some file': 'Empty archive or incorrect arguments', - 'weird_names/some"file': 'Empty archive or incorrect arguments', - 'weird_names/some_"file': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab".t"': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab<.t<': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab>.t>': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab|.t|': 'Empty archive or incorrect arguments' + "weird_names/man_1/[.1.gz": "Empty archive or incorrect arguments", + "weird_names/man_1/[___.1": "Empty archive or incorrect arguments", + "weird_names/some 'file": "Empty archive or incorrect arguments", + "weird_names/some _file": "Empty archive or incorrect arguments", + "weird_names/some file": "Empty archive or incorrect arguments", + 'weird_names/some"file': "Empty archive or incorrect arguments", + 'weird_names/some_"file': "Empty archive or incorrect arguments", + 'weird_names/winchr/ab".t"': "Empty archive or incorrect arguments", + "weird_names/winchr/ab<.t<": "Empty archive or incorrect arguments", + "weird_names/winchr/ab>.t>": "Empty archive or incorrect arguments", + "weird_names/winchr/ab|.t|": "Empty archive or incorrect arguments", } self.check_extract_weird_names( sevenzip.extract, test_file, expected_warnings=[], - expected_suffix='7zip', - expected_exception=expected_exception, regen=False) + expected_suffix="7zip", + expected_exception=expected_exception, + regen=False, + ) def test_extract_tar_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.tar') + test_file = self.get_test_loc("archive/weird_names/weird_names.tar") expected_exception = { - 'weird_names/man\\1/\\:.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab*.t*': 'Empty archive or incorrect arguments', - 'weird_names/some file': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/[\\:*.1': 'Empty archive or incorrect arguments', - 'weird_names/some"file': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\\.t\\': 'Empty archive or incorrect arguments', - 'weird_names/some\\"file': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\t.t': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\n.t': 'Empty archive or incorrect arguments', - "weird_names/some 'file": 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab<.t<': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab>.t>': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab?.t?': 'Empty archive or incorrect arguments', - 'weird_names/some \\file': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/..1.gz': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab".t"': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/[.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab|.t|': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/:\\.1': 'Empty archive or incorrect arguments', + "weird_names/man\\1/\\:.1.gz": "Empty archive or incorrect arguments", + "weird_names/winchr/ab*.t*": "Empty archive or incorrect arguments", + "weird_names/some file": "Empty archive or incorrect arguments", + "weird_names/man\\1/[\\:*.1": "Empty archive or incorrect arguments", + 'weird_names/some"file': "Empty archive or incorrect arguments", + "weird_names/winchr/ab\\.t\\": "Empty archive or incorrect arguments", + 'weird_names/some\\"file': "Empty archive or incorrect arguments", + "weird_names/winchr/ab\t.t": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\n.t": "Empty archive or incorrect arguments", + "weird_names/some 'file": "Empty archive or incorrect arguments", + "weird_names/winchr/ab<.t<": "Empty archive or incorrect arguments", + "weird_names/winchr/ab>.t>": "Empty archive or incorrect arguments", + "weird_names/winchr/ab?.t?": "Empty archive or incorrect arguments", + "weird_names/some \\file": "Empty archive or incorrect arguments", + "weird_names/man\\1/..1.gz": "Empty archive or incorrect arguments", + 'weird_names/winchr/ab".t"': "Empty archive or incorrect arguments", + "weird_names/man\\1/[.1.gz": "Empty archive or incorrect arguments", + "weird_names/winchr/ab|.t|": "Empty archive or incorrect arguments", + "weird_names/man\\1/:\\.1": "Empty archive or incorrect arguments", } self.check_extract_weird_names( sevenzip.extract, test_file, expected_warnings=[], - expected_suffix='7zip', - expected_exception=expected_exception, regen=False) + expected_suffix="7zip", + expected_exception=expected_exception, + regen=False, + ) def test_extract_zip_with_weird_filenames_with_sevenzip_macos(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.zip') + test_file = self.get_test_loc("archive/weird_names/weird_names.zip") expected_exception = { - 'weird_names/some \\file': 'Empty archive or incorrect arguments', - "weird_names/some 'file": 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab?.t?': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\t.t': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\n.t': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab>.t>': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab\\.t\\': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab<.t<': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab".t"': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab*.t*': 'Empty archive or incorrect arguments', - 'weird_names/winchr/ab|.t|': 'Empty archive or incorrect arguments', - 'weird_names/some\\"file': 'Empty archive or incorrect arguments', - 'weird_names/some file': 'Empty archive or incorrect arguments', - 'weird_names/some"file': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/[\\:*.1': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/\\:.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/[.1.gz': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/:\\.1': 'Empty archive or incorrect arguments', - 'weird_names/man\\1/..1.gz': 'Empty archive or incorrect arguments', + "weird_names/some \\file": "Empty archive or incorrect arguments", + "weird_names/some 'file": "Empty archive or incorrect arguments", + "weird_names/winchr/ab?.t?": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\t.t": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\n.t": "Empty archive or incorrect arguments", + "weird_names/winchr/ab>.t>": "Empty archive or incorrect arguments", + "weird_names/winchr/ab\\.t\\": "Empty archive or incorrect arguments", + "weird_names/winchr/ab<.t<": "Empty archive or incorrect arguments", + 'weird_names/winchr/ab".t"': "Empty archive or incorrect arguments", + "weird_names/winchr/ab*.t*": "Empty archive or incorrect arguments", + "weird_names/winchr/ab|.t|": "Empty archive or incorrect arguments", + 'weird_names/some\\"file': "Empty archive or incorrect arguments", + "weird_names/some file": "Empty archive or incorrect arguments", + 'weird_names/some"file': "Empty archive or incorrect arguments", + "weird_names/man\\1/[\\:*.1": "Empty archive or incorrect arguments", + "weird_names/man\\1/\\:.1.gz": "Empty archive or incorrect arguments", + "weird_names/man\\1/[.1.gz": "Empty archive or incorrect arguments", + "weird_names/man\\1/:\\.1": "Empty archive or incorrect arguments", + "weird_names/man\\1/..1.gz": "Empty archive or incorrect arguments", } self.check_extract_weird_names( sevenzip.extract, test_file, expected_warnings=[], - expected_suffix='7zip', - expected_exception=expected_exception, regen=False) - - -@pytest.mark.skipif(not on_windows, reason='Run only on Windows because of specific test expectations.') -class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnWin(ExtractArchiveWithIllegalFilenamesTestCase): - + expected_suffix="7zip", + expected_exception=expected_exception, + regen=False, + ) + + +@pytest.mark.skipif( + not on_windows, reason="Run only on Windows because of specific test expectations." +) +class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnWin( + ExtractArchiveWithIllegalFilenamesTestCase +): def test_extract_7zip_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.7z') + test_file = self.get_test_loc("archive/weird_names/weird_names.7z") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) @pytest.mark.xfail # not a problem: we use libarchive for these def test_extract_ar_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.ar') + test_file = self.get_test_loc("archive/weird_names/weird_names.ar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) @pytest.mark.xfail # not a problem: we use libarchive for these def test_extract_cpio_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.cpio') + test_file = self.get_test_loc("archive/weird_names/weird_names.cpio") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) def test_extract_iso_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.iso') + test_file = self.get_test_loc("archive/weird_names/weird_names.iso") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) @pytest.mark.xfail # not a problem: we use libarchive for these def test_extract_rar_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.rar') + test_file = self.get_test_loc("archive/weird_names/weird_names.rar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) def test_extract_tar_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.tar') + test_file = self.get_test_loc("archive/weird_names/weird_names.tar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) @pytest.mark.xfail # not a problem: we use libarchive for these def test_extract_zip_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.zip') + test_file = self.get_test_loc("archive/weird_names/weird_names.zip") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=False, check_only_warnings=False) - - -@pytest.mark.skipif(not on_windows, reason='Run only on Windows because of specific test expectations.') -class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnWinWarning(ExtractArchiveWithIllegalFilenamesTestCase): - + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=False, + check_only_warnings=False, + ) + + +@pytest.mark.skipif( + not on_windows, reason="Run only on Windows because of specific test expectations." +) +class TestExtractArchiveWithIllegalFilenamesWithSevenzipOnWinWarning( + ExtractArchiveWithIllegalFilenamesTestCase +): def test_extract_7zip_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.7z') + test_file = self.get_test_loc("archive/weird_names/weird_names.7z") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) def test_extract_ar_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.ar') + test_file = self.get_test_loc("archive/weird_names/weird_names.ar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) def test_extract_cpio_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.cpio') + test_file = self.get_test_loc("archive/weird_names/weird_names.cpio") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) def test_extract_iso_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.iso') + test_file = self.get_test_loc("archive/weird_names/weird_names.iso") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) @pytest.mark.xfail # not a problem: we use libarchive for these def test_extract_rar_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.rar') + test_file = self.get_test_loc("archive/weird_names/weird_names.rar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) def test_extract_tar_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.tar') + test_file = self.get_test_loc("archive/weird_names/weird_names.tar") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) def test_extract_zip_with_weird_filenames_with_sevenzip_win(self): - test_file = self.get_test_loc('archive/weird_names/weird_names.zip') + test_file = self.get_test_loc("archive/weird_names/weird_names.zip") self.check_extract_weird_names( - sevenzip.extract, test_file, expected_warnings=[], expected_suffix='7zip', - check_warnings=True, check_only_warnings=True) + sevenzip.extract, + test_file, + expected_warnings=[], + expected_suffix="7zip", + check_warnings=True, + check_only_warnings=True, + ) class TestZipSlip(BaseArchiveTestCase): - def test_extract_zipslip_zip_posix(self): - test_file = self.get_test_loc('archive/zipslip/zip-slip.zip') + test_file = self.get_test_loc("archive/zipslip/zip-slip.zip") test_dir = self.get_temp_dir() result = archive.extract_zip(test_file, test_dir) assert [] == result expected = [ - 'dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt', - 'good.txt' + "dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt", + "good.txt", ] if on_windows: try: check_files(test_dir, expected) except AssertionError: - expected = [u'good.txt', u'tmp/evil.txt'] + expected = ["good.txt", "tmp/evil.txt"] check_files(test_dir, expected) else: check_files(test_dir, expected) def test_extract_zipslip_zip_win(self): - test_file = self.get_test_loc('archive/zipslip/zip-slip-win.zip') + test_file = self.get_test_loc("archive/zipslip/zip-slip-win.zip") test_dir = self.get_temp_dir() result = archive.extract_zip(test_file, test_dir) assert [] == result expected = [ - 'dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt', - 'good.txt' + "dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt", + "good.txt", ] if on_windows: try: check_files(test_dir, expected) except AssertionError: - expected = [u'Temp/evil.txt', u'good.txt'] + expected = ["Temp/evil.txt", "good.txt"] check_files(test_dir, expected) else: check_files(test_dir, expected) - @pytest.mark.skipif(on_windows, reason='Fails with WindowsError: [Error 206] The filename or extension is too long:') + @pytest.mark.skipif( + on_windows, + reason="Fails with WindowsError: [Error 206] The filename or extension is too long:", + ) def test_extract_zipslip_tar_posix(self): - test_file = self.get_test_loc('archive/zipslip/zip-slip.tar') + test_file = self.get_test_loc("archive/zipslip/zip-slip.tar") test_dir = self.get_temp_dir() result = archive.extract_tar(test_file, test_dir) assert [] == result expected = [ - 'dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt', - 'good.txt' + "dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt", + "good.txt", ] check_files(test_dir, expected) - @pytest.mark.skipif(on_windows, reason='Fails with WindowsError: [Error 206] The filename or extension is too long:') + @pytest.mark.skipif( + on_windows, + reason="Fails with WindowsError: [Error 206] The filename or extension is too long:", + ) def test_extract_zipslip_tar_win(self): - test_file = self.get_test_loc('archive/zipslip/zip-slip-win.tar') + test_file = self.get_test_loc("archive/zipslip/zip-slip-win.tar") test_dir = self.get_temp_dir() result = archive.extract_tar(test_file, test_dir) assert [] == result expected = [ - 'dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt', - 'good.txt' + "dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt", + "good.txt", ] check_files(test_dir, expected) diff --git a/tests/test_extract.py b/tests/test_extract.py index 5e35612..42eb71a 100644 --- a/tests/test_extract.py +++ b/tests/test_extract.py @@ -30,41 +30,47 @@ class TestExtract(BaseArchiveTestCase): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test_extract_file_function(self): - test_file = self.get_test_loc('extract/basic_non_nested.tar.gz', copy=True) + test_file = self.get_test_loc("extract/basic_non_nested.tar.gz", copy=True) base = fileutils.parent_directory(test_file) - expected = ['a/b/a.txt', 'a/b/b.txt', 'a/c/c.txt'] - cleaned_test_file = test_file.replace(base, '') + expected = ["a/b/a.txt", "a/b/b.txt", "a/c/c.txt"] + cleaned_test_file = test_file.replace(base, "") expected_event = [ extract.ExtractEvent( source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file), - done=False, warnings=[], errors=[] + done=False, + warnings=[], + errors=[], ), extract.ExtractEvent( source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file), - done=True, warnings=[], errors=[] - ) + done=True, + warnings=[], + errors=[], + ), ] target = extractcode.get_extraction_path(test_file) result = list(extract.extract_file(test_file, target)) - result = [r._replace( - source=cleaned_test_file, - target=extractcode.get_extraction_path(cleaned_test_file)) - for r in result] + result = [ + r._replace( + source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file) + ) + for r in result + ] assert expected_event == result check_files(target, expected) def test_extract_archive_non_nested(self): - test_dir = self.get_test_loc('extract/basic_non_nested.tar.gz', copy=True) + test_dir = self.get_test_loc("extract/basic_non_nested.tar.gz", copy=True) expected = ( - 'a/b/a.txt', - 'a/b/b.txt', - 'a/c/c.txt', + "a/b/a.txt", + "a/b/b.txt", + "a/c/c.txt", ) result = extract.extract(test_dir, recurse=False) check_no_error(result) @@ -75,7 +81,7 @@ def test_extract_archive_non_nested(self): check_files(extractcode.get_extraction_path(test_dir), expected) def test_extract_archive_shallow_with_readonly_inside(self): - test_file = self.get_test_loc('extract/readonly/read_only.tar.gz', copy=True) + test_file = self.get_test_loc("extract/readonly/read_only.tar.gz", copy=True) """ This test file was created with: import tarfile, time, datetime, io, os @@ -102,66 +108,63 @@ def test_extract_archive_shallow_with_readonly_inside(self): check_no_error(result) expected = ( - 'somefilename-0.txt', - 'somefilename-1.txt', + "somefilename-0.txt", + "somefilename-1.txt", ) test_dir = extractcode.get_extraction_path(test_file) check_files(test_dir, expected) def test_extract_dir_shallow_with_readonly_inside(self): - test_dir = self.get_test_loc('extract/readonly', copy=True) + test_dir = self.get_test_loc("extract/readonly", copy=True) result = list(extract.extract(test_dir, recurse=False)) check_no_error(result) expected = [ - 'read_only.tar.gz', - 'read_only.tar.gz-extract/somefilename-0.txt', - 'read_only.tar.gz-extract/somefilename-1.txt', + "read_only.tar.gz", + "read_only.tar.gz-extract/somefilename-0.txt", + "read_only.tar.gz-extract/somefilename-1.txt", ] check_files(test_dir, expected) def test_extract_tree_shallow_only(self): expected = ( - 'a/a.tar.gz', - 'a/a.txt', - 'a/a.tar.gz-extract/a/b/a.txt', - 'a/a.tar.gz-extract/a/b/b.txt', - 'a/a.tar.gz-extract/a/c/c.txt', - - 'b/a.txt', - 'b/b.tar.gz', - 'b/b.tar.gz-extract/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/.svn/entries', - 'b/b.tar.gz-extract/b/.svn/format', - 'b/b.tar.gz-extract/b/a/a.tar.gz', - - 'b/b.tar.gz-extract/b/a/a.txt', - 'b/b.tar.gz-extract/b/a/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/a/.svn/entries', - 'b/b.tar.gz-extract/b/a/.svn/format', - 'b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/b/a.txt', - 'b/b.tar.gz-extract/b/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/b/.svn/entries', - 'b/b.tar.gz-extract/b/b/.svn/format', - 'b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/c/a.tar.gz', - 'b/b.tar.gz-extract/b/c/a.txt', - 'b/b.tar.gz-extract/b/c/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/c/.svn/entries', - 'b/b.tar.gz-extract/b/c/.svn/format', - 'b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - - 'c/a.tar.gz', - 'c/a.txt', - 'c/a.tar.gz-extract/a/b/a.txt', - 'c/a.tar.gz-extract/a/b/b.txt', - 'c/a.tar.gz-extract/a/c/c.txt', + "a/a.tar.gz", + "a/a.txt", + "a/a.tar.gz-extract/a/b/a.txt", + "a/a.tar.gz-extract/a/b/b.txt", + "a/a.tar.gz-extract/a/c/c.txt", + "b/a.txt", + "b/b.tar.gz", + "b/b.tar.gz-extract/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/.svn/entries", + "b/b.tar.gz-extract/b/.svn/format", + "b/b.tar.gz-extract/b/a/a.tar.gz", + "b/b.tar.gz-extract/b/a/a.txt", + "b/b.tar.gz-extract/b/a/.svn/all-wcprops", + "b/b.tar.gz-extract/b/a/.svn/entries", + "b/b.tar.gz-extract/b/a/.svn/format", + "b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/b/a.txt", + "b/b.tar.gz-extract/b/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/b/.svn/entries", + "b/b.tar.gz-extract/b/b/.svn/format", + "b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/c/a.tar.gz", + "b/b.tar.gz-extract/b/c/a.txt", + "b/b.tar.gz-extract/b/c/.svn/all-wcprops", + "b/b.tar.gz-extract/b/c/.svn/entries", + "b/b.tar.gz-extract/b/c/.svn/format", + "b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "c/a.tar.gz", + "c/a.txt", + "c/a.tar.gz-extract/a/b/a.txt", + "c/a.tar.gz-extract/a/b/b.txt", + "c/a.tar.gz-extract/a/c/c.txt", ) - test_dir = self.get_test_loc('extract/tree', copy=True) + test_dir = self.get_test_loc("extract/tree", copy=True) result = list(extract.extract(test_dir, recurse=False)) check_no_error(result) check_files(test_dir, expected) @@ -172,50 +175,50 @@ def test_extract_tree_shallow_only(self): def test_extract_tree_recursive(self): expected = ( - 'a/a.tar.gz', - 'a/a.txt', - 'a/a.tar.gz-extract/a/b/a.txt', - 'a/a.tar.gz-extract/a/b/b.txt', - 'a/a.tar.gz-extract/a/c/c.txt', - 'b/a.txt', - 'b/b.tar.gz', - 'b/b.tar.gz-extract/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/.svn/entries', - 'b/b.tar.gz-extract/b/.svn/format', - 'b/b.tar.gz-extract/b/a/a.tar.gz', - 'b/b.tar.gz-extract/b/a/a.txt', - 'b/b.tar.gz-extract/b/a/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/a/.svn/entries', - 'b/b.tar.gz-extract/b/a/.svn/format', - 'b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt', - 'b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt', - 'b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt', - 'b/b.tar.gz-extract/b/b/a.txt', - 'b/b.tar.gz-extract/b/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/b/.svn/entries', - 'b/b.tar.gz-extract/b/b/.svn/format', - 'b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/c/a.tar.gz', - 'b/b.tar.gz-extract/b/c/a.txt', - 'b/b.tar.gz-extract/b/c/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/c/.svn/entries', - 'b/b.tar.gz-extract/b/c/.svn/format', - 'b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt', - 'b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt', - 'b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt', - 'c/a.tar.gz', - 'c/a.txt', - 'c/a.tar.gz-extract/a/b/a.txt', - 'c/a.tar.gz-extract/a/b/b.txt', - 'c/a.tar.gz-extract/a/c/c.txt', + "a/a.tar.gz", + "a/a.txt", + "a/a.tar.gz-extract/a/b/a.txt", + "a/a.tar.gz-extract/a/b/b.txt", + "a/a.tar.gz-extract/a/c/c.txt", + "b/a.txt", + "b/b.tar.gz", + "b/b.tar.gz-extract/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/.svn/entries", + "b/b.tar.gz-extract/b/.svn/format", + "b/b.tar.gz-extract/b/a/a.tar.gz", + "b/b.tar.gz-extract/b/a/a.txt", + "b/b.tar.gz-extract/b/a/.svn/all-wcprops", + "b/b.tar.gz-extract/b/a/.svn/entries", + "b/b.tar.gz-extract/b/a/.svn/format", + "b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt", + "b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt", + "b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt", + "b/b.tar.gz-extract/b/b/a.txt", + "b/b.tar.gz-extract/b/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/b/.svn/entries", + "b/b.tar.gz-extract/b/b/.svn/format", + "b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/c/a.tar.gz", + "b/b.tar.gz-extract/b/c/a.txt", + "b/b.tar.gz-extract/b/c/.svn/all-wcprops", + "b/b.tar.gz-extract/b/c/.svn/entries", + "b/b.tar.gz-extract/b/c/.svn/format", + "b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt", + "b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt", + "b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt", + "c/a.tar.gz", + "c/a.txt", + "c/a.tar.gz-extract/a/b/a.txt", + "c/a.tar.gz-extract/a/b/b.txt", + "c/a.tar.gz-extract/a/c/c.txt", ) - test_dir = self.get_test_loc('extract/tree', copy=True) + test_dir = self.get_test_loc("extract/tree", copy=True) result = list(extract.extract(test_dir, recurse=True)) check_no_error(result) check_files(test_dir, expected) @@ -226,45 +229,45 @@ def test_extract_tree_recursive(self): def test_extract_tree_recursive_replace_originals(self): expected = ( - 'a/a.txt', - 'a/a.tar.gz/a/b/a.txt', - 'a/a.tar.gz/a/b/b.txt', - 'a/a.tar.gz/a/c/c.txt', - 'b/a.txt', - 'b/b.tar.gz/b/.svn/all-wcprops', - 'b/b.tar.gz/b/.svn/entries', - 'b/b.tar.gz/b/.svn/format', - 'b/b.tar.gz/b/a/a.txt', - 'b/b.tar.gz/b/a/.svn/all-wcprops', - 'b/b.tar.gz/b/a/.svn/entries', - 'b/b.tar.gz/b/a/.svn/format', - 'b/b.tar.gz/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz/b/a/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz/b/a/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz/b/a/a.tar.gz/a/b/a.txt', - 'b/b.tar.gz/b/a/a.tar.gz/a/b/b.txt', - 'b/b.tar.gz/b/a/a.tar.gz/a/c/c.txt', - 'b/b.tar.gz/b/b/a.txt', - 'b/b.tar.gz/b/b/.svn/all-wcprops', - 'b/b.tar.gz/b/b/.svn/entries', - 'b/b.tar.gz/b/b/.svn/format', - 'b/b.tar.gz/b/b/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz/b/c/a.txt', - 'b/b.tar.gz/b/c/.svn/all-wcprops', - 'b/b.tar.gz/b/c/.svn/entries', - 'b/b.tar.gz/b/c/.svn/format', - 'b/b.tar.gz/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz/b/c/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz/b/c/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz/b/c/a.tar.gz/a/b/a.txt', - 'b/b.tar.gz/b/c/a.tar.gz/a/b/b.txt', - 'b/b.tar.gz/b/c/a.tar.gz/a/c/c.txt', - 'c/a.txt', - 'c/a.tar.gz/a/b/a.txt', - 'c/a.tar.gz/a/b/b.txt', - 'c/a.tar.gz/a/c/c.txt', + "a/a.txt", + "a/a.tar.gz/a/b/a.txt", + "a/a.tar.gz/a/b/b.txt", + "a/a.tar.gz/a/c/c.txt", + "b/a.txt", + "b/b.tar.gz/b/.svn/all-wcprops", + "b/b.tar.gz/b/.svn/entries", + "b/b.tar.gz/b/.svn/format", + "b/b.tar.gz/b/a/a.txt", + "b/b.tar.gz/b/a/.svn/all-wcprops", + "b/b.tar.gz/b/a/.svn/entries", + "b/b.tar.gz/b/a/.svn/format", + "b/b.tar.gz/b/a/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz/b/a/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz/b/a/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz/b/a/a.tar.gz/a/b/a.txt", + "b/b.tar.gz/b/a/a.tar.gz/a/b/b.txt", + "b/b.tar.gz/b/a/a.tar.gz/a/c/c.txt", + "b/b.tar.gz/b/b/a.txt", + "b/b.tar.gz/b/b/.svn/all-wcprops", + "b/b.tar.gz/b/b/.svn/entries", + "b/b.tar.gz/b/b/.svn/format", + "b/b.tar.gz/b/b/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz/b/c/a.txt", + "b/b.tar.gz/b/c/.svn/all-wcprops", + "b/b.tar.gz/b/c/.svn/entries", + "b/b.tar.gz/b/c/.svn/format", + "b/b.tar.gz/b/c/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz/b/c/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz/b/c/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz/b/c/a.tar.gz/a/b/a.txt", + "b/b.tar.gz/b/c/a.tar.gz/a/b/b.txt", + "b/b.tar.gz/b/c/a.tar.gz/a/c/c.txt", + "c/a.txt", + "c/a.tar.gz/a/b/a.txt", + "c/a.tar.gz/a/b/b.txt", + "c/a.tar.gz/a/c/c.txt", ) - test_dir = self.get_test_loc('extract/tree', copy=True) + test_dir = self.get_test_loc("extract/tree", copy=True) result = list(extract.extract(test_dir, recurse=True, replace_originals=True)) check_no_error(result) check_files(test_dir, expected) @@ -274,110 +277,108 @@ def test_extract_tree_recursive_replace_originals(self): check_files(test_dir, expected) def test_extract_with_replace_originals_does_not_fail_with_gz_with_trailing(self): - expected = ('rake.1.gz',) - test_dir = self.get_test_loc('extract/replace-originals/rake.1.gz', copy=True) + expected = ("rake.1.gz",) + test_dir = self.get_test_loc("extract/replace-originals/rake.1.gz", copy=True) result = list(extract.extract(test_dir, recurse=True, replace_originals=True)) r = result[-1] - assert r.errors and all(e.startswith('Not a gzipped file') for e in r.errors) + assert r.errors and all(e.startswith("Not a gzipped file") for e in r.errors) assert not r.warnings check_files(test_dir, expected) def test_extract_with_replace_originals_does_not_fail_with_corrupted_archive(self): - expected = ('issue6550.gz',) - test_dir = self.get_test_loc('extract/replace-originals/issue6550.gz', copy=True) + expected = ("issue6550.gz",) + test_dir = self.get_test_loc("extract/replace-originals/issue6550.gz", copy=True) result = list(extract.extract(test_dir, recurse=True, replace_originals=True)) r = result[-1] - assert r.errors and all(e.startswith('Error') for e in r.errors) + assert r.errors and all(e.startswith("Error") for e in r.errors) assert not r.warnings check_files(test_dir, expected) def test_extract_tree_shallow_then_recursive(self): shallow = ( - 'a/a.tar.gz', - 'a/a.txt', - 'a/a.tar.gz-extract/a/b/a.txt', - 'a/a.tar.gz-extract/a/b/b.txt', - 'a/a.tar.gz-extract/a/c/c.txt', - - 'b/a.txt', - 'b/b.tar.gz', - 'b/b.tar.gz-extract/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/.svn/entries', - 'b/b.tar.gz-extract/b/.svn/format', - 'b/b.tar.gz-extract/b/a/a.tar.gz', - 'b/b.tar.gz-extract/b/a/a.txt', - 'b/b.tar.gz-extract/b/a/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/a/.svn/entries', - 'b/b.tar.gz-extract/b/a/.svn/format', - 'b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/b/a.txt', - 'b/b.tar.gz-extract/b/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/b/.svn/entries', - 'b/b.tar.gz-extract/b/b/.svn/format', - 'b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/c/a.tar.gz', - 'b/b.tar.gz-extract/b/c/a.txt', - 'b/b.tar.gz-extract/b/c/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/c/.svn/entries', - 'b/b.tar.gz-extract/b/c/.svn/format', - 'b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - - 'c/a.tar.gz', - 'c/a.txt', - 'c/a.tar.gz-extract/a/b/a.txt', - 'c/a.tar.gz-extract/a/b/b.txt', - 'c/a.tar.gz-extract/a/c/c.txt', + "a/a.tar.gz", + "a/a.txt", + "a/a.tar.gz-extract/a/b/a.txt", + "a/a.tar.gz-extract/a/b/b.txt", + "a/a.tar.gz-extract/a/c/c.txt", + "b/a.txt", + "b/b.tar.gz", + "b/b.tar.gz-extract/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/.svn/entries", + "b/b.tar.gz-extract/b/.svn/format", + "b/b.tar.gz-extract/b/a/a.tar.gz", + "b/b.tar.gz-extract/b/a/a.txt", + "b/b.tar.gz-extract/b/a/.svn/all-wcprops", + "b/b.tar.gz-extract/b/a/.svn/entries", + "b/b.tar.gz-extract/b/a/.svn/format", + "b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/b/a.txt", + "b/b.tar.gz-extract/b/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/b/.svn/entries", + "b/b.tar.gz-extract/b/b/.svn/format", + "b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/c/a.tar.gz", + "b/b.tar.gz-extract/b/c/a.txt", + "b/b.tar.gz-extract/b/c/.svn/all-wcprops", + "b/b.tar.gz-extract/b/c/.svn/entries", + "b/b.tar.gz-extract/b/c/.svn/format", + "b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "c/a.tar.gz", + "c/a.txt", + "c/a.tar.gz-extract/a/b/a.txt", + "c/a.tar.gz-extract/a/b/b.txt", + "c/a.tar.gz-extract/a/c/c.txt", ) recursed = ( - 'a/a.tar.gz', - 'a/a.txt', - 'a/a.tar.gz-extract/a/b/a.txt', - 'a/a.tar.gz-extract/a/b/b.txt', - 'a/a.tar.gz-extract/a/c/c.txt', - 'b/a.txt', - 'b/b.tar.gz', - 'b/b.tar.gz-extract/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/.svn/entries', - 'b/b.tar.gz-extract/b/.svn/format', - 'b/b.tar.gz-extract/b/a/a.tar.gz', - 'b/b.tar.gz-extract/b/a/a.txt', - 'b/b.tar.gz-extract/b/a/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/a/.svn/entries', - 'b/b.tar.gz-extract/b/a/.svn/format', - 'b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt', - 'b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt', - 'b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt', - 'b/b.tar.gz-extract/b/b/a.txt', - 'b/b.tar.gz-extract/b/b/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/b/.svn/entries', - 'b/b.tar.gz-extract/b/b/.svn/format', - 'b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/c/a.tar.gz', - 'b/b.tar.gz-extract/b/c/a.txt', - 'b/b.tar.gz-extract/b/c/.svn/all-wcprops', - 'b/b.tar.gz-extract/b/c/.svn/entries', - 'b/b.tar.gz-extract/b/c/.svn/format', - 'b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - 'b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt', - 'b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt', - 'b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt', - 'c/a.tar.gz', - 'c/a.txt', - 'c/a.tar.gz-extract/a/b/a.txt', - 'c/a.tar.gz-extract/a/b/b.txt', - 'c/a.tar.gz-extract/a/c/c.txt', + "a/a.tar.gz", + "a/a.txt", + "a/a.tar.gz-extract/a/b/a.txt", + "a/a.tar.gz-extract/a/b/b.txt", + "a/a.tar.gz-extract/a/c/c.txt", + "b/a.txt", + "b/b.tar.gz", + "b/b.tar.gz-extract/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/.svn/entries", + "b/b.tar.gz-extract/b/.svn/format", + "b/b.tar.gz-extract/b/a/a.tar.gz", + "b/b.tar.gz-extract/b/a/a.txt", + "b/b.tar.gz-extract/b/a/.svn/all-wcprops", + "b/b.tar.gz-extract/b/a/.svn/entries", + "b/b.tar.gz-extract/b/a/.svn/format", + "b/b.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt", + "b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt", + "b/b.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt", + "b/b.tar.gz-extract/b/b/a.txt", + "b/b.tar.gz-extract/b/b/.svn/all-wcprops", + "b/b.tar.gz-extract/b/b/.svn/entries", + "b/b.tar.gz-extract/b/b/.svn/format", + "b/b.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/c/a.tar.gz", + "b/b.tar.gz-extract/b/c/a.txt", + "b/b.tar.gz-extract/b/c/.svn/all-wcprops", + "b/b.tar.gz-extract/b/c/.svn/entries", + "b/b.tar.gz-extract/b/c/.svn/format", + "b/b.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "b/b.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt", + "b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt", + "b/b.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt", + "c/a.tar.gz", + "c/a.txt", + "c/a.tar.gz-extract/a/b/a.txt", + "c/a.tar.gz-extract/a/b/b.txt", + "c/a.tar.gz-extract/a/c/c.txt", ) - test_dir = self.get_test_loc('extract/tree', copy=True) + test_dir = self.get_test_loc("extract/tree", copy=True) result = list(extract.extract(test_dir, recurse=False)) check_no_error(result) check_files(test_dir, shallow) @@ -388,61 +389,60 @@ def test_extract_tree_shallow_then_recursive(self): def test_uncompress_corrupted_archive_with_zlib(self): from extractcode import archive - test_file = self.get_test_loc('extract/corrupted/a.tar.gz', copy=True) + + test_file = self.get_test_loc("extract/corrupted/a.tar.gz", copy=True) test_dir = self.get_temp_dir() - expected = Exception('Error -3 while decompressing') + expected = Exception("Error -3 while decompressing") self.assertRaisesInstance(expected, archive.uncompress_gzip, test_file, test_dir) def test_uncompress_corrupted_archive_with_libarchive(self): from extractcode import libarchive2 - test_file = self.get_test_loc('extract/corrupted/a.tar.gz', copy=True) + + test_file = self.get_test_loc("extract/corrupted/a.tar.gz", copy=True) test_dir = self.get_temp_dir() - expected = Exception('gzip decompression failed') + expected = Exception("gzip decompression failed") self.assertRaisesInstance(expected, libarchive2.extract, test_file, test_dir) - @pytest.mark.skipif(not on_linux, reason='Expectations are different on Windows and macOS') + @pytest.mark.skipif(not on_linux, reason="Expectations are different on Windows and macOS") def test_extract_tree_with_corrupted_archives_linux(self): - expected = ( - 'a.tar.gz', - ) - test_dir = self.get_test_loc('extract/corrupted', copy=True) + expected = ("a.tar.gz",) + test_dir = self.get_test_loc("extract/corrupted", copy=True) result = list(extract.extract(test_dir, recurse=False)) check_files(test_dir, expected) assert len(result) == 2 result = result[1] assert len(result.errors) == 1 - assert result.errors[0].startswith('gzip decompression failed') + assert result.errors[0].startswith("gzip decompression failed") assert not result.warnings - @pytest.mark.skipif(on_linux, reason='Expectations are different on Windows and macOS') + @pytest.mark.skipif(on_linux, reason="Expectations are different on Windows and macOS") def test_extract_tree_with_corrupted_archives_mac_win(self): - expected = ( - 'a.tar.gz', - ) - test_dir = self.get_test_loc('extract/corrupted', copy=True) + expected = ("a.tar.gz",) + test_dir = self.get_test_loc("extract/corrupted", copy=True) result = list(extract.extract(test_dir, recurse=False)) check_files(test_dir, expected) assert len(result) == 2 result = result[1] - errs = ['gzip decompression failed'] + errs = ["gzip decompression failed"] assert errs == result.errors assert not result.warnings def test_extract_with_empty_dir_and_small_files_ignores_empty_dirs(self): expected = ( - 'empty_small.zip', - 'empty_small.zip-extract/empty_dirs_and_small_files/small_files/small_file.txt',) - test_dir = self.get_test_loc('extract/small', copy=True) + "empty_small.zip", + "empty_small.zip-extract/empty_dirs_and_small_files/small_files/small_file.txt", + ) + test_dir = self.get_test_loc("extract/small", copy=True) result = list(extract.extract(test_dir, recurse=True)) check_no_error(result) check_files(test_dir, expected) def test_extract_tar_with_broken_links(self): - test_dir = self.get_test_loc('extract/broken_link', copy=True) + test_dir = self.get_test_loc("extract/broken_link", copy=True) result = list(extract.extract(test_dir, recurse=True)) expected = ( - 'broken-link.tar.bz2', - 'broken-link.tar.bz2-extract/openssl/test/Makefile', + "broken-link.tar.bz2", + "broken-link.tar.bz2-extract/openssl/test/Makefile", ) check_files(test_dir, expected) expected_warning = [[], []] @@ -450,111 +450,111 @@ def test_extract_tar_with_broken_links(self): assert expected_warning == warns def test_extract_nested_tar_file_recurse_only(self): - test_file = self.get_test_loc('extract/nested/nested_tars.tar.gz', copy=True) + test_file = self.get_test_loc("extract/nested/nested_tars.tar.gz", copy=True) expected = [ - 'nested_tars.tar.gz', - 'nested_tars.tar.gz-extract/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/a/.svn/entries', - 'nested_tars.tar.gz-extract/b/a/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt', - 'nested_tars.tar.gz-extract/b/a/a.txt', - 'nested_tars.tar.gz-extract/b/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/b/.svn/format', - 'nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/c/.svn/entries', - 'nested_tars.tar.gz-extract/b/c/.svn/format', - 'nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt', - 'nested_tars.tar.gz-extract/b/c/a.txt' + "nested_tars.tar.gz", + "nested_tars.tar.gz-extract/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/.svn/entries", + "nested_tars.tar.gz-extract/b/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/a/.svn/entries", + "nested_tars.tar.gz-extract/b/a/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/a/a.tar.gz", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt", + "nested_tars.tar.gz-extract/b/a/a.txt", + "nested_tars.tar.gz-extract/b/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/b/.svn/entries", + "nested_tars.tar.gz-extract/b/b/.svn/format", + "nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/b/a.txt", + "nested_tars.tar.gz-extract/b/c/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/c/.svn/entries", + "nested_tars.tar.gz-extract/b/c/.svn/format", + "nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/c/a.tar.gz", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt", + "nested_tars.tar.gz-extract/b/c/a.txt", ] result = list(extract.extract(test_file, recurse=True)) check_no_error(result) check_files(test_file, expected) def test_extract_nested_tar_file_shallow_only(self): - test_dir = self.get_test_loc('extract/nested/nested_tars.tar.gz', copy=True) + test_dir = self.get_test_loc("extract/nested/nested_tars.tar.gz", copy=True) expected = [ - 'nested_tars.tar.gz', - 'nested_tars.tar.gz-extract/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/a/.svn/entries', - 'nested_tars.tar.gz-extract/b/a/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz', - 'nested_tars.tar.gz-extract/b/a/a.txt', - 'nested_tars.tar.gz-extract/b/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/b/.svn/format', - 'nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/c/.svn/entries', - 'nested_tars.tar.gz-extract/b/c/.svn/format', - 'nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz', - 'nested_tars.tar.gz-extract/b/c/a.txt' + "nested_tars.tar.gz", + "nested_tars.tar.gz-extract/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/.svn/entries", + "nested_tars.tar.gz-extract/b/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/a/.svn/entries", + "nested_tars.tar.gz-extract/b/a/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/a/a.tar.gz", + "nested_tars.tar.gz-extract/b/a/a.txt", + "nested_tars.tar.gz-extract/b/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/b/.svn/entries", + "nested_tars.tar.gz-extract/b/b/.svn/format", + "nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/b/a.txt", + "nested_tars.tar.gz-extract/b/c/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/c/.svn/entries", + "nested_tars.tar.gz-extract/b/c/.svn/format", + "nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/c/a.tar.gz", + "nested_tars.tar.gz-extract/b/c/a.txt", ] result1 = list(extract.extract(test_dir, recurse=False)) check_no_error(result1) check_files(test_dir, expected) def test_extract_nested_tar_file_shallow_then_recurse(self): - test_file = self.get_test_loc('extract/nested/nested_tars.tar.gz', copy=True) + test_file = self.get_test_loc("extract/nested/nested_tars.tar.gz", copy=True) expected = [ - 'nested_tars.tar.gz', - 'nested_tars.tar.gz-extract/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/a/.svn/entries', - 'nested_tars.tar.gz-extract/b/a/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt', - 'nested_tars.tar.gz-extract/b/a/a.txt', - 'nested_tars.tar.gz-extract/b/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/b/.svn/format', - 'nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/c/.svn/entries', - 'nested_tars.tar.gz-extract/b/c/.svn/format', - 'nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt', - 'nested_tars.tar.gz-extract/b/c/a.txt' + "nested_tars.tar.gz", + "nested_tars.tar.gz-extract/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/.svn/entries", + "nested_tars.tar.gz-extract/b/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/a/.svn/entries", + "nested_tars.tar.gz-extract/b/a/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/a/a.tar.gz", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt", + "nested_tars.tar.gz-extract/b/a/a.txt", + "nested_tars.tar.gz-extract/b/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/b/.svn/entries", + "nested_tars.tar.gz-extract/b/b/.svn/format", + "nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/b/a.txt", + "nested_tars.tar.gz-extract/b/c/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/c/.svn/entries", + "nested_tars.tar.gz-extract/b/c/.svn/format", + "nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/c/a.tar.gz", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt", + "nested_tars.tar.gz-extract/b/c/a.txt", ] result1 = list(extract.extract(test_file, recurse=False)) result2 = list(extract.extract(test_file, recurse=True)) @@ -563,39 +563,39 @@ def test_extract_nested_tar_file_shallow_then_recurse(self): check_files(test_file, expected) def test_extract_dir_with_nested_tar_file_shallow_then_recurse(self): - test_dir = self.get_test_loc('extract/nested', copy=True) + test_dir = self.get_test_loc("extract/nested", copy=True) expected = [ - 'nested_tars.tar.gz', - 'nested_tars.tar.gz-extract/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/a/.svn/entries', - 'nested_tars.tar.gz-extract/b/a/.svn/format', - 'nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt', - 'nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt', - 'nested_tars.tar.gz-extract/b/a/a.txt', - 'nested_tars.tar.gz-extract/b/b/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/b/.svn/entries', - 'nested_tars.tar.gz-extract/b/b/.svn/format', - 'nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/.svn/all-wcprops', - 'nested_tars.tar.gz-extract/b/c/.svn/entries', - 'nested_tars.tar.gz-extract/b/c/.svn/format', - 'nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base', - 'nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt', - 'nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt', - 'nested_tars.tar.gz-extract/b/c/a.txt' + "nested_tars.tar.gz", + "nested_tars.tar.gz-extract/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/.svn/entries", + "nested_tars.tar.gz-extract/b/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/a/.svn/entries", + "nested_tars.tar.gz-extract/b/a/.svn/format", + "nested_tars.tar.gz-extract/b/a/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/a/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/a/a.tar.gz", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/a.txt", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/b/b.txt", + "nested_tars.tar.gz-extract/b/a/a.tar.gz-extract/a/c/c.txt", + "nested_tars.tar.gz-extract/b/a/a.txt", + "nested_tars.tar.gz-extract/b/b/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/b/.svn/entries", + "nested_tars.tar.gz-extract/b/b/.svn/format", + "nested_tars.tar.gz-extract/b/b/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/b/a.txt", + "nested_tars.tar.gz-extract/b/c/.svn/all-wcprops", + "nested_tars.tar.gz-extract/b/c/.svn/entries", + "nested_tars.tar.gz-extract/b/c/.svn/format", + "nested_tars.tar.gz-extract/b/c/.svn/prop-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.tar.gz.svn-base", + "nested_tars.tar.gz-extract/b/c/.svn/text-base/a.txt.svn-base", + "nested_tars.tar.gz-extract/b/c/a.tar.gz", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/a.txt", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/b/b.txt", + "nested_tars.tar.gz-extract/b/c/a.tar.gz-extract/a/c/c.txt", + "nested_tars.tar.gz-extract/b/c/a.txt", ] result1 = list(extract.extract(test_dir, recurse=False)) result2 = list(extract.extract(test_dir, recurse=True)) @@ -604,38 +604,38 @@ def test_extract_dir_with_nested_tar_file_shallow_then_recurse(self): check_files(test_dir, expected) def test_extract_zip_with_spaces_in_name(self): - test_dir = self.get_test_loc('extract/space-zip', copy=True) + test_dir = self.get_test_loc("extract/space-zip", copy=True) expected = ( - 'with spaces in name.zip', - 'with spaces in name.zip-extract/empty_dirs_and_small_files/small_files/small_file.txt' + "with spaces in name.zip", + "with spaces in name.zip-extract/empty_dirs_and_small_files/small_files/small_file.txt", ) result = list(extract.extract(test_dir, recurse=True)) check_no_error(result) check_files(test_dir, expected) def test_extract_tar_gz_with_spaces_in_name(self): - test_dir = self.get_test_loc('extract/space-tgz', copy=True) + test_dir = self.get_test_loc("extract/space-tgz", copy=True) expected = ( - 'with spaces in name.tar.gz', - 'with spaces in name.tar.gz-extract/a/b/a.txt', - 'with spaces in name.tar.gz-extract/a/b/b.txt', - 'with spaces in name.tar.gz-extract/a/c/c.txt', + "with spaces in name.tar.gz", + "with spaces in name.tar.gz-extract/a/b/a.txt", + "with spaces in name.tar.gz-extract/a/b/b.txt", + "with spaces in name.tar.gz-extract/a/c/c.txt", ) result = list(extract.extract(test_dir, recurse=True)) check_no_error(result) check_files(test_dir, expected) def test_extract_tar_with_special_files(self): - test_dir = self.get_test_loc('extract/special', copy=True) + test_dir = self.get_test_loc("extract/special", copy=True) expected = [ - 't.tgz', - 't.tgz-extract/0-REGTYPE', - 't.tgz-extract/0-REGTYPE-TEXT', - 't.tgz-extract/0-REGTYPE-VEEEERY_LONG_NAME_____________________________________________________________________________________________________________________155', + "t.tgz", + "t.tgz-extract/0-REGTYPE", + "t.tgz-extract/0-REGTYPE-TEXT", + "t.tgz-extract/0-REGTYPE-VEEEERY_LONG_NAME_____________________________________________________________________________________________________________________155", # we skip links but not hardlinks - 't.tgz-extract/1-LNKTYPE', - 't.tgz-extract/S-SPARSE', - 't.tgz-extract/S-SPARSE-WITH-NULLS', + "t.tgz-extract/1-LNKTYPE", + "t.tgz-extract/S-SPARSE", + "t.tgz-extract/S-SPARSE-WITH-NULLS", ] result = list(extract.extract(test_dir, recurse=True)) check_files(test_dir, expected) @@ -647,127 +647,126 @@ def test_extract_tar_with_special_files(self): assert [] == warns def test_extract_directory_of_windows_ar_archives(self): - test_dir = self.get_test_loc('extract/ar_tree/winlib', copy=True) + test_dir = self.get_test_loc("extract/ar_tree/winlib", copy=True) result = list(extract.extract(test_dir, recurse=True)) - expected = self.get_test_loc('extract/ar_tree/winlib-expected.json') + expected = self.get_test_loc("extract/ar_tree/winlib-expected.json") check_files(test_dir, expected, regen=False) check_no_error(result) def test_extract_nested_arch_with_corruption_should_extract_inner_archives_only_once(self): test_file = self.get_test_loc( - 'extract/nested_not_compressed/nested_with_not_compressed_gz_file.tgz', copy=True) + "extract/nested_not_compressed/nested_with_not_compressed_gz_file.tgz", copy=True + ) expected = [ - 'nested_with_not_compressed_gz_file.tgz', - 'nested_with_not_compressed_gz_file.tgz-extract/top/file', - 'nested_with_not_compressed_gz_file.tgz-extract/top/notcompressed.gz' + "nested_with_not_compressed_gz_file.tgz", + "nested_with_not_compressed_gz_file.tgz-extract/top/file", + "nested_with_not_compressed_gz_file.tgz-extract/top/notcompressed.gz", ] result = list(extract.extract(test_file, recurse=True)) check_no_error(result) check_files(test_file, expected) def test_extract_directory_with_office_docs(self): - test_dir = self.get_test_loc('extract/office_docs', copy=True) + test_dir = self.get_test_loc("extract/office_docs", copy=True) result = list(extract.extract(test_dir, kinds=(extractcode.docs,), recurse=True)) expected = [ - 'abc.docx', - 'abc.docx-extract/[Content_Types].xml', - 'abc.docx-extract/docProps/app.xml', - 'abc.docx-extract/docProps/core.xml', - 'abc.docx-extract/_rels/.rels', - 'abc.docx-extract/word/fontTable.xml', - 'abc.docx-extract/word/document.xml', - 'abc.docx-extract/word/settings.xml', - 'abc.docx-extract/word/numbering.xml', - 'abc.docx-extract/word/activeX/activeX1.xml', - 'abc.docx-extract/word/activeX/activeX2.xml', - 'abc.docx-extract/word/activeX/activeX3.xml', - 'abc.docx-extract/word/activeX/_rels/activeX1.xml.rels', - 'abc.docx-extract/word/activeX/_rels/activeX2.xml.rels', - 'abc.docx-extract/word/activeX/_rels/activeX3.xml.rels', - 'abc.docx-extract/word/activeX/activeX1.bin', - 'abc.docx-extract/word/activeX/activeX3.bin', - 'abc.docx-extract/word/activeX/activeX2.bin', - 'abc.docx-extract/word/webSettings.xml', - 'abc.docx-extract/word/styles.xml', - 'abc.docx-extract/word/theme/theme1.xml', - 'abc.docx-extract/word/_rels/document.xml.rels', - 'abc.docx-extract/word/stylesWithEffects.xml', - 'abc.docx-extract/word/media/image1.gif', - 'abc.docx-extract/word/media/image4.wmf', - 'abc.docx-extract/word/media/image2.wmf', - 'abc.docx-extract/word/media/image3.wmf', - - 'excel.xlsx', - 'excel.xlsx-extract/[Content_Types].xml', - 'excel.xlsx-extract/docProps/app.xml', - 'excel.xlsx-extract/docProps/core.xml', - 'excel.xlsx-extract/_rels/.rels', - 'excel.xlsx-extract/xl/workbook.xml', - 'excel.xlsx-extract/xl/worksheets/sheet2.xml', - 'excel.xlsx-extract/xl/worksheets/sheet3.xml', - 'excel.xlsx-extract/xl/worksheets/sheet1.xml', - 'excel.xlsx-extract/xl/styles.xml', - 'excel.xlsx-extract/xl/theme/theme1.xml', - 'excel.xlsx-extract/xl/_rels/workbook.xml.rels', - 'excel.xlsx-extract/xl/sharedStrings.xml', - - 'ppt.pptx', - 'ppt.pptx-extract/[Content_Types].xml', - 'ppt.pptx-extract/docProps/app.xml', - 'ppt.pptx-extract/docProps/thumbnail.jpeg', - 'ppt.pptx-extract/docProps/core.xml', - 'ppt.pptx-extract/_rels/.rels', - 'ppt.pptx-extract/ppt/viewProps.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout9.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout8.xml', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout5.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout4.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout2.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout3.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout8.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout9.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout11.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout10.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout6.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout7.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout1.xml.rels', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout3.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout2.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout1.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout5.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout4.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout6.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout10.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout11.xml', - 'ppt.pptx-extract/ppt/slideLayouts/slideLayout7.xml', - 'ppt.pptx-extract/ppt/presentation.xml', - 'ppt.pptx-extract/ppt/slideMasters/slideMaster1.xml', - 'ppt.pptx-extract/ppt/slideMasters/_rels/slideMaster1.xml.rels', - 'ppt.pptx-extract/ppt/slides/slide1.xml', - 'ppt.pptx-extract/ppt/slides/_rels/slide1.xml.rels', - 'ppt.pptx-extract/ppt/theme/theme1.xml', - 'ppt.pptx-extract/ppt/_rels/presentation.xml.rels', - 'ppt.pptx-extract/ppt/presProps.xml', - 'ppt.pptx-extract/ppt/tableStyles.xml', - 'ppt.pptx-extract/ppt/media/image1.png' + "abc.docx", + "abc.docx-extract/[Content_Types].xml", + "abc.docx-extract/docProps/app.xml", + "abc.docx-extract/docProps/core.xml", + "abc.docx-extract/_rels/.rels", + "abc.docx-extract/word/fontTable.xml", + "abc.docx-extract/word/document.xml", + "abc.docx-extract/word/settings.xml", + "abc.docx-extract/word/numbering.xml", + "abc.docx-extract/word/activeX/activeX1.xml", + "abc.docx-extract/word/activeX/activeX2.xml", + "abc.docx-extract/word/activeX/activeX3.xml", + "abc.docx-extract/word/activeX/_rels/activeX1.xml.rels", + "abc.docx-extract/word/activeX/_rels/activeX2.xml.rels", + "abc.docx-extract/word/activeX/_rels/activeX3.xml.rels", + "abc.docx-extract/word/activeX/activeX1.bin", + "abc.docx-extract/word/activeX/activeX3.bin", + "abc.docx-extract/word/activeX/activeX2.bin", + "abc.docx-extract/word/webSettings.xml", + "abc.docx-extract/word/styles.xml", + "abc.docx-extract/word/theme/theme1.xml", + "abc.docx-extract/word/_rels/document.xml.rels", + "abc.docx-extract/word/stylesWithEffects.xml", + "abc.docx-extract/word/media/image1.gif", + "abc.docx-extract/word/media/image4.wmf", + "abc.docx-extract/word/media/image2.wmf", + "abc.docx-extract/word/media/image3.wmf", + "excel.xlsx", + "excel.xlsx-extract/[Content_Types].xml", + "excel.xlsx-extract/docProps/app.xml", + "excel.xlsx-extract/docProps/core.xml", + "excel.xlsx-extract/_rels/.rels", + "excel.xlsx-extract/xl/workbook.xml", + "excel.xlsx-extract/xl/worksheets/sheet2.xml", + "excel.xlsx-extract/xl/worksheets/sheet3.xml", + "excel.xlsx-extract/xl/worksheets/sheet1.xml", + "excel.xlsx-extract/xl/styles.xml", + "excel.xlsx-extract/xl/theme/theme1.xml", + "excel.xlsx-extract/xl/_rels/workbook.xml.rels", + "excel.xlsx-extract/xl/sharedStrings.xml", + "ppt.pptx", + "ppt.pptx-extract/[Content_Types].xml", + "ppt.pptx-extract/docProps/app.xml", + "ppt.pptx-extract/docProps/thumbnail.jpeg", + "ppt.pptx-extract/docProps/core.xml", + "ppt.pptx-extract/_rels/.rels", + "ppt.pptx-extract/ppt/viewProps.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout9.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout8.xml", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout5.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout4.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout2.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout3.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout8.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout9.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout11.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout10.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout6.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout7.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/_rels/slideLayout1.xml.rels", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout3.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout2.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout1.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout5.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout4.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout6.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout10.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout11.xml", + "ppt.pptx-extract/ppt/slideLayouts/slideLayout7.xml", + "ppt.pptx-extract/ppt/presentation.xml", + "ppt.pptx-extract/ppt/slideMasters/slideMaster1.xml", + "ppt.pptx-extract/ppt/slideMasters/_rels/slideMaster1.xml.rels", + "ppt.pptx-extract/ppt/slides/slide1.xml", + "ppt.pptx-extract/ppt/slides/_rels/slide1.xml.rels", + "ppt.pptx-extract/ppt/theme/theme1.xml", + "ppt.pptx-extract/ppt/_rels/presentation.xml.rels", + "ppt.pptx-extract/ppt/presProps.xml", + "ppt.pptx-extract/ppt/tableStyles.xml", + "ppt.pptx-extract/ppt/media/image1.png", ] check_files(test_dir, expected) check_no_error(result) def touch(self, location): - with io.open(location, 'w') as f: - f.write(u'\n') + with io.open(location, "w") as f: + f.write("\n") def fake_extract(self, location): - extracted = os.path.join(location + 'extract') + extracted = os.path.join(location + "extract") os.mkdir(extracted) - self.touch(os.path.join(extracted, 'extracted_file')) + self.touch(os.path.join(extracted, "extracted_file")) return extracted def extract_walker(self, test_dir): for top, dirs, files in os.walk(test_dir, topdown=True): for f in files: - if not f.endswith('-extract') and f.endswith('.gz'): + if not f.endswith("-extract") and f.endswith(".gz"): extracted = self.fake_extract(os.path.join(top, f)) for x in self.extract_walker(os.path.join(top, extracted)): yield x @@ -775,21 +774,23 @@ def extract_walker(self, test_dir): def test_walk_can_be_extended_while_walking(self): test_dir = self.get_temp_dir() - self.touch(os.path.join(test_dir, 'file')) - self.touch(os.path.join(test_dir, 'arch.gz')) - os.mkdir(os.path.join(test_dir, 'dir')) - self.touch(os.path.join(test_dir, 'dir', 'otherarch.gz')) + self.touch(os.path.join(test_dir, "file")) + self.touch(os.path.join(test_dir, "arch.gz")) + os.mkdir(os.path.join(test_dir, "dir")) + self.touch(os.path.join(test_dir, "dir", "otherarch.gz")) allpaths = [] for top, dirs, files in self.extract_walker(test_dir): - allpaths.extend([as_posixpath(os.path.join(top, d).replace(test_dir, '')) for d in dirs + files]) + allpaths.extend( + [as_posixpath(os.path.join(top, d).replace(test_dir, "")) for d in dirs + files] + ) expected = [ - '/arch.gzextract/extracted_file', - '/dir', - '/arch.gz', - '/file', - '/dir/otherarch.gzextract/extracted_file', - '/dir/otherarch.gz' + "/arch.gzextract/extracted_file", + "/dir", + "/arch.gz", + "/file", + "/dir/otherarch.gzextract/extracted_file", + "/dir/otherarch.gz", ] assert sorted(expected) == sorted(allpaths) @@ -802,16 +803,16 @@ def test_extract_can_extract_to_relative_paths(self): import shutil import tempfile - project_tmp = join(project_root, 'tmp') + project_tmp = join(project_root, "tmp") fileutils.create_dir(project_tmp) project_root_abs = abspath(project_root) - test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, '').strip('\\/') - test_file = self.get_test_loc('extract/relative_path/basic.zip') + test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") + test_file = self.get_test_loc("extract/relative_path/basic.zip") shutil.copy(test_file, test_src_dir) - test_src_file = join(test_src_dir, 'basic.zip') + test_src_file = join(test_src_dir, "basic.zip") test_tgt_dir = join(project_root, test_src_file) + extractcode.EXTRACT_SUFFIX result = list(extract.extract(test_src_file)) - expected = ['c/a/a.txt', 'c/b/a.txt', 'c/c/a.txt'] + expected = ["c/a/a.txt", "c/b/a.txt", "c/c/a.txt"] check_files(test_tgt_dir, expected) for r in result: assert [] == r.warnings @@ -820,27 +821,29 @@ def test_extract_can_extract_to_relative_paths(self): def test_recursive_import(self): from extractcode.extract import extract # NOQA - @pytest.mark.skipif(on_windows, reason='Windows behavior is slightly different with relative paths') + @pytest.mark.skipif( + on_windows, reason="Windows behavior is slightly different with relative paths" + ) def test_extract_zipslip_tar_posix(self): - test_dir = self.get_test_loc('extract/zipslip', copy=True) + test_dir = self.get_test_loc("extract/zipslip", copy=True) expected = [ - 'README.md', - 'zip-slip-win.tar', - 'zip-slip-win.tar-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt', - 'zip-slip-win.tar-extract/good.txt', - 'zip-slip-win.tar.ABOUT', - 'zip-slip-win.zip', - 'zip-slip-win.zip-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt', - 'zip-slip-win.zip-extract/good.txt', - 'zip-slip-win.zip.ABOUT', - 'zip-slip.tar', - 'zip-slip.tar-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt', - 'zip-slip.tar-extract/good.txt', - 'zip-slip.tar.ABOUT', - 'zip-slip.zip', - 'zip-slip.zip-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt', - 'zip-slip.zip-extract/good.txt', - 'zip-slip.zip.ABOUT', + "README.md", + "zip-slip-win.tar", + "zip-slip-win.tar-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt", + "zip-slip-win.tar-extract/good.txt", + "zip-slip-win.tar.ABOUT", + "zip-slip-win.zip", + "zip-slip-win.zip-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/Temp/evil.txt", + "zip-slip-win.zip-extract/good.txt", + "zip-slip-win.zip.ABOUT", + "zip-slip.tar", + "zip-slip.tar-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt", + "zip-slip.tar-extract/good.txt", + "zip-slip.tar.ABOUT", + "zip-slip.zip", + "zip-slip.zip-extract/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/dotdot/tmp/evil.txt", + "zip-slip.zip-extract/good.txt", + "zip-slip.zip.ABOUT", ] result = list(extract.extract(test_dir, recurse=True)) @@ -854,134 +857,142 @@ def test_extract_zipslip_tar_posix(self): def test_extract_always_returns_a_generator_and_not_a_list(self): # a test for #1996 to ensure that progress is displayed "progressively" - test_dir = self.get_test_loc('extract/generator', copy=True) + test_dir = self.get_test_loc("extract/generator", copy=True) result = extract.extract(test_dir) assert isinstance(result, GeneratorType) def test_extract_ignore_file(self): - test_dir = self.get_test_loc('extract/ignore', copy=True) + test_dir = self.get_test_loc("extract/ignore", copy=True) expected = [ - 'alpha.zip', - 'beta.tar', - 'beta.tar-extract/a.txt', - 'beta.tar-extract/b.txt', - 'beta.tar-extract/c.txt', - 'gamma/gamma.zip', - 'gamma/gamma.zip-extract/c.txt' + "alpha.zip", + "beta.tar", + "beta.tar-extract/a.txt", + "beta.tar-extract/b.txt", + "beta.tar-extract/c.txt", + "gamma/gamma.zip", + "gamma/gamma.zip-extract/c.txt", ] - result = list(extract.extract(test_dir, recurse=True, ignore_pattern=('alpha.zip',))) + result = list(extract.extract(test_dir, recurse=True, ignore_pattern=("alpha.zip",))) check_no_error(result) check_files(test_dir, expected) def test_extract_ignore_directory(self): - test_dir = self.get_test_loc('extract/ignore', copy=True) + test_dir = self.get_test_loc("extract/ignore", copy=True) expected = [ - 'alpha.zip', - 'alpha.zip-extract/a.txt', - 'alpha.zip-extract/beta.zip', - 'alpha.zip-extract/beta.zip-extract/b.txt', - 'alpha.zip-extract/gamma.tar', - 'alpha.zip-extract/gamma.tar-extract/c.txt', - 'beta.tar', - 'beta.tar-extract/a.txt', - 'beta.tar-extract/b.txt', - 'beta.tar-extract/c.txt', - 'gamma/gamma.zip', + "alpha.zip", + "alpha.zip-extract/a.txt", + "alpha.zip-extract/beta.zip", + "alpha.zip-extract/beta.zip-extract/b.txt", + "alpha.zip-extract/gamma.tar", + "alpha.zip-extract/gamma.tar-extract/c.txt", + "beta.tar", + "beta.tar-extract/a.txt", + "beta.tar-extract/b.txt", + "beta.tar-extract/c.txt", + "gamma/gamma.zip", ] - result = list(extract.extract(test_dir, recurse=True, ignore_pattern=('gamma',))) + result = list(extract.extract(test_dir, recurse=True, ignore_pattern=("gamma",))) check_no_error(result) check_files(test_dir, expected) def test_extract_ignore_pattern(self): - test_dir = self.get_test_loc('extract/ignore', copy=True) + test_dir = self.get_test_loc("extract/ignore", copy=True) expected = [ - 'alpha.zip', - 'alpha.zip-extract/a.txt', - 'alpha.zip-extract/beta.zip', - 'alpha.zip-extract/gamma.tar', - 'alpha.zip-extract/gamma.tar-extract/c.txt', - 'beta.tar', - 'beta.tar-extract/a.txt', - 'beta.tar-extract/b.txt', - 'beta.tar-extract/c.txt', - 'gamma/gamma.zip', - 'gamma/gamma.zip-extract/c.txt' + "alpha.zip", + "alpha.zip-extract/a.txt", + "alpha.zip-extract/beta.zip", + "alpha.zip-extract/gamma.tar", + "alpha.zip-extract/gamma.tar-extract/c.txt", + "beta.tar", + "beta.tar-extract/a.txt", + "beta.tar-extract/b.txt", + "beta.tar-extract/c.txt", + "gamma/gamma.zip", + "gamma/gamma.zip-extract/c.txt", ] - result = list(extract.extract(test_dir, recurse=True, ignore_pattern=('b*.zip',))) + result = list(extract.extract(test_dir, recurse=True, ignore_pattern=("b*.zip",))) check_no_error(result) check_files(test_dir, expected) def test_extract_file_ignores_archives_not_of_default_kinds(self): - test_dir = self.get_test_loc('extract/all_formats/doc.docx', copy=True) + test_dir = self.get_test_loc("extract/all_formats/doc.docx", copy=True) base = fileutils.parent_directory(test_dir) expected = [] - cleaned_test_file = test_dir.replace(base, '') + cleaned_test_file = test_dir.replace(base, "") expected_events = [] target = extractcode.get_extraction_path(test_dir) result = list(extract.extract_file(test_dir, target)) result = [ r._replace( - source=cleaned_test_file, - target=extractcode.get_extraction_path(cleaned_test_file)) + source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file) + ) for r in result ] assert result == expected_events check_files(target, expected) def test_extract_file_handles_archives_of_default_kinds(self): - test_dir = self.get_test_loc('extract/all_formats/c.zip', copy=True) + test_dir = self.get_test_loc("extract/all_formats/c.zip", copy=True) base = fileutils.parent_directory(test_dir) expected = [ - 'c/a/a.txt', - 'c/b/a.txt', - 'c/c/a.txt', + "c/a/a.txt", + "c/b/a.txt", + "c/c/a.txt", ] - cleaned_test_file = test_dir.replace(base, '') + cleaned_test_file = test_dir.replace(base, "") expected_events = [ extract.ExtractEvent( source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file), - done=False, warnings=[], errors=[] + done=False, + warnings=[], + errors=[], ), extract.ExtractEvent( source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file), - done=True, warnings=[], errors=[] - ) + done=True, + warnings=[], + errors=[], + ), ] target = extractcode.get_extraction_path(test_dir) result = list(extract.extract_file(test_dir, target)) result = [ r._replace( - source=cleaned_test_file, - target=extractcode.get_extraction_path(cleaned_test_file)) + source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file) + ) for r in result ] assert result == expected_events check_files(target, expected) def test_extract_file_works_with_all_kinds(self): - test_dir = self.get_test_loc('extract/all_formats/doc.docx', copy=True) + test_dir = self.get_test_loc("extract/all_formats/doc.docx", copy=True) base = fileutils.parent_directory(test_dir) expected = [ - 'c/a/a.txt', - 'c/b/a.txt', - 'c/c/a.txt', + "c/a/a.txt", + "c/b/a.txt", + "c/c/a.txt", ] - cleaned_test_file = test_dir.replace(base, '') + cleaned_test_file = test_dir.replace(base, "") expected_events = [ - extract.ExtractEvent(source='doc.docx', target='doc.docx-extract', done=False, warnings=[], errors=[]), - extract.ExtractEvent(source='doc.docx', target='doc.docx-extract', done=True, warnings=[], errors=[]), + extract.ExtractEvent( + source="doc.docx", target="doc.docx-extract", done=False, warnings=[], errors=[] + ), + extract.ExtractEvent( + source="doc.docx", target="doc.docx-extract", done=True, warnings=[], errors=[] + ), ] target = extractcode.get_extraction_path(test_dir) result = list(extract.extract_file(test_dir, target, kinds=all_kinds)) result = [ r._replace( - source=cleaned_test_file, - target=extractcode.get_extraction_path(cleaned_test_file)) + source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file) + ) for r in result ] assert result == expected_events diff --git a/tests/test_extractcode.py b/tests/test_extractcode.py index cf06d44..0e362e5 100644 --- a/tests/test_extractcode.py +++ b/tests/test_extractcode.py @@ -17,75 +17,75 @@ class TestNewName(FileBasedTesting): - test_data_dir = join(dirname(__file__), 'data') + test_data_dir = join(dirname(__file__), "data") def test_new_name_without_extensions(self): - test_dir = self.get_test_loc('new_name/noext', copy=True) - renamed = new_name(join(test_dir, 'test'), is_dir=False) + test_dir = self.get_test_loc("new_name/noext", copy=True) + renamed = new_name(join(test_dir, "test"), is_dir=False) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'test_4' == result + assert "test_4" == result - renamed = new_name(join(test_dir, 'TEST'), is_dir=False) + renamed = new_name(join(test_dir, "TEST"), is_dir=False) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'TEST_4' == result + assert "TEST_4" == result - renamed = new_name(join(test_dir, 'test_1'), is_dir=True) + renamed = new_name(join(test_dir, "test_1"), is_dir=True) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'test_1_1' == result + assert "test_1_1" == result def test_new_name_with_extensions(self): - test_dir = self.get_test_loc('new_name/ext', copy=True) - renamed = new_name(join(test_dir, 'test.txt'), is_dir=False) + test_dir = self.get_test_loc("new_name/ext", copy=True) + renamed = new_name(join(test_dir, "test.txt"), is_dir=False) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'test_3.txt' == result + assert "test_3.txt" == result - renamed = new_name(join(test_dir, 'TEST.txt'), is_dir=False) + renamed = new_name(join(test_dir, "TEST.txt"), is_dir=False) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'TEST_3.txt' == result + assert "TEST_3.txt" == result - renamed = new_name(join(test_dir, 'TEST.tXt'), is_dir=False) + renamed = new_name(join(test_dir, "TEST.tXt"), is_dir=False) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'TEST_3.tXt' == result + assert "TEST_3.tXt" == result - renamed = new_name(join(test_dir, 'test.txt'), is_dir=True) + renamed = new_name(join(test_dir, "test.txt"), is_dir=True) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'test.txt_2' == result + assert "test.txt_2" == result - renamed = new_name(join(test_dir, 'teST.txt'), is_dir=True) + renamed = new_name(join(test_dir, "teST.txt"), is_dir=True) assert not exists(renamed) result = fileutils.file_name(renamed) - assert 'teST.txt_2' == result + assert "teST.txt_2" == result def test_new_name_with_empties(self): base_dir = self.get_temp_dir() - self.assertRaises(AssertionError, new_name, '', is_dir=False) - test_file = base_dir + '/' + self.assertRaises(AssertionError, new_name, "", is_dir=False) + test_file = base_dir + "/" renamed = new_name(test_file, is_dir=False) assert renamed assert not exists(renamed) - test_file = join(base_dir, '.') + test_file = join(base_dir, ".") renamed = new_name(test_file, is_dir=False) assert not exists(renamed) result = fileutils.file_name(renamed) - assert '_' == result + assert "_" == result - test_dir = base_dir + '/' + test_dir = base_dir + "/" renamed = new_name(test_dir, is_dir=True) assert not exists(renamed) result = fileutils.file_name(renamed) assert result - test_dir = join(base_dir, '.') + test_dir = join(base_dir, ".") renamed = new_name(test_dir, is_dir=True) assert not exists(renamed) result = fileutils.file_name(renamed) - assert '_' == result + assert "_" == result diff --git a/tests/test_extractcode_api.py b/tests/test_extractcode_api.py index 0e6e073..2d670c9 100644 --- a/tests/test_extractcode_api.py +++ b/tests/test_extractcode_api.py @@ -22,28 +22,32 @@ class TestExtractApi(BaseArchiveTestCase): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test_extract_archive(self): - test_dir = self.get_test_loc('api/doc.docx', copy=True) + test_dir = self.get_test_loc("api/doc.docx", copy=True) base = fileutils.parent_directory(test_dir) expected = [ - 'c/a/a.txt', - 'c/b/a.txt', - 'c/c/a.txt', + "c/a/a.txt", + "c/b/a.txt", + "c/c/a.txt", ] - cleaned_test_file = test_dir.replace(base, '') + cleaned_test_file = test_dir.replace(base, "") expected_event = [ - extract.ExtractEvent(source='doc.docx', target='doc.docx-extract', done=False, warnings=[], errors=[]), - extract.ExtractEvent(source='doc.docx', target='doc.docx-extract', done=True, warnings=[], errors=[]), + extract.ExtractEvent( + source="doc.docx", target="doc.docx-extract", done=False, warnings=[], errors=[] + ), + extract.ExtractEvent( + source="doc.docx", target="doc.docx-extract", done=True, warnings=[], errors=[] + ), ] target = extractcode.get_extraction_path(test_dir) result = list(api.extract_archive(test_dir, target)) result = [ r._replace( - source=cleaned_test_file, - target=extractcode.get_extraction_path(cleaned_test_file)) + source=cleaned_test_file, target=extractcode.get_extraction_path(cleaned_test_file) + ) for r in result ] assert expected_event == result diff --git a/tests/test_extractcode_cli.py b/tests/test_extractcode_cli.py index aa9864f..a16bed7 100644 --- a/tests/test_extractcode_cli.py +++ b/tests/test_extractcode_cli.py @@ -18,7 +18,7 @@ from commoncode.system import on_windows test_env = FileDrivenTesting() -test_env.test_data_dir = os.path.join(os.path.dirname(__file__), 'data') +test_env.test_data_dir = os.path.join(os.path.dirname(__file__), "data") project_root = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) """ @@ -31,13 +31,14 @@ def run_extract(options, expected_rc=None, cwd=None): """ Run extractcode as a plain subprocess. Return rc, stdout, stderr. """ - bin_dir = 'Scripts' if on_windows else 'bin' + bin_dir = "Scripts" if on_windows else "bin" # note: this assumes that we are using a standard directory layout as set # with the configure script - cmd_loc = os.path.join(project_root, 'venv', bin_dir, 'extractcode') - assert os.path.exists(cmd_loc + ('.exe' if on_windows else '')) + cmd_loc = os.path.join(project_root, "venv", bin_dir, "extractcode") + assert os.path.exists(cmd_loc + (".exe" if on_windows else "")) args = [cmd_loc] + options - result = subprocess.run(args, + result = subprocess.run( + args, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=cwd, @@ -45,15 +46,15 @@ def run_extract(options, expected_rc=None, cwd=None): ) if expected_rc is not None and result.returncode != expected_rc: - opts = ' '.join(options) - error = f''' + opts = " ".join(options) + error = f""" Failure to run: extractcode {opts}: stdout: {result.stdout} stderr: {result.stderr} -''' +""" assert result.returncode == expected_rc, error return result @@ -63,39 +64,39 @@ def test_extractcode_command_can_take_an_empty_directory(): test_dir = test_env.get_temp_dir() result = run_extract([test_dir], expected_rc=0) - assert 'Extracting archives...' in result.stderr - assert 'Extracting done' in result.stderr + assert "Extracting archives..." in result.stderr + assert "Extracting done" in result.stderr def test_extractcode_command_does_extract_verbose(): - test_dir = test_env.get_test_loc('cli/extract', copy=True) - result = run_extract(['--verbose', test_dir], expected_rc=1) + test_dir = test_env.get_test_loc("cli/extract", copy=True) + result = run_extract(["--verbose", test_dir], expected_rc=1) - assert os.path.exists(os.path.join(test_dir, 'some.tar.gz-extract')) + assert os.path.exists(os.path.join(test_dir, "some.tar.gz-extract")) try: - assert 'some.tar.gz' in result.stdout - assert 'tarred_gzipped.tgz' in result.stdout + assert "some.tar.gz" in result.stdout + assert "tarred_gzipped.tgz" in result.stdout - assert 'Extracting archives...' in result.stderr - assert 'ERROR extracting' in result.stderr - assert 'broken.tar.gz' in result.stderr + assert "Extracting archives..." in result.stderr + assert "ERROR extracting" in result.stderr + assert "broken.tar.gz" in result.stderr assert "broken.tar.gz: Unrecognized archive format" in result.stderr - assert 'Extracting done.' in result.stderr + assert "Extracting done." in result.stderr except: assert [result.stderr, result.stdout] == [] def test_extractcode_command_always_shows_something_if_not_using_a_tty_verbose_or_not(): - test_dir = test_env.get_test_loc('cli/extract/some.tar.gz', copy=True) + test_dir = test_env.get_test_loc("cli/extract/some.tar.gz", copy=True) - result = run_extract(options=['--verbose', test_dir], expected_rc=0) - assert 'Extracting archives...' in result.stderr - assert 'Extracting: some.tar.gz' in result.stdout - assert 'Extracting done.' in result.stderr + result = run_extract(options=["--verbose", test_dir], expected_rc=0) + assert "Extracting archives..." in result.stderr + assert "Extracting: some.tar.gz" in result.stdout + assert "Extracting done." in result.stderr result = run_extract(options=[test_dir], expected_rc=0) - assert 'Extracting archives...' in result.stderr - assert 'Extracting done.' in result.stderr + assert "Extracting archives..." in result.stderr + assert "Extracting done." in result.stderr def test_extractcode_command_works_with_relative_paths(): @@ -103,34 +104,35 @@ def test_extractcode_command_works_with_relative_paths(): # dir where we run tests from, i.e. the git checkout dir To use relative # paths, we use our tmp dir at the root of the code tree from os.path import join - from commoncode import fileutils + from commoncode import fileutils import extractcode import tempfile import shutil try: - test_file = test_env.get_test_loc('cli/extract_relative_path/basic.zip') + test_file = test_env.get_test_loc("cli/extract_relative_path/basic.zip") - project_tmp = join(project_root, 'tmp') + project_tmp = join(project_root, "tmp") fileutils.create_dir(project_tmp) temp_rel = tempfile.mkdtemp(dir=project_tmp) assert os.path.exists(temp_rel) - relative_dir = temp_rel.replace(project_root, '').strip('\\/') + relative_dir = temp_rel.replace(project_root, "").strip("\\/") shutil.copy(test_file, temp_rel) - test_src_file = join(relative_dir, 'basic.zip') + test_src_file = join(relative_dir, "basic.zip") test_tgt_dir = join(project_root, test_src_file) + extractcode.EXTRACT_SUFFIX result = run_extract([test_src_file], expected_rc=0, cwd=project_root) - assert 'Extracting done' in result.stderr - assert not 'WARNING' in result.stderr - assert not 'ERROR' in result.stderr + assert "Extracting done" in result.stderr + assert not "WARNING" in result.stderr + assert not "ERROR" in result.stderr - expected = ['/c/a/a.txt', '/c/b/a.txt', '/c/c/a.txt'] + expected = ["/c/a/a.txt", "/c/b/a.txt", "/c/c/a.txt"] file_result = [ - as_posixpath(f.replace(test_tgt_dir, '')) - for f in fileutils.resource_iter(test_tgt_dir, with_dirs=False)] + as_posixpath(f.replace(test_tgt_dir, "")) + for f in fileutils.resource_iter(test_tgt_dir, with_dirs=False) + ] assert sorted(expected) == sorted(file_result) @@ -143,146 +145,157 @@ def test_extractcode_command_works_with_relative_paths_verbose(): # to the base dir where we run tests from, i.e. the git checkout dir # To use relative paths, we use our tmp dir at the root of the code tree from os.path import join - from commoncode import fileutils + from commoncode import fileutils import tempfile import shutil try: - project_tmp = join(project_root, 'tmp') + project_tmp = join(project_root, "tmp") fileutils.create_dir(project_tmp) - test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root, '').strip('\\/') - test_file = test_env.get_test_loc('cli/extract_relative_path/basic.zip') + test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root, "").strip("\\/") + test_file = test_env.get_test_loc("cli/extract_relative_path/basic.zip") shutil.copy(test_file, test_src_dir) - test_src_file = join(test_src_dir, 'basic.zip') + test_src_file = join(test_src_dir, "basic.zip") - result = run_extract(['--verbose', test_src_file] , expected_rc=0) + result = run_extract(["--verbose", test_src_file], expected_rc=0) # extract the path from the second line of the output # check that the path is relative and not absolute lines = result.stderr.splitlines(False) line = lines[1] - line_path = line.split(':', 1)[-1].strip() + line_path = line.split(":", 1)[-1].strip() if on_windows: drive = test_file[:2] assert not line_path.startswith(drive) else: - assert not line_path.startswith('/') + assert not line_path.startswith("/") finally: fileutils.delete(test_src_dir) def test_usage_and_help_return_a_correct_script_name_on_all_platforms(): - options = ['--help'] + options = ["--help"] - result = run_extract(options , expected_rc=0) + result = run_extract(options, expected_rc=0) - assert 'Usage: extractcode [OPTIONS]' in result.stdout + assert "Usage: extractcode [OPTIONS]" in result.stdout # this was showing up on Windows - assert 'extractcode-script.py' not in result.stderr + assert "extractcode-script.py" not in result.stderr result = run_extract([]) - assert 'Usage: extractcode [OPTIONS]' in result.stderr + assert "Usage: extractcode [OPTIONS]" in result.stderr # this was showing up on Windows - assert 'extractcode-script.py' not in result.stderr + assert "extractcode-script.py" not in result.stderr - result = run_extract(['-xyz'] , expected_rc=2) + result = run_extract(["-xyz"], expected_rc=2) # this was showing up on Windows - assert 'extractcode-script.py' not in result.stderr + assert "extractcode-script.py" not in result.stderr def test_extractcode_command_can_extract_archive_with_unicode_names_verbose(): - test_dir = test_env.get_test_loc('cli/unicodearch', copy=True) - result = run_extract(['--verbose', test_dir] , expected_rc=0) - assert 'Sanders' in result.stdout + test_dir = test_env.get_test_loc("cli/unicodearch", copy=True) + result = run_extract(["--verbose", test_dir], expected_rc=0) + assert "Sanders" in result.stdout file_result = [ - f for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) - if not f.endswith('unicodepath.tgz')] - file_result = [''.join(f.partition('/unicodepath/')[1:]) for f in file_result] + f + for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) + if not f.endswith("unicodepath.tgz") + ] + file_result = ["".join(f.partition("/unicodepath/")[1:]) for f in file_result] file_result = [f for f in file_result if f] expected = [ - '/unicodepath/Ho_', - '/unicodepath/Ho_a', - '/unicodepath/koristenjem Karkkainen - Sander.pdf' + "/unicodepath/Ho_", + "/unicodepath/Ho_a", + "/unicodepath/koristenjem Karkkainen - Sander.pdf", ] assert sorted(expected) == sorted(file_result) def test_extractcode_command_can_extract_archive_with_unicode_names(): - test_dir = test_env.get_test_loc('cli/unicodearch', copy=True) - run_extract([test_dir] , expected_rc=0) + test_dir = test_env.get_test_loc("cli/unicodearch", copy=True) + run_extract([test_dir], expected_rc=0) file_result = [ - f for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) - if not f.endswith('unicodepath.tgz')] - file_result = [''.join(f.partition('/unicodepath/')[1:]) for f in file_result] + f + for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) + if not f.endswith("unicodepath.tgz") + ] + file_result = ["".join(f.partition("/unicodepath/")[1:]) for f in file_result] file_result = [f for f in file_result if f] expected = [ - '/unicodepath/Ho_', - '/unicodepath/Ho_a', - '/unicodepath/koristenjem Karkkainen - Sander.pdf' + "/unicodepath/Ho_", + "/unicodepath/Ho_a", + "/unicodepath/koristenjem Karkkainen - Sander.pdf", ] assert sorted(expected) == sorted(file_result) def test_extractcode_command_can_extract_shallow(): - test_dir = test_env.get_test_loc('cli/extract_shallow', copy=True) - run_extract(['--shallow', test_dir] , expected_rc=0) + test_dir = test_env.get_test_loc("cli/extract_shallow", copy=True) + run_extract(["--shallow", test_dir], expected_rc=0) file_result = [ - f for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) - if not f.endswith('unicodepath.tgz')] - file_result = [''.join(f.partition('/top.zip-extract/')[1:]) for f in file_result] + f + for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) + if not f.endswith("unicodepath.tgz") + ] + file_result = ["".join(f.partition("/top.zip-extract/")[1:]) for f in file_result] file_result = [f for f in file_result if f] # this checks that the zip in top.zip are not extracted expected = [ - '/top.zip-extract/some3.zip', - '/top.zip-extract/some2.zip', - '/top.zip-extract/some1.zip', + "/top.zip-extract/some3.zip", + "/top.zip-extract/some2.zip", + "/top.zip-extract/some1.zip", ] assert sorted(expected) == sorted(file_result) def test_extractcode_command_can_ignore(): - test_dir = test_env.get_test_loc('cli/extract_ignore', copy=True) - run_extract(['--ignore', '*.tar', test_dir] , expected_rc=0) + test_dir = test_env.get_test_loc("cli/extract_ignore", copy=True) + run_extract(["--ignore", "*.tar", test_dir], expected_rc=0) file_result = [ - f for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) - if not f.endswith('a.tar') or not f.endswith('b.tar')] - file_result = [''.join(f.partition('/a.zip-extract/')[1:]) for f in file_result] + f + for f in map(as_posixpath, resource_iter(test_dir, with_dirs=False)) + if not f.endswith("a.tar") or not f.endswith("b.tar") + ] + file_result = ["".join(f.partition("/a.zip-extract/")[1:]) for f in file_result] file_result = [f for f in file_result if f] expected = [ - '/a.zip-extract/a.txt', - '/a.zip-extract/b.zip', - '/a.zip-extract/b.zip-extract/b.txt', - '/a.zip-extract/c.tar', + "/a.zip-extract/a.txt", + "/a.zip-extract/b.zip", + "/a.zip-extract/b.zip-extract/b.txt", + "/a.zip-extract/c.tar", ] assert sorted(expected) == sorted(file_result) def test_extractcode_command_does_not_crash_with_replace_originals_and_corrupted_archives(): - test_dir = test_env.get_test_loc('cli/replace-originals', copy=True) - result = run_extract(['--replace-originals', '--verbose', test_dir] , expected_rc=1) + test_dir = test_env.get_test_loc("cli/replace-originals", copy=True) + result = run_extract(["--replace-originals", "--verbose", test_dir], expected_rc=1) - assert not os.path.exists(os.path.join(test_dir, 'rake.1.gz-extract')) - assert 'rake.1.gz' in result.stdout + assert not os.path.exists(os.path.join(test_dir, "rake.1.gz-extract")) + assert "rake.1.gz" in result.stdout - assert 'Extracting archives...' in result.stderr - assert 'ERROR extracting' in result.stderr - assert 'rake.1.gz' in result.stderr - assert 'Not a gzipped file ' in result.stderr - assert 'issue6550.gz' in result.stderr - assert ' too many length or distance symbols' in result.stderr - assert 'Extracting done.' in result.stderr + assert "Extracting archives..." in result.stderr + assert "ERROR extracting" in result.stderr + assert "rake.1.gz" in result.stderr + assert "Not a gzipped file " in result.stderr + assert "issue6550.gz" in result.stderr + assert " too many length or distance symbols" in result.stderr + assert "Extracting done." in result.stderr -@pytest.mark.skipif(on_windows, reason='FIXME: this test fails on Windows until we have support for long file names.') +@pytest.mark.skipif( + on_windows, + reason="FIXME: this test fails on Windows until we have support for long file names.", +) def test_extractcode_command_can_extract_nuget(): - test_dir = test_env.get_test_loc('cli/extract_nuget', copy=True) - result = run_extract(['--verbose', test_dir]) + test_dir = test_env.get_test_loc("cli/extract_nuget", copy=True) + result = run_extract(["--verbose", test_dir]) if result.returncode != 0: print(result.stdout) - assert 'ERROR extracting' not in result.stdout - assert 'ERROR extracting' not in result.stderr + assert "ERROR extracting" not in result.stdout + assert "ERROR extracting" not in result.stderr diff --git a/tests/test_libarchive2.py b/tests/test_libarchive2.py index 0ea6cb3..78315e9 100644 --- a/tests/test_libarchive2.py +++ b/tests/test_libarchive2.py @@ -22,7 +22,6 @@ class TestExtractorTest(BaseArchiveTestCase): - def test_libarchive_extract_can_extract_to_relative_paths(self): # The setup is a tad complex because we want to have a relative dir # to the base dir where we run tests from, i.e. the git checkout dir @@ -32,17 +31,15 @@ def test_libarchive_extract_can_extract_to_relative_paths(self): import shutil from extractcode.libarchive2 import extract - test_file = self.get_test_loc('archive/relative_path/basic.zip') - project_tmp = join(project_root, 'tmp') + test_file = self.get_test_loc("archive/relative_path/basic.zip") + project_tmp = join(project_root, "tmp") fileutils.create_dir(project_tmp) project_root_abs = abspath(project_root) - test_src_dir = tempfile.mkdtemp( - dir=project_tmp).replace(project_root_abs, '').strip('\\/') - test_tgt_dir = tempfile.mkdtemp( - dir=project_tmp).replace(project_root_abs, '').strip('\\/') + test_src_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") + test_tgt_dir = tempfile.mkdtemp(dir=project_tmp).replace(project_root_abs, "").strip("\\/") shutil.copy(test_file, test_src_dir) - test_src_file = join(test_src_dir, 'basic.zip') + test_src_file = join(test_src_dir, "basic.zip") result = list(extract(test_src_file, test_tgt_dir)) assert [] == result - expected = ['c/a/a.txt', 'c/b/a.txt', 'c/c/a.txt'] + expected = ["c/a/a.txt", "c/b/a.txt", "c/c/a.txt"] check_files(test_tgt_dir, expected) diff --git a/tests/test_patch.py b/tests/test_patch.py index c3f4008..126edc1 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -21,24 +21,25 @@ import patch as _pythonpatch except ImportError: import pytest + pytestmark = pytest.mark.skipif(True, reason="Run only if patch is installed.") class TestIsPatch(FileBasedTesting): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test_is_not_patch(self): - test_dir = self.get_test_loc('patch/not_patches', copy=True) + test_dir = self.get_test_loc("patch/not_patches", copy=True) for r, _, files in os.walk(test_dir): for f in files: test_file = os.path.join(r, f) assert not patch.is_patch(test_file) def test_is_patch(self): - test_dir = self.get_test_loc('patch/patches', copy=True) + test_dir = self.get_test_loc("patch/patches", copy=True) for r, _, files in os.walk(test_dir): for f in files: - if not f.endswith('expected'): + if not f.endswith("expected"): test_file = os.path.join(r, f) assert patch.is_patch(test_file) @@ -46,11 +47,12 @@ def test_is_patch(self): def check_patch(test_file, expected_file, regen=False): result = [list(pi) for pi in patch.patch_info(test_file)] - result = [[as_unicode(s), as_unicode(t), list(map(as_unicode, lines))] - for s, t, lines in result] + result = [ + [as_unicode(s), as_unicode(t), list(map(as_unicode, lines))] for s, t, lines in result + ] if regen: - with io.open(expected_file, 'w') as regened: + with io.open(expected_file, "w") as regened: json.dump(result, regened, indent=2) with open(expected_file) as expect: expected = json.load(expect) @@ -58,996 +60,1558 @@ def check_patch(test_file, expected_file, regen=False): class TestPatchInfoFailing(FileBasedTesting): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") # FIXME: these tests need love and eventually a bug report upstream @expectedFailure def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/sync_xhr.patch') + test_file = self.get_test_loc("patch/patches/misc/webkit/opensource/patches/sync_xhr.patch") # fails with Exception Unable to parse patch file list(patch.patch_info(test_file)) @expectedFailure def test_patch_info_patch_patches_problematic_opensso_patch(self): - test_file = self.get_test_loc(u'patch/patches/problematic/OpenSSO.patch') + test_file = self.get_test_loc("patch/patches/problematic/OpenSSO.patch") # fails with Exception Unable to parse patch file list(patch.patch_info(test_file)) class TestPatchInfo(FileBasedTesting): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test_patch_info_patch_patches_dnsmasq_2_63_1_diff(self): - test_file = self.get_test_loc(u'patch/patches/dnsmasq_2.63-1.diff') - expected_file = self.get_test_loc('patch/patches/dnsmasq_2.63-1.diff.expected') + test_file = self.get_test_loc("patch/patches/dnsmasq_2.63-1.diff") + expected_file = self.get_test_loc("patch/patches/dnsmasq_2.63-1.diff.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_dropbear_2012_55_1_diff(self): - test_file = self.get_test_loc(u'patch/patches/dropbear_2012.55-1.diff') - expected_file = self.get_test_loc('patch/patches/dropbear_2012.55-1.diff.expected') + test_file = self.get_test_loc("patch/patches/dropbear_2012.55-1.diff") + expected_file = self.get_test_loc("patch/patches/dropbear_2012.55-1.diff.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_electricfence_2_0_5_longjmp_patch(self): - test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.0.5-longjmp.patch') - expected_file = self.get_test_loc('patch/patches/ElectricFence-2.0.5-longjmp.patch.expected') + test_file = self.get_test_loc("patch/patches/ElectricFence-2.0.5-longjmp.patch") + expected_file = self.get_test_loc( + "patch/patches/ElectricFence-2.0.5-longjmp.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_electricfence_2_1_vaarg_patch(self): - test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.1-vaarg.patch') - expected_file = self.get_test_loc('patch/patches/ElectricFence-2.1-vaarg.patch.expected') + test_file = self.get_test_loc("patch/patches/ElectricFence-2.1-vaarg.patch") + expected_file = self.get_test_loc("patch/patches/ElectricFence-2.1-vaarg.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_electricfence_2_2_2_madvise_patch(self): - test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.2.2-madvise.patch') - expected_file = self.get_test_loc('patch/patches/ElectricFence-2.2.2-madvise.patch.expected') + test_file = self.get_test_loc("patch/patches/ElectricFence-2.2.2-madvise.patch") + expected_file = self.get_test_loc( + "patch/patches/ElectricFence-2.2.2-madvise.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_electricfence_2_2_2_pthread_patch(self): - test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.2.2-pthread.patch') - expected_file = self.get_test_loc('patch/patches/ElectricFence-2.2.2-pthread.patch.expected') + test_file = self.get_test_loc("patch/patches/ElectricFence-2.2.2-pthread.patch") + expected_file = self.get_test_loc( + "patch/patches/ElectricFence-2.2.2-pthread.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_libmediainfo_0_7_43_diff(self): - test_file = self.get_test_loc(u'patch/patches/libmediainfo-0.7.43.diff') - expected_file = self.get_test_loc('patch/patches/libmediainfo-0.7.43.diff.expected') + test_file = self.get_test_loc("patch/patches/libmediainfo-0.7.43.diff") + expected_file = self.get_test_loc("patch/patches/libmediainfo-0.7.43.diff.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_avahi_0_6_25_patches_configure_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/avahi-0.6.25/patches/configure.patch') - expected_file = self.get_test_loc('patch/patches/misc/avahi-0.6.25/patches/configure.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/avahi-0.6.25/patches/configure.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/avahi-0.6.25/patches/configure.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_avahi_0_6_25_patches_main_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/avahi-0.6.25/patches/main.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/avahi-0.6.25/patches/main.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/avahi-0.6.25/patches/main.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/avahi-0.6.25/patches/main.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_busybox_patches_fix_subarch_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/fix-subarch.patch') - expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/fix-subarch.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/busybox/patches/fix-subarch.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/busybox/patches/fix-subarch.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_busybox_patches_gtrick_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/gtrick.patch') - expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/gtrick.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/busybox/patches/gtrick.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/busybox/patches/gtrick.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_busybox_patches_workaround_old_uclibc_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/workaround_old_uclibc.patch') - expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/workaround_old_uclibc.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/busybox/patches/workaround_old_uclibc.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/busybox/patches/workaround_old_uclibc.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_curl_patches_ekioh_cookie_fix_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/curl/patches/ekioh_cookie_fix.patch') - expected_file = self.get_test_loc('patch/patches/misc/curl/patches/ekioh_cookie_fix.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_e2fsprogs_1_37_uuidlibs_blkidlibs_only_target_makefile_in_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch') - expected_file = self.get_test_loc('patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_ekioh_svg_opensource_patches_patch_ekioh_config_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch') - expected_file = self.get_test_loc('patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webcore_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webkit_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/curl/patches/ekioh_cookie_fix.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/curl/patches/ekioh_cookie_fix.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_e2fsprogs_1_37_uuidlibs_blkidlibs_only_target_makefile_in_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_ekioh_svg_opensource_patches_patch_ekioh_config_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webcore_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webkit_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_animated_gif_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/animated_gif.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/animated_gif.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/animated_gif.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/animated_gif.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_computed_style_for_transform_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_computed_style_for_transform_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_cookies_fixes_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_dlna_image_security_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_dlna_image_security_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_draw_pattern_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/draw_pattern.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/draw_pattern.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/draw_pattern.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/draw_pattern.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_logs_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/enable_logs.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/enable_logs.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/enable_logs.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/enable_logs.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_proxy_setup_log_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_proxy_setup_log_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_file_secure_mode_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_http_secure_mode_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_javascript_screen_resolution_fix_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_javascript_screen_resolution_fix_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_keycode_webkit_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_local_file_access_whitelist_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_lower_case_css_attributes_for_transform_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_moving_empty_image_leaves_garbage_on_screen_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_local_file_access_whitelist_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_lower_case_css_attributes_for_transform_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_moving_empty_image_leaves_garbage_on_screen_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_open_in_new_window_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_plugin_thread_async_call_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_plugin_thread_async_call_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/ram_cache.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/ram_cache.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/ram_cache.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/ram_cache.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_meta_expires_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_meta_expires_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_speedup_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/speedup.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/speedup.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/webkit/opensource/patches/speedup.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/speedup.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_https_access_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch.expected') + def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_https_access_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_useragent_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/useragent.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/useragent.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/useragent.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/useragent.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_webcore_keyevent_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_webcore_videoplane_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_cssparser_parsetransitionshorthand_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_database_support_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_cssparser_parsetransitionshorthand_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_database_support_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_dlna_images_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_finish_animations_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_xmlhttprequest_cross_domain_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_createobject_null_check_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_finish_animations_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_xmlhttprequest_cross_domain_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_createobject_null_check_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_dump_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_getopensourcenotice_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_jsvalue_equal_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_getopensourcenotice_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_jsvalue_equal_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_timer_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_support_parallel_idl_gen_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_accept_click_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_videoplane_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch') - expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_support_parallel_idl_gen_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_accept_click_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_videoplane_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_icu_patches_ekioh_config_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/icu/patches/ekioh-config.patch') - expected_file = self.get_test_loc('patch/patches/misc/icu/patches/ekioh-config.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/icu/patches/ekioh-config.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/icu/patches/ekioh-config.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_jfsutils_patches_largefile_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/jfsutils/patches/largefile.patch') - expected_file = self.get_test_loc('patch/patches/misc/jfsutils/patches/largefile.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/jfsutils/patches/largefile.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/jfsutils/patches/largefile.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libasyncns_asyncns_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libasyncns/asyncns.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/libasyncns/asyncns.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libasyncns/asyncns.h.patch") + expected_file = self.get_test_loc("patch/patches/misc/libasyncns/asyncns.h.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libasyncns_configure_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libasyncns/configure.patch') - expected_file = self.get_test_loc('patch/patches/misc/libasyncns/configure.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libasyncns/configure.patch") + expected_file = self.get_test_loc("patch/patches/misc/libasyncns/configure.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libdaemon_0_13_patches_configure_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libdaemon-0.13/patches/configure.patch') - expected_file = self.get_test_loc('patch/patches/misc/libdaemon-0.13/patches/configure.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libdaemon-0.13/patches/configure.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libdaemon-0.13/patches/configure.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libiconv_patches_cp932_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libiconv/patches/cp932.patch') - expected_file = self.get_test_loc('patch/patches/misc/libiconv/patches/cp932.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libiconv/patches/cp932.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libiconv/patches/cp932.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libiconv_patches_make_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libiconv/patches/make.patch') - expected_file = self.get_test_loc('patch/patches/misc/libiconv/patches/make.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libiconv/patches/make.patch") + expected_file = self.get_test_loc("patch/patches/misc/libiconv/patches/make.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_config_sub_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/config.sub.patch') - expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/config.sub.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libjpeg-v6b/patches/config.sub.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libjpeg-v6b/patches/config.sub.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_configure_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/configure.patch') - expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/configure.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libjpeg-v6b/patches/configure.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libjpeg-v6b/patches/configure.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_makefile_cfg_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch') - expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libpng_1_2_8_makefile_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/makefile.patch') - expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/makefile.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libpng-1.2.8/makefile.patch") + expected_file = self.get_test_loc("patch/patches/misc/libpng-1.2.8/makefile.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libpng_1_2_8_pngconf_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/pngconf.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/pngconf.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libpng-1.2.8/pngconf.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libpng-1.2.8/pngconf.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libpng_1_2_8_pngrutil_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/pngrutil.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/pngrutil.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libpng-1.2.8/pngrutil.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/libpng-1.2.8/pngrutil.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_libxml2_patches_iconv_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/libxml2/patches/iconv.patch') - expected_file = self.get_test_loc('patch/patches/misc/libxml2/patches/iconv.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/libxml2/patches/iconv.patch") + expected_file = self.get_test_loc("patch/patches/misc/libxml2/patches/iconv.patch.expected") check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_0001_stmmac_updated_the_driver_and_added_several_fixes_a_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_0001_stmmac_updated_the_driver_and_added_several_fixes_a_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_addrspace_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/addrspace.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/addrspace.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/addrspace.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/addrspace.h.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_kernel_cpu_init_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_kernel_cpu_init_c_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_makefile_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_mm_init_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_bigphysarea_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/bigphysarea.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/bigphysarea.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/bigphysarea.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/bigphysarea.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_bugs_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/bugs.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/bugs.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/bugs.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/bugs.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_cache_sh4_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_cfi_cmdset_0001_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_cfi_util_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cfi_util.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cfi_util.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/cfi_util.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/cfi_util.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_char_build_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/char_build.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/char_build.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/char_build.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/char_build.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_cmdlinepart_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_console_printk_loglevel_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_delayed_i2c_read_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_devinet_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/devinet.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/devinet.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/devinet.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/devinet.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_carrier_sense_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_unaligned_printks_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_unaligned_printks_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_dma_api_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/dma-api.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/dma-api.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/dma-api.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/dma-api.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_do_mounts_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/do_mounts.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/do_mounts.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/do_mounts.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/do_mounts.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_drivers_net_makefile_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_fan_ctrl_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/fan_ctrl.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/fan_ctrl.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/fan_ctrl.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/fan_ctrl.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_hcd_stm_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_head_s_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/head.S.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/head.S.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/head.S.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/head.S.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_stm_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_stm_c_patch2(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_nostop_for_bitbanging_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_nostop_for_bitbanging_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_rate_normal_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_revert_to_117_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_if_ppp_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/if_ppp.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/if_ppp.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/if_ppp.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/if_ppp.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_inittmpfs_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/inittmpfs.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/inittmpfs.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/inittmpfs.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/inittmpfs.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_init_kconfig_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/init_Kconfig.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/init_Kconfig.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/init_Kconfig.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/init_Kconfig.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_init_main_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/init_main.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/init_main.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/init_main.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/init_main.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_ioremap_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ioremap.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ioremap.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/ioremap.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/ioremap.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_ipconfig_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ipconfig.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ipconfig.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/ipconfig.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/ipconfig.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_kernel_extable_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_kernel_resource_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kernel_resource.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kernel_resource.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/kernel_resource.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/kernel_resource.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_kexec_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kexec.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kexec.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/kexec.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/kexec.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_ksymhash_elflib_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_libata_sense_data_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/libata_sense_data.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/libata_sense_data.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/libata_sense_data.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/libata_sense_data.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_localversion_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/localversion.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/localversion.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/localversion.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/localversion.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_mach_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mach.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mach.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/mach.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/mach.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_marvell_88e3015_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_mb442_setup_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_mmu_context_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mmu_context.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mmu_context.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/mmu_context.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/mmu_context.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_motorola_make_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/motorola_make.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/motorola_make.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/motorola_make.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/motorola_make.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_motorola_rootdisk_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_namespace_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/namespace.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/namespace.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/namespace.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/namespace.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_flash_based_bbt_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_old_oob_layout_for_yaffs2_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_old_oob_layout_for_yaffs2_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_netconsole_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/netconsole.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/netconsole.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/netconsole.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/netconsole.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_netconsole_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/netconsole.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/netconsole.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/netconsole.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/netconsole.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_nfsroot_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nfsroot.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nfsroot.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/nfsroot.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/nfsroot.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_page_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/page.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/page.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/page.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/page.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_page_alloc_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/page_alloc.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/page_alloc.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/page_alloc.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/page_alloc.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_pgtable_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pgtable.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pgtable.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/pgtable.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/pgtable.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/phy.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/phy.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/phy.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/phy.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_device_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy_device.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy_device.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/phy_device.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/phy_device.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_pid_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pid.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pid.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/pid.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/pid.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_pio_irq_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pio-irq.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pio-irq.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/pio-irq.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/pio-irq.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_pmb_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pmb.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pmb.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/pmb.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/pmb.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_process_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/process.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/process.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/process.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/process.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_sample_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sample.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sample.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/sample.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/sample.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_sched_cfs_v2_6_23_12_v24_1_mod_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_sched_cfs_v2_6_23_12_v24_1_mod_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_stb7100_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_stx7105_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/setup.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/setup.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_sh_kernel_setup_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_sh_ksyms_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_smsc_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/smsc.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/smsc.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/smsc.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/smsc.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_smsc_makefile_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/smsc_makefile.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/smsc_makefile.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/smsc_makefile.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/smsc_makefile.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_soc_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/soc.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/soc.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/soc.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/soc.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert1_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert2_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert3_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_4_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.4.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.4.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/squashfs3.4.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/squashfs3.4.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_stasc_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/stasc.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/stasc.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/stasc.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/stasc.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_stmmac_main_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch.expected" + ) check_patch(test_file, expected_file) - def test_patch_info_patch_patches_misc_linux_st710x_patches_suppress_igmp_report_listening_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch.expected') + def test_patch_info_patch_patches_misc_linux_st710x_patches_suppress_igmp_report_listening_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_time_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/time.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/time.c.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_2_5_1_for_2_6_23_17_diff(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_remove_debug_printouts_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/time.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/time.c.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_2_5_1_for_2_6_23_17_diff( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_remove_debug_printouts_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19x0_vidmem_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch.expected') + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19x3_board_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19x3_board.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19x3_board.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/vip19x3_board.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vip19x3_board.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/vip19xx.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vip19xx.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_nand_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_nor_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_linux_st710x_patches_vt_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vt.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vt.c.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_yaffs_guts_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch.expected') - check_patch(test_file, expected_file) - - def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch') - expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/linux-st710x/patches/vt.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/vt.c.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_yaffs_guts_c_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch.expected" + ) + check_patch(test_file, expected_file) + + def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_patch( + self, + ): + test_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_npapi_patches_npapi_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/npapi/patches/npapi.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/npapi/patches/npapi.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/npapi/patches/npapi.h.patch") + expected_file = self.get_test_loc("patch/patches/misc/npapi/patches/npapi.h.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_openssl_0_9_8_patches_configure_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/openssl-0.9.8/patches/Configure.patch') - expected_file = self.get_test_loc('patch/patches/misc/openssl-0.9.8/patches/Configure.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/openssl-0.9.8/patches/Configure.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/openssl-0.9.8/patches/Configure.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_sqlite_patches_permissions_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/sqlite/patches/permissions.patch') - expected_file = self.get_test_loc('patch/patches/misc/sqlite/patches/permissions.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/sqlite/patches/permissions.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/sqlite/patches/permissions.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_arpping_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_clientpacket_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_clientpacket_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_debug_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpc_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpc_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpd_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_makefile_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_options_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/options.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/options.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/options.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/options.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_options_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/options.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/options.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/options.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/options.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_packet_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_packet_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_route_patch1(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/route.patch1') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/route.patch1.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/route.patch1") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/route.patch1.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_script_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/script.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/script.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/script.c.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/script.c.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_t1t2_patch1(self): - test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1') - expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1.expected') + test_file = self.get_test_loc("patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1") + expected_file = self.get_test_loc( + "patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_vqec_patch_build_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/BUILD.patch') - expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/BUILD.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/vqec/patch/BUILD.patch") + expected_file = self.get_test_loc("patch/patches/misc/vqec/patch/BUILD.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_vqec_patch_cross_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/cross.patch') - expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/cross.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/vqec/patch/cross.patch") + expected_file = self.get_test_loc("patch/patches/misc/vqec/patch/cross.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_vqec_patch_uclibc_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/uclibc.patch') - expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/uclibc.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/vqec/patch/uclibc.patch") + expected_file = self.get_test_loc("patch/patches/misc/vqec/patch/uclibc.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_vqec_patch_vqec_ifclient_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_ifclient.patch') - expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_ifclient.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/vqec/patch/vqec_ifclient.patch") + expected_file = self.get_test_loc( + "patch/patches/misc/vqec/patch/vqec_ifclient.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_vqec_patch_vqec_wv_c_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_wv.c.patch') - expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_wv.c.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/vqec/patch/vqec_wv.c.patch") + expected_file = self.get_test_loc("patch/patches/misc/vqec/patch/vqec_wv.c.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_misc_vqec_patch_vqec_wv_h_patch(self): - test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_wv.h.patch') - expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_wv.h.patch.expected') + test_file = self.get_test_loc("patch/patches/misc/vqec/patch/vqec_wv.h.patch") + expected_file = self.get_test_loc("patch/patches/misc/vqec/patch/vqec_wv.h.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_postgrey_1_30_group_patch(self): - test_file = self.get_test_loc(u'patch/patches/postgrey-1.30-group.patch') - expected_file = self.get_test_loc('patch/patches/postgrey-1.30-group.patch.expected') + test_file = self.get_test_loc("patch/patches/postgrey-1.30-group.patch") + expected_file = self.get_test_loc("patch/patches/postgrey-1.30-group.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_drupal_upload_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/drupal_upload.patch') - expected_file = self.get_test_loc('patch/patches/windows/drupal_upload.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/drupal_upload.patch") + expected_file = self.get_test_loc("patch/patches/windows/drupal_upload.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_ether_patch_1_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/ether_patch_1.patch') - expected_file = self.get_test_loc('patch/patches/windows/ether_patch_1.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/ether_patch_1.patch") + expected_file = self.get_test_loc("patch/patches/windows/ether_patch_1.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_js_delete_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/js_delete.patch') - expected_file = self.get_test_loc('patch/patches/windows/js_delete.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/js_delete.patch") + expected_file = self.get_test_loc("patch/patches/windows/js_delete.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_plugin_explorer_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/plugin explorer.patch') - expected_file = self.get_test_loc('patch/patches/windows/plugin explorer.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/plugin explorer.patch") + expected_file = self.get_test_loc("patch/patches/windows/plugin explorer.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_resolveentity32_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/resolveentity32.patch') - expected_file = self.get_test_loc('patch/patches/windows/resolveentity32.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/resolveentity32.patch") + expected_file = self.get_test_loc("patch/patches/windows/resolveentity32.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_sift_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/sift.patch') - expected_file = self.get_test_loc('patch/patches/windows/sift.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/sift.patch") + expected_file = self.get_test_loc("patch/patches/windows/sift.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_thumbnail_support_0_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/thumbnail_support_0.patch') - expected_file = self.get_test_loc('patch/patches/windows/thumbnail_support_0.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/thumbnail_support_0.patch") + expected_file = self.get_test_loc( + "patch/patches/windows/thumbnail_support_0.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_thumbnail_support_0_patch_1(self): - test_file = self.get_test_loc(u'patch/patches/windows/thumbnail_support_0.patch.1') - expected_file = self.get_test_loc('patch/patches/windows/thumbnail_support_0.patch.1.expected') + test_file = self.get_test_loc("patch/patches/windows/thumbnail_support_0.patch.1") + expected_file = self.get_test_loc( + "patch/patches/windows/thumbnail_support_0.patch.1.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_webform_3_0_conditional_constructor_0_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/webform-3.0-conditional_constructor_0.patch') - expected_file = self.get_test_loc('patch/patches/windows/webform-3.0-conditional_constructor_0.patch.expected') + test_file = self.get_test_loc( + "patch/patches/windows/webform-3.0-conditional_constructor_0.patch" + ) + expected_file = self.get_test_loc( + "patch/patches/windows/webform-3.0-conditional_constructor_0.patch.expected" + ) check_patch(test_file, expected_file) def test_patch_info_patch_patches_windows_xml_rpc_addspace_patch(self): - test_file = self.get_test_loc(u'patch/patches/windows/xml_rpc_addSpace.patch') - expected_file = self.get_test_loc('patch/patches/windows/xml_rpc_addSpace.patch.expected') + test_file = self.get_test_loc("patch/patches/windows/xml_rpc_addSpace.patch") + expected_file = self.get_test_loc("patch/patches/windows/xml_rpc_addSpace.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_xvidcap_1_1_6_docdir_patch(self): - test_file = self.get_test_loc(u'patch/patches/xvidcap-1.1.6-docdir.patch') - expected_file = self.get_test_loc('patch/patches/xvidcap-1.1.6-docdir.patch.expected') + test_file = self.get_test_loc("patch/patches/xvidcap-1.1.6-docdir.patch") + expected_file = self.get_test_loc("patch/patches/xvidcap-1.1.6-docdir.patch.expected") check_patch(test_file, expected_file) def test_patch_info_patch_patches_xvidcap_xorg_patch(self): - test_file = self.get_test_loc(u'patch/patches/xvidcap-xorg.patch') - expected_file = self.get_test_loc('patch/patches/xvidcap-xorg.patch.expected') + test_file = self.get_test_loc("patch/patches/xvidcap-xorg.patch") + expected_file = self.get_test_loc("patch/patches/xvidcap-xorg.patch.expected") check_patch(test_file, expected_file) diff --git a/tests/test_sevenzip.py b/tests/test_sevenzip.py index 4a7637e..a45c9c0 100644 --- a/tests/test_sevenzip.py +++ b/tests/test_sevenzip.py @@ -22,7 +22,7 @@ class TestSevenZip(FileBasedTesting): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def check_results_with_expected_json( self, @@ -32,8 +32,8 @@ def check_results_with_expected_json( regen=False, ): if regen: - with open(expected_loc, 'w') as ex: - json.dump(results, ex, indent=2, separators=(',', ':')) + with open(expected_loc, "w") as ex: + json.dump(results, ex, indent=2, separators=(",", ":")) with open(expected_loc) as ex: expected = json.load(ex) @@ -48,12 +48,12 @@ def clean_dates(self, results): if isinstance(results, list): for res in results: # remove time from date/time stamp - dt = res.get('date') or None + dt = res.get("date") or None if dt: - res['date'] = dt.partition(' ')[0] + res["date"] = dt.partition(" ")[0] def test_get_7z_errors_password_protected(self): - test = ''' + test = """ 7-Zip 9.04 beta Copyright (c) 1999-2009 Igor Pavlov 2009-05-30 Processing archive: c:\\w421\\scripts\\testfiles\\archive\\zip\\zip_password_nexb.zip @@ -61,16 +61,16 @@ def test_get_7z_errors_password_protected(self): Extracting a.txt CRC Failed in encrypted file. Wrong password? Sub items Errors: 1 -''' - result = sevenzip.get_7z_errors(test, test) - expected = 'Password protected archive, unable to extract' - assert expected == result +""" + result = sevenzip.get_7z_errors(test, test) + expected = "Password protected archive, unable to extract" + assert expected == result def test_list_extracted_7z_files_empty(self): - assert [] == sevenzip.list_extracted_7z_files('') + assert [] == sevenzip.list_extracted_7z_files("") def test_list_extracted_7z_files_2(self): - test = ''' + test = """ 7-Zip 9.04 beta Copyright (c) 1999-2009 Igor Pavlov 2009-05-30' p7zip Version 9.04 (locale=utf8,Utf16=on,HugeFiles=on,2 CPUs) @@ -82,13 +82,13 @@ def test_list_extracted_7z_files_2(self): Size: 6536 Compressed: 7674 -''' - expected = ['a.cpio'] +""" + expected = ["a.cpio"] result = sevenzip.list_extracted_7z_files(test) assert expected == result def test_list_extracted_7z_files_3(self): - test = ''' + test = """ 7-Zip 9.04 beta Copyright (c) 1999-2009 Igor Pavlov 2009-05-30 p7zip Version 9.04 (locale=utf8,Utf16=on,HugeFiles=on,2 CPUs) @@ -101,8 +101,8 @@ def test_list_extracted_7z_files_3(self): Size: 6536 Compressed: 7674 -''' - expected = ['a.cpio', 'b.cpio'] +""" + expected = ["a.cpio", "b.cpio"] result = sevenzip.list_extracted_7z_files(test) assert expected == result @@ -112,59 +112,58 @@ def collect_extracted_path(self, test_dir): for t, dirs, files in os.walk(test_dir): t = fileutils.as_posixpath(t) for d in dirs: - nd = posixpath.join(t, d).replace(td, '') + '/' + nd = posixpath.join(t, d).replace(td, "") + "/" result.append(nd) for f in files: - nf = posixpath.join(t, f).replace(td, '') + nf = posixpath.join(t, f).replace(td, "") result.append(nf) result = sorted(result) return result def test_extract_of_tar_with_aboslute_path(self): - test_loc = self.get_test_loc('sevenzip/absolute_path.tar') + test_loc = self.get_test_loc("sevenzip/absolute_path.tar") target_dir = self.get_temp_dir() sevenzip.extract(test_loc, target_dir, file_by_file=False) - expected_loc = test_loc + '-extract-expected.json' + expected_loc = test_loc + "-extract-expected.json" results = self.collect_extracted_path(target_dir) self.check_results_with_expected_json(results, expected_loc, regen=False) class TestSevenZipListEntries(TestSevenZip): - - @pytest.mark.skipif(on_windows, reason='Windows file-by-file extracton is not working well') + @pytest.mark.skipif(on_windows, reason="Windows file-by-file extracton is not working well") def test_list_entries_of_special_tar(self): - test_loc = self.get_test_loc('sevenzip/special.tar') - expected_loc = test_loc + '-entries-expected.json' + test_loc = self.get_test_loc("sevenzip/special.tar") + expected_loc = test_loc + "-entries-expected.json" entries, errors = sevenzip.list_entries(test_loc) entries = [e.to_dict(full=True) for e in entries] errors = errors or [] results = entries + errors self.check_results_with_expected_json(results, expected_loc, regen=False) - @pytest.mark.skipif(not on_windows, reason='Windows file-by-file extracton is not working well') + @pytest.mark.skipif(not on_windows, reason="Windows file-by-file extracton is not working well") def test_list_entries_of_special_tar_win(self): - test_loc = self.get_test_loc('sevenzip/special.tar') - expected_loc = test_loc + '-entries-expected-win.json' + test_loc = self.get_test_loc("sevenzip/special.tar") + expected_loc = test_loc + "-entries-expected-win.json" entries, errors = sevenzip.list_entries(test_loc) entries = [e.to_dict(full=True) for e in entries] errors = errors or [] results = entries + errors self.check_results_with_expected_json(results, expected_loc, clean_dates=True, regen=False) - @pytest.mark.skipif(on_windows, reason='Windows file-by-file extracton is not working well') + @pytest.mark.skipif(on_windows, reason="Windows file-by-file extracton is not working well") def test_list_entries_with_weird_names_7z(self): - test_loc = self.get_test_loc('sevenzip/weird_names.7z') - expected_loc = test_loc + '-entries-expected.json' + test_loc = self.get_test_loc("sevenzip/weird_names.7z") + expected_loc = test_loc + "-entries-expected.json" entries, errors = sevenzip.list_entries(test_loc) entries = [e.to_dict(full=True) for e in entries] errors = errors or [] results = entries + errors self.check_results_with_expected_json(results, expected_loc, regen=False) - @pytest.mark.skipif(not on_windows, reason='Windows file-by-file extracton is not working well') + @pytest.mark.skipif(not on_windows, reason="Windows file-by-file extracton is not working well") def test_list_entries_with_weird_names_7z_win(self): - test_loc = self.get_test_loc('sevenzip/weird_names.7z') - expected_loc = test_loc + '-entries-expected-win.json' + test_loc = self.get_test_loc("sevenzip/weird_names.7z") + expected_loc = test_loc + "-entries-expected-win.json" entries, errors = sevenzip.list_entries(test_loc) entries = [e.to_dict(full=True) for e in entries] errors = errors or [] @@ -173,123 +172,149 @@ def test_list_entries_with_weird_names_7z_win(self): class TestSevenParseListing(TestSevenZip): - def check_parse_7z_listing(self, test_loc, regen=False): test_loc = self.get_test_loc(test_loc) results = [e.to_dict(full=True) for e in sevenzip.parse_7z_listing(location=test_loc)] - expected_loc = test_loc + '-expected.json' + expected_loc = test_loc + "-expected.json" self.check_results_with_expected_json( - results=results, expected_loc=expected_loc, regen=regen) + results=results, expected_loc=expected_loc, regen=regen + ) def test_parse_7z_listing_cpio_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/cpio_relative.cpio.linux', regen=False) + self.check_parse_7z_listing("sevenzip/listings/cpio_relative.cpio.linux", regen=False) def test_parse_7z_listing_cpio_from_win(self): - self.check_parse_7z_listing('sevenzip/listings/cpio_relative.cpio.win', regen=False) + self.check_parse_7z_listing("sevenzip/listings/cpio_relative.cpio.win", regen=False) def test_parse_7z_listing_7z_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.7z_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.7z_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_7z_from_win(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.ar_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.ar_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_cpio_weird_names_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.cpio_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.cpio_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_iso_weird_names_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.iso_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.iso_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_rar_weird_names_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.rar_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.rar_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_tar_weird_names_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.tar_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.tar_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_zip_weird_names_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names.zip_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names.zip_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_z_from_mac(self): - self.check_parse_7z_listing('sevenzip/listings/single_file.z.mac', regen=False) + self.check_parse_7z_listing("sevenzip/listings/single_file.z.mac", regen=False) def test_parse_7z_listing_tarz_from_mac(self): - self.check_parse_7z_listing('sevenzip/listings/single_file.tarz.mac', regen=False) + self.check_parse_7z_listing("sevenzip/listings/single_file.tarz.mac", regen=False) def test_parse_7z_listing_shar_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/demo-spring-boot.sh.listing', regen=False) + self.check_parse_7z_listing("sevenzip/listings/demo-spring-boot.sh.listing", regen=False) def test_parse_7z_listing_svgz_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/insert-emptyframe.svgz.listing', regen=False) + self.check_parse_7z_listing("sevenzip/listings/insert-emptyframe.svgz.listing", regen=False) def test_parse_7z_listing_rpm_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/libsqueeze0.2_0-0.2.3-8mdv2010.0.i586.rpm.listing', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/libsqueeze0.2_0-0.2.3-8mdv2010.0.i586.rpm.listing", regen=False + ) def test_parse_7z_listing_tbz_broken_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/tarred_bzipped_broken.tar.bz2.listing', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/tarred_bzipped_broken.tar.bz2.listing", regen=False + ) def test_parse_7z_listing_tbz_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/tarred_bzipped.tar.bz2.listing', regen=False) + self.check_parse_7z_listing("sevenzip/listings/tarred_bzipped.tar.bz2.listing", regen=False) def test_parse_7z_listing_txz_from_linux(self): - self.check_parse_7z_listing('sevenzip/listings/texlive-core-patches-20.tar.xz.listing', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/texlive-core-patches-20.tar.xz.listing", regen=False + ) def test_parse_7z_listing_deb_linux(self): - self.check_parse_7z_listing('sevenzip/listings/adduser_3.113+nmu3ubuntu3_all.deb-linux.listing', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/adduser_3.113+nmu3ubuntu3_all.deb-linux.listing", regen=False + ) def test_parse_7z_listing_special_tar_linux(self): - self.check_parse_7z_listing('sevenzip/listings/special.tar-linux.listing', regen=False) + self.check_parse_7z_listing("sevenzip/listings/special.tar-linux.listing", regen=False) def test_parse_7z_listing_cbr_linux(self): - self.check_parse_7z_listing('sevenzip/listings/t.cbr-linux.listing', regen=False) + self.check_parse_7z_listing("sevenzip/listings/t.cbr-linux.listing", regen=False) def test_parse_7z_listing_weird_names_7zip_linux(self): - self.check_parse_7z_listing('sevenzip/listings/weird_names-mini.7z_7zip_linux_listing.data', regen=False) + self.check_parse_7z_listing( + "sevenzip/listings/weird_names-mini.7z_7zip_linux_listing.data", regen=False + ) def test_parse_7z_listing_xar_linux(self): - self.check_parse_7z_listing('sevenzip/listings/xar-1.4.xar-linux.listing', regen=False) + self.check_parse_7z_listing("sevenzip/listings/xar-1.4.xar-linux.listing", regen=False) class TestSevenZipFileByFile(TestSevenZip): - def check_extract_file_by_file(self, test_file, regen=False): """ - This test uses a different expected JSON file on Linux + TUses a different expected JSON file for testing on Linux """ test_loc = self.get_test_loc(test_file) target_dir = self.get_temp_dir() - suffix = '-win' if on_windows else '' + suffix = "-win" if on_windows else "" try: sevenzip.extract_file_by_file(test_loc, target_dir) except ExtractErrorFailedToExtract as e: # this fails on some Windows 10 installs and not some others # based on symlinks creation permissions - expected_err_loc = test_loc + '-extract-errors-expected' + suffix + '.json' + expected_err_loc = test_loc + "-extract-errors-expected" + suffix + ".json" self.check_results_with_expected_json(e.args[0], expected_err_loc, regen=regen) - expected_loc = test_loc + '-extract-expected' + suffix + '.json' + expected_loc = test_loc + "-extract-expected" + suffix + ".json" results = self.collect_extracted_path(target_dir) self.check_results_with_expected_json(results, expected_loc, regen=regen) def test_extract_file_by_file_of_tar_with_absolute_path(self): - self.check_extract_file_by_file('sevenzip/absolute_path.tar', regen=False) + self.check_extract_file_by_file("sevenzip/absolute_path.tar", regen=False) def test_extract_file_by_file_of_nested_zip(self): - self.check_extract_file_by_file('sevenzip/relative_nested.zip', regen=False) + self.check_extract_file_by_file("sevenzip/relative_nested.zip", regen=False) def test_extract_file_by_file_of_special_tar(self): - self.check_extract_file_by_file('sevenzip/special.tar', regen=False) + self.check_extract_file_by_file("sevenzip/special.tar", regen=False) def test_extract_file_by_file_with_weird_names_7z(self): - self.check_extract_file_by_file('sevenzip/weird_names.7z', regen=False) + self.check_extract_file_by_file("sevenzip/weird_names.7z", regen=False) def test_extract_file_by_file_weird_names_zip(self): - self.check_extract_file_by_file('sevenzip/weird_names.zip', regen=False) + self.check_extract_file_by_file("sevenzip/weird_names.zip", regen=False) - @pytest.mark.xfail(on_windows, reason='Fails on Windows becasue it has file names that cannot be extracted there') + @pytest.mark.xfail( + on_windows, + reason="Fails on Windows becasue it has file names that cannot be extracted there", + ) def test_extract_file_by_file_weird_names_ar(self): - self.check_extract_file_by_file('sevenzip/weird_names.ar', regen=False) + self.check_extract_file_by_file("sevenzip/weird_names.ar", regen=False) def test_extract_file_by_file_weird_names_cpio(self): - self.check_extract_file_by_file('sevenzip/weird_names.cpio', regen=False) + self.check_extract_file_by_file("sevenzip/weird_names.cpio", regen=False) def test_extract_file_by_file_weird_names_tar(self): - self.check_extract_file_by_file('sevenzip/weird_names.tar', regen=False) + self.check_extract_file_by_file("sevenzip/weird_names.tar", regen=False) diff --git a/tests/test_vmimage.py b/tests/test_vmimage.py index 6653cdd..f87eeb2 100644 --- a/tests/test_vmimage.py +++ b/tests/test_vmimage.py @@ -20,28 +20,28 @@ from extractcode import vmimage -@pytest.mark.skipif(not on_linux, reason='Only linux supports image extraction') +@pytest.mark.skipif(not on_linux, reason="Only linux supports image extraction") class TestExtractVmImage(BaseArchiveTestCase): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test_can_listfs_from_qcow2_image(self): - test_file = self.extract_test_tar('vmimage/foobar.qcow2.tar.gz') - test_file = str(Path(test_file) / 'foobar.qcow2') + test_file = self.extract_test_tar("vmimage/foobar.qcow2.tar.gz") + test_file = str(Path(test_file) / "foobar.qcow2") vmi = vmimage.VmImage.from_file(test_file) - assert [('/dev/sda', 'ext2')] == vmi.listfs() + assert [("/dev/sda", "ext2")] == vmi.listfs() def test_can_extract_qcow2_vm_image_as_tarball(self): - test_file = self.extract_test_tar('vmimage/foobar.qcow2.tar.gz') - test_file = str(Path(test_file) / 'foobar.qcow2') - target_dir = self.get_temp_dir('vmimage') + test_file = self.extract_test_tar("vmimage/foobar.qcow2.tar.gz") + test_file = str(Path(test_file) / "foobar.qcow2") + target_dir = self.get_temp_dir("vmimage") vmimage.extract(location=test_file, target_dir=target_dir, as_tarballs=True) - expected = ['foobar.qcow2.tar.gz'] + expected = ["foobar.qcow2.tar.gz"] check_files(target_dir, expected) def test_can_extract_qcow2_vm_image_not_as_tarball(self): - test_file = self.extract_test_tar('vmimage/bios-tables-test.x86_64.iso.qcow2.tar.gz') - test_file = str(Path(test_file) / 'bios-tables-test.x86_64.iso.qcow2') - target_dir = self.get_temp_dir('vmimage') + test_file = self.extract_test_tar("vmimage/bios-tables-test.x86_64.iso.qcow2.tar.gz") + test_file = str(Path(test_file) / "bios-tables-test.x86_64.iso.qcow2") + target_dir = self.get_temp_dir("vmimage") vmimage.extract(location=test_file, target_dir=target_dir, as_tarballs=False) - expected = ['bios_tab.fat', 'boot.cat'] + expected = ["bios_tab.fat", "boot.cat"] check_files(target_dir, expected)