diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 00000000..8b87a275 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,19 @@ +name: main + +on: + push: + branches: [main, test-me-*] + tags: '*' + pull_request: + +jobs: + main-windows: + uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1 + with: + env: '["py310"]' + os: windows-latest + main-linux: + uses: asottile/workflows/.github/workflows/tox.yml@v1.8.1 + with: + env: '["py310", "py311", "py312", "py313"]' + os: ubuntu-latest diff --git a/.gitignore b/.gitignore index 32c2fec0..4f6c5b7c 100644 --- a/.gitignore +++ b/.gitignore @@ -3,9 +3,4 @@ .*.sw[a-z] .coverage .tox -.venv.touch -/.mypy_cache -/.pytest_cache -/venv* -coverage-html dist diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b4614a74..e4808c9f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,50 +1,41 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v6.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - - id: check-docstring-first - - id: check-json - - id: check-added-large-files - id: check-yaml - id: debug-statements - - id: name-tests-test - id: double-quote-string-fixer + - id: name-tests-test - id: requirements-txt-fixer -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.0 - hooks: - - id: flake8 - additional_dependencies: [flake8-typing-imports==1.7.0] -- repo: https://github.com/pre-commit/mirrors-autopep8 - rev: v1.5.2 - hooks: - - id: autopep8 -- repo: https://github.com/pre-commit/pre-commit - rev: v2.4.0 +- repo: https://github.com/asottile/setup-cfg-fmt + rev: v3.2.0 hooks: - - id: validate_manifest -- repo: https://github.com/asottile/reorder_python_imports - rev: v2.3.0 + - id: setup-cfg-fmt +- repo: https://github.com/asottile/reorder-python-imports + rev: v3.16.0 hooks: - id: reorder-python-imports - args: [--py3-plus] + args: [--py310-plus, --add-import, 'from __future__ import annotations'] +- repo: https://github.com/asottile/add-trailing-comma + rev: v4.0.0 + hooks: + - id: add-trailing-comma - repo: https://github.com/asottile/pyupgrade - rev: v2.4.1 + rev: v3.21.2 hooks: - id: pyupgrade - args: [--py36-plus] -- repo: https://github.com/asottile/add-trailing-comma - rev: v2.0.1 + args: [--py310-plus] +- repo: https://github.com/hhatto/autopep8 + rev: v2.3.2 hooks: - - id: add-trailing-comma - args: [--py36-plus] -- repo: https://github.com/asottile/setup-cfg-fmt - rev: v1.9.0 + - id: autopep8 +- repo: https://github.com/PyCQA/flake8 + rev: 7.3.0 hooks: - - id: setup-cfg-fmt + - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.770 + rev: v1.19.1 hooks: - id: mypy diff --git a/.pre-commit-hooks.yaml b/.pre-commit-hooks.yaml index 3e4dc9ea..275605eb 100644 --- a/.pre-commit-hooks.yaml +++ b/.pre-commit-hooks.yaml @@ -1,195 +1,212 @@ -- id: autopep8-wrapper - name: autopep8 wrapper (removed) - description: (removed) use pre-commit/mirrors-autopep8 instead. - entry: pre-commit-hooks-removed autopep8-wrapper autopep8 https://github.com/pre-commit/mirrors-autopep8 - language: python - always_run: true - pass_filenames: false - id: check-added-large-files - name: Check for added large files - description: Prevent giant files from being committed + name: check for added large files + description: prevents giant files from being committed. entry: check-added-large-files language: python + stages: [pre-commit, pre-push, manual] + minimum_pre_commit_version: 3.2.0 - id: check-ast - name: Check python ast - description: Simply check whether the files parse as valid python. + name: check python ast + description: simply checks whether the files parse as valid python. entry: check-ast language: python types: [python] - id: check-byte-order-marker - name: Check for byte-order marker - description: Forbid files which have a UTF-8 byte-order marker - entry: check-byte-order-marker + name: check-byte-order-marker (removed) + description: (removed) use fix-byte-order-marker instead. + entry: pre-commit-hooks-removed check-byte-order-marker fix-byte-order-marker https://github.com/pre-commit/pre-commit-hooks language: python types: [text] - id: check-builtin-literals - name: Check builtin type constructor use - description: Require literal syntax when initializing empty or zero Python builtin types. + name: check builtin type constructor use + description: requires literal syntax when initializing empty or zero python builtin types. entry: check-builtin-literals language: python types: [python] - id: check-case-conflict - name: Check for case conflicts - description: Check for files that would conflict in case-insensitive filesystems + name: check for case conflicts + description: checks for files that would conflict in case-insensitive filesystems. entry: check-case-conflict language: python - id: check-docstring-first - name: Check docstring is first - description: Checks a common error of defining a docstring after code. + name: check docstring is first (deprecated) + description: checks a common error of defining a docstring after code. entry: check-docstring-first language: python types: [python] - id: check-executables-have-shebangs - name: Check that executables have shebangs - description: Ensures that (non-binary) executables have a shebang. + name: check that executables have shebangs + description: ensures that (non-binary) executables have a shebang. entry: check-executables-have-shebangs language: python types: [text, executable] - stages: [commit, push, manual] + stages: [pre-commit, pre-push, manual] + minimum_pre_commit_version: 3.2.0 +- id: check-illegal-windows-names + name: check illegal windows names + entry: Illegal Windows filenames detected + language: fail + files: '(?i)((^|/)(CON|PRN|AUX|NUL|COM[\d¹²³]|LPT[\d¹²³])(\.|/|$)|[<>:\"\\|?*\x00-\x1F]|/[^/]*[\.\s]/|[^/]*[\.\s]$)' - id: check-json - name: Check JSON - description: This hook checks json files for parseable syntax. + name: check json + description: checks json files for parseable syntax. entry: check-json language: python types: [json] +- id: check-shebang-scripts-are-executable + name: check that scripts with shebangs are executable + description: ensures that (non-binary) files with a shebang are executable. + entry: check-shebang-scripts-are-executable + language: python + types: [text] + stages: [pre-commit, pre-push, manual] + minimum_pre_commit_version: 3.2.0 - id: pretty-format-json - name: Pretty format JSON - description: This hook sets a standard for formatting JSON files. + name: pretty format json + description: sets a standard for formatting json files. entry: pretty-format-json language: python types: [json] - id: check-merge-conflict - name: Check for merge conflicts - description: Check for files that contain merge conflict strings. + name: check for merge conflicts + description: checks for files that contain merge conflict strings. entry: check-merge-conflict language: python types: [text] - id: check-symlinks - name: Check for broken symlinks - description: Checks for symlinks which do not point to anything. + name: check for broken symlinks + description: checks for symlinks which do not point to anything. entry: check-symlinks language: python types: [symlink] - id: check-toml - name: Check Toml - description: This hook checks toml files for parseable syntax. + name: check toml + description: checks toml files for parseable syntax. entry: check-toml language: python types: [toml] - id: check-vcs-permalinks - name: Check vcs permalinks - description: Ensures that links to vcs websites are permalinks. + name: check vcs permalinks + description: ensures that links to vcs websites are permalinks. entry: check-vcs-permalinks language: python types: [text] - id: check-xml - name: Check Xml - description: This hook checks xml files for parseable syntax. + name: check xml + description: checks xml files for parseable syntax. entry: check-xml language: python types: [xml] - id: check-yaml - name: Check Yaml - description: This hook checks yaml files for parseable syntax. + name: check yaml + description: checks yaml files for parseable syntax. entry: check-yaml language: python types: [yaml] - id: debug-statements - name: Debug Statements (Python) - description: Check for debugger imports and py37+ `breakpoint()` calls in python source. + name: debug statements (python) + description: checks for debugger imports and py37+ `breakpoint()` calls in python source. entry: debug-statement-hook language: python types: [python] +- id: destroyed-symlinks + name: detect destroyed symlinks + description: detects symlinks which are changed to regular files with a content of a path which that symlink was pointing to. + entry: destroyed-symlinks + language: python + types: [file] + stages: [pre-commit, pre-push, manual] - id: detect-aws-credentials - name: Detect AWS Credentials - description: Detects *your* aws credentials from the aws cli credentials file + name: detect aws credentials + description: detects *your* aws credentials from the aws cli credentials file. entry: detect-aws-credentials language: python types: [text] - id: detect-private-key - name: Detect Private Key - description: Detects the presence of private keys + name: detect private key + description: detects the presence of private keys. entry: detect-private-key language: python types: [text] - id: double-quote-string-fixer - name: Fix double quoted strings - description: This hook replaces double quoted strings with single quoted strings + name: fix double quoted strings + description: replaces double quoted strings with single quoted strings. entry: double-quote-string-fixer language: python types: [python] - id: end-of-file-fixer - name: Fix End of Files - description: Ensures that a file is either empty, or ends with one newline. + name: fix end of files + description: ensures that a file is either empty, or ends with one newline. entry: end-of-file-fixer language: python types: [text] - stages: [commit, push, manual] + stages: [pre-commit, pre-push, manual] + minimum_pre_commit_version: 3.2.0 - id: file-contents-sorter - name: File Contents Sorter - description: Sort the lines in specified files (defaults to alphabetical). You must provide list of target files as input in your .pre-commit-config.yaml file. + name: file contents sorter + description: sorts the lines in specified files (defaults to alphabetical). you must provide list of target files as input in your .pre-commit-config.yaml file. entry: file-contents-sorter language: python files: '^$' +- id: fix-byte-order-marker + name: fix utf-8 byte order marker + description: removes utf-8 byte order marker. + entry: fix-byte-order-marker + language: python + types: [text] - id: fix-encoding-pragma - name: Fix python encoding pragma + name: fix python encoding pragma (removed) + description: (removed) use pyupgrade instead. + entry: pre-commit-hooks-removed fix-encoding-pragma pyupgrade https://github.com/asottile/pyupgrade language: python - entry: fix-encoding-pragma - description: 'Add # -*- coding: utf-8 -*- to the top of python files' types: [python] -- id: flake8 - name: Flake8 (removed) - description: (removed) use gitlab.com/pycqa/flake8 instead. - entry: pre-commit-hooks-removed flake8 flake8 https://gitlab.com/pycqa/flake8 - language: python - always_run: true - pass_filenames: false - id: forbid-new-submodules - name: Forbid new submodules + name: forbid new submodules + description: prevents addition of new git submodules. language: python entry: forbid-new-submodules - description: Prevent addition of new git submodules + types: [directory] +- id: forbid-submodules + name: forbid submodules + description: forbids any submodules in the repository + language: fail + entry: 'submodules are not allowed in this repository:' + types: [directory] - id: mixed-line-ending - name: Mixed line ending - description: Replaces or checks mixed line ending + name: mixed line ending + description: replaces or checks mixed line ending. entry: mixed-line-ending language: python types: [text] - id: name-tests-test - name: Tests should end in _test.py - description: This verifies that test files are named correctly + name: python tests naming + description: verifies that test files are named correctly. entry: name-tests-test language: python files: (^|/)tests/.+\.py$ - id: no-commit-to-branch - name: "Don't commit to branch" + name: "don't commit to branch" entry: no-commit-to-branch language: python pass_filenames: false always_run: true -- id: pyflakes - name: Pyflakes (removed) - description: (removed) use gitlab.com/pycqa/flake8 instead. - entry: pre-commit-hooks-removed pyflakes flake8 https://gitlab.com/pycqa/flake8 - language: python - always_run: true - pass_filenames: false - id: requirements-txt-fixer - name: Fix requirements.txt - description: Sorts entries in requirements.txt + name: fix requirements.txt + description: sorts entries in requirements.txt. entry: requirements-txt-fixer language: python - files: requirements.*\.txt$ + files: (requirements|constraints).*\.txt$ - id: sort-simple-yaml - name: Sort simple YAML files + name: sort simple yaml files + description: sorts simple yaml files which consist only of top-level keys, preserving comments and blocks. language: python entry: sort-simple-yaml - description: Sorts simple YAML files which consist only of top-level keys, preserving comments and blocks. files: '^$' - id: trailing-whitespace - name: Trim Trailing Whitespace - description: This hook trims trailing whitespace. + name: trim trailing whitespace + description: trims trailing whitespace. entry: trailing-whitespace-fixer language: python types: [text] - stages: [commit, push, manual] + stages: [pre-commit, pre-push, manual] + minimum_pre_commit_version: 3.2.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 9aa7441b..522925ee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,242 @@ +6.0.0 - 2025-08-09 +================== + +## Fixes +- `check-shebang-scripts-are-executable`: improve error message. + - #1115 PR by @homebysix. + +## Migrating +- now requires python >= 3.9. + - #1098 PR by @asottile. +- `file-contents-sorter`: disallow `--unique` and `--ignore-case` at the same + time. + - #1095 PR by @nemacysts. + - #794 issue by @teksturi. +- Removed `check-byte-order-marker` and `fix-encoding-pragma`. + - `check-byte-order-marker`: migrate to `fix-byte-order-marker`. + - `fix-encoding-pragma`: migrate to `pyupgrade`. + - #1034 PR by @mxr. + - #1032 issue by @mxr. + - #522 PR by @jgowdy. + +5.0.0 - 2024-10-05 +================== + +### Features +- `requirements-txt-fixer`: also remove `pkg_resources==...`. + - #850 PR by @ericfrederich. + - #1030 issue by @ericfrederich. +- `check-illegal-windows-names`: new hook! + - #1044 PR by @ericfrederich. + - #589 issue by @ericfrederich. + - #1049 PR by @Jeffrey-Lim. +- `pretty-format-json`: continue processing even if a file has a json error. + - #1039 PR by @amarvin. + - #1038 issue by @amarvin. + +### Fixes +- `destroyed-symlinks`: set `stages` to `[pre-commit, pre-push, manual]` + - PR #1085 by @AdrianDC. + +### Migrating +- pre-commit-hooks now requires `pre-commit>=3.2.0`. +- use non-deprecated names for `stages`. + - #1093 PR by @asottile. + +4.6.0 - 2024-04-06 +================== + +### Features +- `requirements-txt-fixer`: remove duplicate packages. + - #1014 PR by @vhoulbreque-withings. + - #960 issue @csibe17. + +### Migrating +- `fix-encoding-pragma`: deprecated -- will be removed in 5.0.0. use + [pyupgrade](https://github.com/asottile/pyupgrade) or some other tool. + - #1033 PR by @mxr. + - #1032 issue by @mxr. + +4.5.0 - 2023-10-07 +================== + +### Features +- `requirements-txt-fixer`: also sort `constraints.txt` by default. + - #857 PR by @lev-blit. + - #830 issue by @PLPeeters. +- `debug-statements`: add `bpdb` debugger. + - #942 PR by @mwip. + - #941 issue by @mwip. + +### Fixes +- `file-contents-sorter`: fix sorting an empty file. + - #944 PR by @RoelAdriaans. + - #935 issue by @paduszyk. +- `double-quote-string-fixer`: don't rewrite inside f-strings in 3.12+. + - #973 PR by @asottile. + - #971 issue by @XuehaiPan. + +## Migrating +- now requires python >= 3.8. + - #926 PR by @asottile. + - #927 PR by @asottile. + +4.4.0 - 2022-11-23 +================== + +### Features +- `forbid-submodules`: new hook which outright bans submodules. + - #815 PR by @asottile. + - #707 issue by @ChiefGokhlayeh. + +4.3.0 - 2022-06-07 +================== + +### Features +- `check-executables-have-shebangs`: use `git config core.fileMode` to + determine if it should query `git`. + - #730 PR by @Kurt-von-Laven. +- `name-tests-test`: add `--pytest-test-first` test convention. + - #779 PR by @asottile. + +### Fixes +- `check-shebang-scripts-are-executable`: update windows instructions. + - #774 PR by @mdeweerd. + - #770 issue by @mdeweerd. +- `check-toml`: use stdlib `tomllib` when available. + - #771 PR by @DanielNoord. + - #755 issue by @sognetic. +- `check-added-large-files`: don't run on non-file `stages`. + - #778 PR by @asottile. + - #777 issue by @skyj. + +4.2.0 - 2022-04-06 +================== + +### Features +- `name-tests-test`: updated display text. + - #713 PR by @asottile. +- `check-docstring-first`: make output more parsable. + - #748 PR by @asottile. +- `check-merge-conflict`: make output more parsable. + - #748 PR by @asottile. +- `debug-statements`: make output more parsable. + - #748 PR by @asottile. + +### Fixes +- `check-merge-conflict`: fix detection of `======` conflict marker on windows. + - #748 PR by @asottile. + +### Updating +- Drop python<3.7. + - #719 PR by @asottile. +- Changed default branch from `master` to `main`. + - #744 PR by @asottile. + +4.1.0 - 2021-12-22 +================== + +### Features +- `debug-statements`: add `pdbr` debugger. + - #614 PR by @cansarigol. +- `detect-private-key`: add detection for additional key types. + - #658 PR by @ljmf00. +- `check-executables-have-shebangs`: improve messaging on windows. + - #689 PR by @pujitm. + - #686 issue by @jmerdich. +- `check-added-large-files`: support `--enforce-all` with `git-lfs`. + - #674 PR by @amartani. + - #560 issue by @jeremy-coulon. + +### Fixes +- `check-case-conflict`: improve performance. + - #626 PR by @guykisel. + - #625 issue by @guykisel. +- `forbid-new-submodules`: fix false-negatives for `pre-push`. + - #619 PR by @m-khvoinitsky. + - #609 issue by @m-khvoinitsky. +- `check-merge-conflict`: fix execution in git worktrees. + - #662 PR by @errsyn. + - #638 issue by @daschuer. + +### Misc. +- Normalize case of hook names and descriptions. + - #671 PR by @dennisroche. + - #673 PR by @revolter. + +4.0.1 - 2021-05-16 +================== + +### Fixes +- `check-shebang-scripts-are-executable` fix entry point. + - #602 issue by @Person-93. + - #603 PR by @scop. + +4.0.0 - 2021-05-14 +================== + +### Features +- `check-json`: report duplicate keys. + - #558 PR by @AdityaKhursale. + - #554 issue by @adamchainz. +- `no-commit-to-branch`: add `main` to default blocked branches. + - #565 PR by @ndevenish. +- `check-case-conflict`: check conflicts in directory names as well. + - #575 PR by @slsyy. + - #70 issue by @andyjack. +- `check-vcs-permalinks`: forbid other branch names. + - #582 PR by @jack1142. + - #581 issue by @jack1142. +- `check-shebang-scripts-are-executable`: new hook which ensures shebang'd + scripts are executable. + - #545 PR by @scop. + +### Fixes +- `check-executables-have-shebangs`: Short circuit shebang lookup on windows. + - #544 PR by @scop. +- `requirements-txt-fixer`: Fix comments which have indentation + - #549 PR by @greshilov. + - #548 issue by @greshilov. +- `pretty-format-json`: write to stdout using UTF-8 encoding. + - #571 PR by @jack1142. + - #570 issue by @jack1142. +- Use more inclusive language. + - #599 PR by @asottile. + +### Breaking changes +- Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`. + - #597 PR by @asottile. + + +3.4.0 - 2020-12-15 +================== + +### Features +- `file-contents-sorter`: Add `--unique` argument + - #524 PR by @danielhoherd. +- `check-vcs-permalinks`: Add `--additional-github-domain` option + - #530 PR by @youngminz. +- New hook: `destroyed-symlinks` to detect unintentional symlink-breakages on + windows. + - #511 PR by @m-khvoinitsky. + +3.3.0 - 2020-10-20 +================== + +### Features +- `file-contents-sorter`: add `--ignore-case` option for case-insensitive + sorting + - #514 PR by @Julian. +- `check-added-large-files`: add `--enforce-all` option to check non-added + files as well + - #519 PR by @mshawcroft. + - #518 issue by @mshawcroft. +- `fix-byte-order-marker`: new hook which fixes UTF-8 byte-order marker. + - #522 PR by @jgowdy. + +### Deprecations +- `check-byte-order-marker` is now deprecated for `fix-byte-order-marker` + 3.2.0 - 2020-07-30 ================== diff --git a/README.md b/README.md index 3552721f..8432455f 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -[![Build Status](https://asottile.visualstudio.com/asottile/_apis/build/status/pre-commit.pre-commit-hooks?branchName=master)](https://asottile.visualstudio.com/asottile/_build/latest?definitionId=17&branchName=master) -[![Azure DevOps coverage](https://img.shields.io/azure-devops/coverage/asottile/asottile/17/master.svg)](https://dev.azure.com/asottile/asottile/_build/latest?definitionId=17&branchName=master) +[![build status](https://github.com/pre-commit/pre-commit-hooks/actions/workflows/main.yml/badge.svg)](https://github.com/pre-commit/pre-commit-hooks/actions/workflows/main.yml) +[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pre-commit/pre-commit-hooks/main.svg)](https://results.pre-commit.ci/latest/github/pre-commit/pre-commit-hooks/main) pre-commit-hooks ================ @@ -15,7 +15,7 @@ Add this to your `.pre-commit-config.yaml` ```yaml - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 # Use the ref you want to point at + rev: v6.0.0 # Use the ref you want to point at hooks: - id: trailing-whitespace # - id: ... @@ -26,8 +26,11 @@ Add this to your `.pre-commit-config.yaml` #### `check-added-large-files` Prevent giant files from being committed. - Specify what is "too large" with `args: ['--maxkb=123']` (default=500kB). + - Limits checked files to those indicated as staged for addition by git. - If `git-lfs` is installed, lfs files will be skipped (requires `git-lfs>=2.2.1`) + - `--enforce-all` - Check all listed files not just those staged for + addition. #### `check-ast` Simply check whether files parse as valid python. @@ -39,23 +42,24 @@ Require literal syntax when initializing empty or zero Python builtin types. - Ignore this requirement for specific builtin types with `--ignore=type1,type2,…`. - Forbid `dict` keyword syntax with `--no-allow-dict-kwargs`. -#### `check-byte-order-marker` -Forbid files which have a UTF-8 byte-order marker - #### `check-case-conflict` Check for files with names that would conflict on a case-insensitive filesystem like MacOS HFS+ or Windows FAT. -#### `check-docstring-first` -Checks for a common error of placing code before the docstring. - #### `check-executables-have-shebangs` Checks that non-binary executables have a proper shebang. +#### `check-illegal-windows-names` +Check for files that cannot be created on Windows. + #### `check-json` Attempts to load all json files to verify syntax. #### `check-merge-conflict` Check for files that contain merge conflict strings. + - `--assume-in-merge` - Allows running the hook when there is no ongoing merge operation + +#### `check-shebang-scripts-are-executable` +Checks that scripts with shebangs are executable. #### `check-symlinks` Checks for symlinks which do not point to anything. @@ -65,6 +69,10 @@ Attempts to load all TOML files to verify syntax. #### `check-vcs-permalinks` Ensures that links to vcs websites are permalinks. + - `--additional-github-domain DOMAIN` - Add check for specified domain. + Can be repeated multiple times. for example, if your company uses + GitHub Enterprise you may use something like + `--additional-github-domain github.example.com` #### `check-xml` Attempts to load all xml files to verify syntax. @@ -82,6 +90,12 @@ Attempts to load all yaml files to verify syntax. #### `debug-statements` Check for debugger imports and py37+ `breakpoint()` calls in python source. +#### `destroyed-symlinks` +Detects symlinks which are changed to regular files with a content of a path +which that symlink was pointing to. +This usually happens on Windows when a user clones a repository that has +symlinks but they do not have the permission to create symlinks. + #### `detect-aws-credentials` Checks for the existence of AWS secrets that you have set up with the AWS CLI. The following arguments are available: @@ -99,18 +113,28 @@ This hook replaces double quoted strings with single quoted strings. #### `end-of-file-fixer` Makes sure files end in a newline and only a newline. -#### `fix-encoding-pragma` -Add `# -*- coding: utf-8 -*-` to the top of python files. - - To remove the coding pragma pass `--remove` (useful in a python3-only codebase) - #### `file-contents-sorter` Sort the lines in specified files (defaults to alphabetical). -You must provide list of target files as input to it. +You must provide the target [`files`](https://pre-commit.com/#config-files) as input. Note that this hook WILL remove blank lines and does NOT respect any comments. +All newlines will be converted to line feeds (`\n`). + +The following arguments are available: +- `--ignore-case` - fold lower case to upper case characters. +- `--unique` - ensure each line is unique. + +#### `fix-byte-order-marker` +removes UTF-8 byte order marker #### `forbid-new-submodules` Prevent addition of new git submodules. +This is intended as a helper to migrate away from submodules. If you want to +ban them entirely use `forbid-submodules` + +#### `forbid-submodules` +forbids any submodules in the repository. + #### `mixed-line-ending` Replaces or checks mixed line ending. - `--fix={auto,crlf,lf,no}` @@ -120,13 +144,15 @@ Replaces or checks mixed line ending. - `no` - Checks if there is any mixed line ending without modifying any file. #### `name-tests-test` -Assert that files in tests/ end in `_test.py`. - - Use `args: ['--django']` to match `test*.py` instead. +verifies that test files are named correctly. +- `--pytest` (the default): ensure tests match `.*_test\.py` +- `--pytest-test-first`: ensure tests match `test_.*\.py` +- `--django` / `--unittest`: ensure tests match `test.*\.py` #### `no-commit-to-branch` Protect specific branches from direct checkins. - - Use `args: [--branch, staging, --branch, master]` to set the branch. - `master` is the default if no branch argument is set. + - Use `args: [--branch, staging, --branch, main]` to set the branch. + Both `main` and `master` are protected by default if no branch argument is set. - `-b` / `--branch` may be specified multiple times to protect multiple branches. - `-p` / `--pattern` can be used to protect branches that match a supplied regex @@ -150,7 +176,7 @@ the following commandline options: - `--top-keys comma,separated,keys` - Keys to keep at the top of mappings. #### `requirements-txt-fixer` -Sorts entries in requirements.txt and removes incorrect entry for `pkg-resources==0.0.0` +Sorts entries in requirements.txt and constraints.txt and removes incorrect entry for `pkg-resources==0.0.0` #### `sort-simple-yaml` Sorts simple YAML files which consist only of top-level @@ -176,10 +202,10 @@ Trims trailing whitespace. ### Deprecated / replaced hooks -- `autopep8-wrapper`: instead use - [mirrors-autopep8](https://github.com/pre-commit/mirrors-autopep8) -- `pyflakes`: instead use `flake8` -- `flake8`: instead use [upstream flake8](https://gitlab.com/pycqa/flake8) +- `check-byte-order-marker`: instead use fix-byte-order-marker +- `fix-encoding-pragma`: instead use [`pyupgrade`](https://github.com/asottile/pyupgrade) +- `check-docstring-first`: fundamentally flawed, deprecated without replacement. + ### As a standalone package diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index dc3a57a9..00000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,24 +0,0 @@ -trigger: - branches: - include: [master, test-me-*] - tags: - include: ['*'] - -resources: - repositories: - - repository: asottile - type: github - endpoint: github - name: asottile/azure-pipeline-templates - ref: refs/tags/v1.0.0 - -jobs: -- template: job--pre-commit.yml@asottile -- template: job--python-tox.yml@asottile - parameters: - toxenvs: [py38] - os: windows -- template: job--python-tox.yml@asottile - parameters: - toxenvs: [pypy3, py36, py37, py38] - os: linux diff --git a/pre_commit_hooks/check_added_large_files.py b/pre_commit_hooks/check_added_large_files.py index 91f57544..e6741623 100644 --- a/pre_commit_hooks/check_added_large_files.py +++ b/pre_commit_hooks/check_added_large_files.py @@ -1,32 +1,52 @@ +from __future__ import annotations + import argparse -import json import math import os -from typing import Optional -from typing import Sequence -from typing import Set +import subprocess +from collections.abc import Sequence from pre_commit_hooks.util import added_files -from pre_commit_hooks.util import CalledProcessError -from pre_commit_hooks.util import cmd_output +from pre_commit_hooks.util import zsplit -def lfs_files() -> Set[str]: - try: - # Introduced in git-lfs 2.2.0, first working in 2.2.1 - lfs_ret = cmd_output('git', 'lfs', 'status', '--json') - except CalledProcessError: # pragma: no cover (with git-lfs) - lfs_ret = '{"files":{}}' +def filter_lfs_files(filenames: set[str]) -> None: # pragma: no cover (lfs) + """Remove files tracked by git-lfs from the set.""" + if not filenames: + return - return set(json.loads(lfs_ret)['files']) + check_attr = subprocess.run( + ('git', 'check-attr', 'filter', '-z', '--stdin'), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + encoding='utf-8', + check=True, + input='\0'.join(filenames), + ) + stdout = zsplit(check_attr.stdout) + for i in range(0, len(stdout), 3): + filename, filter_tag = stdout[i], stdout[i + 2] + if filter_tag == 'lfs': + filenames.remove(filename) -def find_large_added_files(filenames: Sequence[str], maxkb: int) -> int: +def find_large_added_files( + filenames: Sequence[str], + maxkb: int, + *, + enforce_all: bool = False, +) -> int: # Find all added files that are also in the list of files pre-commit tells # us about retv = 0 - for filename in (added_files() & set(filenames)) - lfs_files(): - kb = int(math.ceil(os.stat(filename).st_size / 1024)) + filenames_filtered = set(filenames) + filter_lfs_files(filenames_filtered) + + if not enforce_all: + filenames_filtered &= added_files() + + for filename in filenames_filtered: + kb = math.ceil(os.stat(filename).st_size / 1024) if kb > maxkb: print(f'{filename} ({kb} KB) exceeds {maxkb} KB.') retv = 1 @@ -34,20 +54,28 @@ def find_large_added_files(filenames: Sequence[str], maxkb: int) -> int: return retv -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( 'filenames', nargs='*', help='Filenames pre-commit believes are changed.', ) + parser.add_argument( + '--enforce-all', action='store_true', + help='Enforce all files are checked, not just staged files.', + ) parser.add_argument( '--maxkb', type=int, default=500, - help='Maxmimum allowable KB for added files', + help='Maximum allowable KB for added files', ) - args = parser.parse_args(argv) - return find_large_added_files(args.filenames, args.maxkb) + + return find_large_added_files( + args.filenames, + args.maxkb, + enforce_all=args.enforce_all, + ) if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_ast.py b/pre_commit_hooks/check_ast.py index 2be6e1af..c1f165b8 100644 --- a/pre_commit_hooks/check_ast.py +++ b/pre_commit_hooks/check_ast.py @@ -1,13 +1,14 @@ +from __future__ import annotations + import argparse import ast import platform import sys import traceback -from typing import Optional -from typing import Sequence +from collections.abc import Sequence -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv) @@ -29,4 +30,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_builtin_literals.py b/pre_commit_hooks/check_builtin_literals.py index 6bcd8387..e128eeaa 100644 --- a/pre_commit_hooks/check_builtin_literals.py +++ b/pre_commit_hooks/check_builtin_literals.py @@ -1,10 +1,9 @@ +from __future__ import annotations + import argparse import ast -from typing import List +from collections.abc import Sequence from typing import NamedTuple -from typing import Optional -from typing import Sequence -from typing import Set BUILTIN_TYPES = { @@ -27,38 +26,39 @@ class Call(NamedTuple): class Visitor(ast.NodeVisitor): def __init__( self, - ignore: Optional[Sequence[str]] = None, + ignore: set[str], allow_dict_kwargs: bool = True, ) -> None: - self.builtin_type_calls: List[Call] = [] - self.ignore = set(ignore) if ignore else set() + self.builtin_type_calls: list[Call] = [] self.allow_dict_kwargs = allow_dict_kwargs + self._disallowed = BUILTIN_TYPES.keys() - ignore def _check_dict_call(self, node: ast.Call) -> bool: return self.allow_dict_kwargs and bool(node.keywords) def visit_Call(self, node: ast.Call) -> None: - if not isinstance(node.func, ast.Name): + if ( # Ignore functions that are object attributes (`foo.bar()`). # Assume that if the user calls `builtins.list()`, they know what # they're doing. - return - if node.func.id not in set(BUILTIN_TYPES).difference(self.ignore): - return - if node.func.id == 'dict' and self._check_dict_call(node): - return - elif node.args: - return - self.builtin_type_calls.append( - Call(node.func.id, node.lineno, node.col_offset), - ) + isinstance(node.func, ast.Name) and + node.func.id in self._disallowed and + (node.func.id != 'dict' or not self._check_dict_call(node)) and + not node.args + ): + self.builtin_type_calls.append( + Call(node.func.id, node.lineno, node.col_offset), + ) + + self.generic_visit(node) def check_file( filename: str, - ignore: Optional[Sequence[str]] = None, + *, + ignore: set[str], allow_dict_kwargs: bool = True, -) -> List[Call]: +) -> list[Call]: with open(filename, 'rb') as f: tree = ast.parse(f.read(), filename=filename) visitor = Visitor(ignore=ignore, allow_dict_kwargs=allow_dict_kwargs) @@ -66,11 +66,11 @@ def check_file( return visitor.builtin_type_calls -def parse_ignore(value: str) -> Set[str]: +def parse_ignore(value: str) -> set[str]: return set(value.split(',')) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') parser.add_argument('--ignore', type=parse_ignore, default=set()) @@ -103,4 +103,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_byte_order_marker.py b/pre_commit_hooks/check_byte_order_marker.py deleted file mode 100644 index c0c2969c..00000000 --- a/pre_commit_hooks/check_byte_order_marker.py +++ /dev/null @@ -1,23 +0,0 @@ -import argparse -from typing import Optional -from typing import Sequence - - -def main(argv: Optional[Sequence[str]] = None) -> int: - parser = argparse.ArgumentParser() - parser.add_argument('filenames', nargs='*', help='Filenames to check') - args = parser.parse_args(argv) - - retv = 0 - - for filename in args.filenames: - with open(filename, 'rb') as f: - if f.read(3) == b'\xef\xbb\xbf': - retv = 1 - print(f'{filename}: Has a byte-order marker') - - return retv - - -if __name__ == '__main__': - exit(main()) diff --git a/pre_commit_hooks/check_case_conflict.py b/pre_commit_hooks/check_case_conflict.py index 6b8ba82f..475c91c4 100644 --- a/pre_commit_hooks/check_case_conflict.py +++ b/pre_commit_hooks/check_case_conflict.py @@ -1,20 +1,35 @@ +from __future__ import annotations + import argparse -from typing import Iterable -from typing import Optional -from typing import Sequence -from typing import Set +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence from pre_commit_hooks.util import added_files from pre_commit_hooks.util import cmd_output -def lower_set(iterable: Iterable[str]) -> Set[str]: +def lower_set(iterable: Iterable[str]) -> set[str]: return {x.lower() for x in iterable} +def parents(file: str) -> Iterator[str]: + path_parts = file.split('/') + path_parts.pop() + while path_parts: + yield '/'.join(path_parts) + path_parts.pop() + + +def directories_for(files: set[str]) -> set[str]: + return {parent for file in files for parent in parents(file)} + + def find_conflicting_filenames(filenames: Sequence[str]) -> int: repo_files = set(cmd_output('git', 'ls-files').splitlines()) + repo_files |= directories_for(repo_files) relevant_files = set(filenames) | added_files() + relevant_files |= directories_for(relevant_files) repo_files -= relevant_files retv = 0 @@ -41,7 +56,7 @@ def find_conflicting_filenames(filenames: Sequence[str]) -> int: return retv -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( 'filenames', nargs='*', @@ -54,4 +69,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_docstring_first.py b/pre_commit_hooks/check_docstring_first.py index 875c0fba..42fbd15b 100644 --- a/pre_commit_hooks/check_docstring_first.py +++ b/pre_commit_hooks/check_docstring_first.py @@ -1,9 +1,10 @@ +from __future__ import annotations + import argparse import io import tokenize +from collections.abc import Sequence from tokenize import tokenize as tokenize_tokenize -from typing import Optional -from typing import Sequence NON_CODE_TOKENS = frozenset(( tokenize.COMMENT, tokenize.ENDMARKER, tokenize.NEWLINE, tokenize.NL, @@ -27,13 +28,13 @@ def check_docstring_first(src: bytes, filename: str = '') -> int: if tok_type == tokenize.STRING and scol == 0: if found_docstring_line is not None: print( - f'{filename}:{sline} Multiple module docstrings ' + f'{filename}:{sline}: Multiple module docstrings ' f'(first docstring on line {found_docstring_line}).', ) return 1 elif found_code_line is not None: print( - f'{filename}:{sline} Module docstring appears after code ' + f'{filename}:{sline}: Module docstring appears after code ' f'(code seen on line {found_code_line}).', ) return 1 @@ -45,7 +46,7 @@ def check_docstring_first(src: bytes, filename: str = '') -> int: return 0 -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv) diff --git a/pre_commit_hooks/check_executables_have_shebangs.py b/pre_commit_hooks/check_executables_have_shebangs.py index a02d2a9c..707863b3 100644 --- a/pre_commit_hooks/check_executables_have_shebangs.py +++ b/pre_commit_hooks/check_executables_have_shebangs.py @@ -1,55 +1,60 @@ """Check that executable text files have a shebang.""" +from __future__ import annotations + import argparse import shlex import sys -from typing import List -from typing import Optional -from typing import Sequence -from typing import Set +from collections.abc import Generator +from collections.abc import Sequence +from typing import NamedTuple from pre_commit_hooks.util import cmd_output +from pre_commit_hooks.util import zsplit EXECUTABLE_VALUES = frozenset(('1', '3', '5', '7')) -def zsplit(s: str) -> List[str]: - s = s.strip('\0') - if s: - return s.split('\0') - else: - return [] - - -def check_executables(paths: List[str]) -> int: - if sys.platform == 'win32': # pragma: win32 cover +def check_executables(paths: list[str]) -> int: + fs_tracks_executable_bit = cmd_output( + 'git', 'config', 'core.fileMode', retcode=None, + ).strip() + if fs_tracks_executable_bit == 'false': # pragma: win32 cover return _check_git_filemode(paths) else: # pragma: win32 no cover retv = 0 for path in paths: - if not _check_has_shebang(path): + if not has_shebang(path): _message(path) retv = 1 return retv -def _check_git_filemode(paths: Sequence[str]) -> int: +class GitLsFile(NamedTuple): + mode: str + filename: str + + +def git_ls_files(paths: Sequence[str]) -> Generator[GitLsFile]: outs = cmd_output('git', 'ls-files', '-z', '--stage', '--', *paths) - seen: Set[str] = set() for out in zsplit(outs): - metadata, path = out.split('\t') - tagmode = metadata.split(' ', 1)[0] + metadata, filename = out.split('\t') + mode, _, _ = metadata.split() + yield GitLsFile(mode, filename) - is_executable = any(b in EXECUTABLE_VALUES for b in tagmode[-3:]) - has_shebang = _check_has_shebang(path) - if is_executable and not has_shebang: - _message(path) - seen.add(path) + +def _check_git_filemode(paths: Sequence[str]) -> int: + seen: set[str] = set() + for ls_file in git_ls_files(paths): + is_executable = any(b in EXECUTABLE_VALUES for b in ls_file.mode[-3:]) + if is_executable and not has_shebang(ls_file.filename): + _message(ls_file.filename) + seen.add(ls_file.filename) return int(bool(seen)) -def _check_has_shebang(path: str) -> int: +def has_shebang(path: str) -> int: with open(path, 'rb') as f: first_bytes = f.read(2) @@ -61,12 +66,14 @@ def _message(path: str) -> None: f'{path}: marked executable but has no (or invalid) shebang!\n' f" If it isn't supposed to be executable, try: " f'`chmod -x {shlex.quote(path)}`\n' + f' If on Windows, you may also need to: ' + f'`git add --chmod=-x {shlex.quote(path)}`\n' f' If it is supposed to be executable, double-check its shebang.', file=sys.stderr, ) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv) @@ -75,4 +82,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_json.py b/pre_commit_hooks/check_json.py index 6026270c..612111c5 100644 --- a/pre_commit_hooks/check_json.py +++ b/pre_commit_hooks/check_json.py @@ -1,10 +1,24 @@ +from __future__ import annotations + import argparse import json -from typing import Optional -from typing import Sequence +from collections.abc import Sequence +from typing import Any + + +def raise_duplicate_keys( + ordered_pairs: list[tuple[str, Any]], +) -> dict[str, Any]: + d = {} + for key, val in ordered_pairs: + if key in d: + raise ValueError(f'Duplicate key: {key}') + else: + d[key] = val + return d -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check.') args = parser.parse_args(argv) @@ -13,7 +27,7 @@ def main(argv: Optional[Sequence[str]] = None) -> int: for filename in args.filenames: with open(filename, 'rb') as f: try: - json.load(f) + json.load(f, object_pairs_hook=raise_duplicate_keys) except ValueError as exc: print(f'{filename}: Failed to json decode ({exc})') retval = 1 @@ -21,4 +35,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_merge_conflict.py b/pre_commit_hooks/check_merge_conflict.py index c20a8af7..54a083ee 100644 --- a/pre_commit_hooks/check_merge_conflict.py +++ b/pre_commit_hooks/check_merge_conflict.py @@ -1,29 +1,34 @@ +from __future__ import annotations + import argparse import os.path -from typing import Optional -from typing import Sequence +from collections.abc import Sequence + +from pre_commit_hooks.util import cmd_output CONFLICT_PATTERNS = [ b'<<<<<<< ', b'======= ', + b'=======\r\n', b'=======\n', b'>>>>>>> ', ] -def is_in_merge() -> int: +def is_in_merge() -> bool: + git_dir = cmd_output('git', 'rev-parse', '--git-dir').rstrip() return ( - os.path.exists(os.path.join('.git', 'MERGE_MSG')) and + os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and ( - os.path.exists(os.path.join('.git', 'MERGE_HEAD')) or - os.path.exists(os.path.join('.git', 'rebase-apply')) or - os.path.exists(os.path.join('.git', 'rebase-merge')) + os.path.exists(os.path.join(git_dir, 'MERGE_HEAD')) or + os.path.exists(os.path.join(git_dir, 'rebase-apply')) or + os.path.exists(os.path.join(git_dir, 'rebase-merge')) ) ) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') parser.add_argument('--assume-in-merge', action='store_true') @@ -35,12 +40,12 @@ def main(argv: Optional[Sequence[str]] = None) -> int: retcode = 0 for filename in args.filenames: with open(filename, 'rb') as inputfile: - for i, line in enumerate(inputfile): + for i, line in enumerate(inputfile, start=1): for pattern in CONFLICT_PATTERNS: if line.startswith(pattern): print( - f'Merge conflict string "{pattern.decode()}" ' - f'found in {filename}:{i + 1}', + f'{filename}:{i}: Merge conflict string ' + f'{pattern.strip().decode()!r} found', ) retcode = 1 @@ -48,4 +53,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_shebang_scripts_are_executable.py b/pre_commit_hooks/check_shebang_scripts_are_executable.py new file mode 100644 index 00000000..937425b0 --- /dev/null +++ b/pre_commit_hooks/check_shebang_scripts_are_executable.py @@ -0,0 +1,54 @@ +"""Check that text files with a shebang are executable.""" +from __future__ import annotations + +import argparse +import shlex +import sys +from collections.abc import Sequence + +from pre_commit_hooks.check_executables_have_shebangs import EXECUTABLE_VALUES +from pre_commit_hooks.check_executables_have_shebangs import git_ls_files +from pre_commit_hooks.check_executables_have_shebangs import has_shebang + + +def check_shebangs(paths: list[str]) -> int: + # Cannot optimize on non-executability here if we intend this check to + # work on win32 -- and that's where problems caused by non-executability + # (elsewhere) are most likely to arise from. + return _check_git_filemode(paths) + + +def _check_git_filemode(paths: Sequence[str]) -> int: + seen: set[str] = set() + for ls_file in git_ls_files(paths): + is_executable = any(b in EXECUTABLE_VALUES for b in ls_file.mode[-3:]) + if not is_executable and has_shebang(ls_file.filename): + _message(ls_file.filename) + seen.add(ls_file.filename) + + return int(bool(seen)) + + +def _message(path: str) -> None: + print( + f'{path}: has a shebang but is not marked executable!\n' + f' If it is supposed to be executable, try: ' + f'`chmod +x {shlex.quote(path)}`\n' + f' If on Windows, you may also need to: ' + f'`git add --chmod=+x {shlex.quote(path)}`\n' + f' If it is not supposed to be executable, double-check its shebang ' + f'is wanted.\n', + file=sys.stderr, + ) + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('filenames', nargs='*') + args = parser.parse_args(argv) + + return check_shebangs(args.filenames) + + +if __name__ == '__main__': + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_symlinks.py b/pre_commit_hooks/check_symlinks.py index f014714a..be8a800e 100644 --- a/pre_commit_hooks/check_symlinks.py +++ b/pre_commit_hooks/check_symlinks.py @@ -1,10 +1,11 @@ +from __future__ import annotations + import argparse import os.path -from typing import Optional -from typing import Sequence +from collections.abc import Sequence -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser(description='Checks for broken symlinks.') parser.add_argument('filenames', nargs='*', help='Filenames to check') args = parser.parse_args(argv) @@ -23,4 +24,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_toml.py b/pre_commit_hooks/check_toml.py index 87496753..2105b072 100644 --- a/pre_commit_hooks/check_toml.py +++ b/pre_commit_hooks/check_toml.py @@ -1,11 +1,16 @@ +from __future__ import annotations + import argparse -from typing import Optional -from typing import Sequence +import sys +from collections.abc import Sequence -import toml +if sys.version_info >= (3, 11): # pragma: >=3.11 cover + import tomllib +else: # pragma: <3.11 cover + import tomli as tomllib -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check.') args = parser.parse_args(argv) @@ -13,12 +18,13 @@ def main(argv: Optional[Sequence[str]] = None) -> int: retval = 0 for filename in args.filenames: try: - toml.load(filename) - except toml.TomlDecodeError as exc: + with open(filename, mode='rb') as fp: + tomllib.load(fp) + except tomllib.TOMLDecodeError as exc: print(f'{filename}: {exc}') retval = 1 return retval if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_vcs_permalinks.py b/pre_commit_hooks/check_vcs_permalinks.py index bf698e11..108656aa 100644 --- a/pre_commit_hooks/check_vcs_permalinks.py +++ b/pre_commit_hooks/check_vcs_permalinks.py @@ -1,35 +1,53 @@ +from __future__ import annotations + import argparse import re import sys -from typing import Optional -from typing import Sequence +from collections.abc import Sequence +from re import Pattern -GITHUB_NON_PERMALINK = re.compile( - br'https://github.com/[^/ ]+/[^/ ]+/blob/master/[^# ]+#L\d+', -) +def _get_pattern(domain: str) -> Pattern[bytes]: + regex = ( + rf'https://{domain}/[^/ ]+/[^/ ]+/blob/' + r'(?![a-fA-F0-9]{4,64}/)([^/. ]+)/[^# ]+#L\d+' + ) + return re.compile(regex.encode()) -def _check_filename(filename: str) -> int: +def _check_filename(filename: str, patterns: list[Pattern[bytes]]) -> int: retv = 0 with open(filename, 'rb') as f: for i, line in enumerate(f, 1): - if GITHUB_NON_PERMALINK.search(line): - sys.stdout.write(f'{filename}:{i}:') - sys.stdout.flush() - sys.stdout.buffer.write(line) - retv = 1 + for pattern in patterns: + if pattern.search(line): + sys.stdout.write(f'{filename}:{i}:') + sys.stdout.flush() + sys.stdout.buffer.write(line) + retv = 1 return retv -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') + parser.add_argument( + '--additional-github-domain', + dest='additional_github_domains', + action='append', + default=['github.com'], + ) args = parser.parse_args(argv) + patterns = [ + _get_pattern(domain) + for domain in args.additional_github_domains + ] + retv = 0 + for filename in args.filenames: - retv |= _check_filename(filename) + retv |= _check_filename(filename, patterns) if retv: print() @@ -39,4 +57,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_xml.py b/pre_commit_hooks/check_xml.py index 59b4d59e..ff5536b5 100644 --- a/pre_commit_hooks/check_xml.py +++ b/pre_commit_hooks/check_xml.py @@ -1,10 +1,11 @@ +from __future__ import annotations + import argparse import xml.sax.handler -from typing import Optional -from typing import Sequence +from collections.abc import Sequence -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='XML filenames to check.') args = parser.parse_args(argv) @@ -22,4 +23,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/check_yaml.py b/pre_commit_hooks/check_yaml.py index 7453f6fb..c94ea716 100644 --- a/pre_commit_hooks/check_yaml.py +++ b/pre_commit_hooks/check_yaml.py @@ -1,16 +1,17 @@ +from __future__ import annotations + import argparse +from collections.abc import Generator +from collections.abc import Sequence from typing import Any -from typing import Generator from typing import NamedTuple -from typing import Optional -from typing import Sequence import ruamel.yaml yaml = ruamel.yaml.YAML(typ='safe') -def _exhaust(gen: Generator[str, None, None]) -> None: +def _exhaust(gen: Generator[str]) -> None: for _ in gen: pass @@ -36,7 +37,7 @@ class Key(NamedTuple): } -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '-m', '--multi', '--allow-multiple-documents', action='store_true', @@ -45,7 +46,7 @@ def main(argv: Optional[Sequence[str]] = None) -> int: '--unsafe', action='store_true', help=( 'Instead of loading the files, simply parse them for syntax. ' - 'A syntax-only check enables extensions and unsafe contstructs ' + 'A syntax-only check enables extensions and unsafe constructs ' 'which would otherwise be forbidden. Using this option removes ' 'all guarantees of portability to other yaml implementations. ' 'Implies --allow-multiple-documents' @@ -68,4 +69,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/debug_statement_hook.py b/pre_commit_hooks/debug_statement_hook.py index 794f7080..7e6be95e 100644 --- a/pre_commit_hooks/debug_statement_hook.py +++ b/pre_commit_hooks/debug_statement_hook.py @@ -1,15 +1,17 @@ +from __future__ import annotations + import argparse import ast import traceback -from typing import List +from collections.abc import Sequence from typing import NamedTuple -from typing import Optional -from typing import Sequence DEBUG_STATEMENTS = { + 'bpdb', 'ipdb', 'pdb', + 'pdbr', 'pudb', 'pydevd_pycharm', 'q', @@ -28,7 +30,7 @@ class Debug(NamedTuple): class DebugStatementParser(ast.NodeVisitor): def __init__(self) -> None: - self.breakpoints: List[Debug] = [] + self.breakpoints: list[Debug] = [] def visit_Import(self, node: ast.Import) -> None: for name in node.names: @@ -64,12 +66,12 @@ def check_file(filename: str) -> int: visitor.visit(ast_obj) for bp in visitor.breakpoints: - print(f'{filename}:{bp.line}:{bp.col} - {bp.name} {bp.reason}') + print(f'{filename}:{bp.line}:{bp.col}: {bp.name} {bp.reason}') return int(bool(visitor.breakpoints)) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') args = parser.parse_args(argv) @@ -81,4 +83,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/destroyed_symlinks.py b/pre_commit_hooks/destroyed_symlinks.py new file mode 100644 index 00000000..9bc25898 --- /dev/null +++ b/pre_commit_hooks/destroyed_symlinks.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +import argparse +import shlex +import subprocess +from collections.abc import Sequence + +from pre_commit_hooks.util import cmd_output +from pre_commit_hooks.util import zsplit + +ORDINARY_CHANGED_ENTRIES_MARKER = '1' +PERMS_LINK = '120000' +PERMS_NONEXIST = '000000' + + +def find_destroyed_symlinks(files: Sequence[str]) -> list[str]: + destroyed_links: list[str] = [] + if not files: + return destroyed_links + for line in zsplit( + cmd_output('git', 'status', '--porcelain=v2', '-z', '--', *files), + ): + splitted = line.split(' ') + if splitted and splitted[0] == ORDINARY_CHANGED_ENTRIES_MARKER: + # https://git-scm.com/docs/git-status#_changed_tracked_entries + ( + _, _, _, + mode_HEAD, + mode_index, + _, + hash_HEAD, + hash_index, + *path_splitted, + ) = splitted + path = ' '.join(path_splitted) + if ( + mode_HEAD == PERMS_LINK and + mode_index != PERMS_LINK and + mode_index != PERMS_NONEXIST + ): + if hash_HEAD == hash_index: + # if old and new hashes are equal, it's not needed to check + # anything more, we've found a destroyed symlink for sure + destroyed_links.append(path) + else: + # if old and new hashes are *not* equal, it doesn't mean + # that everything is OK - new file may be altered + # by something like trailing-whitespace and/or + # mixed-line-ending hooks so we need to go deeper + SIZE_CMD = ('git', 'cat-file', '-s') + size_index = int(cmd_output(*SIZE_CMD, hash_index).strip()) + size_HEAD = int(cmd_output(*SIZE_CMD, hash_HEAD).strip()) + + # in the worst case new file may have CRLF added + # so check content only if new file is bigger + # not more than 2 bytes compared to the old one + if size_index <= size_HEAD + 2: + head_content = subprocess.check_output( + ('git', 'cat-file', '-p', hash_HEAD), + ).rstrip() + index_content = subprocess.check_output( + ('git', 'cat-file', '-p', hash_index), + ).rstrip() + if head_content == index_content: + destroyed_links.append(path) + return destroyed_links + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument('filenames', nargs='*', help='Filenames to check.') + args = parser.parse_args(argv) + destroyed_links = find_destroyed_symlinks(files=args.filenames) + if destroyed_links: + print('Destroyed symlinks:') + for destroyed_link in destroyed_links: + print(f'- {destroyed_link}') + print('You should unstage affected files:') + print(f'\tgit reset HEAD -- {shlex.join(destroyed_links)}') + print( + 'And retry commit. As a long term solution ' + 'you may try to explicitly tell git that your ' + 'environment does not support symlinks:', + ) + print('\tgit config core.symlinks false') + return 1 + else: + return 0 + + +if __name__ == '__main__': + raise SystemExit(main()) diff --git a/pre_commit_hooks/detect_aws_credentials.py b/pre_commit_hooks/detect_aws_credentials.py index 1663cfd6..85822886 100644 --- a/pre_commit_hooks/detect_aws_credentials.py +++ b/pre_commit_hooks/detect_aws_credentials.py @@ -1,11 +1,10 @@ +from __future__ import annotations + import argparse import configparser import os -from typing import List +from collections.abc import Sequence from typing import NamedTuple -from typing import Optional -from typing import Sequence -from typing import Set class BadFile(NamedTuple): @@ -13,7 +12,7 @@ class BadFile(NamedTuple): key: str -def get_aws_cred_files_from_env() -> Set[str]: +def get_aws_cred_files_from_env() -> set[str]: """Extract credential file paths from environment variables.""" return { os.environ[env_var] @@ -25,7 +24,7 @@ def get_aws_cred_files_from_env() -> Set[str]: } -def get_aws_secrets_from_env() -> Set[str]: +def get_aws_secrets_from_env() -> set[str]: """Extract AWS secrets from environment variables.""" keys = set() for env_var in ( @@ -36,7 +35,7 @@ def get_aws_secrets_from_env() -> Set[str]: return keys -def get_aws_secrets_from_file(credentials_file: str) -> Set[str]: +def get_aws_secrets_from_file(credentials_file: str) -> set[str]: """Extract AWS secrets from configuration files. Read an ini-style configuration file and return a set with all found AWS @@ -69,8 +68,8 @@ def get_aws_secrets_from_file(credentials_file: str) -> Set[str]: def check_file_for_aws_keys( filenames: Sequence[str], - keys: Set[bytes], -) -> List[BadFile]: + keys: set[bytes], +) -> list[BadFile]: """Check if files contain AWS secrets. Return a list of all files containing AWS secrets and keys found, with all @@ -90,7 +89,7 @@ def check_file_for_aws_keys( return bad_files -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='+', help='Filenames to run') parser.add_argument( @@ -119,7 +118,7 @@ def main(argv: Optional[Sequence[str]] = None) -> int: # of files to to gather AWS secrets from. credential_files |= get_aws_cred_files_from_env() - keys: Set[str] = set() + keys: set[str] = set() for credential_file in credential_files: keys |= get_aws_secrets_from_file(credential_file) @@ -149,4 +148,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/detect_private_key.py b/pre_commit_hooks/detect_private_key.py index 7bbc2f91..9ad703ae 100644 --- a/pre_commit_hooks/detect_private_key.py +++ b/pre_commit_hooks/detect_private_key.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import argparse -from typing import Optional -from typing import Sequence +from collections.abc import Sequence BLACKLIST = [ b'BEGIN RSA PRIVATE KEY', @@ -11,10 +12,12 @@ b'PuTTY-User-Key-File-2', b'BEGIN SSH2 ENCRYPTED PRIVATE KEY', b'BEGIN PGP PRIVATE KEY BLOCK', + b'BEGIN ENCRYPTED PRIVATE KEY', + b'BEGIN OpenVPN Static key V1', ] -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to check') args = parser.parse_args(argv) @@ -36,4 +39,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/end_of_file_fixer.py b/pre_commit_hooks/end_of_file_fixer.py index 1c07379d..a88425c6 100644 --- a/pre_commit_hooks/end_of_file_fixer.py +++ b/pre_commit_hooks/end_of_file_fixer.py @@ -1,8 +1,9 @@ +from __future__ import annotations + import argparse import os +from collections.abc import Sequence from typing import IO -from typing import Optional -from typing import Sequence def fix_file(file_obj: IO[bytes]) -> int: @@ -48,7 +49,7 @@ def fix_file(file_obj: IO[bytes]) -> int: return 0 -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -67,4 +68,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/file_contents_sorter.py b/pre_commit_hooks/file_contents_sorter.py index 4c1c7479..ee26d926 100644 --- a/pre_commit_hooks/file_contents_sorter.py +++ b/pre_commit_hooks/file_contents_sorter.py @@ -9,12 +9,14 @@ this hook on that file should reduce the instances of git merge conflicts and keep the file nicely ordered. """ +from __future__ import annotations + import argparse +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Sequence from typing import Any -from typing import Callable from typing import IO -from typing import Optional -from typing import Sequence PASS = 0 FAIL = 1 @@ -22,16 +24,23 @@ def sort_file_contents( f: IO[bytes], - key: Optional[Callable[[bytes], Any]], + key: Callable[[bytes], Any] | None, + *, + unique: bool = False, ) -> int: before = list(f) - after = sorted( - (line.strip(b'\n\r') for line in before if line.strip()), - key=key, + lines: Iterable[bytes] = ( + line.rstrip(b'\n\r') for line in before if line.strip() ) + if unique: + lines = set(lines) + after = sorted(lines, key=key) before_string = b''.join(before) - after_string = b'\n'.join(after) + b'\n' + after_string = b'\n'.join(after) + + if after_string: + after_string += b'\n' if before_string == after_string: return PASS @@ -42,23 +51,33 @@ def sort_file_contents( return FAIL -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='+', help='Files to sort') - parser.add_argument( + + mutex = parser.add_mutually_exclusive_group(required=False) + mutex.add_argument( '--ignore-case', action='store_const', const=bytes.lower, default=None, help='fold lower case to upper case characters', ) + mutex.add_argument( + '--unique', + action='store_true', + help='ensure each line is unique', + ) + args = parser.parse_args(argv) retv = PASS for arg in args.filenames: with open(arg, 'rb+') as file_obj: - ret_for_file = sort_file_contents(file_obj, key=args.ignore_case) + ret_for_file = sort_file_contents( + file_obj, key=args.ignore_case, unique=args.unique, + ) if ret_for_file: print(f'Sorting {arg}') @@ -69,4 +88,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/fix_byte_order_marker.py b/pre_commit_hooks/fix_byte_order_marker.py new file mode 100644 index 00000000..100ffeac --- /dev/null +++ b/pre_commit_hooks/fix_byte_order_marker.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import argparse +from collections.abc import Sequence + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument('filenames', nargs='*', help='Filenames to check') + args = parser.parse_args(argv) + + retv = 0 + + for filename in args.filenames: + with open(filename, 'rb') as f_b: + bts = f_b.read(3) + + if bts == b'\xef\xbb\xbf': + with open(filename, newline='', encoding='utf-8-sig') as f: + contents = f.read() + with open(filename, 'w', newline='', encoding='utf-8') as f: + f.write(contents) + + print(f'{filename}: removed byte-order marker') + retv = 1 + + return retv + + +if __name__ == '__main__': + raise SystemExit(main()) diff --git a/pre_commit_hooks/fix_encoding_pragma.py b/pre_commit_hooks/fix_encoding_pragma.py deleted file mode 100644 index 88d72ed7..00000000 --- a/pre_commit_hooks/fix_encoding_pragma.py +++ /dev/null @@ -1,148 +0,0 @@ -import argparse -from typing import IO -from typing import NamedTuple -from typing import Optional -from typing import Sequence - -DEFAULT_PRAGMA = b'# -*- coding: utf-8 -*-' - - -def has_coding(line: bytes) -> bool: - if not line.strip(): - return False - return ( - line.lstrip()[:1] == b'#' and ( - b'unicode' in line or - b'encoding' in line or - b'coding:' in line or - b'coding=' in line - ) - ) - - -class ExpectedContents(NamedTuple): - shebang: bytes - rest: bytes - # True: has exactly the coding pragma expected - # False: missing coding pragma entirely - # None: has a coding pragma, but it does not match - pragma_status: Optional[bool] - ending: bytes - - @property - def has_any_pragma(self) -> bool: - return self.pragma_status is not False - - def is_expected_pragma(self, remove: bool) -> bool: - expected_pragma_status = not remove - return self.pragma_status is expected_pragma_status - - -def _get_expected_contents( - first_line: bytes, - second_line: bytes, - rest: bytes, - expected_pragma: bytes, -) -> ExpectedContents: - ending = b'\r\n' if first_line.endswith(b'\r\n') else b'\n' - - if first_line.startswith(b'#!'): - shebang = first_line - potential_coding = second_line - else: - shebang = b'' - potential_coding = first_line - rest = second_line + rest - - if potential_coding.rstrip(b'\r\n') == expected_pragma: - pragma_status: Optional[bool] = True - elif has_coding(potential_coding): - pragma_status = None - else: - pragma_status = False - rest = potential_coding + rest - - return ExpectedContents( - shebang=shebang, rest=rest, pragma_status=pragma_status, ending=ending, - ) - - -def fix_encoding_pragma( - f: IO[bytes], - remove: bool = False, - expected_pragma: bytes = DEFAULT_PRAGMA, -) -> int: - expected = _get_expected_contents( - f.readline(), f.readline(), f.read(), expected_pragma, - ) - - # Special cases for empty files - if not expected.rest.strip(): - # If a file only has a shebang or a coding pragma, remove it - if expected.has_any_pragma or expected.shebang: - f.seek(0) - f.truncate() - f.write(b'') - return 1 - else: - return 0 - - if expected.is_expected_pragma(remove): - return 0 - - # Otherwise, write out the new file - f.seek(0) - f.truncate() - f.write(expected.shebang) - if not remove: - f.write(expected_pragma + expected.ending) - f.write(expected.rest) - - return 1 - - -def _normalize_pragma(pragma: str) -> bytes: - return pragma.encode().rstrip() - - -def main(argv: Optional[Sequence[str]] = None) -> int: - parser = argparse.ArgumentParser( - 'Fixes the encoding pragma of python files', - ) - parser.add_argument('filenames', nargs='*', help='Filenames to fix') - parser.add_argument( - '--pragma', default=DEFAULT_PRAGMA, type=_normalize_pragma, - help=( - f'The encoding pragma to use. ' - f'Default: {DEFAULT_PRAGMA.decode()}' - ), - ) - parser.add_argument( - '--remove', action='store_true', - help='Remove the encoding pragma (Useful in a python3-only codebase)', - ) - args = parser.parse_args(argv) - - retv = 0 - - if args.remove: - fmt = 'Removed encoding pragma from {filename}' - else: - fmt = 'Added `{pragma}` to {filename}' - - for filename in args.filenames: - with open(filename, 'r+b') as f: - file_ret = fix_encoding_pragma( - f, remove=args.remove, expected_pragma=args.pragma, - ) - retv |= file_ret - if file_ret: - print( - fmt.format(pragma=args.pragma.decode(), filename=filename), - ) - - return retv - - -if __name__ == '__main__': - exit(main()) diff --git a/pre_commit_hooks/forbid_new_submodules.py b/pre_commit_hooks/forbid_new_submodules.py index c144d728..b7a63cd2 100644 --- a/pre_commit_hooks/forbid_new_submodules.py +++ b/pre_commit_hooks/forbid_new_submodules.py @@ -1,14 +1,30 @@ -from typing import Optional -from typing import Sequence +from __future__ import annotations + +import argparse +import os +from collections.abc import Sequence from pre_commit_hooks.util import cmd_output -def main(argv: Optional[Sequence[str]] = None) -> int: - # `argv` is ignored, pre-commit will send us a list of files that we - # don't care about +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument('filenames', nargs='*') + args = parser.parse_args(argv) + + if ( + 'PRE_COMMIT_FROM_REF' in os.environ and + 'PRE_COMMIT_TO_REF' in os.environ + ): + diff_arg = '...'.join(( + os.environ['PRE_COMMIT_FROM_REF'], + os.environ['PRE_COMMIT_TO_REF'], + )) + else: + diff_arg = '--staged' added_diff = cmd_output( - 'git', 'diff', '--staged', '--diff-filter=A', '--raw', + 'git', 'diff', '--diff-filter=A', '--raw', diff_arg, '--', + *args.filenames, ) retv = 0 for line in added_diff.splitlines(): @@ -29,4 +45,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/mixed_line_ending.py b/pre_commit_hooks/mixed_line_ending.py index 0ef8e2c0..2fbf067f 100644 --- a/pre_commit_hooks/mixed_line_ending.py +++ b/pre_commit_hooks/mixed_line_ending.py @@ -1,8 +1,8 @@ +from __future__ import annotations + import argparse import collections -from typing import Dict -from typing import Optional -from typing import Sequence +from collections.abc import Sequence CRLF = b'\r\n' @@ -25,7 +25,7 @@ def fix_filename(filename: str, fix: str) -> int: with open(filename, 'rb') as f: contents = f.read() - counts: Dict[bytes, int] = collections.defaultdict(int) + counts: dict[bytes, int] = collections.defaultdict(int) for line in contents.splitlines(True): for ending in ALL_ENDINGS: @@ -62,7 +62,7 @@ def fix_filename(filename: str, fix: str) -> int: return other_endings -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '-f', '--fix', @@ -85,4 +85,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/no_commit_to_branch.py b/pre_commit_hooks/no_commit_to_branch.py index fb1506f9..b0b8b238 100644 --- a/pre_commit_hooks/no_commit_to_branch.py +++ b/pre_commit_hooks/no_commit_to_branch.py @@ -1,8 +1,9 @@ +from __future__ import annotations + import argparse import re +from collections.abc import Sequence from typing import AbstractSet -from typing import Optional -from typing import Sequence from pre_commit_hooks.util import CalledProcessError from pre_commit_hooks.util import cmd_output @@ -23,7 +24,7 @@ def is_on_branch( ) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '-b', '--branch', action='append', @@ -38,10 +39,10 @@ def main(argv: Optional[Sequence[str]] = None) -> int: ) args = parser.parse_args(argv) - protected = frozenset(args.branch or ('master',)) + protected = frozenset(args.branch or ('master', 'main')) patterns = frozenset(args.pattern or ()) return int(is_on_branch(protected, patterns)) if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/pretty_format_json.py b/pre_commit_hooks/pretty_format_json.py index 25827dc4..501f37f7 100644 --- a/pre_commit_hooks/pretty_format_json.py +++ b/pre_commit_hooks/pretty_format_json.py @@ -1,12 +1,11 @@ +from __future__ import annotations + import argparse import json +import sys +from collections.abc import Mapping +from collections.abc import Sequence from difflib import unified_diff -from typing import List -from typing import Mapping -from typing import Optional -from typing import Sequence -from typing import Tuple -from typing import Union def _get_pretty_format( @@ -16,7 +15,7 @@ def _get_pretty_format( sort_keys: bool = True, top_keys: Sequence[str] = (), ) -> str: - def pairs_first(pairs: Sequence[Tuple[str, str]]) -> Mapping[str, str]: + def pairs_first(pairs: Sequence[tuple[str, str]]) -> Mapping[str, str]: before = [pair for pair in pairs if pair[0] in top_keys] before = sorted(before, key=lambda x: top_keys.index(x[0])) after = [pair for pair in pairs if pair[0] not in top_keys] @@ -37,7 +36,7 @@ def _autofix(filename: str, new_contents: str) -> None: f.write(new_contents) -def parse_num_to_int(s: str) -> Union[int, str]: +def parse_num_to_int(s: str) -> int | str: """Convert string numbers to int, leaving strings as is.""" try: return int(s) @@ -45,7 +44,7 @@ def parse_num_to_int(s: str) -> Union[int, str]: return s -def parse_topkeys(s: str) -> List[str]: +def parse_topkeys(s: str) -> list[str]: return s.split(',') @@ -56,7 +55,7 @@ def get_diff(source: str, target: str, file: str) -> str: return ''.join(diff) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '--autofix', @@ -111,26 +110,28 @@ def main(argv: Optional[Sequence[str]] = None) -> int: contents, args.indent, ensure_ascii=not args.no_ensure_ascii, sort_keys=not args.no_sort_keys, top_keys=args.top_keys, ) - + except ValueError: + print( + f'Input File {json_file} is not a valid JSON, consider using ' + f'check-json', + ) + status = 1 + else: if contents != pretty_contents: if args.autofix: _autofix(json_file, pretty_contents) else: - print( - get_diff(contents, pretty_contents, json_file), - end='', + diff_output = get_diff( + contents, + pretty_contents, + json_file, ) + sys.stdout.buffer.write(diff_output.encode()) status = 1 - except ValueError: - print( - f'Input File {json_file} is not a valid JSON, consider using ' - f'check-json', - ) - return 1 return status if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/removed.py b/pre_commit_hooks/removed.py index 60df0963..fb2b6d98 100644 --- a/pre_commit_hooks/removed.py +++ b/pre_commit_hooks/removed.py @@ -1,9 +1,10 @@ +from __future__ import annotations + import sys -from typing import Optional -from typing import Sequence +from collections.abc import Sequence -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: argv = argv if argv is not None else sys.argv[1:] hookid, new_hookid, url = argv[:3] raise SystemExit( @@ -12,4 +13,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/requirements_txt_fixer.py b/pre_commit_hooks/requirements_txt_fixer.py index 78103a14..8ce8ec64 100644 --- a/pre_commit_hooks/requirements_txt_fixer.py +++ b/pre_commit_hooks/requirements_txt_fixer.py @@ -1,9 +1,9 @@ +from __future__ import annotations + import argparse import re +from collections.abc import Sequence from typing import IO -from typing import List -from typing import Optional -from typing import Sequence PASS = 0 @@ -15,8 +15,8 @@ class Requirement: UNTIL_SEP = re.compile(rb'[^;\s]+') def __init__(self) -> None: - self.value: Optional[bytes] = None - self.comments: List[bytes] = [] + self.value: bytes | None = None + self.comments: list[bytes] = [] @property def name(self) -> bytes: @@ -36,7 +36,7 @@ def name(self) -> bytes: return name[:m.start()] - def __lt__(self, requirement: 'Requirement') -> int: + def __lt__(self, requirement: Requirement) -> bool: # \n means top of file comment, so always return True, # otherwise just do a string comparison with value. assert self.value is not None, self.value @@ -45,6 +45,11 @@ def __lt__(self, requirement: 'Requirement') -> int: elif requirement.value == b'\n': return False else: + # if 2 requirements have the same name, the one with comments + # needs to go first (so that when removing duplicates, the one + # with comments is kept) + if self.name == requirement.name: + return bool(self.comments) > bool(requirement.comments) return self.name < requirement.name def is_complete(self) -> bool: @@ -61,9 +66,9 @@ def append_value(self, value: bytes) -> None: def fix_requirements(f: IO[bytes]) -> int: - requirements: List[Requirement] = [] + requirements: list[Requirement] = [] before = list(f) - after: List[bytes] = [] + after: list[bytes] = [] before_string = b''.join(before) @@ -95,7 +100,7 @@ def fix_requirements(f: IO[bytes]) -> int: requirement.value = b'\n' else: requirement.comments.append(line) - elif line.startswith(b'#') or line.strip() == b'': + elif line.lstrip().startswith(b'#') or line.strip() == b'': requirement.comments.append(line) else: requirement.append_value(line) @@ -110,13 +115,20 @@ def fix_requirements(f: IO[bytes]) -> int: # which is automatically added by broken pip package under Debian requirements = [ req for req in requirements - if req.value != b'pkg-resources==0.0.0\n' + if req.value not in [ + b'pkg-resources==0.0.0\n', + b'pkg_resources==0.0.0\n', + ] ] + # sort the requirements and remove duplicates + prev = None for requirement in sorted(requirements): after.extend(requirement.comments) assert requirement.value, requirement.value - after.append(requirement.value) + if prev is None or requirement.value != prev.value: + after.append(requirement.value) + prev = requirement after.extend(rest) after_string = b''.join(after) @@ -130,7 +142,7 @@ def fix_requirements(f: IO[bytes]) -> int: return FAIL -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -150,4 +162,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/sort_simple_yaml.py b/pre_commit_hooks/sort_simple_yaml.py old mode 100755 new mode 100644 index 8ebc84ff..65e6b7a6 --- a/pre_commit_hooks/sort_simple_yaml.py +++ b/pre_commit_hooks/sort_simple_yaml.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """Sort a simple YAML file, keeping blocks of comments and definitions together. @@ -18,16 +17,16 @@ In other words, we don't sort deeper than the top layer, and might corrupt complicated YAML files. """ +from __future__ import annotations + import argparse -from typing import List -from typing import Optional -from typing import Sequence +from collections.abc import Sequence QUOTES = ["'", '"'] -def sort(lines: List[str]) -> List[str]: +def sort(lines: list[str]) -> list[str]: """Sort a YAML file in alphabetical order, keeping blocks together. :param lines: array of strings (without newlines) @@ -45,7 +44,7 @@ def sort(lines: List[str]) -> List[str]: return new_lines -def parse_block(lines: List[str], header: bool = False) -> List[str]: +def parse_block(lines: list[str], header: bool = False) -> list[str]: """Parse and return a single block, popping off the start of `lines`. If parsing a header block, we stop after we reach a line that is not a @@ -61,7 +60,7 @@ def parse_block(lines: List[str], header: bool = False) -> List[str]: return block_lines -def parse_blocks(lines: List[str]) -> List[List[str]]: +def parse_blocks(lines: list[str]) -> list[list[str]]: """Parse and return all possible blocks, popping off the start of `lines`. :param lines: list of lines @@ -78,7 +77,7 @@ def parse_blocks(lines: List[str]) -> List[List[str]]: return blocks -def first_key(lines: List[str]) -> str: +def first_key(lines: list[str]) -> str: """Returns a string representing the sort key of a block. The sort key is the first YAML key we encounter, ignoring comments, and @@ -100,7 +99,7 @@ def first_key(lines: List[str]) -> str: return '' # not actually reached in reality -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -123,4 +122,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/string_fixer.py b/pre_commit_hooks/string_fixer.py index 3fdb6e2f..76eb3526 100644 --- a/pre_commit_hooks/string_fixer.py +++ b/pre_commit_hooks/string_fixer.py @@ -1,10 +1,17 @@ +from __future__ import annotations + import argparse import io import re +import sys import tokenize -from typing import List -from typing import Optional -from typing import Sequence +from collections.abc import Sequence + +if sys.version_info >= (3, 12): # pragma: >=3.12 cover + FSTRING_START = tokenize.FSTRING_START + FSTRING_END = tokenize.FSTRING_END +else: # pragma: <3.12 cover + FSTRING_START = FSTRING_END = -1 START_QUOTE_RE = re.compile('^[a-zA-Z]*"') @@ -24,7 +31,7 @@ def handle_match(token_text: str) -> str: return token_text -def get_line_offsets_by_line_no(src: str) -> List[int]: +def get_line_offsets_by_line_no(src: str) -> list[int]: # Padded so we can index with line number offsets = [-1, 0] for line in src.splitlines(True): @@ -40,11 +47,17 @@ def fix_strings(filename: str) -> int: # Basically a mutable string splitcontents = list(contents) + fstring_depth = 0 + # Iterate in reverse so the offsets are always correct tokens_l = list(tokenize.generate_tokens(io.StringIO(contents).readline)) tokens = reversed(tokens_l) for token_type, token_text, (srow, scol), (erow, ecol), _ in tokens: - if token_type == tokenize.STRING: + if token_type == FSTRING_START: # pragma: >=3.12 cover + fstring_depth += 1 + elif token_type == FSTRING_END: # pragma: >=3.12 cover + fstring_depth -= 1 + elif fstring_depth == 0 and token_type == tokenize.STRING: new_text = handle_match(token_text) splitcontents[ line_offsets[srow] + scol: @@ -60,7 +73,7 @@ def fix_strings(filename: str) -> int: return 0 -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) @@ -77,4 +90,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/tests_should_end_in_test.py b/pre_commit_hooks/tests_should_end_in_test.py index b8cf9152..07af277d 100644 --- a/pre_commit_hooks/tests_should_end_in_test.py +++ b/pre_commit_hooks/tests_should_end_in_test.py @@ -1,33 +1,53 @@ +from __future__ import annotations + import argparse import os.path import re -from typing import Optional -from typing import Sequence +from collections.abc import Sequence -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') - parser.add_argument( - '--django', default=False, action='store_true', - help='Use Django-style test naming pattern (test*.py)', + mutex = parser.add_mutually_exclusive_group() + mutex.add_argument( + '--pytest', + dest='pattern', + action='store_const', + const=r'.*_test\.py', + default=r'.*_test\.py', + help='(the default) ensure tests match %(const)s', + ) + mutex.add_argument( + '--pytest-test-first', + dest='pattern', + action='store_const', + const=r'test_.*\.py', + help='ensure tests match %(const)s', + ) + mutex.add_argument( + '--django', '--unittest', + dest='pattern', + action='store_const', + const=r'test.*\.py', + help='ensure tests match %(const)s', ) args = parser.parse_args(argv) retcode = 0 - test_name_pattern = r'test.*\.py' if args.django else r'.*_test\.py' + reg = re.compile(args.pattern) for filename in args.filenames: base = os.path.basename(filename) if ( - not re.match(test_name_pattern, base) and + not reg.fullmatch(base) and not base == '__init__.py' and not base == 'conftest.py' ): retcode = 1 - print(f'{filename} does not match pattern "{test_name_pattern}"') + print(f'{filename} does not match pattern "{args.pattern}"') return retcode if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/trailing_whitespace_fixer.py b/pre_commit_hooks/trailing_whitespace_fixer.py index 05ed9994..dab8b14a 100644 --- a/pre_commit_hooks/trailing_whitespace_fixer.py +++ b/pre_commit_hooks/trailing_whitespace_fixer.py @@ -1,13 +1,14 @@ +from __future__ import annotations + import argparse import os -from typing import Optional -from typing import Sequence +from collections.abc import Sequence def _fix_file( filename: str, is_markdown: bool, - chars: Optional[bytes], + chars: bytes | None, ) -> bool: with open(filename, mode='rb') as file_processed: lines = file_processed.readlines() @@ -24,7 +25,7 @@ def _fix_file( def _process_line( line: bytes, is_markdown: bool, - chars: Optional[bytes], + chars: bytes | None, ) -> bytes: if line[-2:] == b'\r\n': eol = b'\r\n' @@ -40,7 +41,7 @@ def _process_line( return line.rstrip(chars) + eol -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( '--no-markdown-linebreak-ext', @@ -99,4 +100,4 @@ def main(argv: Optional[Sequence[str]] = None) -> int: if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff --git a/pre_commit_hooks/util.py b/pre_commit_hooks/util.py index e04b0150..d6c90ae0 100644 --- a/pre_commit_hooks/util.py +++ b/pre_commit_hooks/util.py @@ -1,19 +1,19 @@ +from __future__ import annotations + import subprocess from typing import Any -from typing import Optional -from typing import Set class CalledProcessError(RuntimeError): pass -def added_files() -> Set[str]: +def added_files() -> set[str]: cmd = ('git', 'diff', '--staged', '--name-only', '--diff-filter=A') return set(cmd_output(*cmd).splitlines()) -def cmd_output(*cmd: str, retcode: Optional[int] = 0, **kwargs: Any) -> str: +def cmd_output(*cmd: str, retcode: int | None = 0, **kwargs: Any) -> str: kwargs.setdefault('stdout', subprocess.PIPE) kwargs.setdefault('stderr', subprocess.PIPE) proc = subprocess.Popen(cmd, **kwargs) @@ -22,3 +22,11 @@ def cmd_output(*cmd: str, retcode: Optional[int] = 0, **kwargs: Any) -> str: if retcode is not None and proc.returncode != retcode: raise CalledProcessError(cmd, retcode, proc.returncode, stdout, stderr) return stdout + + +def zsplit(s: str) -> list[str]: + s = s.strip('\0') + if s: + return s.split('\0') + else: + return [] diff --git a/setup.cfg b/setup.cfg index 47b8bb6d..d91f4399 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = pre_commit_hooks -version = 3.2.0 +version = 6.0.0 description = Some out-of-the-box hooks for pre-commit. long_description = file: README.md long_description_content_type = text/markdown @@ -8,14 +8,10 @@ url = https://github.com/pre-commit/pre-commit-hooks author = Anthony Sottile author_email = asottile@umich.edu license = MIT -license_file = LICENSE +license_files = LICENSE classifiers = - License :: OSI Approved :: MIT License Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy @@ -23,32 +19,38 @@ classifiers = packages = find: install_requires = ruamel.yaml>=0.15 - toml -python_requires = >=3.6.1 + tomli>=1.1.0;python_version<"3.11" +python_requires = >=3.10 + +[options.packages.find] +exclude = + tests* + testing* [options.entry_points] console_scripts = check-added-large-files = pre_commit_hooks.check_added_large_files:main check-ast = pre_commit_hooks.check_ast:main check-builtin-literals = pre_commit_hooks.check_builtin_literals:main - check-byte-order-marker = pre_commit_hooks.check_byte_order_marker:main check-case-conflict = pre_commit_hooks.check_case_conflict:main check-docstring-first = pre_commit_hooks.check_docstring_first:main check-executables-have-shebangs = pre_commit_hooks.check_executables_have_shebangs:main check-json = pre_commit_hooks.check_json:main check-merge-conflict = pre_commit_hooks.check_merge_conflict:main + check-shebang-scripts-are-executable = pre_commit_hooks.check_shebang_scripts_are_executable:main check-symlinks = pre_commit_hooks.check_symlinks:main check-toml = pre_commit_hooks.check_toml:main check-vcs-permalinks = pre_commit_hooks.check_vcs_permalinks:main check-xml = pre_commit_hooks.check_xml:main check-yaml = pre_commit_hooks.check_yaml:main debug-statement-hook = pre_commit_hooks.debug_statement_hook:main + destroyed-symlinks = pre_commit_hooks.destroyed_symlinks:main detect-aws-credentials = pre_commit_hooks.detect_aws_credentials:main detect-private-key = pre_commit_hooks.detect_private_key:main double-quote-string-fixer = pre_commit_hooks.string_fixer:main end-of-file-fixer = pre_commit_hooks.end_of_file_fixer:main file-contents-sorter = pre_commit_hooks.file_contents_sorter:main - fix-encoding-pragma = pre_commit_hooks.fix_encoding_pragma:main + fix-byte-order-marker = pre_commit_hooks.fix_byte_order_marker:main forbid-new-submodules = pre_commit_hooks.forbid_new_submodules:main mixed-line-ending = pre_commit_hooks.mixed_line_ending:main name-tests-test = pre_commit_hooks.tests_should_end_in_test:main @@ -59,11 +61,6 @@ console_scripts = sort-simple-yaml = pre_commit_hooks.sort_simple_yaml:main trailing-whitespace-fixer = pre_commit_hooks.trailing_whitespace_fixer:main -[options.packages.find] -exclude = - tests* - testing* - [bdist_wheel] universal = True @@ -75,7 +72,8 @@ check_untyped_defs = true disallow_any_generics = true disallow_incomplete_defs = true disallow_untyped_defs = true -no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true [mypy-testing.*] disallow_untyped_defs = false diff --git a/setup.py b/setup.py index 8bf1ba93..3d93aefb 100644 --- a/setup.py +++ b/setup.py @@ -1,2 +1,4 @@ +from __future__ import annotations + from setuptools import setup setup() diff --git a/testing/resources/duplicate_key_json.notjson b/testing/resources/duplicate_key_json.notjson new file mode 100644 index 00000000..8a432623 --- /dev/null +++ b/testing/resources/duplicate_key_json.notjson @@ -0,0 +1,4 @@ +{ + "hello": "world", + "hello": "planet" +} diff --git a/testing/util.py b/testing/util.py index 8e468d60..2bbbe644 100644 --- a/testing/util.py +++ b/testing/util.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import os.path +import subprocess TESTING_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -6,3 +9,8 @@ def get_resource_path(path): return os.path.join(TESTING_DIR, 'resources', path) + + +def git_commit(*args, **kwargs): + cmd = ('git', 'commit', '--no-gpg-sign', '--no-verify', '--no-edit', *args) + subprocess.check_call(cmd, **kwargs) diff --git a/tests/check_added_large_files_test.py b/tests/check_added_large_files_test.py index 40ffd24d..54c4e689 100644 --- a/tests/check_added_large_files_test.py +++ b/tests/check_added_large_files_test.py @@ -1,10 +1,13 @@ -import distutils.spawn +from __future__ import annotations + +import shutil import pytest from pre_commit_hooks.check_added_large_files import find_large_added_files from pre_commit_hooks.check_added_large_files import main from pre_commit_hooks.util import cmd_output +from testing.util import git_commit def test_nothing_added(temp_git_dir): @@ -40,6 +43,17 @@ def test_add_something_giant(temp_git_dir): assert find_large_added_files(['f.py'], 10) == 0 +def test_enforce_all(temp_git_dir): + with temp_git_dir.as_cwd(): + temp_git_dir.join('f.py').write('a' * 10000) + + # Should fail, when not staged with enforce_all + assert find_large_added_files(['f.py'], 0, enforce_all=True) == 1 + + # Should pass, when not staged without enforce_all + assert find_large_added_files(['f.py'], 0, enforce_all=False) == 0 + + def test_added_file_not_in_pre_commits_list(temp_git_dir): with temp_git_dir.as_cwd(): temp_git_dir.join('f.py').write("print('hello world')") @@ -64,7 +78,7 @@ def test_integration(temp_git_dir): def has_gitlfs(): - return distutils.spawn.find_executable('git-lfs') is not None + return shutil.which('git-lfs') is not None xfailif_no_gitlfs = pytest.mark.xfail( @@ -73,10 +87,9 @@ def has_gitlfs(): @xfailif_no_gitlfs -def test_allows_gitlfs(temp_git_dir, monkeypatch): # pragma: no cover +def test_allows_gitlfs(temp_git_dir): # pragma: no cover with temp_git_dir.as_cwd(): - monkeypatch.setenv('HOME', str(temp_git_dir)) - cmd_output('git', 'lfs', 'install') + cmd_output('git', 'lfs', 'install', '--local') temp_git_dir.join('f.py').write('a' * 10000) cmd_output('git', 'lfs', 'track', 'f.py') cmd_output('git', 'add', '--', '.') @@ -85,15 +98,37 @@ def test_allows_gitlfs(temp_git_dir, monkeypatch): # pragma: no cover @xfailif_no_gitlfs -def test_moves_with_gitlfs(temp_git_dir, monkeypatch): # pragma: no cover +def test_moves_with_gitlfs(temp_git_dir): # pragma: no cover with temp_git_dir.as_cwd(): - monkeypatch.setenv('HOME', str(temp_git_dir)) - cmd_output('git', 'lfs', 'install') + cmd_output('git', 'lfs', 'install', '--local') cmd_output('git', 'lfs', 'track', 'a.bin', 'b.bin') # First add the file we're going to move temp_git_dir.join('a.bin').write('a' * 10000) cmd_output('git', 'add', '--', '.') - cmd_output('git', 'commit', '--no-gpg-sign', '-am', 'foo') + git_commit('-am', 'foo') # Now move it and make sure the hook still succeeds cmd_output('git', 'mv', 'a.bin', 'b.bin') assert main(('--maxkb', '9', 'b.bin')) == 0 + + +@xfailif_no_gitlfs +def test_enforce_allows_gitlfs(temp_git_dir): # pragma: no cover + with temp_git_dir.as_cwd(): + cmd_output('git', 'lfs', 'install', '--local') + temp_git_dir.join('f.py').write('a' * 10000) + cmd_output('git', 'lfs', 'track', 'f.py') + cmd_output('git', 'add', '--', '.') + # With --enforce-all large files on git lfs should succeed + assert main(('--enforce-all', '--maxkb', '9', 'f.py')) == 0 + + +@xfailif_no_gitlfs +def test_enforce_allows_gitlfs_after_commit(temp_git_dir): # pragma: no cover + with temp_git_dir.as_cwd(): + cmd_output('git', 'lfs', 'install', '--local') + temp_git_dir.join('f.py').write('a' * 10000) + cmd_output('git', 'lfs', 'track', 'f.py') + cmd_output('git', 'add', '--', '.') + git_commit('-am', 'foo') + # With --enforce-all large files on git lfs should succeed + assert main(('--enforce-all', '--maxkb', '9', 'f.py')) == 0 diff --git a/tests/check_ast_test.py b/tests/check_ast_test.py index 686fd116..62439661 100644 --- a/tests/check_ast_test.py +++ b/tests/check_ast_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pre_commit_hooks.check_ast import main from testing.util import get_resource_path diff --git a/tests/check_builtin_literals_test.py b/tests/check_builtin_literals_test.py index e9367989..de29063f 100644 --- a/tests/check_builtin_literals_test.py +++ b/tests/check_builtin_literals_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ast import pytest @@ -36,11 +38,6 @@ ''' -@pytest.fixture -def visitor(): - return Visitor() - - @pytest.mark.parametrize( ('expression', 'calls'), [ @@ -83,7 +80,8 @@ def visitor(): ('builtins.tuple()', []), ], ) -def test_non_dict_exprs(visitor, expression, calls): +def test_non_dict_exprs(expression, calls): + visitor = Visitor(ignore=set()) visitor.visit(ast.parse(expression)) assert visitor.builtin_type_calls == calls @@ -100,7 +98,8 @@ def test_non_dict_exprs(visitor, expression, calls): ('builtins.dict()', []), ], ) -def test_dict_allow_kwargs_exprs(visitor, expression, calls): +def test_dict_allow_kwargs_exprs(expression, calls): + visitor = Visitor(ignore=set()) visitor.visit(ast.parse(expression)) assert visitor.builtin_type_calls == calls @@ -112,17 +111,18 @@ def test_dict_allow_kwargs_exprs(visitor, expression, calls): ('dict(a=1, b=2, c=3)', [Call('dict', 1, 0)]), ("dict(**{'a': 1, 'b': 2, 'c': 3})", [Call('dict', 1, 0)]), ('builtins.dict()', []), + pytest.param('f(dict())', [Call('dict', 1, 2)], id='nested'), ], ) def test_dict_no_allow_kwargs_exprs(expression, calls): - visitor = Visitor(allow_dict_kwargs=False) + visitor = Visitor(ignore=set(), allow_dict_kwargs=False) visitor.visit(ast.parse(expression)) assert visitor.builtin_type_calls == calls def test_ignore_constructors(): visitor = Visitor( - ignore=('complex', 'dict', 'float', 'int', 'list', 'str', 'tuple'), + ignore={'complex', 'dict', 'float', 'int', 'list', 'str', 'tuple'}, ) visitor.visit(ast.parse(BUILTIN_CONSTRUCTORS)) assert visitor.builtin_type_calls == [] diff --git a/tests/check_case_conflict_test.py b/tests/check_case_conflict_test.py index 53de852e..a914f452 100644 --- a/tests/check_case_conflict_test.py +++ b/tests/check_case_conflict_test.py @@ -1,6 +1,26 @@ +from __future__ import annotations + +import sys + +import pytest + from pre_commit_hooks.check_case_conflict import find_conflicting_filenames from pre_commit_hooks.check_case_conflict import main +from pre_commit_hooks.check_case_conflict import parents from pre_commit_hooks.util import cmd_output +from testing.util import git_commit + +skip_win32 = pytest.mark.skipif( + sys.platform == 'win32', + reason='case conflicts between directories and files', +) + + +def test_parents(): + assert set(parents('a')) == set() + assert set(parents('a/b')) == {'a'} + assert set(parents('a/b/c')) == {'a/b', 'a'} + assert set(parents('a/b/c/d')) == {'a/b/c', 'a/b', 'a'} def test_nothing_added(temp_git_dir): @@ -26,6 +46,36 @@ def test_adding_something_with_conflict(temp_git_dir): assert find_conflicting_filenames(['f.py', 'F.py']) == 1 +@skip_win32 # pragma: win32 no cover +def test_adding_files_with_conflicting_directories(temp_git_dir): + with temp_git_dir.as_cwd(): + temp_git_dir.mkdir('dir').join('x').write('foo') + temp_git_dir.mkdir('DIR').join('y').write('foo') + cmd_output('git', 'add', '-A') + + assert find_conflicting_filenames([]) == 1 + + +@skip_win32 # pragma: win32 no cover +def test_adding_files_with_conflicting_deep_directories(temp_git_dir): + with temp_git_dir.as_cwd(): + temp_git_dir.mkdir('x').mkdir('y').join('z').write('foo') + temp_git_dir.join('X').write('foo') + cmd_output('git', 'add', '-A') + + assert find_conflicting_filenames([]) == 1 + + +@skip_win32 # pragma: win32 no cover +def test_adding_file_with_conflicting_directory(temp_git_dir): + with temp_git_dir.as_cwd(): + temp_git_dir.mkdir('dir').join('x').write('foo') + temp_git_dir.join('DIR').write('foo') + cmd_output('git', 'add', '-A') + + assert find_conflicting_filenames([]) == 1 + + def test_added_file_not_in_pre_commits_list(temp_git_dir): with temp_git_dir.as_cwd(): temp_git_dir.join('f.py').write("print('hello world')") @@ -38,7 +88,7 @@ def test_file_conflicts_with_committed_file(temp_git_dir): with temp_git_dir.as_cwd(): temp_git_dir.join('f.py').write("print('hello world')") cmd_output('git', 'add', 'f.py') - cmd_output('git', 'commit', '--no-gpg-sign', '-n', '-m', 'Add f.py') + git_commit('-m', 'Add f.py') temp_git_dir.join('F.py').write("print('hello world')") cmd_output('git', 'add', 'F.py') @@ -46,6 +96,19 @@ def test_file_conflicts_with_committed_file(temp_git_dir): assert find_conflicting_filenames(['F.py']) == 1 +@skip_win32 # pragma: win32 no cover +def test_file_conflicts_with_committed_dir(temp_git_dir): + with temp_git_dir.as_cwd(): + temp_git_dir.mkdir('dir').join('x').write('foo') + cmd_output('git', 'add', '-A') + git_commit('-m', 'Add f.py') + + temp_git_dir.join('DIR').write('foo') + cmd_output('git', 'add', '-A') + + assert find_conflicting_filenames([]) == 1 + + def test_integration(temp_git_dir): with temp_git_dir.as_cwd(): assert main(argv=[]) == 0 diff --git a/tests/check_docstring_first_test.py b/tests/check_docstring_first_test.py index ed5c08ef..8bafae82 100644 --- a/tests/check_docstring_first_test.py +++ b/tests/check_docstring_first_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.check_docstring_first import check_docstring_first @@ -15,7 +17,7 @@ b'from __future__ import unicode_literals\n' b'"foo"\n', 1, - '{filename}:2 Module docstring appears after code ' + '{filename}:2: Module docstring appears after code ' '(code seen on line 1).\n', ), # Test double docstring @@ -24,7 +26,7 @@ b'from __future__ import absolute_import\n' b'"fake docstring"\n', 1, - '{filename}:3 Multiple module docstrings ' + '{filename}:3: Multiple module docstrings ' '(first docstring on line 1).\n', ), # Test multiple lines of code above @@ -33,7 +35,7 @@ b'import sys\n' b'"docstring"\n', 1, - '{filename}:3 Module docstring appears after code ' + '{filename}:3: Module docstring appears after code ' '(code seen on line 1).\n', ), # String literals in expressions are ok. diff --git a/tests/check_executables_have_shebangs_test.py b/tests/check_executables_have_shebangs_test.py index 7046081f..82d03e3d 100644 --- a/tests/check_executables_have_shebangs_test.py +++ b/tests/check_executables_have_shebangs_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys @@ -102,16 +104,6 @@ def test_check_git_filemode_failing(tmpdir): assert check_executables_have_shebangs._check_git_filemode(files) == 1 -@pytest.mark.parametrize('out', ('\0f1\0f2\0', '\0f1\0f2', 'f1\0f2\0')) -def test_check_zsplits_correctly(out): - assert check_executables_have_shebangs.zsplit(out) == ['f1', 'f2'] - - -@pytest.mark.parametrize('out', ('\0\0', '\0', '')) -def test_check_zsplit_returns_empty(out): - assert check_executables_have_shebangs.zsplit(out) == [] - - @pytest.mark.parametrize( ('content', 'mode', 'expected'), ( diff --git a/tests/check_illegal_windows_names_test.py b/tests/check_illegal_windows_names_test.py new file mode 100644 index 00000000..82d75322 --- /dev/null +++ b/tests/check_illegal_windows_names_test.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import os.path +import re + +import pytest + +from pre_commit_hooks.check_yaml import yaml + + +@pytest.fixture(scope='module') +def hook_re(): + here = os.path.dirname(__file__) + with open(os.path.join(here, '..', '.pre-commit-hooks.yaml')) as f: + hook_defs = yaml.load(f) + hook, = ( + hook + for hook in hook_defs + if hook['id'] == 'check-illegal-windows-names' + ) + yield re.compile(hook['files']) + + +@pytest.mark.parametrize( + 's', + ( + pytest.param('aux.txt', id='with ext'), + pytest.param('aux', id='without ext'), + pytest.param('AuX.tXt', id='capitals'), + pytest.param('com7.dat', id='com with digit'), + pytest.param(':', id='bare colon'), + pytest.param('file:Zone.Identifier', id='mid colon'), + pytest.param('path/COM¹.json', id='com with superscript'), + pytest.param('dir/LPT³.toml', id='lpt with superscript'), + pytest.param('with < less than', id='with less than'), + pytest.param('Fast or Slow?.md', id='with question mark'), + pytest.param('with "double" quotes', id='with double quotes'), + pytest.param('with_null\x00byte', id='with null byte'), + pytest.param('ends_with.', id='ends with period'), + pytest.param('ends_with ', id='ends with space'), + pytest.param('ends_with\t', id='ends with tab'), + pytest.param('dir/ends./with.txt', id='directory ends with period'), + pytest.param('dir/ends /with.txt', id='directory ends with space'), + ), +) +def test_check_illegal_windows_names_matches(hook_re, s): + assert hook_re.search(s) + + +@pytest.mark.parametrize( + 's', + ( + pytest.param('README.md', id='standard file'), + pytest.param('foo.aux', id='as ext'), + pytest.param('com.dat', id='com without digit'), + pytest.param('.python-version', id='starts with period'), + pytest.param(' pseudo nan', id='with spaces'), + pytest.param('!@#$%^&;=≤\'~`¡¿€🤗', id='with allowed characters'), + pytest.param('path.to/file.py', id='standard path'), + ), +) +def test_check_illegal_windows_names_does_not_match(hook_re, s): + assert hook_re.search(s) is None diff --git a/tests/check_json_test.py b/tests/check_json_test.py index c63dc4c8..53e1f52d 100644 --- a/tests/check_json_test.py +++ b/tests/check_json_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.check_json import main @@ -9,6 +11,7 @@ ('bad_json.notjson', 1), ('bad_json_latin1.nonjson', 1), ('ok_json.json', 0), + ('duplicate_key_json.notjson', 1), ), ) def test_main(capsys, filename, expected_retval): diff --git a/tests/check_merge_conflict_test.py b/tests/check_merge_conflict_test.py index fccf41ff..64112d79 100644 --- a/tests/check_merge_conflict_test.py +++ b/tests/check_merge_conflict_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import shutil @@ -6,6 +8,7 @@ from pre_commit_hooks.check_merge_conflict import main from pre_commit_hooks.util import cmd_output from testing.util import get_resource_path +from testing.util import git_commit @pytest.fixture @@ -20,19 +23,19 @@ def f1_is_a_conflict_file(tmpdir): with repo1.as_cwd(): repo1_f1.ensure() cmd_output('git', 'add', '.') - cmd_output('git', 'commit', '--no-gpg-sign', '-m', 'commit1') + git_commit('-m', 'commit1') cmd_output('git', 'clone', str(repo1), str(repo2)) - # Commit in master + # Commit in mainline with repo1.as_cwd(): repo1_f1.write('parent\n') - cmd_output('git', 'commit', '--no-gpg-sign', '-am', 'master commit2') + git_commit('-am', 'mainline commit2') # Commit in clone and pull with repo2.as_cwd(): repo2_f1.write('child\n') - cmd_output('git', 'commit', '--no-gpg-sign', '-am', 'clone commit2') + git_commit('-am', 'clone commit2') cmd_output('git', 'pull', '--no-rebase', retcode=None) # We should end up in a merge conflict! f1 = repo2_f1.read() @@ -75,20 +78,20 @@ def repository_pending_merge(tmpdir): with repo1.as_cwd(): repo1_f1.ensure() cmd_output('git', 'add', '.') - cmd_output('git', 'commit', '--no-gpg-sign', '-m', 'commit1') + git_commit('-m', 'commit1') cmd_output('git', 'clone', str(repo1), str(repo2)) - # Commit in master + # Commit in mainline with repo1.as_cwd(): repo1_f1.write('parent\n') - cmd_output('git', 'commit', '--no-gpg-sign', '-am', 'master commit2') + git_commit('-am', 'mainline commit2') # Commit in clone and pull without committing with repo2.as_cwd(): repo2_f2.write('child\n') cmd_output('git', 'add', '.') - cmd_output('git', 'commit', '--no-gpg-sign', '-m', 'clone commit2') + git_commit('-m', 'clone commit2') cmd_output('git', 'pull', '--no-commit', '--no-rebase') # We should end up in a pending merge assert repo2_f1.read() == 'parent\n' @@ -98,12 +101,18 @@ def repository_pending_merge(tmpdir): @pytest.mark.usefixtures('f1_is_a_conflict_file') -def test_merge_conflicts_git(): +def test_merge_conflicts_git(capsys): assert main(['f1']) == 1 + out, _ = capsys.readouterr() + assert out == ( + "f1:1: Merge conflict string '<<<<<<<' found\n" + "f1:3: Merge conflict string '=======' found\n" + "f1:5: Merge conflict string '>>>>>>>' found\n" + ) @pytest.mark.parametrize( - 'contents', (b'<<<<<<< HEAD\n', b'=======\n', b'>>>>>>> master\n'), + 'contents', (b'<<<<<<< HEAD\n', b'=======\n', b'>>>>>>> main\n'), ) def test_merge_conflicts_failing(contents, repository_pending_merge): repository_pending_merge.join('f2').write_binary(contents) @@ -134,3 +143,15 @@ def test_care_when_assumed_merge(tmpdir): f = tmpdir.join('README.md') f.write_binary(b'problem\n=======\n') assert main([str(f.realpath()), '--assume-in-merge']) == 1 + + +def test_worktree_merge_conflicts(f1_is_a_conflict_file, tmpdir, capsys): + worktree = tmpdir.join('worktree') + cmd_output('git', 'worktree', 'add', str(worktree)) + with worktree.as_cwd(): + cmd_output( + 'git', 'pull', '--no-rebase', 'origin', 'HEAD', retcode=None, + ) + msg = f1_is_a_conflict_file.join('.git/worktrees/worktree/MERGE_MSG') + assert msg.exists() + test_merge_conflicts_git(capsys) diff --git a/tests/check_shebang_scripts_are_executable_test.py b/tests/check_shebang_scripts_are_executable_test.py new file mode 100644 index 00000000..e4bd07ca --- /dev/null +++ b/tests/check_shebang_scripts_are_executable_test.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import os + +import pytest + +from pre_commit_hooks.check_shebang_scripts_are_executable import \ + _check_git_filemode +from pre_commit_hooks.check_shebang_scripts_are_executable import main +from pre_commit_hooks.util import cmd_output + + +def test_check_git_filemode_passing(tmpdir): + with tmpdir.as_cwd(): + cmd_output('git', 'init', '.') + + f = tmpdir.join('f') + f.write('#!/usr/bin/env bash') + f_path = str(f) + cmd_output('chmod', '+x', f_path) + cmd_output('git', 'add', f_path) + cmd_output('git', 'update-index', '--chmod=+x', f_path) + + g = tmpdir.join('g').ensure() + g_path = str(g) + cmd_output('git', 'add', g_path) + + files = [f_path, g_path] + assert _check_git_filemode(files) == 0 + + # this is the one we should trigger on + h = tmpdir.join('h') + h.write('#!/usr/bin/env bash') + h_path = str(h) + cmd_output('git', 'add', h_path) + + files = [h_path] + assert _check_git_filemode(files) == 1 + + +def test_check_git_filemode_passing_unusual_characters(tmpdir): + with tmpdir.as_cwd(): + cmd_output('git', 'init', '.') + + f = tmpdir.join('mañana.txt') + f.write('#!/usr/bin/env bash') + f_path = str(f) + cmd_output('chmod', '+x', f_path) + cmd_output('git', 'add', f_path) + cmd_output('git', 'update-index', '--chmod=+x', f_path) + + files = (f_path,) + assert _check_git_filemode(files) == 0 + + +def test_check_git_filemode_failing(tmpdir): + with tmpdir.as_cwd(): + cmd_output('git', 'init', '.') + + f = tmpdir.join('f').ensure() + f.write('#!/usr/bin/env bash') + f_path = str(f) + cmd_output('git', 'add', f_path) + + files = (f_path,) + assert _check_git_filemode(files) == 1 + + +@pytest.mark.parametrize( + ('content', 'mode', 'expected'), + ( + pytest.param('#!python', '+x', 0, id='shebang with executable'), + pytest.param('#!python', '-x', 1, id='shebang without executable'), + pytest.param('', '+x', 0, id='no shebang with executable'), + pytest.param('', '-x', 0, id='no shebang without executable'), + ), +) +def test_git_executable_shebang(temp_git_dir, content, mode, expected): + with temp_git_dir.as_cwd(): + path = temp_git_dir.join('path') + path.write(content) + cmd_output('git', 'add', str(path)) + cmd_output('chmod', mode, str(path)) + cmd_output('git', 'update-index', f'--chmod={mode}', str(path)) + + # simulate how identify chooses that something is executable + filenames = [path for path in [str(path)] if os.access(path, os.X_OK)] + + assert main(filenames) == expected diff --git a/tests/check_symlinks_test.py b/tests/check_symlinks_test.py index 07c11687..e2c2c78f 100644 --- a/tests/check_symlinks_test.py +++ b/tests/check_symlinks_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import pytest diff --git a/tests/check_toml_test.py b/tests/check_toml_test.py index c7251eb0..d594f815 100644 --- a/tests/check_toml_test.py +++ b/tests/check_toml_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pre_commit_hooks.check_toml import main diff --git a/tests/check_vcs_permalinks_test.py b/tests/check_vcs_permalinks_test.py index 19b1c355..324b70ce 100644 --- a/tests/check_vcs_permalinks_test.py +++ b/tests/check_vcs_permalinks_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pre_commit_hooks.check_vcs_permalinks import main @@ -11,10 +13,12 @@ def test_passing(tmpdir): f.write_binary( # permalinks are ok b'https://github.com/asottile/test/blob/649e6/foo%20bar#L1\n' + # tags are ok + b'https://github.com/asottile/test/blob/1.0.0/foo%20bar#L1\n' # links to files but not line numbers are ok - b'https://github.com/asottile/test/blob/master/foo%20bar\n' + b'https://github.com/asottile/test/blob/main/foo%20bar\n' # regression test for overly-greedy regex - b'https://github.com/ yes / no ? /blob/master/foo#L1\n', + b'https://github.com/ yes / no ? /blob/main/foo#L1\n', ) assert not main((str(f),)) @@ -22,13 +26,15 @@ def test_passing(tmpdir): def test_failing(tmpdir, capsys): with tmpdir.as_cwd(): tmpdir.join('f.txt').write_binary( - b'https://github.com/asottile/test/blob/master/foo#L1\n', + b'https://github.com/asottile/test/blob/main/foo#L1\n' + b'https://example.com/asottile/test/blob/main/foo#L1\n', ) - assert main(('f.txt',)) + assert main(('f.txt', '--additional-github-domain', 'example.com')) out, _ = capsys.readouterr() assert out == ( - 'f.txt:1:https://github.com/asottile/test/blob/master/foo#L1\n' + 'f.txt:1:https://github.com/asottile/test/blob/main/foo#L1\n' + 'f.txt:2:https://example.com/asottile/test/blob/main/foo#L1\n' '\n' 'Non-permanent github link detected.\n' 'On any page on github press [y] to load a permalink.\n' diff --git a/tests/check_xml_test.py b/tests/check_xml_test.py index 357bad64..767619f1 100644 --- a/tests/check_xml_test.py +++ b/tests/check_xml_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.check_xml import main diff --git a/tests/check_yaml_test.py b/tests/check_yaml_test.py index 1a017a12..54eb16e8 100644 --- a/tests/check_yaml_test.py +++ b/tests/check_yaml_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.check_yaml import main diff --git a/tests/conftest.py b/tests/conftest.py index f92cfc18..807f15b8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.util import cmd_output diff --git a/tests/debug_statement_hook_test.py b/tests/debug_statement_hook_test.py index 428421a0..5a8e0bb2 100644 --- a/tests/debug_statement_hook_test.py +++ b/tests/debug_statement_hook_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ast from pre_commit_hooks.debug_statement_hook import Debug @@ -53,7 +55,9 @@ def test_non_utf8_file(tmpdir): assert main((str(f_py),)) == 0 -def test_py37_breakpoint(tmpdir): +def test_py37_breakpoint(tmpdir, capsys): f_py = tmpdir.join('f.py') f_py.write('def f():\n breakpoint()\n') assert main((str(f_py),)) == 1 + out, _ = capsys.readouterr() + assert out == f'{f_py}:2:4: breakpoint called\n' diff --git a/tests/destroyed_symlinks_test.py b/tests/destroyed_symlinks_test.py new file mode 100644 index 00000000..39c474a1 --- /dev/null +++ b/tests/destroyed_symlinks_test.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +import os +import subprocess + +import pytest + +from pre_commit_hooks.destroyed_symlinks import find_destroyed_symlinks +from pre_commit_hooks.destroyed_symlinks import main +from testing.util import git_commit + +TEST_SYMLINK = 'test_symlink' +TEST_SYMLINK_TARGET = '/doesnt/really/matters' +TEST_FILE = 'test_file' +TEST_FILE_RENAMED = f'{TEST_FILE}_renamed' + + +@pytest.fixture +def repo_with_destroyed_symlink(tmpdir): + source_repo = tmpdir.join('src') + os.makedirs(source_repo, exist_ok=True) + test_repo = tmpdir.join('test') + with source_repo.as_cwd(): + subprocess.check_call(('git', 'init')) + os.symlink(TEST_SYMLINK_TARGET, TEST_SYMLINK) + with open(TEST_FILE, 'w') as f: + print('some random content', file=f) + subprocess.check_call(('git', 'add', '.')) + git_commit('-m', 'initial') + assert b'120000 ' in subprocess.check_output( + ('git', 'cat-file', '-p', 'HEAD^{tree}'), + ) + subprocess.check_call( + ('git', '-c', 'core.symlinks=false', 'clone', source_repo, test_repo), + ) + with test_repo.as_cwd(): + subprocess.check_call( + ('git', 'config', '--local', 'core.symlinks', 'true'), + ) + subprocess.check_call(('git', 'mv', TEST_FILE, TEST_FILE_RENAMED)) + assert not os.path.islink(test_repo.join(TEST_SYMLINK)) + yield test_repo + + +def test_find_destroyed_symlinks(repo_with_destroyed_symlink): + with repo_with_destroyed_symlink.as_cwd(): + assert find_destroyed_symlinks([]) == [] + assert main([]) == 0 + + subprocess.check_call(('git', 'add', TEST_SYMLINK)) + assert find_destroyed_symlinks([TEST_SYMLINK]) == [TEST_SYMLINK] + assert find_destroyed_symlinks([]) == [] + assert main([]) == 0 + assert find_destroyed_symlinks([TEST_FILE_RENAMED, TEST_FILE]) == [] + ALL_STAGED = [TEST_SYMLINK, TEST_FILE_RENAMED] + assert find_destroyed_symlinks(ALL_STAGED) == [TEST_SYMLINK] + assert main(ALL_STAGED) != 0 + + with open(TEST_SYMLINK, 'a') as f: + print(file=f) # add trailing newline + subprocess.check_call(['git', 'add', TEST_SYMLINK]) + assert find_destroyed_symlinks(ALL_STAGED) == [TEST_SYMLINK] + assert main(ALL_STAGED) != 0 + + with open(TEST_SYMLINK, 'w') as f: + print('0' * len(TEST_SYMLINK_TARGET), file=f) + subprocess.check_call(('git', 'add', TEST_SYMLINK)) + assert find_destroyed_symlinks(ALL_STAGED) == [] + assert main(ALL_STAGED) == 0 + + with open(TEST_SYMLINK, 'w') as f: + print('0' * (len(TEST_SYMLINK_TARGET) + 3), file=f) + subprocess.check_call(('git', 'add', TEST_SYMLINK)) + assert find_destroyed_symlinks(ALL_STAGED) == [] + assert main(ALL_STAGED) == 0 diff --git a/tests/detect_aws_credentials_test.py b/tests/detect_aws_credentials_test.py index 4f00744e..afda47a9 100644 --- a/tests/detect_aws_credentials_test.py +++ b/tests/detect_aws_credentials_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest.mock import patch import pytest @@ -13,15 +15,15 @@ ('env_vars', 'values'), ( ({}, set()), - ({'AWS_DUMMY_KEY': '/foo'}, set()), + ({'AWS_PLACEHOLDER_KEY': '/foo'}, set()), ({'AWS_CONFIG_FILE': '/foo'}, {'/foo'}), ({'AWS_CREDENTIAL_FILE': '/foo'}, {'/foo'}), ({'AWS_SHARED_CREDENTIALS_FILE': '/foo'}, {'/foo'}), ({'BOTO_CONFIG': '/foo'}, {'/foo'}), - ({'AWS_DUMMY_KEY': '/foo', 'AWS_CONFIG_FILE': '/bar'}, {'/bar'}), + ({'AWS_PLACEHOLDER_KEY': '/foo', 'AWS_CONFIG_FILE': '/bar'}, {'/bar'}), ( { - 'AWS_DUMMY_KEY': '/foo', 'AWS_CONFIG_FILE': '/bar', + 'AWS_PLACEHOLDER_KEY': '/foo', 'AWS_CONFIG_FILE': '/bar', 'AWS_CREDENTIAL_FILE': '/baz', }, {'/bar', '/baz'}, @@ -44,13 +46,16 @@ def test_get_aws_credentials_file_from_env(env_vars, values): ('env_vars', 'values'), ( ({}, set()), - ({'AWS_DUMMY_KEY': 'foo'}, set()), + ({'AWS_PLACEHOLDER_KEY': 'foo'}, set()), ({'AWS_SECRET_ACCESS_KEY': 'foo'}, {'foo'}), ({'AWS_SECURITY_TOKEN': 'foo'}, {'foo'}), ({'AWS_SESSION_TOKEN': 'foo'}, {'foo'}), ({'AWS_SESSION_TOKEN': ''}, set()), ({'AWS_SESSION_TOKEN': 'foo', 'AWS_SECURITY_TOKEN': ''}, {'foo'}), - ({'AWS_DUMMY_KEY': 'foo', 'AWS_SECRET_ACCESS_KEY': 'bar'}, {'bar'}), + ( + {'AWS_PLACEHOLDER_KEY': 'foo', 'AWS_SECRET_ACCESS_KEY': 'bar'}, + {'bar'}, + ), ( {'AWS_SECRET_ACCESS_KEY': 'foo', 'AWS_SECURITY_TOKEN': 'bar'}, {'foo', 'bar'}, diff --git a/tests/detect_private_key_test.py b/tests/detect_private_key_test.py index 72810008..41f8bae5 100644 --- a/tests/detect_private_key_test.py +++ b/tests/detect_private_key_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.detect_private_key import main @@ -10,6 +12,8 @@ (b'-----BEGIN OPENSSH PRIVATE KEY-----', 1), (b'PuTTY-User-Key-File-2: ssh-rsa', 1), (b'---- BEGIN SSH2 ENCRYPTED PRIVATE KEY ----', 1), + (b'-----BEGIN ENCRYPTED PRIVATE KEY-----', 1), + (b'-----BEGIN OpenVPN Static key V1-----', 1), (b'ssh-rsa DATA', 0), (b'ssh-dsa DATA', 0), # Some arbitrary binary data diff --git a/tests/end_of_file_fixer_test.py b/tests/end_of_file_fixer_test.py index 60b9e82f..8a5d889e 100644 --- a/tests/end_of_file_fixer_test.py +++ b/tests/end_of_file_fixer_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import pytest diff --git a/tests/file_contents_sorter_test.py b/tests/file_contents_sorter_test.py index 9ebb021a..f178ae6e 100644 --- a/tests/file_contents_sorter_test.py +++ b/tests/file_contents_sorter_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.file_contents_sorter import FAIL @@ -8,7 +10,9 @@ @pytest.mark.parametrize( ('input_s', 'argv', 'expected_retval', 'output'), ( - (b'', [], FAIL, b'\n'), + (b'', [], PASS, b''), + (b'\n', [], FAIL, b''), + (b'\n\n', [], FAIL, b''), (b'lonesome\n', [], PASS, b'lonesome\n'), (b'missing_newline', [], FAIL, b'missing_newline\n'), (b'newline\nmissing', [], FAIL, b'missing\nnewline\n'), @@ -45,6 +49,24 @@ FAIL, b'fee\nFie\nFoe\nfum\n', ), + ( + b'Fie\nFoe\nfee\nfee\nfum\n', + ['--ignore-case'], + FAIL, + b'fee\nfee\nFie\nFoe\nfum\n', + ), + ( + b'Fie\nFoe\nfee\nfum\n', + ['--unique'], + PASS, + b'Fie\nFoe\nfee\nfum\n', + ), + ( + b'Fie\nFie\nFoe\nfee\nfum\n', + ['--unique'], + FAIL, + b'Fie\nFoe\nfee\nfum\n', + ), ), ) def test_integration(input_s, argv, expected_retval, output, tmpdir): @@ -55,3 +77,24 @@ def test_integration(input_s, argv, expected_retval, output, tmpdir): assert path.read_binary() == output assert output_retval == expected_retval + + +@pytest.mark.parametrize( + ('input_s', 'argv'), + ( + ( + b'fee\nFie\nFoe\nfum\n', + ['--unique', '--ignore-case'], + ), + ( + b'fee\nfee\nFie\nFoe\nfum\n', + ['--unique', '--ignore-case'], + ), + ), +) +def test_integration_invalid_args(input_s, argv, tmpdir): + path = tmpdir.join('file.txt') + path.write_binary(input_s) + + with pytest.raises(SystemExit): + main([str(path)] + argv) diff --git a/tests/check_byte_order_marker_test.py b/tests/fix_byte_order_marker_test.py similarity index 51% rename from tests/check_byte_order_marker_test.py rename to tests/fix_byte_order_marker_test.py index 4c402476..d7a65990 100644 --- a/tests/check_byte_order_marker_test.py +++ b/tests/fix_byte_order_marker_test.py @@ -1,13 +1,15 @@ -from pre_commit_hooks import check_byte_order_marker +from __future__ import annotations + +from pre_commit_hooks import fix_byte_order_marker def test_failure(tmpdir): f = tmpdir.join('f.txt') f.write_text('ohai', encoding='utf-8-sig') - assert check_byte_order_marker.main((str(f),)) == 1 + assert fix_byte_order_marker.main((str(f),)) == 1 def test_success(tmpdir): f = tmpdir.join('f.txt') f.write_text('ohai', encoding='utf-8') - assert check_byte_order_marker.main((str(f),)) == 0 + assert fix_byte_order_marker.main((str(f),)) == 0 diff --git a/tests/fix_encoding_pragma_test.py b/tests/fix_encoding_pragma_test.py deleted file mode 100644 index f3afa094..00000000 --- a/tests/fix_encoding_pragma_test.py +++ /dev/null @@ -1,159 +0,0 @@ -import io - -import pytest - -from pre_commit_hooks.fix_encoding_pragma import _normalize_pragma -from pre_commit_hooks.fix_encoding_pragma import fix_encoding_pragma -from pre_commit_hooks.fix_encoding_pragma import main - - -def test_integration_inserting_pragma(tmpdir): - path = tmpdir.join('foo.py') - path.write_binary(b'import httplib\n') - - assert main((str(path),)) == 1 - - assert path.read_binary() == ( - b'# -*- coding: utf-8 -*-\n' - b'import httplib\n' - ) - - -def test_integration_ok(tmpdir): - path = tmpdir.join('foo.py') - path.write_binary(b'# -*- coding: utf-8 -*-\nx = 1\n') - assert main((str(path),)) == 0 - - -def test_integration_remove(tmpdir): - path = tmpdir.join('foo.py') - path.write_binary(b'# -*- coding: utf-8 -*-\nx = 1\n') - - assert main((str(path), '--remove')) == 1 - - assert path.read_binary() == b'x = 1\n' - - -def test_integration_remove_ok(tmpdir): - path = tmpdir.join('foo.py') - path.write_binary(b'x = 1\n') - assert main((str(path), '--remove')) == 0 - - -@pytest.mark.parametrize( - 'input_str', - ( - b'', - ( - b'# -*- coding: utf-8 -*-\n' - b'x = 1\n' - ), - ( - b'#!/usr/bin/env python\n' - b'# -*- coding: utf-8 -*-\n' - b'foo = "bar"\n' - ), - ), -) -def test_ok_inputs(input_str): - bytesio = io.BytesIO(input_str) - assert fix_encoding_pragma(bytesio) == 0 - bytesio.seek(0) - assert bytesio.read() == input_str - - -@pytest.mark.parametrize( - ('input_str', 'output'), - ( - ( - b'import httplib\n', - b'# -*- coding: utf-8 -*-\n' - b'import httplib\n', - ), - ( - b'#!/usr/bin/env python\n' - b'x = 1\n', - b'#!/usr/bin/env python\n' - b'# -*- coding: utf-8 -*-\n' - b'x = 1\n', - ), - ( - b'#coding=utf-8\n' - b'x = 1\n', - b'# -*- coding: utf-8 -*-\n' - b'x = 1\n', - ), - ( - b'#!/usr/bin/env python\n' - b'#coding=utf8\n' - b'x = 1\n', - b'#!/usr/bin/env python\n' - b'# -*- coding: utf-8 -*-\n' - b'x = 1\n', - ), - # These should each get truncated - (b'#coding: utf-8\n', b''), - (b'# -*- coding: utf-8 -*-\n', b''), - (b'#!/usr/bin/env python\n', b''), - (b'#!/usr/bin/env python\n#coding: utf8\n', b''), - (b'#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n', b''), - ), -) -def test_not_ok_inputs(input_str, output): - bytesio = io.BytesIO(input_str) - assert fix_encoding_pragma(bytesio) == 1 - bytesio.seek(0) - assert bytesio.read() == output - - -def test_ok_input_alternate_pragma(): - input_s = b'# coding: utf-8\nx = 1\n' - bytesio = io.BytesIO(input_s) - ret = fix_encoding_pragma(bytesio, expected_pragma=b'# coding: utf-8') - assert ret == 0 - bytesio.seek(0) - assert bytesio.read() == input_s - - -def test_not_ok_input_alternate_pragma(): - bytesio = io.BytesIO(b'x = 1\n') - ret = fix_encoding_pragma(bytesio, expected_pragma=b'# coding: utf-8') - assert ret == 1 - bytesio.seek(0) - assert bytesio.read() == b'# coding: utf-8\nx = 1\n' - - -@pytest.mark.parametrize( - ('input_s', 'expected'), - ( - ('# coding: utf-8', b'# coding: utf-8'), - # trailing whitespace - ('# coding: utf-8\n', b'# coding: utf-8'), - ), -) -def test_normalize_pragma(input_s, expected): - assert _normalize_pragma(input_s) == expected - - -def test_integration_alternate_pragma(tmpdir, capsys): - f = tmpdir.join('f.py') - f.write('x = 1\n') - - pragma = '# coding: utf-8' - assert main((str(f), '--pragma', pragma)) == 1 - assert f.read() == '# coding: utf-8\nx = 1\n' - out, _ = capsys.readouterr() - assert out == f'Added `# coding: utf-8` to {str(f)}\n' - - -def test_crlf_ok(tmpdir): - f = tmpdir.join('f.py') - f.write_binary(b'# -*- coding: utf-8 -*-\r\nx = 1\r\n') - assert not main((str(f),)) - - -def test_crfl_adds(tmpdir): - f = tmpdir.join('f.py') - f.write_binary(b'x = 1\r\n') - assert main((str(f),)) - assert f.read_binary() == b'# -*- coding: utf-8 -*-\r\nx = 1\r\n' diff --git a/tests/forbid_new_submodules_test.py b/tests/forbid_new_submodules_test.py index 4871ae7f..058a3294 100644 --- a/tests/forbid_new_submodules_test.py +++ b/tests/forbid_new_submodules_test.py @@ -1,22 +1,22 @@ +from __future__ import annotations + +import os import subprocess +from unittest import mock import pytest from pre_commit_hooks.forbid_new_submodules import main +from testing.util import git_commit @pytest.fixture def git_dir_with_git_dir(tmpdir): with tmpdir.as_cwd(): subprocess.check_call(('git', 'init', '.')) - subprocess.check_call(( - 'git', 'commit', '-m', 'init', '--allow-empty', '--no-gpg-sign', - )) + git_commit('--allow-empty', '-m', 'init') subprocess.check_call(('git', 'init', 'foo')) - subprocess.check_call( - ('git', 'commit', '-m', 'init', '--allow-empty', '--no-gpg-sign'), - cwd=str(tmpdir.join('foo')), - ) + git_commit('--allow-empty', '-m', 'init', cwd=str(tmpdir.join('foo'))) yield @@ -31,7 +31,24 @@ def git_dir_with_git_dir(tmpdir): ) def test_main_new_submodule(git_dir_with_git_dir, capsys, cmd): subprocess.check_call(cmd) - assert main() == 1 + assert main(('random_non-related_file',)) == 0 + assert main(('foo',)) == 1 + out, _ = capsys.readouterr() + assert out.startswith('foo: new submodule introduced\n') + + +def test_main_new_submodule_committed(git_dir_with_git_dir, capsys): + rev_parse_cmd = ('git', 'rev-parse', 'HEAD') + from_ref = subprocess.check_output(rev_parse_cmd).decode().strip() + subprocess.check_call(('git', 'submodule', 'add', './foo')) + git_commit('-m', 'new submodule') + to_ref = subprocess.check_output(rev_parse_cmd).decode().strip() + with mock.patch.dict( + os.environ, + {'PRE_COMMIT_FROM_REF': from_ref, 'PRE_COMMIT_TO_REF': to_ref}, + ): + assert main(('random_non-related_file',)) == 0 + assert main(('foo',)) == 1 out, _ = capsys.readouterr() assert out.startswith('foo: new submodule introduced\n') @@ -39,4 +56,4 @@ def test_main_new_submodule(git_dir_with_git_dir, capsys, cmd): def test_main_no_new_submodule(git_dir_with_git_dir): open('test.py', 'a+').close() subprocess.check_call(('git', 'add', 'test.py')) - assert main() == 0 + assert main(('test.py',)) == 0 diff --git a/tests/mixed_line_ending_test.py b/tests/mixed_line_ending_test.py index f1c26418..a7e79719 100644 --- a/tests/mixed_line_ending_test.py +++ b/tests/mixed_line_ending_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.mixed_line_ending import main diff --git a/tests/no_commit_to_branch_test.py b/tests/no_commit_to_branch_test.py index 72b32e64..7d37e490 100644 --- a/tests/no_commit_to_branch_test.py +++ b/tests/no_commit_to_branch_test.py @@ -1,20 +1,23 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.no_commit_to_branch import is_on_branch from pre_commit_hooks.no_commit_to_branch import main from pre_commit_hooks.util import cmd_output +from testing.util import git_commit def test_other_branch(temp_git_dir): with temp_git_dir.as_cwd(): cmd_output('git', 'checkout', '-b', 'anotherbranch') - assert is_on_branch({'master'}) is False + assert is_on_branch({'placeholder'}) is False def test_multi_branch(temp_git_dir): with temp_git_dir.as_cwd(): cmd_output('git', 'checkout', '-b', 'another/branch') - assert is_on_branch({'master'}) is False + assert is_on_branch({'placeholder'}) is False def test_multi_branch_fail(temp_git_dir): @@ -23,9 +26,10 @@ def test_multi_branch_fail(temp_git_dir): assert is_on_branch({'another/branch'}) is True -def test_master_branch(temp_git_dir): +def test_exact_branch(temp_git_dir): with temp_git_dir.as_cwd(): - assert is_on_branch({'master'}) is True + cmd_output('git', 'checkout', '-b', 'branchname') + assert is_on_branch({'branchname'}) is True def test_main_branch_call(temp_git_dir): @@ -47,11 +51,11 @@ def test_branch_pattern_fail(temp_git_dir): assert is_on_branch(set(), {'another/.*'}) is True -@pytest.mark.parametrize('branch_name', ('master', 'another/branch')) +@pytest.mark.parametrize('branch_name', ('somebranch', 'another/branch')) def test_branch_pattern_multiple_branches_fail(temp_git_dir, branch_name): with temp_git_dir.as_cwd(): cmd_output('git', 'checkout', '-b', branch_name) - assert main(('--branch', 'master', '--pattern', 'another/.*')) + assert main(('--branch', 'somebranch', '--pattern', 'another/.*')) def test_main_default_call(temp_git_dir): @@ -62,8 +66,15 @@ def test_main_default_call(temp_git_dir): def test_not_on_a_branch(temp_git_dir): with temp_git_dir.as_cwd(): - cmd_output('git', 'commit', '--no-gpg-sign', '--allow-empty', '-m1') + git_commit('--allow-empty', '-m1') head = cmd_output('git', 'rev-parse', 'HEAD').strip() cmd_output('git', 'checkout', head) # we're not on a branch! assert main(()) == 0 + + +@pytest.mark.parametrize('branch_name', ('master', 'main')) +def test_default_branch_names(temp_git_dir, branch_name): + with temp_git_dir.as_cwd(): + cmd_output('git', 'checkout', '-b', branch_name) + assert main(()) == 1 diff --git a/tests/pretty_format_json_test.py b/tests/pretty_format_json_test.py index 7fda23b3..68b6d7a1 100644 --- a/tests/pretty_format_json_test.py +++ b/tests/pretty_format_json_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import shutil @@ -80,6 +82,24 @@ def test_autofix_main(tmpdir): assert ret == 0 +def test_invalid_main(tmpdir): + srcfile1 = tmpdir.join('not_valid_json.json') + srcfile1.write( + '{\n' + ' // not json\n' + ' "a": "b"\n' + '}', + ) + srcfile2 = tmpdir.join('to_be_json_formatted.json') + srcfile2.write('{ "a": "b" }') + + # it should have skipped the first file and formatted the second one + assert main(['--autofix', str(srcfile1), str(srcfile2)]) == 1 + + # confirm second file was formatted (shouldn't trigger linter again) + assert main([str(srcfile2)]) == 0 + + def test_orderfile_get_pretty_format(): ret = main(( '--top-keys=alist', get_resource_path('pretty_formatted_json.json'), diff --git a/tests/readme_test.py b/tests/readme_test.py index 7df7fcfe..038868d8 100644 --- a/tests/readme_test.py +++ b/tests/readme_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pre_commit_hooks.check_yaml import yaml diff --git a/tests/removed_test.py b/tests/removed_test.py index d635eb1e..cd669578 100644 --- a/tests/removed_test.py +++ b/tests/removed_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.removed import main diff --git a/tests/requirements_txt_fixer_test.py b/tests/requirements_txt_fixer_test.py index f4f679da..c0d2c65d 100644 --- a/tests/requirements_txt_fixer_test.py +++ b/tests/requirements_txt_fixer_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.requirements_txt_fixer import FAIL @@ -30,6 +32,16 @@ ), (b'#comment\n\nfoo\nbar\n', FAIL, b'#comment\n\nbar\nfoo\n'), (b'#comment\n\nbar\nfoo\n', PASS, b'#comment\n\nbar\nfoo\n'), + ( + b'foo\n\t#comment with indent\nbar\n', + FAIL, + b'\t#comment with indent\nbar\nfoo\n', + ), + ( + b'bar\n\t#comment with indent\nfoo\n', + PASS, + b'bar\n\t#comment with indent\nfoo\n', + ), (b'\nfoo\nbar\n', FAIL, b'bar\n\nfoo\n'), (b'\nbar\nfoo\n', PASS, b'\nbar\nfoo\n'), ( @@ -56,6 +68,12 @@ b'f<=2\n' b'g<2\n', ), + (b'a==1\nb==1\na==1\n', FAIL, b'a==1\nb==1\n'), + ( + b'a==1\nb==1\n#comment about a\na==1\n', + FAIL, + b'#comment about a\na==1\nb==1\n', + ), (b'ocflib\nDjango\nPyMySQL\n', FAIL, b'Django\nocflib\nPyMySQL\n'), ( b'-e git+ssh://git_url@tag#egg=ocflib\nDjango\nPyMySQL\n', @@ -64,6 +82,8 @@ ), (b'bar\npkg-resources==0.0.0\nfoo\n', FAIL, b'bar\nfoo\n'), (b'foo\npkg-resources==0.0.0\nbar\n', FAIL, b'bar\nfoo\n'), + (b'bar\npkg_resources==0.0.0\nfoo\n', FAIL, b'bar\nfoo\n'), + (b'foo\npkg_resources==0.0.0\nbar\n', FAIL, b'bar\nfoo\n'), ( b'git+ssh://git_url@tag#egg=ocflib\nDjango\nijk\n', FAIL, diff --git a/tests/sort_simple_yaml_test.py b/tests/sort_simple_yaml_test.py index a682c158..6cbda857 100644 --- a/tests/sort_simple_yaml_test.py +++ b/tests/sort_simple_yaml_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import pytest diff --git a/tests/string_fixer_test.py b/tests/string_fixer_test.py index 6ddb0ac8..8eb164c5 100644 --- a/tests/string_fixer_test.py +++ b/tests/string_fixer_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import textwrap import pytest @@ -35,6 +37,12 @@ 1, ), ('"foo""bar"', "'foo''bar'", 1), + pytest.param( + "f'hello{\"world\"}'", + "f'hello{\"world\"}'", + 0, + id='ignore nested fstrings', + ), ) diff --git a/tests/tests_should_end_in_test_test.py b/tests/tests_should_end_in_test_test.py index 4df2963f..2b5a0dea 100644 --- a/tests/tests_should_end_in_test_test.py +++ b/tests/tests_should_end_in_test_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pre_commit_hooks.tests_should_end_in_test import main @@ -41,3 +43,8 @@ def test_main_not_django_fails(): def test_main_django_fails(): ret = main(['--django', 'foo_test.py', 'test_bar.py', 'test_baz.py']) assert ret == 1 + + +def test_main_pytest_test_first(): + assert main(['--pytest-test-first', 'test_foo.py']) == 0 + assert main(['--pytest-test-first', 'foo_test.py']) == 1 diff --git a/tests/trailing_whitespace_fixer_test.py b/tests/trailing_whitespace_fixer_test.py index bb3b62d4..c07497a2 100644 --- a/tests/trailing_whitespace_fixer_test.py +++ b/tests/trailing_whitespace_fixer_test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.trailing_whitespace_fixer import main diff --git a/tests/util_test.py b/tests/util_test.py index b42ee6f9..92473e59 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -1,7 +1,10 @@ +from __future__ import annotations + import pytest from pre_commit_hooks.util import CalledProcessError from pre_commit_hooks.util import cmd_output +from pre_commit_hooks.util import zsplit def test_raises_on_error(): @@ -12,3 +15,13 @@ def test_raises_on_error(): def test_output(): ret = cmd_output('sh', '-c', 'echo hi') assert ret == 'hi\n' + + +@pytest.mark.parametrize('out', ('\0f1\0f2\0', '\0f1\0f2', 'f1\0f2\0')) +def test_check_zsplits_str_correctly(out): + assert zsplit(out) == ['f1', 'f2'] + + +@pytest.mark.parametrize('out', ('\0\0', '\0', '')) +def test_check_zsplit_returns_empty(out): + assert zsplit(out) == [] diff --git a/tox.ini b/tox.ini index 965eba92..11340f4d 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py36,py37,py38,pypy3,pre-commit +envlist = py,pre-commit [testenv] deps = -rrequirements-dev.txt @@ -11,7 +11,7 @@ setenv = commands = coverage erase coverage run -m pytest {posargs:tests} - coverage report --fail-under 100 + coverage report [testenv:pre-commit] skip_install = true