Compare commits
143 Commits
david/map-
...
david/gene
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b1e64a0da4 | ||
|
|
65982a1e14 | ||
|
|
57d1f7132d | ||
|
|
7a75702237 | ||
|
|
9ca632c84f | ||
|
|
64fe7d30a3 | ||
|
|
beeeb8d5c5 | ||
|
|
b6fca52855 | ||
|
|
ac7f882c78 | ||
|
|
aef0a107a8 | ||
|
|
512395f4e6 | ||
|
|
5dec37fbaf | ||
|
|
4bda9dad68 | ||
|
|
9d972d0583 | ||
|
|
1bbb553d6f | ||
|
|
bb4c51afb2 | ||
|
|
3dbdd2b883 | ||
|
|
d8e43bf9f7 | ||
|
|
ee448eab2d | ||
|
|
307b7df027 | ||
|
|
e139104aba | ||
|
|
9bb9b54168 | ||
|
|
262f2767ca | ||
|
|
1de9dac9d5 | ||
|
|
1cf6c2439f | ||
|
|
2b51ec6531 | ||
|
|
b85c995927 | ||
|
|
fd7eb1e22f | ||
|
|
4de7d653bd | ||
|
|
9cb37db510 | ||
|
|
ed06fb5ce2 | ||
|
|
54df73c9f7 | ||
|
|
d7524ea6d4 | ||
|
|
bf66178959 | ||
|
|
9cdac2d6fb | ||
|
|
79706a2e26 | ||
|
|
25853e2377 | ||
|
|
61f906d8e7 | ||
|
|
08a561fc05 | ||
|
|
aa5d665d52 | ||
|
|
d55edb3d74 | ||
|
|
ab86ae1760 | ||
|
|
916968d0ff | ||
|
|
deb3d3d150 | ||
|
|
982a0a2a7c | ||
|
|
f893b19930 | ||
|
|
c96ebe3936 | ||
|
|
22ca5dd890 | ||
|
|
f7995f4aef | ||
|
|
480fb278d0 | ||
|
|
b2f364d9cb | ||
|
|
aa82137d9f | ||
|
|
adfe2438e9 | ||
|
|
3247991429 | ||
|
|
08fcf7e106 | ||
|
|
2467c4352e | ||
|
|
a27c64811e | ||
|
|
5d52902e18 | ||
|
|
eb6154f792 | ||
|
|
fdfb51b595 | ||
|
|
7ee863b6d7 | ||
|
|
8ade6c4eaf | ||
|
|
9e45bfa9fd | ||
|
|
7509d376eb | ||
|
|
a24a4b55ee | ||
|
|
888a22e849 | ||
|
|
08c1d3660c | ||
|
|
670fffef37 | ||
|
|
de63f408b9 | ||
|
|
555b9f78d6 | ||
|
|
c6516e9b60 | ||
|
|
1aaa0847ab | ||
|
|
b49aa35074 | ||
|
|
1e34f3f20a | ||
|
|
77b2cee223 | ||
|
|
200349c6e8 | ||
|
|
0d4f7dde99 | ||
|
|
cb1ba0d4c2 | ||
|
|
cda376afe0 | ||
|
|
b14fc96141 | ||
|
|
c452a2cb79 | ||
|
|
4c3e1930f6 | ||
|
|
5d7c17c20a | ||
|
|
c402bf8ae2 | ||
|
|
6bc33a041f | ||
|
|
0a0eaf5a9b | ||
|
|
8e52027a88 | ||
|
|
78db56e362 | ||
|
|
046893c186 | ||
|
|
9cea752934 | ||
|
|
3b913ce652 | ||
|
|
aee9350df1 | ||
|
|
4e97b97a76 | ||
|
|
00214fc60c | ||
|
|
ec5584219e | ||
|
|
d5e48a0f80 | ||
|
|
f40a0b3800 | ||
|
|
bbfcf6e111 | ||
|
|
5518c84ab3 | ||
|
|
c71ce006c4 | ||
|
|
2f3bf03d82 | ||
|
|
24a6d247f1 | ||
|
|
88f7644551 | ||
|
|
6be46033b9 | ||
|
|
9e292f1522 | ||
|
|
74b60212dc | ||
|
|
bfe501b802 | ||
|
|
6f2b874d6c | ||
|
|
17dc2e4d80 | ||
|
|
694e7ed52e | ||
|
|
fe953e5c5c | ||
|
|
0bf5d2a204 | ||
|
|
8eb8d25565 | ||
|
|
9b1b58a451 | ||
|
|
fa7798ddd9 | ||
|
|
8223fea062 | ||
|
|
5a608f7366 | ||
|
|
c2d7c673ca | ||
|
|
8a6db4f257 | ||
|
|
ffcdd4ea42 | ||
|
|
0ff0c70302 | ||
|
|
f77315776c | ||
|
|
04dc223710 | ||
|
|
0d7ed32494 | ||
|
|
4ca38b2974 | ||
|
|
a8039f80f0 | ||
|
|
5c2d4d8d8f | ||
|
|
26082e8ec1 | ||
|
|
b6522cb534 | ||
|
|
637a2b1170 | ||
|
|
3927b0c931 | ||
|
|
ca1f66a657 | ||
|
|
166b63ad4d | ||
|
|
9363eeca26 | ||
|
|
f4362b95d7 | ||
|
|
f703536977 | ||
|
|
1842cfe333 | ||
|
|
b3c4005289 | ||
|
|
c2bc15bc15 | ||
|
|
e586f6dcc4 | ||
|
|
76a6b7e3e2 | ||
|
|
1ce65714c0 | ||
|
|
d9aaacd01f |
42
.github/workflows/ci.yaml
vendored
42
.github/workflows/ci.yaml
vendored
@@ -250,7 +250,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
with:
|
||||
@@ -259,6 +259,10 @@ jobs:
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
if: ${{ needs.determine_changes.outputs.ty == 'true' }}
|
||||
env:
|
||||
@@ -308,7 +312,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
with:
|
||||
@@ -317,6 +321,10 @@ jobs:
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -340,6 +348,10 @@ jobs:
|
||||
uses: taiki-e/install-action@6064345e6658255e90e9500fdf9a06ab77e6909c # v2.57.6
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -393,7 +405,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -418,7 +430,7 @@ jobs:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Build tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -441,9 +453,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@0dca8cf8dfb40cb77a29cece06933ce674674523 # v1.15.1
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
uses: cargo-bins/cargo-binstall@837578dfb436769f1e6669b2e23ffea9d9d2da8f # v1.15.4
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
@@ -463,7 +473,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -664,7 +674,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -694,7 +704,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@0dca8cf8dfb40cb77a29cece06933ce674674523 # v1.15.1
|
||||
- uses: cargo-bins/cargo-binstall@837578dfb436769f1e6669b2e23ffea9d9d2da8f # v1.15.4
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -734,7 +744,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
@@ -777,7 +787,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -909,7 +919,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
@@ -923,7 +933,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@76578c2a7ddd928664caa737f0e962e3085d4e7c # v3.8.1
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
@@ -942,7 +952,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
@@ -956,7 +966,7 @@ jobs:
|
||||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
uses: CodSpeedHQ/action@76578c2a7ddd928664caa737f0e962e3085d4e7c # v3.8.1
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
4
.github/workflows/daily_fuzz.yaml
vendored
4
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,11 +34,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
|
||||
4
.github/workflows/mypy_primer.yaml
vendored
4
.github/workflows/mypy_primer.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
6
.github/workflows/sync_typeshed.yaml
vendored
6
.github/workflows/sync_typeshed.yaml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- name: Sync typeshed stubs
|
||||
run: |
|
||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||
- uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
- uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.name typeshedbot
|
||||
|
||||
5
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
5
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
@@ -64,11 +64,12 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@27dd66d9e397d986ef9c631119ee09556eab8af9"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@1f560d07d672effae250e3d271da53d96c5260ff"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
diff \
|
||||
--profile=release \
|
||||
--projects-old ruff/projects_old.txt \
|
||||
--projects-new ruff/projects_new.txt \
|
||||
--old old_commit \
|
||||
|
||||
2
.github/workflows/ty-ecosystem-report.yaml
vendored
2
.github/workflows/ty-ecosystem-report.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b # v6.6.0
|
||||
uses: astral-sh/setup-uv@557e51de59eb14aaaba2ed9621916900a91d50c6 # v6.6.1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
|
||||
55
CHANGELOG.md
55
CHANGELOG.md
@@ -1,5 +1,60 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.12
|
||||
|
||||
### Preview features
|
||||
|
||||
- Show fixes by default ([#19919](https://github.com/astral-sh/ruff/pull/19919))
|
||||
- \[`airflow`\] Convert `DatasetOrTimeSchedule(datasets=...)` to `AssetOrTimeSchedule(assets=...)` (`AIR311`) ([#20202](https://github.com/astral-sh/ruff/pull/20202))
|
||||
- \[`airflow`\] Improve the `AIR002` error message ([#20173](https://github.com/astral-sh/ruff/pull/20173))
|
||||
- \[`airflow`\] Move `airflow.operators.postgres_operator.Mapping` from `AIR302` to `AIR301` ([#20172](https://github.com/astral-sh/ruff/pull/20172))
|
||||
- \[`flake8-async`\] Implement `blocking-input` rule (`ASYNC250`) ([#20122](https://github.com/astral-sh/ruff/pull/20122))
|
||||
- \[`flake8-use-pathlib`\] Make `PTH119` and `PTH120` fixes unsafe because they can change behavior ([#20118](https://github.com/astral-sh/ruff/pull/20118))
|
||||
- \[`pylint`\] Add U+061C to `PLE2502` ([#20106](https://github.com/astral-sh/ruff/pull/20106))
|
||||
- \[`ruff`\] Fix false negative for empty f-strings in `deque` calls (`RUF037`) ([#20109](https://github.com/astral-sh/ruff/pull/20109))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Less confidently mark f-strings as empty when inferring truthiness ([#20152](https://github.com/astral-sh/ruff/pull/20152))
|
||||
- \[`fastapi`\] Fix false positive for paths with spaces around parameters (`FAST003`) ([#20077](https://github.com/astral-sh/ruff/pull/20077))
|
||||
- \[`flake8-comprehensions`\] Skip `C417` when lambda contains `yield`/`yield from` ([#20201](https://github.com/astral-sh/ruff/pull/20201))
|
||||
- \[`perflint`\] Handle tuples in dictionary comprehensions (`PERF403`) ([#19934](https://github.com/astral-sh/ruff/pull/19934))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Preserve return type annotation for `ParamSpec` (`E731`) ([#20108](https://github.com/astral-sh/ruff/pull/20108))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add fix safety sections to docs ([#17490](https://github.com/astral-sh/ruff/pull/17490),[#17499](https://github.com/astral-sh/ruff/pull/17499))
|
||||
|
||||
## 0.12.11
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Extend `AIR311` and `AIR312` rules ([#20082](https://github.com/astral-sh/ruff/pull/20082))
|
||||
- \[`airflow`\] Replace wrong path `airflow.io.storage` with `airflow.io.store` (`AIR311`) ([#20081](https://github.com/astral-sh/ruff/pull/20081))
|
||||
- \[`flake8-async`\] Implement `blocking-http-call-httpx-in-async-function` (`ASYNC212`) ([#20091](https://github.com/astral-sh/ruff/pull/20091))
|
||||
- \[`flake8-logging-format`\] Add auto-fix for f-string logging calls (`G004`) ([#19303](https://github.com/astral-sh/ruff/pull/19303))
|
||||
- \[`flake8-use-pathlib`\] Add autofix for `PTH211` ([#20009](https://github.com/astral-sh/ruff/pull/20009))
|
||||
- \[`flake8-use-pathlib`\] Make `PTH100` fix unsafe because it can change behavior ([#20100](https://github.com/astral-sh/ruff/pull/20100))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyflakes`, `pylint`\] Fix false positives caused by `__class__` cell handling (`F841`, `PLE0117`) ([#20048](https://github.com/astral-sh/ruff/pull/20048))
|
||||
- \[`pyflakes`\] Fix `allowed-unused-imports` matching for top-level modules (`F401`) ([#20115](https://github.com/astral-sh/ruff/pull/20115))
|
||||
- \[`ruff`\] Fix false positive for t-strings in `default-factory-kwarg` (`RUF026`) ([#20032](https://github.com/astral-sh/ruff/pull/20032))
|
||||
- \[`ruff`\] Preserve relative whitespace in multi-line expressions (`RUF033`) ([#19647](https://github.com/astral-sh/ruff/pull/19647))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Handle empty t-strings in `unnecessary-empty-iterable-within-deque-call` (`RUF037`) ([#20045](https://github.com/astral-sh/ruff/pull/20045))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix incorrect `D413` links in docstrings convention FAQ ([#20089](https://github.com/astral-sh/ruff/pull/20089))
|
||||
- \[`flake8-use-pathlib`\] Update links to the table showing the correspondence between `os` and `pathlib` ([#20103](https://github.com/astral-sh/ruff/pull/20103))
|
||||
|
||||
## 0.12.10
|
||||
|
||||
### Preview features
|
||||
|
||||
239
Cargo.lock
generated
239
Cargo.lock
generated
@@ -257,9 +257,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.3"
|
||||
version = "2.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d"
|
||||
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
|
||||
|
||||
[[package]]
|
||||
name = "bitvec"
|
||||
@@ -295,7 +295,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata 0.4.10",
|
||||
"regex-automata",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@@ -322,9 +322,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.1.11"
|
||||
version = "1.1.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d07aa9a93b00c76f71bc35d598bed923f6d4f3a9ca5c24b7737ae1a292841c0"
|
||||
checksum = "dd0b03af37dad7a14518b7691d81acb0f8222604ad3d1b02f6b4bed5188c0cd5"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -408,9 +408,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.45"
|
||||
version = "4.5.47"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fc0e74a703892159f5ae7d3aac52c8e6c392f5ae5f359c70b5881d60aaac318"
|
||||
checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -418,9 +418,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.44"
|
||||
version = "4.5.47"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3e7f4214277f3c7aa526a59dd3fbe306a370daee1f8b7b8c987069cd8e888a8"
|
||||
checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -461,9 +461,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.45"
|
||||
version = "4.5.47"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "14cb31bb0a7d536caef2639baa7fad459e15c3144efefa6dbd1c84562c4739f6"
|
||||
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -603,7 +603,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -612,7 +612,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -955,7 +955,7 @@ dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1035,7 +1035,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1231,8 +1231,8 @@ dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
"log",
|
||||
"regex-automata 0.4.10",
|
||||
"regex-syntax 0.8.5",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1241,7 +1241,7 @@ version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"ignore",
|
||||
"walkdir",
|
||||
]
|
||||
@@ -1459,7 +1459,7 @@ dependencies = [
|
||||
"globset",
|
||||
"log",
|
||||
"memchr",
|
||||
"regex-automata 0.4.10",
|
||||
"regex-automata",
|
||||
"same-file",
|
||||
"walkdir",
|
||||
"winapi-util",
|
||||
@@ -1521,7 +1521,7 @@ version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
@@ -1537,9 +1537,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.43.1"
|
||||
version = "1.43.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371"
|
||||
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
||||
dependencies = [
|
||||
"console 0.15.11",
|
||||
"globset",
|
||||
@@ -1617,7 +1617,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1681,7 +1681,7 @@ dependencies = [
|
||||
"portable-atomic",
|
||||
"portable-atomic-util",
|
||||
"serde",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1728,9 +1728,9 @@ checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
version = "0.3.78"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
|
||||
checksum = "0c0b063578492ceec17683ef2f8c5e89121fbd0b172cbc280635ab7567db2738"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
@@ -1770,9 +1770,9 @@ checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.8.2"
|
||||
version = "1.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae28ddc5b90c3e3146a21d051ca095cbc8d932ad8714cf65ddf71a9abb35684c"
|
||||
checksum = "052ef5d9fc958a51aeebdf3713573b36c6fd6eed0bf0e60e204d2c0f8cf19b9f"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"libcst_derive",
|
||||
@@ -1785,9 +1785,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "1.8.2"
|
||||
version = "1.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc2de5c2f62bcf8a4f7290b1854388b262c4b68f1db1a3ee3ef6d4c1319b00a3"
|
||||
checksum = "a91a751afee92cbdd59d4bc6754c7672712eec2d30a308f23de4e3287b2929cb"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -1795,9 +1795,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libmimalloc-sys"
|
||||
version = "0.1.43"
|
||||
version = "0.1.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf88cd67e9de251c1781dbe2f641a1a3ad66eaae831b8a2c38fbdc5ddae16d4d"
|
||||
checksum = "667f4fec20f29dfc6bc7357c582d91796c169ad7e2fce709468aefeb2c099870"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@@ -1809,7 +1809,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
@@ -1850,9 +1850,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.27"
|
||||
version = "0.4.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
|
||||
[[package]]
|
||||
name = "lsp-server"
|
||||
@@ -1913,11 +1913,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
|
||||
dependencies = [
|
||||
"regex-automata 0.1.10",
|
||||
"regex-automata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1949,9 +1949,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mimalloc"
|
||||
version = "0.1.47"
|
||||
version = "0.1.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1791cbe101e95af5764f06f20f6760521f7158f69dbf9d6baf941ee1bf6bc40"
|
||||
checksum = "e1ee66a4b64c74f4ef288bcbb9192ad9c3feaad75193129ac8509af543894fd8"
|
||||
dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
@@ -2014,7 +2014,7 @@ version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"libc",
|
||||
@@ -2026,7 +2026,7 @@ version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"libc",
|
||||
@@ -2054,7 +2054,7 @@ version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
@@ -2074,12 +2074,11 @@ checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.46.0"
|
||||
version = "0.50.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
|
||||
dependencies = [
|
||||
"overload",
|
||||
"winapi",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2154,12 +2153,6 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.4"
|
||||
@@ -2666,7 +2659,7 @@ version = "0.5.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2688,17 +2681,8 @@ checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata 0.4.10",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||
dependencies = [
|
||||
"regex-syntax 0.6.29",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2709,7 +2693,7 @@ checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax 0.8.5",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2718,12 +2702,6 @@ version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.5"
|
||||
@@ -2743,13 +2721,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.12.10"
|
||||
version = "0.12.12"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
"assert_fs",
|
||||
"bincode 2.0.1",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"cachedir",
|
||||
"clap",
|
||||
"clap_complete_command",
|
||||
@@ -2870,6 +2848,7 @@ dependencies = [
|
||||
"insta",
|
||||
"matchit",
|
||||
"path-slash",
|
||||
"pathdiff",
|
||||
"quick-junit",
|
||||
"ruff_annotate_snippets",
|
||||
"ruff_cache",
|
||||
@@ -2998,11 +2977,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.10"
|
||||
version = "0.12.12"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"clap",
|
||||
"colored 3.0.0",
|
||||
"fern",
|
||||
@@ -3020,7 +2999,6 @@ dependencies = [
|
||||
"memchr",
|
||||
"natord",
|
||||
"path-absolutize",
|
||||
"pathdiff",
|
||||
"pep440_rs",
|
||||
"pyproject-toml",
|
||||
"regex",
|
||||
@@ -3108,7 +3086,7 @@ name = "ruff_python_ast"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"compact_str",
|
||||
"get-size2",
|
||||
"is-macro",
|
||||
@@ -3196,7 +3174,7 @@ dependencies = [
|
||||
name = "ruff_python_literal"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"itertools 0.14.0",
|
||||
"ruff_python_ast",
|
||||
"unic-ucd-category",
|
||||
@@ -3207,7 +3185,7 @@ name = "ruff_python_parser"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"bstr",
|
||||
"compact_str",
|
||||
"get-size2",
|
||||
@@ -3232,7 +3210,7 @@ dependencies = [
|
||||
name = "ruff_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"insta",
|
||||
"is-macro",
|
||||
"ruff_cache",
|
||||
@@ -3253,7 +3231,7 @@ dependencies = [
|
||||
name = "ruff_python_stdlib"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
@@ -3337,7 +3315,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.12.10"
|
||||
version = "0.12.12"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3430,11 +3408,11 @@ version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3827,7 +3805,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4161,15 +4139,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.19"
|
||||
version = "0.3.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
|
||||
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"regex-automata",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
@@ -4240,7 +4218,8 @@ dependencies = [
|
||||
name = "ty_ide"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"camino",
|
||||
"get-size2",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
@@ -4278,7 +4257,7 @@ dependencies = [
|
||||
"pep440_rs",
|
||||
"rayon",
|
||||
"regex",
|
||||
"regex-automata 0.4.10",
|
||||
"regex-automata",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_macros",
|
||||
@@ -4304,7 +4283,7 @@ name = "ty_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"bitvec",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
@@ -4357,7 +4336,7 @@ name = "ty_server"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"crossbeam",
|
||||
"dunce",
|
||||
"insta",
|
||||
@@ -4400,7 +4379,7 @@ name = "ty_test"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"insta",
|
||||
@@ -4759,21 +4738,22 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
version = "0.2.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
|
||||
checksum = "7e14915cadd45b529bb8d1f343c4ed0ac1de926144b746e2710f9cd05df6603b"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.100"
|
||||
version = "0.2.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
|
||||
checksum = "e28d1ba982ca7923fd01448d5c30c6864d0a14109560296a162f80f305fb93bb"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
@@ -4785,9 +4765,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.50"
|
||||
version = "0.4.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
|
||||
checksum = "0ca85039a9b469b38336411d6d6ced91f3fc87109a2a27b0c197663f5144dffe"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
@@ -4798,9 +4778,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.100"
|
||||
version = "0.2.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
|
||||
checksum = "7c3d463ae3eff775b0c45df9da45d68837702ac35af998361e2c84e7c5ec1b0d"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -4808,9 +4788,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.100"
|
||||
version = "0.2.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
checksum = "7bb4ce89b08211f923caf51d527662b75bdc9c9c7aab40f86dcb9fb85ac552aa"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4821,18 +4801,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.100"
|
||||
version = "0.2.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
|
||||
checksum = "f143854a3b13752c6950862c906306adb27c7e839f7414cec8fea35beab624c1"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test"
|
||||
version = "0.3.50"
|
||||
version = "0.3.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66c8d5e33ca3b6d9fa3b4676d774c5778031d27a578c2b007f905acf816152c3"
|
||||
checksum = "80cc7f8a4114fdaa0c58383caf973fc126cf004eba25c9dc639bccd3880d55ad"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"minicov",
|
||||
@@ -4843,9 +4823,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test-macro"
|
||||
version = "0.3.50"
|
||||
version = "0.3.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b"
|
||||
checksum = "c5ada2ab788d46d4bda04c9d567702a79c8ced14f51f221646a16ed39d0e6a5d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4854,9 +4834,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.77"
|
||||
version = "0.3.78"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
|
||||
checksum = "77e4b637749ff0d92b8fad63aa1f7cff3cbe125fd49c175cd6345e7272638b12"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
@@ -4892,37 +4872,15 @@ dependencies = [
|
||||
"glob",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.61.2"
|
||||
@@ -4982,6 +4940,15 @@ dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
@@ -5150,7 +5117,7 @@ version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.3",
|
||||
"bitflags 2.9.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
10
Cargo.toml
10
Cargo.toml
@@ -115,7 +115,7 @@ jiff = { version = "0.2.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "1.0.0" }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
libcst = { version = "1.8.4", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
@@ -251,6 +251,14 @@ rest_pat_in_fully_bound_structs = "warn"
|
||||
redundant_clone = "warn"
|
||||
debug_assert_with_mut_call = "warn"
|
||||
unused_peekable = "warn"
|
||||
# This lint sometimes flags code whose `if` and `else`
|
||||
# bodies could be flipped when a `!` operator is removed.
|
||||
# While perhaps sometimes a good idea, it is also often
|
||||
# not a good idea due to other factors impacting
|
||||
# readability. For example, if flipping the bodies results
|
||||
# in the `if` being an order of magnitude bigger than the
|
||||
# `else`, then some might consider that harder to read.
|
||||
if_not_else = "allow"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.12.10/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.10/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.12/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.12/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.10
|
||||
rev: v0.12.12
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.12.10"
|
||||
version = "0.12.12"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -15,8 +15,7 @@ use ruff_db::diagnostic::{
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, SarifEmitter,
|
||||
TextEmitter,
|
||||
Emitter, EmitterContext, GithubEmitter, GroupedEmitter, SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
@@ -31,8 +30,6 @@ bitflags! {
|
||||
const SHOW_VIOLATIONS = 1 << 0;
|
||||
/// Whether to show a summary of the fixed violations when emitting diagnostics.
|
||||
const SHOW_FIX_SUMMARY = 1 << 1;
|
||||
/// Whether to show a diff of each fixed violation when emitting diagnostics.
|
||||
const SHOW_FIX_DIFF = 1 << 2;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -261,9 +258,9 @@ impl Printer {
|
||||
OutputFormat::Concise | OutputFormat::Full => {
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_fix_diff(self.flags.intersects(Flags::SHOW_FIX_DIFF))
|
||||
.with_show_fix_diff(self.format == OutputFormat::Full && preview)
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
@@ -296,7 +293,11 @@ impl Printer {
|
||||
GithubEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
OutputFormat::Gitlab => {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.inner, &context)?;
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Gitlab)
|
||||
.preview(preview);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
@@ -461,7 +462,7 @@ impl Printer {
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_source(preview)
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
writer.flush()?;
|
||||
|
||||
@@ -1489,6 +1489,8 @@ fn deprecated_direct() {
|
||||
|
||||
#[test]
|
||||
fn deprecated_multiple_direct() {
|
||||
// Multiple deprecated rules selected by exact code should be included
|
||||
// but a warning should be displayed
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "RUF920", "--select", "RUF921"])
|
||||
.build();
|
||||
@@ -1516,16 +1518,10 @@ fn deprecated_indirect() {
|
||||
// since it is not a "direct" selection
|
||||
let mut cmd = RuffCheck::default().args(["--select", "RUF92"]).build();
|
||||
assert_cmd_snapshot!(cmd, @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
Found 2 errors.
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
@@ -2155,16 +2151,10 @@ extend-safe-fixes = ["RUF9"]
|
||||
RUF903 Hey this is a stable test rule with a display only fix.
|
||||
--> -:1:1
|
||||
|
||||
RUF920 Hey this is a deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF921 Hey this is another deprecated test rule.
|
||||
--> -:1:1
|
||||
|
||||
RUF950 Hey this is a test rule that was redirected from another.
|
||||
--> -:1:1
|
||||
|
||||
Found 7 errors.
|
||||
Found 5 errors.
|
||||
[*] 1 fixable with the `--fix` option (1 hidden fix can be enabled with the `--unsafe-fixes` option).
|
||||
|
||||
----- stderr -----
|
||||
|
||||
@@ -5780,28 +5780,6 @@ match 42: # invalid-syntax
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn future_annotations_preview_warning() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "lint.future-annotations = true"])
|
||||
.args(["--select", "F"])
|
||||
.arg("--no-preview")
|
||||
.arg("-")
|
||||
.pass_stdin("1"),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: The `lint.future-annotations` setting will have no effect because `preview` is disabled
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn up045_nested_optional_flatten_all() {
|
||||
let contents = "\
|
||||
@@ -5830,3 +5808,33 @@ nested_optional: Optional[Optional[Optional[str]]] = None
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_fixes_in_full_output_with_preview_enabled() {
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["check", "--no-cache", "--output-format", "full"])
|
||||
.args(["--select", "F401"])
|
||||
.arg("--preview")
|
||||
.arg("-")
|
||||
.pass_stdin("import math"),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
F401 [*] `math` imported but unused
|
||||
--> -:1:8
|
||||
|
|
||||
1 | import math
|
||||
| ^^^^
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
- import math
|
||||
|
||||
Found 1 error.
|
||||
[*] 1 fixable with the `--fix` option.
|
||||
|
||||
----- stderr -----
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -55,6 +55,10 @@ either a redundant alias or, if already present in the file, an `__all__` entry.
|
||||
to remove third-party and standard library imports -- the fix is unsafe because the module's
|
||||
interface changes.
|
||||
|
||||
See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)
|
||||
for more details on how Ruff
|
||||
determines whether an import is first or third-party.
|
||||
|
||||
## Example
|
||||
|
||||
```python
|
||||
@@ -83,11 +87,6 @@ else:
|
||||
print("numpy is not installed")
|
||||
```
|
||||
|
||||
## Preview
|
||||
When [preview](https://docs.astral.sh/ruff/preview/) is enabled,
|
||||
the criterion for determining whether an import is first-party
|
||||
is stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.
|
||||
|
||||
## Options
|
||||
- `lint.ignore-init-module-imports`
|
||||
- `lint.pyflakes.allowed-unused-imports`
|
||||
|
||||
@@ -20,59 +20,59 @@ exit_code: 1
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "F401: `os` imported but unused",
|
||||
"severity": "major",
|
||||
"fingerprint": "4dbad37161e65c72",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 8
|
||||
},
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 10
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "F821: Undefined name `y`",
|
||||
"severity": "major",
|
||||
"fingerprint": "7af59862a085230",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 5,
|
||||
"line": 2
|
||||
"line": 2,
|
||||
"column": 5
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 2
|
||||
"line": 2,
|
||||
"column": 6
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
},
|
||||
{
|
||||
"check_name": "invalid-syntax",
|
||||
"description": "invalid-syntax: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)",
|
||||
"severity": "major",
|
||||
"fingerprint": "e558cec859bb66e8",
|
||||
"location": {
|
||||
"path": "input.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 1,
|
||||
"line": 3
|
||||
"line": 3,
|
||||
"column": 1
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 3
|
||||
"line": 3,
|
||||
"column": 6
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
}
|
||||
]
|
||||
----- stderr -----
|
||||
|
||||
@@ -95,7 +95,7 @@ exit_code: 1
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
|
||||
@@ -450,9 +450,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
||||
r#"
|
||||
class C:
|
||||
def f(self: "C"):
|
||||
self.a = ""
|
||||
self.b = ""
|
||||
|
||||
if isinstance(self.a, str):
|
||||
return
|
||||
|
||||
@@ -466,6 +463,56 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
if isinstance(self.b, str):
|
||||
return
|
||||
|
||||
self.a = ""
|
||||
self.b = ""
|
||||
"#,
|
||||
)
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn benchmark_complex_constrained_attributes_3(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("ty_micro[complex_constrained_attributes_3]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is a regression test for https://github.com/astral-sh/ty/issues/758
|
||||
setup_micro_case(
|
||||
r#"
|
||||
class GridOut:
|
||||
def __init__(self: "GridOut") -> None:
|
||||
self._buffer = b""
|
||||
|
||||
def _read_size_or_line(self: "GridOut", size: int = -1):
|
||||
if size > self._position:
|
||||
size = self._position
|
||||
pass
|
||||
if size == 0:
|
||||
return bytes()
|
||||
|
||||
while size > 0:
|
||||
if self._buffer:
|
||||
buf = self._buffer
|
||||
self._buffer = b""
|
||||
else:
|
||||
buf = b""
|
||||
|
||||
if len(buf) > size:
|
||||
self._buffer = buf
|
||||
self._position -= len(self._buffer)
|
||||
"#,
|
||||
)
|
||||
},
|
||||
@@ -668,6 +715,7 @@ criterion_group!(
|
||||
benchmark_tuple_implicit_instance_attributes,
|
||||
benchmark_complex_constrained_attributes_1,
|
||||
benchmark_complex_constrained_attributes_2,
|
||||
benchmark_complex_constrained_attributes_3,
|
||||
benchmark_many_enum_members,
|
||||
);
|
||||
criterion_group!(project, anyio, attrs, hydra, datetype);
|
||||
|
||||
@@ -34,6 +34,7 @@ glob = { workspace = true }
|
||||
ignore = { workspace = true, optional = true }
|
||||
matchit = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pathdiff = { workspace = true }
|
||||
quick-junit = { workspace = true, optional = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
@@ -53,7 +54,7 @@ web-time = { version = "1.1.0" }
|
||||
etcetera = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
|
||||
@@ -349,6 +349,13 @@ impl Diagnostic {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if the diagnostic is [`fixable`](Diagnostic::fixable) and applies at the
|
||||
/// configured applicability level.
|
||||
pub fn has_applicable_fix(&self, config: &DisplayDiagnosticConfig) -> bool {
|
||||
self.fix()
|
||||
.is_some_and(|fix| fix.applies(config.fix_applicability))
|
||||
}
|
||||
|
||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||
///
|
||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||
@@ -447,24 +454,26 @@ impl Diagnostic {
|
||||
|
||||
/// Computes the start source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_start_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().start())
|
||||
/// Returns None if the diagnostic has no primary span, if its file is not a `SourceFile`,
|
||||
/// or if the span has no range.
|
||||
pub fn ruff_start_location(&self) -> Option<LineColumn> {
|
||||
Some(
|
||||
self.ruff_source_file()?
|
||||
.to_source_code()
|
||||
.line_column(self.range()?.start()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Computes the end source location for the message.
|
||||
///
|
||||
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||
/// span has no range.
|
||||
pub fn expect_ruff_end_location(&self) -> LineColumn {
|
||||
self.expect_primary_span()
|
||||
.expect_ruff_file()
|
||||
.to_source_code()
|
||||
.line_column(self.expect_range().end())
|
||||
/// Returns None if the diagnostic has no primary span, if its file is not a `SourceFile`,
|
||||
/// or if the span has no range.
|
||||
pub fn ruff_end_location(&self) -> Option<LineColumn> {
|
||||
Some(
|
||||
self.ruff_source_file()?
|
||||
.to_source_code()
|
||||
.line_column(self.range()?.end()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the [`SourceFile`] which the message belongs to.
|
||||
@@ -494,13 +503,18 @@ impl Diagnostic {
|
||||
|
||||
/// Returns the ordering of diagnostics based on the start of their ranges, if they have any.
|
||||
///
|
||||
/// Panics if either diagnostic has no primary span, if the span has no range, or if its file is
|
||||
/// not a `SourceFile`.
|
||||
/// Panics if either diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||
pub fn ruff_start_ordering(&self, other: &Self) -> std::cmp::Ordering {
|
||||
(self.expect_ruff_source_file(), self.expect_range().start()).cmp(&(
|
||||
let a = (
|
||||
self.expect_ruff_source_file(),
|
||||
self.range().map(|r| r.start()),
|
||||
);
|
||||
let b = (
|
||||
other.expect_ruff_source_file(),
|
||||
other.expect_range().start(),
|
||||
))
|
||||
other.range().map(|r| r.start()),
|
||||
);
|
||||
|
||||
a.cmp(&b)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1435,6 +1449,11 @@ pub enum DiagnosticFormat {
|
||||
/// Print diagnostics in the format expected by JUnit.
|
||||
#[cfg(feature = "junit")]
|
||||
Junit,
|
||||
/// Print diagnostics in the JSON format used by GitLab [Code Quality] reports.
|
||||
///
|
||||
/// [Code Quality]: https://docs.gitlab.com/ci/testing/code_quality/#code-quality-report-format
|
||||
#[cfg(feature = "serde")]
|
||||
Gitlab,
|
||||
}
|
||||
|
||||
/// A representation of the kinds of messages inside a diagnostic.
|
||||
|
||||
@@ -31,6 +31,8 @@ mod azure;
|
||||
mod concise;
|
||||
mod full;
|
||||
#[cfg(feature = "serde")]
|
||||
mod gitlab;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json;
|
||||
#[cfg(feature = "serde")]
|
||||
mod json_lines;
|
||||
@@ -136,6 +138,10 @@ impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
DiagnosticFormat::Junit => {
|
||||
junit::JunitRenderer::new(self.resolver).render(f, self.diagnostics)?;
|
||||
}
|
||||
#[cfg(feature = "serde")]
|
||||
DiagnosticFormat::Gitlab => {
|
||||
gitlab::GitlabRenderer::new(self.resolver).render(f, self.diagnostics)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -248,9 +254,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
id,
|
||||
message: diag.inner.message.as_str().to_string(),
|
||||
annotations,
|
||||
is_fixable: diag
|
||||
.fix()
|
||||
.is_some_and(|fix| fix.applies(config.fix_applicability)),
|
||||
is_fixable: diag.has_applicable_fix(config),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -77,11 +77,9 @@ impl<'a> ConciseRenderer<'a> {
|
||||
)?;
|
||||
}
|
||||
if self.config.show_fix_status {
|
||||
if let Some(fix) = diag.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.config.fix_applicability) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if diag.has_applicable_fix(self.config) {
|
||||
write!(f, "[{fix}] ", fix = fmt_styled("*", stylesheet.separator))?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::borrow::Cow;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use anstyle::Style;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
||||
use ruff_annotate_snippets::Renderer as AnnotateRenderer;
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
@@ -58,13 +57,14 @@ impl<'a> FullRenderer<'a> {
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)?;
|
||||
|
||||
if self.config.show_fix_diff {
|
||||
if self.config.show_fix_diff && diag.has_applicable_fix(self.config) {
|
||||
if let Some(diff) = Diff::from_diagnostic(diag, &stylesheet, self.resolver) {
|
||||
writeln!(f, "{diff}")?;
|
||||
write!(f, "{diff}")?;
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(f)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -126,19 +126,6 @@ impl std::fmt::Display for Diff<'_> {
|
||||
vec![(None, source_text.text_len())]
|
||||
};
|
||||
|
||||
let message = match self.fix.applicability() {
|
||||
// TODO(zanieb): Adjust this messaging once it's user-facing
|
||||
Applicability::Safe => "Safe fix",
|
||||
Applicability::Unsafe => "Unsafe fix",
|
||||
Applicability::DisplayOnly => "Display-only fix",
|
||||
};
|
||||
|
||||
// TODO(brent) `stylesheet.separator` is cyan rather than blue, as we had before. I think
|
||||
// we're getting rid of this soon anyway, so I didn't think it was worth adding another
|
||||
// style to the stylesheet temporarily. The color doesn't appear at all in the snapshot
|
||||
// tests, which is the only place these are currently used.
|
||||
writeln!(f, "ℹ {}", fmt_styled(message, self.stylesheet.separator))?;
|
||||
|
||||
let mut last_end = TextSize::ZERO;
|
||||
for (cell, offset) in cells {
|
||||
let range = TextRange::new(last_end, offset);
|
||||
@@ -167,64 +154,67 @@ impl std::fmt::Display for Diff<'_> {
|
||||
|
||||
let diff = TextDiff::from_lines(input, &output);
|
||||
|
||||
let (largest_old, largest_new) = diff
|
||||
.ops()
|
||||
.last()
|
||||
.map(|op| (op.old_range().start, op.new_range().start))
|
||||
.unwrap_or_default();
|
||||
let grouped_ops = diff.grouped_ops(3);
|
||||
|
||||
let digit_with = OneIndexed::from_zero_indexed(largest_new.max(largest_old)).digits();
|
||||
// Find the new line number with the largest number of digits to align all of the line
|
||||
// number separators.
|
||||
let last_op = grouped_ops.last().and_then(|group| group.last());
|
||||
let largest_new = last_op.map(|op| op.new_range().end).unwrap_or_default();
|
||||
|
||||
let digit_with = OneIndexed::new(largest_new).unwrap_or_default().digits();
|
||||
|
||||
if let Some(cell) = cell {
|
||||
// Room for 2 digits, 2 x 1 space before each digit, 1 space, and 1 `|`. This
|
||||
// centers the three colons on the pipe.
|
||||
writeln!(f, "{:>1$} cell {cell}", ":::", 2 * digit_with.get() + 4)?;
|
||||
// Room for 1 digit, 1 space, 1 `|`, and 1 more following space. This centers the
|
||||
// three colons on the pipe.
|
||||
writeln!(f, "{:>1$} cell {cell}", ":::", digit_with.get() + 3)?;
|
||||
}
|
||||
|
||||
for (idx, group) in diff.grouped_ops(3).iter().enumerate() {
|
||||
for (idx, group) in grouped_ops.iter().enumerate() {
|
||||
if idx > 0 {
|
||||
writeln!(f, "{:-^1$}", "-", 80)?;
|
||||
}
|
||||
for op in group {
|
||||
for change in diff.iter_inline_changes(op) {
|
||||
let sign = match change.tag() {
|
||||
ChangeTag::Delete => "-",
|
||||
ChangeTag::Insert => "+",
|
||||
ChangeTag::Equal => " ",
|
||||
let (sign, style, line_no_style, index) = match change.tag() {
|
||||
ChangeTag::Delete => (
|
||||
"-",
|
||||
self.stylesheet.deletion,
|
||||
self.stylesheet.deletion_line_no,
|
||||
None,
|
||||
),
|
||||
ChangeTag::Insert => (
|
||||
"+",
|
||||
self.stylesheet.insertion,
|
||||
self.stylesheet.insertion_line_no,
|
||||
change.new_index(),
|
||||
),
|
||||
ChangeTag::Equal => (
|
||||
"|",
|
||||
self.stylesheet.none,
|
||||
self.stylesheet.line_no,
|
||||
change.new_index(),
|
||||
),
|
||||
};
|
||||
|
||||
let line_style = LineStyle::from(change.tag(), self.stylesheet);
|
||||
|
||||
let old_index = change.old_index().map(OneIndexed::from_zero_indexed);
|
||||
let new_index = change.new_index().map(OneIndexed::from_zero_indexed);
|
||||
let line = Line {
|
||||
index: index.map(OneIndexed::from_zero_indexed),
|
||||
width: digit_with,
|
||||
};
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{} {} |{}",
|
||||
Line {
|
||||
index: old_index,
|
||||
width: digit_with,
|
||||
},
|
||||
Line {
|
||||
index: new_index,
|
||||
width: digit_with,
|
||||
},
|
||||
fmt_styled(line_style.apply_to(sign), self.stylesheet.emphasis),
|
||||
"{line} {sign} ",
|
||||
line = fmt_styled(line, self.stylesheet.line_no),
|
||||
sign = fmt_styled(sign, line_no_style),
|
||||
)?;
|
||||
|
||||
for (emphasized, value) in change.iter_strings_lossy() {
|
||||
let value = show_nonprinting(&value);
|
||||
let styled = fmt_styled(value, style);
|
||||
if emphasized {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
fmt_styled(
|
||||
line_style.apply_to(&value),
|
||||
self.stylesheet.underline
|
||||
)
|
||||
)?;
|
||||
write!(f, "{}", fmt_styled(styled, self.stylesheet.emphasis))?;
|
||||
} else {
|
||||
write!(f, "{}", line_style.apply_to(&value))?;
|
||||
write!(f, "{styled}")?;
|
||||
}
|
||||
}
|
||||
if change.missing_newline() {
|
||||
@@ -235,31 +225,35 @@ impl std::fmt::Display for Diff<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct LineStyle {
|
||||
style: Style,
|
||||
}
|
||||
|
||||
impl LineStyle {
|
||||
fn apply_to(&self, input: &str) -> impl std::fmt::Display {
|
||||
fmt_styled(input, self.style)
|
||||
}
|
||||
|
||||
fn from(value: ChangeTag, stylesheet: &DiagnosticStylesheet) -> LineStyle {
|
||||
match value {
|
||||
ChangeTag::Equal => LineStyle {
|
||||
style: stylesheet.none,
|
||||
},
|
||||
ChangeTag::Delete => LineStyle {
|
||||
style: stylesheet.deletion,
|
||||
},
|
||||
ChangeTag::Insert => LineStyle {
|
||||
style: stylesheet.insertion,
|
||||
},
|
||||
match self.fix.applicability() {
|
||||
Applicability::Safe => {}
|
||||
Applicability::Unsafe => {
|
||||
writeln!(
|
||||
f,
|
||||
"{note}: {msg}",
|
||||
note = fmt_styled("note", self.stylesheet.warning),
|
||||
msg = fmt_styled(
|
||||
"This is an unsafe fix and may change runtime behavior",
|
||||
self.stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
}
|
||||
Applicability::DisplayOnly => {
|
||||
// Note that this is still only used in tests. There's no `--display-only-fixes`
|
||||
// analog to `--unsafe-fixes` for users to activate this or see the styling.
|
||||
writeln!(
|
||||
f,
|
||||
"{note}: {msg}",
|
||||
note = fmt_styled("note", self.stylesheet.error),
|
||||
msg = fmt_styled(
|
||||
"This is a display-only fix and is likely to be incorrect",
|
||||
self.stylesheet.emphasis
|
||||
)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -297,7 +291,7 @@ fn show_nonprinting(s: &str) -> Cow<'_, str> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::{
|
||||
@@ -703,6 +697,8 @@ print()
|
||||
fn notebook_output_with_diff() {
|
||||
let (mut env, diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
@@ -712,11 +708,9 @@ print()
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
ℹ Safe fix
|
||||
::: cell 1
|
||||
1 1 | # cell 1
|
||||
2 |-import os
|
||||
::: cell 1
|
||||
1 | # cell 1
|
||||
- import os
|
||||
|
||||
error[unused-import][*]: `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
@@ -728,15 +722,13 @@ print()
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
::: cell 2
|
||||
1 | # cell 2
|
||||
- import math
|
||||
2 |
|
||||
3 | print('hello world')
|
||||
|
||||
ℹ Safe fix
|
||||
::: cell 2
|
||||
1 1 | # cell 2
|
||||
2 |-import math
|
||||
3 2 |
|
||||
4 3 | print('hello world')
|
||||
|
||||
error[unused-variable]: Local variable `x` is assigned to but never used
|
||||
error[unused-variable][*]: Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
@@ -745,14 +737,13 @@ print()
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
ℹ Unsafe fix
|
||||
::: cell 3
|
||||
1 1 | # cell 3
|
||||
2 2 | def foo():
|
||||
3 3 | print()
|
||||
4 |- x = 1
|
||||
5 4 |
|
||||
::: cell 3
|
||||
1 | # cell 3
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
- x = 1
|
||||
4 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
");
|
||||
}
|
||||
|
||||
@@ -760,6 +751,7 @@ print()
|
||||
fn notebook_output_with_diff_spanning_cells() {
|
||||
let (mut env, mut diagnostics) = create_notebook_diagnostics(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
// Move all of the edits from the later diagnostics to the first diagnostic to simulate a
|
||||
// single diagnostic with edits in different cells.
|
||||
@@ -772,7 +764,7 @@ print()
|
||||
*fix = Fix::unsafe_edits(edits.remove(0), edits);
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[unused-import]: `os` imported but unused
|
||||
error[unused-import][*]: `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
@@ -780,22 +772,21 @@ print()
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
ℹ Unsafe fix
|
||||
::: cell 1
|
||||
1 1 | # cell 1
|
||||
2 |-import os
|
||||
::: cell 2
|
||||
1 1 | # cell 2
|
||||
2 |-import math
|
||||
3 2 |
|
||||
4 3 | print('hello world')
|
||||
::: cell 3
|
||||
1 1 | # cell 3
|
||||
2 2 | def foo():
|
||||
3 3 | print()
|
||||
4 |- x = 1
|
||||
5 4 |
|
||||
::: cell 1
|
||||
1 | # cell 1
|
||||
- import os
|
||||
::: cell 2
|
||||
1 | # cell 2
|
||||
- import math
|
||||
2 |
|
||||
3 | print('hello world')
|
||||
::: cell 3
|
||||
1 | # cell 3
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
- x = 1
|
||||
4 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
");
|
||||
}
|
||||
|
||||
@@ -901,4 +892,74 @@ print()
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
/// Test that we handle the width calculation for the line number correctly even for context
|
||||
/// lines at the end of a diff. For example, we want it to render like this:
|
||||
///
|
||||
/// ```
|
||||
/// 8 |
|
||||
/// 9 |
|
||||
/// 10 |
|
||||
/// ```
|
||||
///
|
||||
/// and not like this:
|
||||
///
|
||||
/// ```
|
||||
/// 8 |
|
||||
/// 9 |
|
||||
/// 10 |
|
||||
/// ```
|
||||
#[test]
|
||||
fn longer_line_number_end_of_context() {
|
||||
let mut env = TestEnvironment::new();
|
||||
let contents = "\
|
||||
line 1
|
||||
line 2
|
||||
line 3
|
||||
line 4
|
||||
line 5
|
||||
line 6
|
||||
line 7
|
||||
line 8
|
||||
line 9
|
||||
line 10
|
||||
";
|
||||
env.add("example.py", contents);
|
||||
env.format(DiagnosticFormat::Full);
|
||||
env.show_fix_diff(true);
|
||||
env.fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
let mut diagnostic = env.err().primary("example.py", "3", "3", "label").build();
|
||||
diagnostic.help("Start of diff:");
|
||||
let target = "line 7";
|
||||
let line9 = contents.find(target).unwrap();
|
||||
let range = TextRange::at(TextSize::try_from(line9).unwrap(), target.text_len());
|
||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement(
|
||||
format!("fixed {target}"),
|
||||
range,
|
||||
)));
|
||||
|
||||
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||
error[test-diagnostic][*]: main diagnostic message
|
||||
--> example.py:3:1
|
||||
|
|
||||
1 | line 1
|
||||
2 | line 2
|
||||
3 | line 3
|
||||
| ^^^^^^ label
|
||||
4 | line 4
|
||||
5 | line 5
|
||||
|
|
||||
help: Start of diff:
|
||||
4 | line 4
|
||||
5 | line 5
|
||||
6 | line 6
|
||||
- line 7
|
||||
7 + fixed line 7
|
||||
8 | line 8
|
||||
9 | line 9
|
||||
10 | line 10
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
");
|
||||
}
|
||||
}
|
||||
|
||||
205
crates/ruff_db/src/diagnostic/render/gitlab.rs
Normal file
205
crates/ruff_db/src/diagnostic/render/gitlab.rs
Normal file
@@ -0,0 +1,205 @@
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use ruff_source_file::LineColumn;
|
||||
use serde::{Serialize, Serializer, ser::SerializeSeq};
|
||||
|
||||
use crate::diagnostic::{Diagnostic, Severity};
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) struct GitlabRenderer<'a> {
|
||||
resolver: &'a dyn FileResolver,
|
||||
}
|
||||
|
||||
impl<'a> GitlabRenderer<'a> {
|
||||
pub(super) fn new(resolver: &'a dyn FileResolver) -> Self {
|
||||
Self { resolver }
|
||||
}
|
||||
}
|
||||
|
||||
impl GitlabRenderer<'_> {
|
||||
pub(super) fn render(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::to_string_pretty(&SerializedMessages {
|
||||
diagnostics,
|
||||
resolver: self.resolver,
|
||||
#[expect(
|
||||
clippy::disallowed_methods,
|
||||
reason = "We don't have access to a `System` here, \
|
||||
and this is only intended for use by GitLab CI, \
|
||||
which runs on a real `System`."
|
||||
)]
|
||||
project_dir: std::env::var("CI_PROJECT_DIR").ok().as_deref(),
|
||||
})
|
||||
.unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct SerializedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
resolver: &'a dyn FileResolver,
|
||||
project_dir: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl Serialize for SerializedMessages<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
let mut fingerprints = HashSet::<u64>::with_capacity(self.diagnostics.len());
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let location = diagnostic
|
||||
.primary_span()
|
||||
.map(|span| {
|
||||
let file = span.file();
|
||||
let positions = if self.resolver.is_notebook(file) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
Default::default()
|
||||
} else {
|
||||
let diagnostic_source = file.diagnostic_source(self.resolver);
|
||||
let source_code = diagnostic_source.as_source_code();
|
||||
span.range()
|
||||
.map(|range| Positions {
|
||||
begin: source_code.line_column(range.start()),
|
||||
end: source_code.line_column(range.end()),
|
||||
})
|
||||
.unwrap_or_default()
|
||||
};
|
||||
|
||||
let path = self.project_dir.as_ref().map_or_else(
|
||||
|| file.relative_path(self.resolver).display().to_string(),
|
||||
|project_dir| relativize_path_to(file.path(self.resolver), project_dir),
|
||||
);
|
||||
|
||||
Location { path, positions }
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut message_fingerprint = fingerprint(diagnostic, &location.path, 0);
|
||||
|
||||
// Make sure that we do not get a fingerprint that is already in use
|
||||
// by adding in the previously generated one.
|
||||
while fingerprints.contains(&message_fingerprint) {
|
||||
message_fingerprint = fingerprint(diagnostic, &location.path, message_fingerprint);
|
||||
}
|
||||
fingerprints.insert(message_fingerprint);
|
||||
|
||||
let description = diagnostic.body();
|
||||
let check_name = diagnostic.secondary_code_or_id();
|
||||
let severity = match diagnostic.severity() {
|
||||
Severity::Info => "info",
|
||||
Severity::Warning => "minor",
|
||||
Severity::Error => "major",
|
||||
// Another option here is `blocker`
|
||||
Severity::Fatal => "critical",
|
||||
};
|
||||
|
||||
let value = Message {
|
||||
check_name,
|
||||
// GitLab doesn't display the separate `check_name` field in a Code Quality report,
|
||||
// so prepend it to the description too.
|
||||
description: format!("{check_name}: {description}"),
|
||||
severity,
|
||||
fingerprint: format!("{:x}", message_fingerprint),
|
||||
location,
|
||||
};
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Message<'a> {
|
||||
check_name: &'a str,
|
||||
description: String,
|
||||
severity: &'static str,
|
||||
fingerprint: String,
|
||||
location: Location,
|
||||
}
|
||||
|
||||
/// The place in the source code where the issue was discovered.
|
||||
///
|
||||
/// According to the CodeClimate report format [specification] linked from the GitLab [docs], this
|
||||
/// field is required, so we fall back on a default `path` and position if the diagnostic doesn't
|
||||
/// have a primary span.
|
||||
///
|
||||
/// [specification]: https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types
|
||||
/// [docs]: https://docs.gitlab.com/ci/testing/code_quality/#code-quality-report-format
|
||||
#[derive(Default, Serialize)]
|
||||
struct Location {
|
||||
path: String,
|
||||
positions: Positions,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct Positions {
|
||||
begin: LineColumn,
|
||||
end: LineColumn,
|
||||
}
|
||||
|
||||
/// Generate a unique fingerprint to identify a violation.
|
||||
fn fingerprint(diagnostic: &Diagnostic, project_path: &str, salt: u64) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
|
||||
salt.hash(&mut hasher);
|
||||
diagnostic.name().hash(&mut hasher);
|
||||
project_path.hash(&mut hasher);
|
||||
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
/// Convert an absolute path to be relative to the specified project root.
|
||||
fn relativize_path_to<P: AsRef<Path>, R: AsRef<Path>>(path: P, project_root: R) -> String {
|
||||
format!(
|
||||
"{}",
|
||||
pathdiff::diff_paths(&path, project_root)
|
||||
.expect("Could not diff paths")
|
||||
.display()
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::diagnostic::{
|
||||
DiagnosticFormat,
|
||||
render::tests::{create_diagnostics, create_syntax_error_diagnostics},
|
||||
};
|
||||
|
||||
const FINGERPRINT_FILTERS: [(&str, &str); 1] = [(
|
||||
r#""fingerprint": "[a-z0-9]+","#,
|
||||
r#""fingerprint": "<redacted>","#,
|
||||
)];
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let (env, diagnostics) = create_diagnostics(DiagnosticFormat::Gitlab);
|
||||
insta::with_settings!({filters => FINGERPRINT_FILTERS}, {
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let (env, diagnostics) = create_syntax_error_diagnostics(DiagnosticFormat::Gitlab);
|
||||
insta::with_settings!({filters => FINGERPRINT_FILTERS}, {
|
||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,63 +1,63 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/gitlab.rs
|
||||
expression: redact_fingerprint(&content)
|
||||
source: crates/ruff_db/src/diagnostic/render/gitlab.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
[
|
||||
{
|
||||
"check_name": "F401",
|
||||
"description": "F401: `os` imported but unused",
|
||||
"severity": "major",
|
||||
"fingerprint": "<redacted>",
|
||||
"location": {
|
||||
"path": "fib.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 8,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 8
|
||||
},
|
||||
"end": {
|
||||
"column": 10,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 10
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
},
|
||||
{
|
||||
"check_name": "F841",
|
||||
"description": "F841: Local variable `x` is assigned to but never used",
|
||||
"severity": "major",
|
||||
"fingerprint": "<redacted>",
|
||||
"location": {
|
||||
"path": "fib.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 5,
|
||||
"line": 6
|
||||
"line": 6,
|
||||
"column": 5
|
||||
},
|
||||
"end": {
|
||||
"column": 6,
|
||||
"line": 6
|
||||
"line": 6,
|
||||
"column": 6
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
},
|
||||
{
|
||||
"check_name": "F821",
|
||||
"description": "F821: Undefined name `a`",
|
||||
"severity": "major",
|
||||
"fingerprint": "<redacted>",
|
||||
"location": {
|
||||
"path": "undef.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 4,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 4
|
||||
},
|
||||
"end": {
|
||||
"column": 5,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 5
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,44 +1,44 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/message/gitlab.rs
|
||||
expression: redact_fingerprint(&content)
|
||||
source: crates/ruff_db/src/diagnostic/render/gitlab.rs
|
||||
expression: env.render_diagnostics(&diagnostics)
|
||||
---
|
||||
[
|
||||
{
|
||||
"check_name": "invalid-syntax",
|
||||
"description": "invalid-syntax: Expected one or more symbol names after import",
|
||||
"severity": "major",
|
||||
"fingerprint": "<redacted>",
|
||||
"location": {
|
||||
"path": "syntax_errors.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 15,
|
||||
"line": 1
|
||||
"line": 1,
|
||||
"column": 15
|
||||
},
|
||||
"end": {
|
||||
"column": 1,
|
||||
"line": 2
|
||||
"line": 2,
|
||||
"column": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
},
|
||||
{
|
||||
"check_name": "invalid-syntax",
|
||||
"description": "invalid-syntax: Expected ')', found newline",
|
||||
"severity": "major",
|
||||
"fingerprint": "<redacted>",
|
||||
"location": {
|
||||
"path": "syntax_errors.py",
|
||||
"positions": {
|
||||
"begin": {
|
||||
"column": 12,
|
||||
"line": 3
|
||||
"line": 3,
|
||||
"column": 12
|
||||
},
|
||||
"end": {
|
||||
"column": 1,
|
||||
"line": 4
|
||||
"line": 4,
|
||||
"column": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"severity": "major"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -40,12 +40,13 @@ pub struct DiagnosticStylesheet {
|
||||
pub(crate) help: Style,
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) underline: Style,
|
||||
pub(crate) none: Style,
|
||||
pub(crate) separator: Style,
|
||||
pub(crate) secondary_code: Style,
|
||||
pub(crate) insertion: Style,
|
||||
pub(crate) deletion: Style,
|
||||
pub(crate) insertion_line_no: Style,
|
||||
pub(crate) deletion_line_no: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
@@ -66,12 +67,13 @@ impl DiagnosticStylesheet {
|
||||
help: AnsiColor::BrightCyan.on_default().effects(Effects::BOLD),
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
underline: Style::new().effects(Effects::UNDERLINE),
|
||||
none: Style::new(),
|
||||
separator: AnsiColor::Cyan.on_default(),
|
||||
secondary_code: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
insertion: AnsiColor::Green.on_default(),
|
||||
deletion: AnsiColor::Red.on_default(),
|
||||
insertion_line_no: AnsiColor::Green.on_default().effects(Effects::BOLD),
|
||||
deletion_line_no: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,12 +86,13 @@ impl DiagnosticStylesheet {
|
||||
help: Style::new(),
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
underline: Style::new(),
|
||||
none: Style::new(),
|
||||
separator: Style::new(),
|
||||
secondary_code: Style::new(),
|
||||
insertion: Style::new(),
|
||||
deletion: Style::new(),
|
||||
insertion_line_no: Style::new(),
|
||||
deletion_line_no: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,14 +81,19 @@ impl IndentStyle {
|
||||
pub const fn is_space(&self) -> bool {
|
||||
matches!(self, IndentStyle::Space)
|
||||
}
|
||||
|
||||
/// Returns the string representation of the indent style.
|
||||
pub const fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
IndentStyle::Tab => "tab",
|
||||
IndentStyle::Space => "space",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for IndentStyle {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
IndentStyle::Tab => std::write!(f, "tab"),
|
||||
IndentStyle::Space => std::write!(f, "space"),
|
||||
}
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -139,4 +139,16 @@ impl LineEnding {
|
||||
LineEnding::CarriageReturn => "\r",
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the string used to configure this line ending.
|
||||
///
|
||||
/// See [`LineEnding::as_str`] for the actual string representation of the line ending.
|
||||
#[inline]
|
||||
pub const fn as_setting_str(&self) -> &'static str {
|
||||
match self {
|
||||
LineEnding::LineFeed => "lf",
|
||||
LineEnding::CarriageReturnLineFeed => "crlf",
|
||||
LineEnding::CarriageReturn => "cr",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.12.10"
|
||||
version = "0.12.12"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -51,7 +51,6 @@ path-absolutize = { workspace = true, features = [
|
||||
"once_cell_cache",
|
||||
"use_unix_paths_on_wasm",
|
||||
] }
|
||||
pathdiff = { workspace = true }
|
||||
pep440_rs = { workspace = true }
|
||||
pyproject-toml = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
|
||||
@@ -12,6 +12,7 @@ from airflow import (
|
||||
from airflow.api_connexion.security import requires_access
|
||||
from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
from airflow.datasets import DatasetAliasEvent
|
||||
from airflow.operators.postgres_operator import Mapping
|
||||
from airflow.operators.subdag import SubDagOperator
|
||||
from airflow.secrets.cache import SecretCache
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
@@ -52,6 +53,8 @@ DatasetAliasEvent()
|
||||
# airflow.operators.subdag.*
|
||||
SubDagOperator()
|
||||
|
||||
# airflow.operators.postgres_operator
|
||||
Mapping()
|
||||
|
||||
# airflow.secrets
|
||||
# get_connection
|
||||
|
||||
@@ -70,7 +70,7 @@ from airflow.timetables.datasets import DatasetOrTimeSchedule
|
||||
from airflow.utils.dag_parsing_context import get_parsing_context
|
||||
|
||||
# airflow.timetables.datasets
|
||||
DatasetOrTimeSchedule()
|
||||
DatasetOrTimeSchedule(datasets=[])
|
||||
|
||||
# airflow.utils.dag_parsing_context
|
||||
get_parsing_context()
|
||||
|
||||
@@ -213,3 +213,17 @@ async def get_id_pydantic_full(
|
||||
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_init_not_annotated(params = Depends(InitParams)): ...
|
||||
|
||||
@app.get("/things/{ thing_id }")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{ thing_id : path }")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{ thing_id : str }")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
22
crates/ruff_linter/resources/test/fixtures/flake8_async/ASYNC250.py
vendored
Normal file
22
crates/ruff_linter/resources/test/fixtures/flake8_async/ASYNC250.py
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
def foo():
|
||||
k = input() # Ok
|
||||
input("hello world") # Ok
|
||||
|
||||
|
||||
async def foo():
|
||||
k = input() # ASYNC250
|
||||
input("hello world") # ASYNC250
|
||||
|
||||
|
||||
import builtins
|
||||
|
||||
import fake
|
||||
|
||||
|
||||
def foo():
|
||||
builtins.input("testing") # Ok
|
||||
|
||||
|
||||
async def foo():
|
||||
builtins.input("testing") # ASYNC250
|
||||
fake.input("whatever") # Ok
|
||||
@@ -75,3 +75,7 @@ list(map(lambda x, y: x, [(1, 2), (3, 4)]))
|
||||
_ = t"{set(map(lambda x: x % 2 == 0, nums))}"
|
||||
_ = t"{dict(map(lambda v: (v, v**2), nums))}"
|
||||
|
||||
|
||||
# See https://github.com/astral-sh/ruff/issues/20198
|
||||
# No error: lambda contains `yield`, so map() should not be rewritten
|
||||
map(lambda x: (yield x), [1, 2, 3])
|
||||
|
||||
@@ -23,3 +23,11 @@ def test_error():
|
||||
assert list([])
|
||||
assert set(set())
|
||||
assert tuple("")
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/19935
|
||||
def test_all_ok():
|
||||
assert f"{b""}"
|
||||
assert f"{""=}"
|
||||
assert f"{""!a}"
|
||||
assert f"{""!r}"
|
||||
assert f"{"":1}"
|
||||
|
||||
@@ -192,3 +192,24 @@ def issue_19005_3():
|
||||
c = {}
|
||||
for a[0], a[1] in ():
|
||||
c[a[0]] = a[1]
|
||||
|
||||
|
||||
def issue_19153_1():
|
||||
v = {}
|
||||
for o, (x,) in ["ox"]:
|
||||
v[x,] = o
|
||||
return v
|
||||
|
||||
|
||||
def issue_19153_2():
|
||||
v = {}
|
||||
for (o, p), x in [("op", "x")]:
|
||||
v[x] = o, p
|
||||
return v
|
||||
|
||||
|
||||
def issue_19153_3():
|
||||
v = {}
|
||||
for o, (x,) in ["ox"]:
|
||||
v[(x,)] = o
|
||||
return v
|
||||
@@ -190,3 +190,12 @@ foo_tooltip = (
|
||||
more
|
||||
|
||||
)
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/20097
|
||||
def scope():
|
||||
from collections.abc import Callable
|
||||
from typing import ParamSpec
|
||||
|
||||
P = ParamSpec("P")
|
||||
f1: Callable[P, str] = lambda x: str(x)
|
||||
f2: Callable[..., str] = lambda x: str(x)
|
||||
|
||||
16
crates/ruff_linter/resources/test/fixtures/pyflakes/F401_35.py
vendored
Normal file
16
crates/ruff_linter/resources/test/fixtures/pyflakes/F401_35.py
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
Test: allowed-unused-imports-top-level-module
|
||||
"""
|
||||
|
||||
# No errors
|
||||
|
||||
def f():
|
||||
import hvplot
|
||||
def f():
|
||||
import hvplot.pandas
|
||||
def f():
|
||||
import hvplot.pandas.plots
|
||||
def f():
|
||||
from hvplot.pandas import scatter_matrix
|
||||
def f():
|
||||
from hvplot.pandas.plots import scatter_matrix
|
||||
@@ -4,6 +4,9 @@ print("שלום")
|
||||
# E2502
|
||||
example = "x" * 100 # "x" is assigned
|
||||
|
||||
# E2502
|
||||
another = "x" * 50 # "x" is assigned
|
||||
|
||||
# E2502
|
||||
if access_level != "none": # Check if admin ' and access_level != 'user
|
||||
print("You are an admin.")
|
||||
|
||||
@@ -141,3 +141,133 @@ class ExampleWithKeywords:
|
||||
|
||||
def method3(self):
|
||||
super(ExampleWithKeywords, self).some_method() # Should be fixed - no keywords
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/19357
|
||||
# Must be detected
|
||||
class ParentD:
|
||||
def f(self):
|
||||
print("D")
|
||||
|
||||
class ChildD1(ParentD):
|
||||
def f(self):
|
||||
if False: __class__ # Python injects __class__ into scope
|
||||
builtins.super(ChildD1, self).f()
|
||||
|
||||
class ChildD2(ParentD):
|
||||
def f(self):
|
||||
if False: super # Python injects __class__ into scope
|
||||
builtins.super(ChildD2, self).f()
|
||||
|
||||
class ChildD3(ParentD):
|
||||
def f(self):
|
||||
builtins.super(ChildD3, self).f()
|
||||
super # Python injects __class__ into scope
|
||||
|
||||
import builtins as builtins_alias
|
||||
class ChildD4(ParentD):
|
||||
def f(self):
|
||||
builtins_alias.super(ChildD4, self).f()
|
||||
super # Python injects __class__ into scope
|
||||
|
||||
class ChildD5(ParentD):
|
||||
def f(self):
|
||||
super = 1
|
||||
super # Python injects __class__ into scope
|
||||
builtins.super(ChildD5, self).f()
|
||||
|
||||
class ChildD6(ParentD):
|
||||
def f(self):
|
||||
super: "Any"
|
||||
__class__ # Python injects __class__ into scope
|
||||
builtins.super(ChildD6, self).f()
|
||||
|
||||
class ChildD7(ParentD):
|
||||
def f(self):
|
||||
def x():
|
||||
__class__ # Python injects __class__ into scope
|
||||
builtins.super(ChildD7, self).f()
|
||||
|
||||
class ChildD8(ParentD):
|
||||
def f(self):
|
||||
def x():
|
||||
super = 1
|
||||
super # Python injects __class__ into scope
|
||||
builtins.super(ChildD8, self).f()
|
||||
|
||||
class ChildD9(ParentD):
|
||||
def f(self):
|
||||
def x():
|
||||
__class__ = 1
|
||||
__class__ # Python injects __class__ into scope
|
||||
builtins.super(ChildD9, self).f()
|
||||
|
||||
class ChildD10(ParentD):
|
||||
def f(self):
|
||||
def x():
|
||||
__class__ = 1
|
||||
super # Python injects __class__ into scope
|
||||
builtins.super(ChildD10, self).f()
|
||||
|
||||
|
||||
# Must be ignored
|
||||
class ParentI:
|
||||
def f(self):
|
||||
print("I")
|
||||
|
||||
class ChildI1(ParentI):
|
||||
def f(self):
|
||||
builtins.super(ChildI1, self).f() # no __class__ in the local scope
|
||||
|
||||
|
||||
class ChildI2(ParentI):
|
||||
def b(self):
|
||||
x = __class__
|
||||
if False: super
|
||||
|
||||
def f(self):
|
||||
self.b()
|
||||
builtins.super(ChildI2, self).f() # no __class__ in the local scope
|
||||
|
||||
class ChildI3(ParentI):
|
||||
def f(self):
|
||||
if False: super
|
||||
def x(_):
|
||||
builtins.super(ChildI3, self).f() # no __class__ in the local scope
|
||||
x(None)
|
||||
|
||||
class ChildI4(ParentI):
|
||||
def f(self):
|
||||
super: "str"
|
||||
builtins.super(ChildI4, self).f() # no __class__ in the local scope
|
||||
|
||||
class ChildI5(ParentI):
|
||||
def f(self):
|
||||
super = 1
|
||||
__class__ = 3
|
||||
builtins.super(ChildI5, self).f() # no __class__ in the local scope
|
||||
|
||||
class ChildI6(ParentI):
|
||||
def f(self):
|
||||
__class__ = None
|
||||
__class__
|
||||
builtins.super(ChildI6, self).f() # no __class__ in the local scope
|
||||
|
||||
class ChildI7(ParentI):
|
||||
def f(self):
|
||||
__class__ = None
|
||||
super
|
||||
builtins.super(ChildI7, self).f()
|
||||
|
||||
class ChildI8(ParentI):
|
||||
def f(self):
|
||||
__class__: "Any"
|
||||
super
|
||||
builtins.super(ChildI8, self).f()
|
||||
|
||||
class ChildI9(ParentI):
|
||||
def f(self):
|
||||
class A:
|
||||
def foo(self):
|
||||
if False: super
|
||||
if False: __class__
|
||||
builtins.super(ChildI9, self).f()
|
||||
|
||||
59
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP043.pyi
vendored
Normal file
59
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP043.pyi
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
from collections.abc import Generator, AsyncGenerator
|
||||
|
||||
|
||||
def func() -> Generator[int, None, None]:
|
||||
yield 42
|
||||
|
||||
|
||||
def func() -> Generator[int, None]:
|
||||
yield 42
|
||||
|
||||
|
||||
def func() -> Generator[int]:
|
||||
yield 42
|
||||
|
||||
|
||||
def func() -> Generator[int, int, int]:
|
||||
foo = yield 42
|
||||
return foo
|
||||
|
||||
|
||||
def func() -> Generator[int, int, None]:
|
||||
_ = yield 42
|
||||
return None
|
||||
|
||||
|
||||
def func() -> Generator[int, None, int]:
|
||||
yield 42
|
||||
return 42
|
||||
|
||||
|
||||
async def func() -> AsyncGenerator[int, None]:
|
||||
yield 42
|
||||
|
||||
|
||||
async def func() -> AsyncGenerator[int]:
|
||||
yield 42
|
||||
|
||||
|
||||
async def func() -> AsyncGenerator[int, int]:
|
||||
foo = yield 42
|
||||
return foo
|
||||
|
||||
|
||||
from typing import Generator, AsyncGenerator
|
||||
|
||||
|
||||
def func() -> Generator[str, None, None]:
|
||||
yield "hello"
|
||||
|
||||
|
||||
async def func() -> AsyncGenerator[str, None]:
|
||||
yield "hello"
|
||||
|
||||
|
||||
async def func() -> AsyncGenerator[ # type: ignore
|
||||
str,
|
||||
None
|
||||
]:
|
||||
yield "hello"
|
||||
@@ -107,3 +107,12 @@ deque(f"{x}" "") # OK
|
||||
deque(t"")
|
||||
deque(t"" t"")
|
||||
deque(t"{""}") # OK
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/20050
|
||||
deque(f"{""}") # RUF037
|
||||
|
||||
deque(f"{b""}")
|
||||
deque(f"{""=}")
|
||||
deque(f"{""!a}")
|
||||
deque(f"{""!r}")
|
||||
deque(f"{"":1}")
|
||||
|
||||
@@ -42,3 +42,7 @@ b"a" in bytes("a", "utf-8")
|
||||
1 in set(set([1]))
|
||||
'' in {""}
|
||||
frozenset() in {frozenset()}
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/20238
|
||||
"b" in f"" "" # Error
|
||||
"b" in f"" "x" # OK
|
||||
|
||||
1
crates/ruff_linter/resources/test/fixtures/syntax_errors/yield_from_in_async_function.py
vendored
Normal file
1
crates/ruff_linter/resources/test/fixtures/syntax_errors/yield_from_in_async_function.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
async def f(): yield from x # error
|
||||
@@ -8,7 +8,7 @@ use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::{
|
||||
is_assert_raises_exception_call_enabled, is_optional_as_none_in_union_enabled,
|
||||
is_optional_as_none_in_union_enabled, is_unnecessary_default_type_args_stubs_enabled,
|
||||
};
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::{
|
||||
@@ -142,7 +142,10 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
}
|
||||
|
||||
if checker.is_rule_enabled(Rule::UnnecessaryDefaultTypeArgs) {
|
||||
if checker.target_version() >= PythonVersion::PY313 {
|
||||
if checker.target_version() >= PythonVersion::PY313
|
||||
|| is_unnecessary_default_type_args_stubs_enabled(checker.settings())
|
||||
&& checker.semantic().in_stub_file()
|
||||
{
|
||||
pyupgrade::rules::unnecessary_default_type_args(checker, expr);
|
||||
}
|
||||
}
|
||||
@@ -673,6 +676,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
]) {
|
||||
flake8_async::rules::blocking_process_invocation(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::BlockingInputInAsyncFunction) {
|
||||
flake8_async::rules::blocking_input(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::BlockingSleepInAsyncFunction) {
|
||||
flake8_async::rules::blocking_sleep(checker, call);
|
||||
}
|
||||
@@ -1289,9 +1295,7 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
if checker.is_rule_enabled(Rule::NonOctalPermissions) {
|
||||
ruff::rules::non_octal_permissions(checker, call);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::AssertRaisesException)
|
||||
&& is_assert_raises_exception_call_enabled(checker.settings())
|
||||
{
|
||||
if checker.is_rule_enabled(Rule::AssertRaisesException) {
|
||||
flake8_bugbear::rules::assert_raises_exception_call(checker, call);
|
||||
}
|
||||
}
|
||||
@@ -1316,13 +1320,10 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
pylint::rules::yield_in_init(checker, expr);
|
||||
}
|
||||
}
|
||||
Expr::YieldFrom(yield_from) => {
|
||||
Expr::YieldFrom(_) => {
|
||||
if checker.is_rule_enabled(Rule::YieldInInit) {
|
||||
pylint::rules::yield_in_init(checker, expr);
|
||||
}
|
||||
if checker.is_rule_enabled(Rule::YieldFromInAsyncFunction) {
|
||||
pylint::rules::yield_from_in_async_function(checker, yield_from);
|
||||
}
|
||||
}
|
||||
Expr::FString(f_string_expr @ ast::ExprFString { value, .. }) => {
|
||||
if checker.is_rule_enabled(Rule::FStringMissingPlaceholders) {
|
||||
|
||||
@@ -71,7 +71,9 @@ use crate::registry::Rule;
|
||||
use crate::rules::pyflakes::rules::{
|
||||
LateFutureImport, ReturnOutsideFunction, YieldOutsideFunction,
|
||||
};
|
||||
use crate::rules::pylint::rules::{AwaitOutsideAsync, LoadBeforeGlobalDeclaration};
|
||||
use crate::rules::pylint::rules::{
|
||||
AwaitOutsideAsync, LoadBeforeGlobalDeclaration, YieldFromInAsyncFunction,
|
||||
};
|
||||
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
||||
use crate::settings::rule_table::RuleTable;
|
||||
use crate::settings::{LinterSettings, TargetVersion, flags};
|
||||
@@ -668,6 +670,12 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
self.report_diagnostic(AwaitOutsideAsync, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::YieldFromInAsyncFunction => {
|
||||
// PLE1700
|
||||
if self.is_rule_enabled(Rule::YieldFromInAsyncFunction) {
|
||||
self.report_diagnostic(YieldFromInAsyncFunction, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
||||
| SemanticSyntaxErrorKind::DuplicateTypeParameter
|
||||
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
|
||||
|
||||
@@ -334,13 +334,14 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Async, "109") => (RuleGroup::Stable, rules::flake8_async::rules::AsyncFunctionWithTimeout),
|
||||
(Flake8Async, "110") => (RuleGroup::Stable, rules::flake8_async::rules::AsyncBusyWait),
|
||||
(Flake8Async, "115") => (RuleGroup::Stable, rules::flake8_async::rules::AsyncZeroSleep),
|
||||
(Flake8Async, "116") => (RuleGroup::Preview, rules::flake8_async::rules::LongSleepNotForever),
|
||||
(Flake8Async, "116") => (RuleGroup::Stable, rules::flake8_async::rules::LongSleepNotForever),
|
||||
(Flake8Async, "210") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingHttpCallInAsyncFunction),
|
||||
(Flake8Async, "212") => (RuleGroup::Preview, rules::flake8_async::rules::BlockingHttpCallHttpxInAsyncFunction),
|
||||
(Flake8Async, "220") => (RuleGroup::Stable, rules::flake8_async::rules::CreateSubprocessInAsyncFunction),
|
||||
(Flake8Async, "221") => (RuleGroup::Stable, rules::flake8_async::rules::RunProcessInAsyncFunction),
|
||||
(Flake8Async, "222") => (RuleGroup::Stable, rules::flake8_async::rules::WaitForProcessInAsyncFunction),
|
||||
(Flake8Async, "230") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingOpenCallInAsyncFunction),
|
||||
(Flake8Async, "250") => (RuleGroup::Preview, rules::flake8_async::rules::BlockingInputInAsyncFunction),
|
||||
(Flake8Async, "251") => (RuleGroup::Stable, rules::flake8_async::rules::BlockingSleepInAsyncFunction),
|
||||
|
||||
// flake8-builtins
|
||||
@@ -562,7 +563,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyupgrade, "035") => (RuleGroup::Stable, rules::pyupgrade::rules::DeprecatedImport),
|
||||
(Pyupgrade, "036") => (RuleGroup::Stable, rules::pyupgrade::rules::OutdatedVersionBlock),
|
||||
(Pyupgrade, "037") => (RuleGroup::Stable, rules::pyupgrade::rules::QuotedAnnotation),
|
||||
(Pyupgrade, "038") => (RuleGroup::Deprecated, rules::pyupgrade::rules::NonPEP604Isinstance),
|
||||
(Pyupgrade, "038") => (RuleGroup::Removed, rules::pyupgrade::rules::NonPEP604Isinstance),
|
||||
(Pyupgrade, "039") => (RuleGroup::Stable, rules::pyupgrade::rules::UnnecessaryClassParentheses),
|
||||
(Pyupgrade, "040") => (RuleGroup::Stable, rules::pyupgrade::rules::NonPEP695TypeAlias),
|
||||
(Pyupgrade, "041") => (RuleGroup::Stable, rules::pyupgrade::rules::TimeoutErrorAlias),
|
||||
@@ -573,7 +574,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyupgrade, "046") => (RuleGroup::Stable, rules::pyupgrade::rules::NonPEP695GenericClass),
|
||||
(Pyupgrade, "047") => (RuleGroup::Stable, rules::pyupgrade::rules::NonPEP695GenericFunction),
|
||||
(Pyupgrade, "049") => (RuleGroup::Stable, rules::pyupgrade::rules::PrivateTypeParameter),
|
||||
(Pyupgrade, "050") => (RuleGroup::Preview, rules::pyupgrade::rules::UselessClassMetaclassType),
|
||||
(Pyupgrade, "050") => (RuleGroup::Stable, rules::pyupgrade::rules::UselessClassMetaclassType),
|
||||
|
||||
// pydocstyle
|
||||
(Pydocstyle, "100") => (RuleGroup::Stable, rules::pydocstyle::rules::UndocumentedPublicModule),
|
||||
@@ -772,7 +773,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(PandasVet, "013") => (RuleGroup::Stable, rules::pandas_vet::rules::PandasUseOfDotStack),
|
||||
(PandasVet, "015") => (RuleGroup::Stable, rules::pandas_vet::rules::PandasUseOfPdMerge),
|
||||
(PandasVet, "101") => (RuleGroup::Stable, rules::pandas_vet::rules::PandasNuniqueConstantSeriesCheck),
|
||||
(PandasVet, "901") => (RuleGroup::Deprecated, rules::pandas_vet::rules::PandasDfVariableName),
|
||||
(PandasVet, "901") => (RuleGroup::Removed, rules::pandas_vet::rules::PandasDfVariableName),
|
||||
|
||||
// flake8-errmsg
|
||||
(Flake8ErrMsg, "101") => (RuleGroup::Stable, rules::flake8_errmsg::rules::RawStringInException),
|
||||
@@ -829,8 +830,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Pyi, "056") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnsupportedMethodCallOnAll),
|
||||
(Flake8Pyi, "058") => (RuleGroup::Stable, rules::flake8_pyi::rules::GeneratorReturnFromIterMethod),
|
||||
(Flake8Pyi, "057") => (RuleGroup::Stable, rules::flake8_pyi::rules::ByteStringUsage),
|
||||
(Flake8Pyi, "059") => (RuleGroup::Preview, rules::flake8_pyi::rules::GenericNotLastBaseClass),
|
||||
(Flake8Pyi, "061") => (RuleGroup::Preview, rules::flake8_pyi::rules::RedundantNoneLiteral),
|
||||
(Flake8Pyi, "059") => (RuleGroup::Stable, rules::flake8_pyi::rules::GenericNotLastBaseClass),
|
||||
(Flake8Pyi, "061") => (RuleGroup::Stable, rules::flake8_pyi::rules::RedundantNoneLiteral),
|
||||
(Flake8Pyi, "062") => (RuleGroup::Stable, rules::flake8_pyi::rules::DuplicateLiteralMember),
|
||||
(Flake8Pyi, "063") => (RuleGroup::Stable, rules::flake8_pyi::rules::Pep484StylePositionalOnlyParameter),
|
||||
(Flake8Pyi, "064") => (RuleGroup::Stable, rules::flake8_pyi::rules::RedundantFinalLiteral),
|
||||
@@ -955,7 +956,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8UsePathlib, "207") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::Glob),
|
||||
(Flake8UsePathlib, "208") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsListdir),
|
||||
(Flake8UsePathlib, "210") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::InvalidPathlibWithSuffix),
|
||||
(Flake8UsePathlib, "211") => (RuleGroup::Preview, rules::flake8_use_pathlib::rules::OsSymlink),
|
||||
(Flake8UsePathlib, "211") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::OsSymlink),
|
||||
|
||||
// flake8-logging-format
|
||||
(Flake8LoggingFormat, "001") => (RuleGroup::Stable, rules::flake8_logging_format::violations::LoggingStringFormat),
|
||||
@@ -1031,7 +1032,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "039") => (RuleGroup::Preview, rules::ruff::rules::UnrawRePattern),
|
||||
(Ruff, "040") => (RuleGroup::Stable, rules::ruff::rules::InvalidAssertMessageLiteralArgument),
|
||||
(Ruff, "041") => (RuleGroup::Stable, rules::ruff::rules::UnnecessaryNestedLiteral),
|
||||
(Ruff, "043") => (RuleGroup::Preview, rules::ruff::rules::PytestRaisesAmbiguousPattern),
|
||||
(Ruff, "043") => (RuleGroup::Stable, rules::ruff::rules::PytestRaisesAmbiguousPattern),
|
||||
(Ruff, "045") => (RuleGroup::Preview, rules::ruff::rules::ImplicitClassVarInDataclass),
|
||||
(Ruff, "046") => (RuleGroup::Stable, rules::ruff::rules::UnnecessaryCastToInt),
|
||||
(Ruff, "047") => (RuleGroup::Preview, rules::ruff::rules::NeedlessElse),
|
||||
@@ -1045,7 +1046,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "056") => (RuleGroup::Preview, rules::ruff::rules::FalsyDictGetFallback),
|
||||
(Ruff, "057") => (RuleGroup::Stable, rules::ruff::rules::UnnecessaryRound),
|
||||
(Ruff, "058") => (RuleGroup::Stable, rules::ruff::rules::StarmapZip),
|
||||
(Ruff, "059") => (RuleGroup::Preview, rules::ruff::rules::UnusedUnpackedVariable),
|
||||
(Ruff, "059") => (RuleGroup::Stable, rules::ruff::rules::UnusedUnpackedVariable),
|
||||
(Ruff, "060") => (RuleGroup::Preview, rules::ruff::rules::InEmptyCollection),
|
||||
(Ruff, "061") => (RuleGroup::Preview, rules::ruff::rules::LegacyFormPytestRaises),
|
||||
(Ruff, "063") => (RuleGroup::Preview, rules::ruff::rules::AccessAnnotationsFromClassDict),
|
||||
@@ -1105,11 +1106,11 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
|
||||
// airflow
|
||||
(Airflow, "001") => (RuleGroup::Stable, rules::airflow::rules::AirflowVariableNameTaskIdMismatch),
|
||||
(Airflow, "002") => (RuleGroup::Preview, rules::airflow::rules::AirflowDagNoScheduleArgument),
|
||||
(Airflow, "301") => (RuleGroup::Preview, rules::airflow::rules::Airflow3Removal),
|
||||
(Airflow, "302") => (RuleGroup::Preview, rules::airflow::rules::Airflow3MovedToProvider),
|
||||
(Airflow, "311") => (RuleGroup::Preview, rules::airflow::rules::Airflow3SuggestedUpdate),
|
||||
(Airflow, "312") => (RuleGroup::Preview, rules::airflow::rules::Airflow3SuggestedToMoveToProvider),
|
||||
(Airflow, "002") => (RuleGroup::Stable, rules::airflow::rules::AirflowDagNoScheduleArgument),
|
||||
(Airflow, "301") => (RuleGroup::Stable, rules::airflow::rules::Airflow3Removal),
|
||||
(Airflow, "302") => (RuleGroup::Stable, rules::airflow::rules::Airflow3MovedToProvider),
|
||||
(Airflow, "311") => (RuleGroup::Stable, rules::airflow::rules::Airflow3SuggestedUpdate),
|
||||
(Airflow, "312") => (RuleGroup::Stable, rules::airflow::rules::Airflow3SuggestedToMoveToProvider),
|
||||
|
||||
// perflint
|
||||
(Perflint, "101") => (RuleGroup::Stable, rules::perflint::rules::UnnecessaryListCast),
|
||||
@@ -1136,7 +1137,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Refurb, "105") => (RuleGroup::Stable, rules::refurb::rules::PrintEmptyString),
|
||||
(Refurb, "110") => (RuleGroup::Preview, rules::refurb::rules::IfExpInsteadOfOrOperator),
|
||||
(Refurb, "113") => (RuleGroup::Preview, rules::refurb::rules::RepeatedAppend),
|
||||
(Refurb, "116") => (RuleGroup::Preview, rules::refurb::rules::FStringNumberFormat),
|
||||
(Refurb, "116") => (RuleGroup::Stable, rules::refurb::rules::FStringNumberFormat),
|
||||
(Refurb, "118") => (RuleGroup::Preview, rules::refurb::rules::ReimplementedOperator),
|
||||
(Refurb, "122") => (RuleGroup::Stable, rules::refurb::rules::ForLoopWrites),
|
||||
(Refurb, "129") => (RuleGroup::Stable, rules::refurb::rules::ReadlinesInFor),
|
||||
|
||||
@@ -58,13 +58,3 @@ pub fn relativize_path<P: AsRef<Path>>(path: P) -> String {
|
||||
}
|
||||
format!("{}", path.display())
|
||||
}
|
||||
|
||||
/// Convert an absolute path to be relative to the specified project root.
|
||||
pub fn relativize_path_to<P: AsRef<Path>, R: AsRef<Path>>(path: P, project_root: R) -> String {
|
||||
format!(
|
||||
"{}",
|
||||
pathdiff::diff_paths(&path, project_root)
|
||||
.expect("Could not diff paths")
|
||||
.display()
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1231,6 +1231,10 @@ mod tests {
|
||||
)]
|
||||
#[test_case(Rule::AwaitOutsideAsync, Path::new("await_outside_async_function.py"))]
|
||||
#[test_case(Rule::AwaitOutsideAsync, Path::new("async_comprehension.py"))]
|
||||
#[test_case(
|
||||
Rule::YieldFromInAsyncFunction,
|
||||
Path::new("yield_from_in_async_function.py")
|
||||
)]
|
||||
fn test_syntax_errors(rule: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = path.to_string_lossy().to_string();
|
||||
let path = Path::new("resources/test/fixtures/syntax_errors").join(path);
|
||||
|
||||
@@ -19,7 +19,7 @@ impl Emitter for GithubEmitter {
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
for diagnostic in diagnostics {
|
||||
let source_location = diagnostic.expect_ruff_start_location();
|
||||
let source_location = diagnostic.ruff_start_location().unwrap_or_default();
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
let location = if context.is_notebook(&filename) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
@@ -29,7 +29,7 @@ impl Emitter for GithubEmitter {
|
||||
source_location
|
||||
};
|
||||
|
||||
let end_location = diagnostic.expect_ruff_end_location();
|
||||
let end_location = diagnostic.ruff_end_location().unwrap_or_default();
|
||||
|
||||
write!(
|
||||
writer,
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
use std::collections::HashSet;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::io::Write;
|
||||
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::json;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
|
||||
use crate::fs::{relativize_path, relativize_path_to};
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
|
||||
/// Generate JSON with violations in GitLab CI format
|
||||
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
|
||||
pub struct GitlabEmitter {
|
||||
project_dir: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for GitlabEmitter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
project_dir: std::env::var("CI_PROJECT_DIR").ok(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Emitter for GitlabEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
writer,
|
||||
&SerializedMessages {
|
||||
diagnostics,
|
||||
context,
|
||||
project_dir: self.project_dir.as_deref(),
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct SerializedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
context: &'a EmitterContext<'a>,
|
||||
project_dir: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl Serialize for SerializedMessages<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
let mut fingerprints = HashSet::<u64>::with_capacity(self.diagnostics.len());
|
||||
|
||||
for diagnostic in self.diagnostics {
|
||||
let filename = diagnostic.expect_ruff_filename();
|
||||
|
||||
let (start_location, end_location) = if self.context.is_notebook(&filename) {
|
||||
// We can't give a reasonable location for the structured formats,
|
||||
// so we show one that's clearly a fallback
|
||||
Default::default()
|
||||
} else {
|
||||
(
|
||||
diagnostic.expect_ruff_start_location(),
|
||||
diagnostic.expect_ruff_end_location(),
|
||||
)
|
||||
};
|
||||
|
||||
let path = self.project_dir.as_ref().map_or_else(
|
||||
|| relativize_path(&filename),
|
||||
|project_dir| relativize_path_to(&filename, project_dir),
|
||||
);
|
||||
|
||||
let mut message_fingerprint = fingerprint(diagnostic, &path, 0);
|
||||
|
||||
// Make sure that we do not get a fingerprint that is already in use
|
||||
// by adding in the previously generated one.
|
||||
while fingerprints.contains(&message_fingerprint) {
|
||||
message_fingerprint = fingerprint(diagnostic, &path, message_fingerprint);
|
||||
}
|
||||
fingerprints.insert(message_fingerprint);
|
||||
|
||||
let description = diagnostic.body();
|
||||
let check_name = diagnostic.secondary_code_or_id();
|
||||
|
||||
let value = json!({
|
||||
"check_name": check_name,
|
||||
// GitLab doesn't display the separate `check_name` field in a Code Quality report,
|
||||
// so prepend it to the description too.
|
||||
"description": format!("{check_name}: {description}"),
|
||||
"severity": "major",
|
||||
"fingerprint": format!("{:x}", message_fingerprint),
|
||||
"location": {
|
||||
"path": path,
|
||||
"positions": {
|
||||
"begin": start_location,
|
||||
"end": end_location,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a unique fingerprint to identify a violation.
|
||||
fn fingerprint(message: &Diagnostic, project_path: &str, salt: u64) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
|
||||
salt.hash(&mut hasher);
|
||||
message.name().hash(&mut hasher);
|
||||
project_path.hash(&mut hasher);
|
||||
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::GitlabEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = GitlabEmitter::default();
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(redact_fingerprint(&content));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = GitlabEmitter::default();
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(redact_fingerprint(&content));
|
||||
}
|
||||
|
||||
// Redact the fingerprint because the default hasher isn't stable across platforms.
|
||||
fn redact_fingerprint(content: &str) -> String {
|
||||
static FINGERPRINT_HAY_KEY: &str = r#""fingerprint": ""#;
|
||||
|
||||
let mut output = String::with_capacity(content.len());
|
||||
let mut last = 0;
|
||||
|
||||
for (start, _) in content.match_indices(FINGERPRINT_HAY_KEY) {
|
||||
let fingerprint_hash_start = start + FINGERPRINT_HAY_KEY.len();
|
||||
output.push_str(&content[last..fingerprint_hash_start]);
|
||||
output.push_str("<redacted>");
|
||||
last = fingerprint_hash_start
|
||||
+ content[fingerprint_hash_start..]
|
||||
.find('"')
|
||||
.expect("Expected terminating quote");
|
||||
}
|
||||
|
||||
output.push_str(&content[last..]);
|
||||
|
||||
output
|
||||
}
|
||||
}
|
||||
@@ -105,7 +105,7 @@ fn group_diagnostics_by_filename(
|
||||
.or_insert_with(Vec::new)
|
||||
.push(MessageWithLocation {
|
||||
message: diagnostic,
|
||||
start_location: diagnostic.expect_ruff_start_location(),
|
||||
start_location: diagnostic.ruff_start_location().unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
grouped_messages
|
||||
|
||||
@@ -10,7 +10,6 @@ use ruff_db::diagnostic::{
|
||||
use ruff_db::files::File;
|
||||
|
||||
pub use github::GithubEmitter;
|
||||
pub use gitlab::GitlabEmitter;
|
||||
pub use grouped::GroupedEmitter;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::SourceFile;
|
||||
@@ -22,7 +21,6 @@ use crate::Fix;
|
||||
use crate::registry::Rule;
|
||||
|
||||
mod github;
|
||||
mod gitlab;
|
||||
mod grouped;
|
||||
mod sarif;
|
||||
mod text;
|
||||
|
||||
@@ -158,8 +158,8 @@ struct SarifResult<'a> {
|
||||
impl<'a> SarifResult<'a> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn from_message(message: &'a Diagnostic) -> Result<Self> {
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let end_location = message.expect_ruff_end_location();
|
||||
let start_location = message.ruff_start_location().unwrap_or_default();
|
||||
let end_location = message.ruff_end_location().unwrap_or_default();
|
||||
let path = normalize_path(&*message.expect_ruff_filename());
|
||||
Ok(Self {
|
||||
code: RuleCode::from(message),
|
||||
@@ -178,8 +178,8 @@ impl<'a> SarifResult<'a> {
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[expect(clippy::unnecessary_wraps)]
|
||||
fn from_message(message: &'a Diagnostic) -> Result<Self> {
|
||||
let start_location = message.expect_ruff_start_location();
|
||||
let end_location = message.expect_ruff_end_location();
|
||||
let start_location = message.ruff_start_location().unwrap_or_default();
|
||||
let end_location = message.ruff_end_location().unwrap_or_default();
|
||||
let path = normalize_path(&*message.expect_ruff_filename());
|
||||
Ok(Self {
|
||||
code: RuleCode::from(message),
|
||||
|
||||
@@ -81,7 +81,7 @@ expression: value
|
||||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Preview\nWhen [preview](https://docs.astral.sh/ruff/preview/) is enabled,\nthe criterion for determining whether an import is first-party\nis stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
@@ -119,7 +119,7 @@ expression: value
|
||||
},
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for the presence of unused variables in function scopes.\n\n## Why is this bad?\nA variable that is defined but not used is likely a mistake, and should\nbe removed to avoid confusion.\n\nIf a variable is intentionally defined-but-not-used, it should be\nprefixed with an underscore, or some other value that adheres to the\n[`lint.dummy-variable-rgx`] pattern.\n\n## Example\n```python\ndef foo():\n x = 1\n y = 2\n return x\n```\n\nUse instead:\n```python\ndef foo():\n x = 1\n return x\n```\n\n## Fix safety\n\nThis rule's fix is marked as unsafe because removing an unused variable assignment may\ndelete comments that are attached to the assignment.\n\n## Options\n- `lint.dummy-variable-rgx`\n"
|
||||
"text": "## What it does\nChecks for the presence of unused variables in function scopes.\n\n## Why is this bad?\nA variable that is defined but not used is likely a mistake, and should\nbe removed to avoid confusion.\n\nIf a variable is intentionally defined-but-not-used, it should be\nprefixed with an underscore, or some other value that adheres to the\n[`lint.dummy-variable-rgx`] pattern.\n\n## Example\n```python\ndef foo():\n x = 1\n y = 2\n return x\n```\n\nUse instead:\n```python\ndef foo():\n x = 1\n return x\n```\n\n## Fix safety\n\nThis rule's fix is marked as unsafe because removing an unused variable assignment may\ndelete comments that are attached to the assignment.\n\n## See also\n\nThis rule does not apply to bindings in unpacked assignments (e.g. `x, y = 1, 2`). See\n[`unused-unpacked-variable`][RUF059] for this case.\n\n## Options\n- `lint.dummy-variable-rgx`\n\n[RUF059]: https://docs.astral.sh/ruff/rules/unused-unpacked-variable/\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "Local variable `{name}` is assigned to but never used"
|
||||
|
||||
@@ -3,9 +3,9 @@ use std::io::Write;
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics,
|
||||
};
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
pub struct TextEmitter {
|
||||
config: DisplayDiagnosticConfig,
|
||||
@@ -46,10 +46,8 @@ impl TextEmitter {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_unsafe_fixes(mut self, unsafe_fixes: UnsafeFixes) -> Self {
|
||||
self.config = self
|
||||
.config
|
||||
.fix_applicability(unsafe_fixes.required_applicability());
|
||||
pub fn with_fix_applicability(mut self, applicability: Applicability) -> Self {
|
||||
self.config = self.config.fix_applicability(applicability);
|
||||
self
|
||||
}
|
||||
|
||||
@@ -86,13 +84,13 @@ impl Emitter for TextEmitter {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::message::TextEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
|
||||
create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
#[test]
|
||||
fn default() {
|
||||
@@ -117,7 +115,7 @@ mod tests {
|
||||
let mut emitter = TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_show_source(true)
|
||||
.with_unsafe_fixes(UnsafeFixes::Enabled);
|
||||
.with_fix_applicability(Applicability::Unsafe);
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
@@ -128,7 +126,7 @@ mod tests {
|
||||
let mut emitter = TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_show_source(true)
|
||||
.with_unsafe_fixes(UnsafeFixes::Enabled);
|
||||
.with_fix_applicability(Applicability::Unsafe);
|
||||
let (messages, notebook_indexes) = create_notebook_diagnostics();
|
||||
let content = capture_emitter_notebook_output(&mut emitter, &messages, ¬ebook_indexes);
|
||||
|
||||
|
||||
@@ -11,11 +11,6 @@ pub(crate) const fn is_py314_support_enabled(settings: &LinterSettings) -> bool
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/16565
|
||||
pub(crate) const fn is_full_path_match_source_strategy_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// Rule-specific behavior
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/15541
|
||||
@@ -200,35 +195,11 @@ pub(crate) const fn is_allow_nested_roots_enabled(settings: &LinterSettings) ->
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18208
|
||||
pub(crate) const fn is_multiple_with_statements_fix_safe_enabled(
|
||||
settings: &LinterSettings,
|
||||
) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18400
|
||||
pub(crate) const fn is_ignore_init_files_in_useless_alias_enabled(
|
||||
settings: &LinterSettings,
|
||||
) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18572
|
||||
pub(crate) const fn is_optional_as_none_in_union_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18547
|
||||
pub(crate) const fn is_invalid_async_mock_access_check_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18867
|
||||
pub(crate) const fn is_raise_exception_byte_string_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/18683
|
||||
pub(crate) const fn is_safe_super_call_with_parameters_fix_enabled(
|
||||
settings: &LinterSettings,
|
||||
@@ -236,22 +207,14 @@ pub(crate) const fn is_safe_super_call_with_parameters_fix_enabled(
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19063
|
||||
pub(crate) const fn is_assert_raises_exception_call_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19100
|
||||
pub(crate) const fn is_add_future_annotations_imports_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19390
|
||||
pub(crate) const fn is_trailing_comma_type_params_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/19851
|
||||
pub(crate) const fn is_maxsplit_without_separator_fix_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20027
|
||||
pub(crate) const fn is_unnecessary_default_type_args_stubs_enabled(
|
||||
settings: &LinterSettings,
|
||||
) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
@@ -214,10 +214,8 @@ impl RuleSelector {
|
||||
RuleGroup::Preview => {
|
||||
preview_enabled && (self.is_exact() || !preview_require_explicit)
|
||||
}
|
||||
// Deprecated rules are excluded in preview mode and with 'All' option unless explicitly selected
|
||||
RuleGroup::Deprecated => {
|
||||
(!preview_enabled || self.is_exact()) && !matches!(self, RuleSelector::All)
|
||||
}
|
||||
// Deprecated rules are excluded by default unless explicitly selected
|
||||
RuleGroup::Deprecated => !preview_enabled && self.is_exact(),
|
||||
// Removed rules are included if explicitly selected but will error downstream
|
||||
RuleGroup::Removed => self.is_exact(),
|
||||
}
|
||||
|
||||
@@ -37,7 +37,6 @@ pub(crate) enum Replacement {
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub(crate) enum ProviderReplacement {
|
||||
None,
|
||||
AutoImport {
|
||||
module: &'static str,
|
||||
name: &'static str,
|
||||
|
||||
@@ -46,7 +46,7 @@ pub(crate) struct AirflowDagNoScheduleArgument;
|
||||
impl Violation for AirflowDagNoScheduleArgument {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
"DAG should have an explicit `schedule` argument".to_string()
|
||||
"`DAG` or `@dag` should have an explicit `schedule` argument".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,13 +13,13 @@ use ruff_text_size::TextRange;
|
||||
use crate::{FixAvailability, Violation};
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for uses of Airflow functions and values that have been moved to it providers.
|
||||
/// (e.g., apache-airflow-providers-fab)
|
||||
/// Checks for uses of Airflow functions and values that have been moved to its providers
|
||||
/// (e.g., `apache-airflow-providers-fab`).
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Airflow 3.0 moved various deprecated functions, members, and other
|
||||
/// values to its providers. The user needs to install the corresponding provider and replace
|
||||
/// the original usage with the one in the provider
|
||||
/// the original usage with the one in the provider.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -50,9 +50,6 @@ impl Violation for Airflow3MovedToProvider<'_> {
|
||||
replacement,
|
||||
} = self;
|
||||
match replacement {
|
||||
ProviderReplacement::None => {
|
||||
format!("`{deprecated}` is removed in Airflow 3.0")
|
||||
}
|
||||
ProviderReplacement::AutoImport {
|
||||
name: _,
|
||||
module: _,
|
||||
@@ -85,7 +82,6 @@ impl Violation for Airflow3MovedToProvider<'_> {
|
||||
provider,
|
||||
version,
|
||||
} => Some((module, name.as_str(), provider, version)),
|
||||
ProviderReplacement::None => None,
|
||||
} {
|
||||
Some(format!(
|
||||
"Install `apache-airflow-providers-{provider}>={version}` and use `{name}` from `{module}` instead."
|
||||
@@ -1020,7 +1016,6 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
|
||||
provider: "postgres",
|
||||
version: "1.0.0",
|
||||
},
|
||||
["airflow", "operators", "postgres_operator", "Mapping"] => ProviderReplacement::None,
|
||||
|
||||
// apache-airflow-providers-presto
|
||||
["airflow", "hooks", "presto_hook", "PrestoHook"] => ProviderReplacement::AutoImport {
|
||||
@@ -1209,16 +1204,6 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
|
||||
ProviderReplacement::SourceModuleMovedToProvider { module, name, .. } => {
|
||||
(module, name.as_str())
|
||||
}
|
||||
ProviderReplacement::None => {
|
||||
checker.report_diagnostic(
|
||||
Airflow3MovedToProvider {
|
||||
deprecated: qualified_name,
|
||||
replacement,
|
||||
},
|
||||
ranged,
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if is_guarded_by_try_except(expr, module, name, checker.semantic()) {
|
||||
|
||||
@@ -23,7 +23,7 @@ use ruff_text_size::TextRange;
|
||||
/// ## Why is this bad?
|
||||
/// Airflow 3.0 removed various deprecated functions, members, and other
|
||||
/// values. Some have more modern replacements. Others are considered too niche
|
||||
/// and not worth to be maintained in Airflow.
|
||||
/// and not worth continued maintenance in Airflow.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -704,6 +704,7 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
|
||||
["airflow", "operators", "subdag", ..] => {
|
||||
Replacement::Message("The whole `airflow.subdag` module has been removed.")
|
||||
}
|
||||
["airflow", "operators", "postgres_operator", "Mapping"] => Replacement::None,
|
||||
["airflow", "operators", "python", "get_current_context"] => Replacement::AutoImport {
|
||||
module: "airflow.sdk",
|
||||
name: "get_current_context",
|
||||
|
||||
@@ -65,9 +65,6 @@ impl Violation for Airflow3SuggestedToMoveToProvider<'_> {
|
||||
replacement,
|
||||
} = self;
|
||||
match replacement {
|
||||
ProviderReplacement::None => {
|
||||
format!("`{deprecated}` is removed in Airflow 3.0")
|
||||
}
|
||||
ProviderReplacement::AutoImport {
|
||||
name: _,
|
||||
module: _,
|
||||
@@ -91,7 +88,6 @@ impl Violation for Airflow3SuggestedToMoveToProvider<'_> {
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
let Airflow3SuggestedToMoveToProvider { replacement, .. } = self;
|
||||
match replacement {
|
||||
ProviderReplacement::None => None,
|
||||
ProviderReplacement::AutoImport {
|
||||
module,
|
||||
name,
|
||||
@@ -319,16 +315,6 @@ fn check_names_moved_to_provider(checker: &Checker, expr: &Expr, ranged: TextRan
|
||||
ProviderReplacement::SourceModuleMovedToProvider { module, name, .. } => {
|
||||
(module, name.as_str())
|
||||
}
|
||||
ProviderReplacement::None => {
|
||||
checker.report_diagnostic(
|
||||
Airflow3SuggestedToMoveToProvider {
|
||||
deprecated: qualified_name,
|
||||
replacement: replacement.clone(),
|
||||
},
|
||||
ranged.range(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if is_guarded_by_try_except(expr, module, name, checker.semantic()) {
|
||||
|
||||
@@ -17,9 +17,9 @@ use ruff_text_size::TextRange;
|
||||
/// ## Why is this bad?
|
||||
/// Airflow 3.0 removed various deprecated functions, members, and other
|
||||
/// values. Some have more modern replacements. Others are considered too niche
|
||||
/// and not worth to be maintained in Airflow.
|
||||
/// and not worth continued maintenance in Airflow.
|
||||
/// Even though these symbols still work fine on Airflow 3.0, they are expected to be removed in a future version.
|
||||
/// The user is suggested to replace the original usage with the new ones.
|
||||
/// Where available, users should replace the removed functionality with the new alternatives.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -157,6 +157,9 @@ fn check_call_arguments(checker: &Checker, qualified_name: &QualifiedName, argum
|
||||
["airflow", .., "DAG" | "dag"] => {
|
||||
diagnostic_for_argument(checker, arguments, "sla_miss_callback", None);
|
||||
}
|
||||
["airflow", "timetables", "datasets", "DatasetOrTimeSchedule"] => {
|
||||
diagnostic_for_argument(checker, arguments, "datasets", Some("assets"));
|
||||
}
|
||||
segments => {
|
||||
if is_airflow_builtin_or_provider(segments, "operators", "Operator") {
|
||||
diagnostic_for_argument(checker, arguments, "sla", None);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/airflow/mod.rs
|
||||
---
|
||||
AIR002 DAG should have an explicit `schedule` argument
|
||||
AIR002 `DAG` or `@dag` should have an explicit `schedule` argument
|
||||
--> AIR002.py:4:1
|
||||
|
|
||||
2 | from airflow.timetables.simple import NullTimetable
|
||||
@@ -12,7 +12,7 @@ AIR002 DAG should have an explicit `schedule` argument
|
||||
6 | DAG(dag_id="class_schedule", schedule="@hourly")
|
||||
|
|
||||
|
||||
AIR002 DAG should have an explicit `schedule` argument
|
||||
AIR002 `DAG` or `@dag` should have an explicit `schedule` argument
|
||||
--> AIR002.py:13:2
|
||||
|
|
||||
13 | @dag()
|
||||
|
||||
@@ -12,16 +12,14 @@ AIR301 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
18 18 |
|
||||
19 19 | DAG(dag_id="class_schedule", schedule="@hourly")
|
||||
20 20 |
|
||||
21 |-DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
||||
21 |+DAG(dag_id="class_schedule_interval", schedule="@hourly")
|
||||
22 22 |
|
||||
23 23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 24 |
|
||||
18 |
|
||||
19 | DAG(dag_id="class_schedule", schedule="@hourly")
|
||||
20 |
|
||||
- DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
||||
21 + DAG(dag_id="class_schedule_interval", schedule="@hourly")
|
||||
22 |
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 |
|
||||
|
||||
AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:23:31
|
||||
@@ -32,16 +30,14 @@ AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
| ^^^^^^^^^
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 |
|
||||
21 21 | DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
||||
22 22 |
|
||||
23 |-DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
23 |+DAG(dag_id="class_timetable", schedule=NullTimetable())
|
||||
24 24 |
|
||||
25 25 |
|
||||
26 26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
20 |
|
||||
21 | DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
||||
22 |
|
||||
- DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
23 + DAG(dag_id="class_timetable", schedule=NullTimetable())
|
||||
24 |
|
||||
25 |
|
||||
26 | DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
|
||||
AIR301 [*] `fail_stop` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:26:31
|
||||
@@ -52,16 +48,14 @@ AIR301 [*] `fail_stop` is removed in Airflow 3.0
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
|
|
||||
help: Use `fail_fast` instead
|
||||
|
||||
ℹ Safe fix
|
||||
23 23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 24 |
|
||||
25 25 |
|
||||
26 |-DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
26 |+DAG(dag_id="class_fail_stop", fail_fast=True)
|
||||
27 27 |
|
||||
28 28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
29 29 |
|
||||
23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
|
||||
24 |
|
||||
25 |
|
||||
- DAG(dag_id="class_fail_stop", fail_stop=True)
|
||||
26 + DAG(dag_id="class_fail_stop", fail_fast=True)
|
||||
27 |
|
||||
28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
|
||||
29 |
|
||||
|
||||
AIR301 `default_view` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:28:34
|
||||
@@ -94,16 +88,14 @@ AIR301 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
43 | pass
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
38 38 | pass
|
||||
39 39 |
|
||||
40 40 |
|
||||
41 |-@dag(schedule_interval="0 * * * *")
|
||||
41 |+@dag(schedule="0 * * * *")
|
||||
42 42 | def decorator_schedule_interval():
|
||||
43 43 | pass
|
||||
44 44 |
|
||||
38 | pass
|
||||
39 |
|
||||
40 |
|
||||
- @dag(schedule_interval="0 * * * *")
|
||||
41 + @dag(schedule="0 * * * *")
|
||||
42 | def decorator_schedule_interval():
|
||||
43 | pass
|
||||
44 |
|
||||
|
||||
AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:46:6
|
||||
@@ -114,16 +106,14 @@ AIR301 [*] `timetable` is removed in Airflow 3.0
|
||||
48 | pass
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
43 43 | pass
|
||||
44 44 |
|
||||
45 45 |
|
||||
46 |-@dag(timetable=NullTimetable())
|
||||
46 |+@dag(schedule=NullTimetable())
|
||||
47 47 | def decorator_timetable():
|
||||
48 48 | pass
|
||||
49 49 |
|
||||
43 | pass
|
||||
44 |
|
||||
45 |
|
||||
- @dag(timetable=NullTimetable())
|
||||
46 + @dag(schedule=NullTimetable())
|
||||
47 | def decorator_timetable():
|
||||
48 | pass
|
||||
49 |
|
||||
|
||||
AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:54:62
|
||||
@@ -136,16 +126,14 @@ AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
|
|
||||
help: Use `logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
51 51 | @dag()
|
||||
52 52 | def decorator_deprecated_operator_args():
|
||||
53 53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
54 |- task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
54 |+ task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
55 55 | )
|
||||
56 56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
51 | @dag()
|
||||
52 | def decorator_deprecated_operator_args():
|
||||
53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
||||
- task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
54 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
|
||||
AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:57:62
|
||||
@@ -157,16 +145,14 @@ AIR301 [*] `execution_date` is removed in Airflow 3.0
|
||||
58 | )
|
||||
|
|
||||
help: Use `logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
54 54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
55 55 | )
|
||||
56 56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
57 |- task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
57 |+ task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
58 58 | )
|
||||
59 59 |
|
||||
60 60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
55 | )
|
||||
56 | trigger_dagrun_op2 = TriggerDagRunOperator(
|
||||
- task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
|
||||
57 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:61:33
|
||||
@@ -178,16 +164,14 @@ AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
58 58 | )
|
||||
59 59 |
|
||||
60 60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
61 |- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 |+ task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
62 62 | )
|
||||
63 63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 64 | task_id="branch_dt_op2",
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
|
||||
AIR301 [*] `task_concurrency` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:61:62
|
||||
@@ -199,16 +183,14 @@ AIR301 [*] `task_concurrency` is removed in Airflow 3.0
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
|
|
||||
help: Use `max_active_tis_per_dag` instead
|
||||
|
||||
ℹ Safe fix
|
||||
58 58 | )
|
||||
59 59 |
|
||||
60 60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
61 |- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 |+ task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
62 62 | )
|
||||
63 63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 64 | task_id="branch_dt_op2",
|
||||
58 | )
|
||||
59 |
|
||||
60 | branch_dt_op = datetime.BranchDateTimeOperator(
|
||||
- task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
||||
61 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:65:9
|
||||
@@ -221,16 +203,14 @@ AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
67 | )
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
62 62 | )
|
||||
63 63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 64 | task_id="branch_dt_op2",
|
||||
65 |- use_task_execution_day=True,
|
||||
65 |+ use_task_logical_date=True,
|
||||
66 66 | sla=timedelta(seconds=10),
|
||||
67 67 | )
|
||||
68 68 |
|
||||
62 | )
|
||||
63 | branch_dt_op2 = BranchDateTimeOperator(
|
||||
64 | task_id="branch_dt_op2",
|
||||
- use_task_execution_day=True,
|
||||
65 + use_task_logical_date=True,
|
||||
66 | sla=timedelta(seconds=10),
|
||||
67 | )
|
||||
68 |
|
||||
|
||||
AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:92:9
|
||||
@@ -242,16 +222,14 @@ AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
|
||||
93 | )
|
||||
|
|
||||
help: Use `use_task_logical_date` instead
|
||||
|
||||
ℹ Safe fix
|
||||
89 89 | follow_task_ids_if_false=None,
|
||||
90 90 | follow_task_ids_if_true=None,
|
||||
91 91 | week_day=1,
|
||||
92 |- use_task_execution_day=True,
|
||||
92 |+ use_task_logical_date=True,
|
||||
93 93 | )
|
||||
94 94 |
|
||||
95 95 | trigger_dagrun_op >> trigger_dagrun_op2
|
||||
89 | follow_task_ids_if_false=None,
|
||||
90 | follow_task_ids_if_true=None,
|
||||
91 | week_day=1,
|
||||
- use_task_execution_day=True,
|
||||
92 + use_task_logical_date=True,
|
||||
93 | )
|
||||
94 |
|
||||
95 | trigger_dagrun_op >> trigger_dagrun_op2
|
||||
|
||||
AIR301 `filename_template` is removed in Airflow 3.0
|
||||
--> AIR301_args.py:102:15
|
||||
|
||||
@@ -11,16 +11,14 @@ AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
26 | dataset_from_root.iter_dataset_aliases()
|
||||
|
|
||||
help: Use `iter_assets` instead
|
||||
|
||||
ℹ Safe fix
|
||||
22 22 |
|
||||
23 23 | # airflow.Dataset
|
||||
24 24 | dataset_from_root = DatasetFromRoot()
|
||||
25 |-dataset_from_root.iter_datasets()
|
||||
25 |+dataset_from_root.iter_assets()
|
||||
26 26 | dataset_from_root.iter_dataset_aliases()
|
||||
27 27 |
|
||||
28 28 | # airflow.datasets
|
||||
22 |
|
||||
23 | # airflow.Dataset
|
||||
24 | dataset_from_root = DatasetFromRoot()
|
||||
- dataset_from_root.iter_datasets()
|
||||
25 + dataset_from_root.iter_assets()
|
||||
26 | dataset_from_root.iter_dataset_aliases()
|
||||
27 |
|
||||
28 | # airflow.datasets
|
||||
|
||||
AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:26:19
|
||||
@@ -33,16 +31,14 @@ AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
28 | # airflow.datasets
|
||||
|
|
||||
help: Use `iter_asset_aliases` instead
|
||||
|
||||
ℹ Safe fix
|
||||
23 23 | # airflow.Dataset
|
||||
24 24 | dataset_from_root = DatasetFromRoot()
|
||||
25 25 | dataset_from_root.iter_datasets()
|
||||
26 |-dataset_from_root.iter_dataset_aliases()
|
||||
26 |+dataset_from_root.iter_asset_aliases()
|
||||
27 27 |
|
||||
28 28 | # airflow.datasets
|
||||
29 29 | dataset_to_test_method_call = Dataset()
|
||||
23 | # airflow.Dataset
|
||||
24 | dataset_from_root = DatasetFromRoot()
|
||||
25 | dataset_from_root.iter_datasets()
|
||||
- dataset_from_root.iter_dataset_aliases()
|
||||
26 + dataset_from_root.iter_asset_aliases()
|
||||
27 |
|
||||
28 | # airflow.datasets
|
||||
29 | dataset_to_test_method_call = Dataset()
|
||||
|
||||
AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:30:29
|
||||
@@ -54,16 +50,14 @@ AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
31 | dataset_to_test_method_call.iter_dataset_aliases()
|
||||
|
|
||||
help: Use `iter_assets` instead
|
||||
|
||||
ℹ Safe fix
|
||||
27 27 |
|
||||
28 28 | # airflow.datasets
|
||||
29 29 | dataset_to_test_method_call = Dataset()
|
||||
30 |-dataset_to_test_method_call.iter_datasets()
|
||||
30 |+dataset_to_test_method_call.iter_assets()
|
||||
31 31 | dataset_to_test_method_call.iter_dataset_aliases()
|
||||
32 32 |
|
||||
33 33 | alias_to_test_method_call = DatasetAlias()
|
||||
27 |
|
||||
28 | # airflow.datasets
|
||||
29 | dataset_to_test_method_call = Dataset()
|
||||
- dataset_to_test_method_call.iter_datasets()
|
||||
30 + dataset_to_test_method_call.iter_assets()
|
||||
31 | dataset_to_test_method_call.iter_dataset_aliases()
|
||||
32 |
|
||||
33 | alias_to_test_method_call = DatasetAlias()
|
||||
|
||||
AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:31:29
|
||||
@@ -76,16 +70,14 @@ AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
33 | alias_to_test_method_call = DatasetAlias()
|
||||
|
|
||||
help: Use `iter_asset_aliases` instead
|
||||
|
||||
ℹ Safe fix
|
||||
28 28 | # airflow.datasets
|
||||
29 29 | dataset_to_test_method_call = Dataset()
|
||||
30 30 | dataset_to_test_method_call.iter_datasets()
|
||||
31 |-dataset_to_test_method_call.iter_dataset_aliases()
|
||||
31 |+dataset_to_test_method_call.iter_asset_aliases()
|
||||
32 32 |
|
||||
33 33 | alias_to_test_method_call = DatasetAlias()
|
||||
34 34 | alias_to_test_method_call.iter_datasets()
|
||||
28 | # airflow.datasets
|
||||
29 | dataset_to_test_method_call = Dataset()
|
||||
30 | dataset_to_test_method_call.iter_datasets()
|
||||
- dataset_to_test_method_call.iter_dataset_aliases()
|
||||
31 + dataset_to_test_method_call.iter_asset_aliases()
|
||||
32 |
|
||||
33 | alias_to_test_method_call = DatasetAlias()
|
||||
34 | alias_to_test_method_call.iter_datasets()
|
||||
|
||||
AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:34:27
|
||||
@@ -96,16 +88,14 @@ AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
35 | alias_to_test_method_call.iter_dataset_aliases()
|
||||
|
|
||||
help: Use `iter_assets` instead
|
||||
|
||||
ℹ Safe fix
|
||||
31 31 | dataset_to_test_method_call.iter_dataset_aliases()
|
||||
32 32 |
|
||||
33 33 | alias_to_test_method_call = DatasetAlias()
|
||||
34 |-alias_to_test_method_call.iter_datasets()
|
||||
34 |+alias_to_test_method_call.iter_assets()
|
||||
35 35 | alias_to_test_method_call.iter_dataset_aliases()
|
||||
36 36 |
|
||||
37 37 | any_to_test_method_call = DatasetAny()
|
||||
31 | dataset_to_test_method_call.iter_dataset_aliases()
|
||||
32 |
|
||||
33 | alias_to_test_method_call = DatasetAlias()
|
||||
- alias_to_test_method_call.iter_datasets()
|
||||
34 + alias_to_test_method_call.iter_assets()
|
||||
35 | alias_to_test_method_call.iter_dataset_aliases()
|
||||
36 |
|
||||
37 | any_to_test_method_call = DatasetAny()
|
||||
|
||||
AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:35:27
|
||||
@@ -118,16 +108,14 @@ AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
37 | any_to_test_method_call = DatasetAny()
|
||||
|
|
||||
help: Use `iter_asset_aliases` instead
|
||||
|
||||
ℹ Safe fix
|
||||
32 32 |
|
||||
33 33 | alias_to_test_method_call = DatasetAlias()
|
||||
34 34 | alias_to_test_method_call.iter_datasets()
|
||||
35 |-alias_to_test_method_call.iter_dataset_aliases()
|
||||
35 |+alias_to_test_method_call.iter_asset_aliases()
|
||||
36 36 |
|
||||
37 37 | any_to_test_method_call = DatasetAny()
|
||||
38 38 | any_to_test_method_call.iter_datasets()
|
||||
32 |
|
||||
33 | alias_to_test_method_call = DatasetAlias()
|
||||
34 | alias_to_test_method_call.iter_datasets()
|
||||
- alias_to_test_method_call.iter_dataset_aliases()
|
||||
35 + alias_to_test_method_call.iter_asset_aliases()
|
||||
36 |
|
||||
37 | any_to_test_method_call = DatasetAny()
|
||||
38 | any_to_test_method_call.iter_datasets()
|
||||
|
||||
AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:38:25
|
||||
@@ -138,16 +126,14 @@ AIR301 [*] `iter_datasets` is removed in Airflow 3.0
|
||||
39 | any_to_test_method_call.iter_dataset_aliases()
|
||||
|
|
||||
help: Use `iter_assets` instead
|
||||
|
||||
ℹ Safe fix
|
||||
35 35 | alias_to_test_method_call.iter_dataset_aliases()
|
||||
36 36 |
|
||||
37 37 | any_to_test_method_call = DatasetAny()
|
||||
38 |-any_to_test_method_call.iter_datasets()
|
||||
38 |+any_to_test_method_call.iter_assets()
|
||||
39 39 | any_to_test_method_call.iter_dataset_aliases()
|
||||
40 40 |
|
||||
41 41 | # airflow.datasets.manager
|
||||
35 | alias_to_test_method_call.iter_dataset_aliases()
|
||||
36 |
|
||||
37 | any_to_test_method_call = DatasetAny()
|
||||
- any_to_test_method_call.iter_datasets()
|
||||
38 + any_to_test_method_call.iter_assets()
|
||||
39 | any_to_test_method_call.iter_dataset_aliases()
|
||||
40 |
|
||||
41 | # airflow.datasets.manager
|
||||
|
||||
AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:39:25
|
||||
@@ -160,16 +146,14 @@ AIR301 [*] `iter_dataset_aliases` is removed in Airflow 3.0
|
||||
41 | # airflow.datasets.manager
|
||||
|
|
||||
help: Use `iter_asset_aliases` instead
|
||||
|
||||
ℹ Safe fix
|
||||
36 36 |
|
||||
37 37 | any_to_test_method_call = DatasetAny()
|
||||
38 38 | any_to_test_method_call.iter_datasets()
|
||||
39 |-any_to_test_method_call.iter_dataset_aliases()
|
||||
39 |+any_to_test_method_call.iter_asset_aliases()
|
||||
40 40 |
|
||||
41 41 | # airflow.datasets.manager
|
||||
42 42 | dm = DatasetManager()
|
||||
36 |
|
||||
37 | any_to_test_method_call = DatasetAny()
|
||||
38 | any_to_test_method_call.iter_datasets()
|
||||
- any_to_test_method_call.iter_dataset_aliases()
|
||||
39 + any_to_test_method_call.iter_asset_aliases()
|
||||
40 |
|
||||
41 | # airflow.datasets.manager
|
||||
42 | dm = DatasetManager()
|
||||
|
||||
AIR301 [*] `airflow.datasets.manager.DatasetManager` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:42:6
|
||||
@@ -181,24 +165,22 @@ AIR301 [*] `airflow.datasets.manager.DatasetManager` is removed in Airflow 3.0
|
||||
44 | dm.create_datasets()
|
||||
|
|
||||
help: Use `AssetManager` from `airflow.assets.manager` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
19 19 | from airflow.providers_manager import ProvidersManager
|
||||
20 20 | from airflow.secrets.base_secrets import BaseSecretsBackend
|
||||
21 21 | from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
22 |+from airflow.assets.manager import AssetManager
|
||||
22 23 |
|
||||
23 24 | # airflow.Dataset
|
||||
24 25 | dataset_from_root = DatasetFromRoot()
|
||||
19 | from airflow.providers_manager import ProvidersManager
|
||||
20 | from airflow.secrets.base_secrets import BaseSecretsBackend
|
||||
21 | from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
22 + from airflow.assets.manager import AssetManager
|
||||
23 |
|
||||
24 | # airflow.Dataset
|
||||
25 | dataset_from_root = DatasetFromRoot()
|
||||
--------------------------------------------------------------------------------
|
||||
39 40 | any_to_test_method_call.iter_dataset_aliases()
|
||||
40 41 |
|
||||
41 42 | # airflow.datasets.manager
|
||||
42 |-dm = DatasetManager()
|
||||
43 |+dm = AssetManager()
|
||||
43 44 | dm.register_dataset_change()
|
||||
44 45 | dm.create_datasets()
|
||||
45 46 | dm.notify_dataset_created()
|
||||
40 | any_to_test_method_call.iter_dataset_aliases()
|
||||
41 |
|
||||
42 | # airflow.datasets.manager
|
||||
- dm = DatasetManager()
|
||||
43 + dm = AssetManager()
|
||||
44 | dm.register_dataset_change()
|
||||
45 | dm.create_datasets()
|
||||
46 | dm.notify_dataset_created()
|
||||
|
||||
AIR301 [*] `register_dataset_change` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:43:4
|
||||
@@ -211,16 +193,14 @@ AIR301 [*] `register_dataset_change` is removed in Airflow 3.0
|
||||
45 | dm.notify_dataset_created()
|
||||
|
|
||||
help: Use `register_asset_change` instead
|
||||
|
||||
ℹ Safe fix
|
||||
40 40 |
|
||||
41 41 | # airflow.datasets.manager
|
||||
42 42 | dm = DatasetManager()
|
||||
43 |-dm.register_dataset_change()
|
||||
43 |+dm.register_asset_change()
|
||||
44 44 | dm.create_datasets()
|
||||
45 45 | dm.notify_dataset_created()
|
||||
46 46 | dm.notify_dataset_changed()
|
||||
40 |
|
||||
41 | # airflow.datasets.manager
|
||||
42 | dm = DatasetManager()
|
||||
- dm.register_dataset_change()
|
||||
43 + dm.register_asset_change()
|
||||
44 | dm.create_datasets()
|
||||
45 | dm.notify_dataset_created()
|
||||
46 | dm.notify_dataset_changed()
|
||||
|
||||
AIR301 [*] `create_datasets` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:44:4
|
||||
@@ -233,16 +213,14 @@ AIR301 [*] `create_datasets` is removed in Airflow 3.0
|
||||
46 | dm.notify_dataset_changed()
|
||||
|
|
||||
help: Use `create_assets` instead
|
||||
|
||||
ℹ Safe fix
|
||||
41 41 | # airflow.datasets.manager
|
||||
42 42 | dm = DatasetManager()
|
||||
43 43 | dm.register_dataset_change()
|
||||
44 |-dm.create_datasets()
|
||||
44 |+dm.create_assets()
|
||||
45 45 | dm.notify_dataset_created()
|
||||
46 46 | dm.notify_dataset_changed()
|
||||
47 47 | dm.notify_dataset_alias_created()
|
||||
41 | # airflow.datasets.manager
|
||||
42 | dm = DatasetManager()
|
||||
43 | dm.register_dataset_change()
|
||||
- dm.create_datasets()
|
||||
44 + dm.create_assets()
|
||||
45 | dm.notify_dataset_created()
|
||||
46 | dm.notify_dataset_changed()
|
||||
47 | dm.notify_dataset_alias_created()
|
||||
|
||||
AIR301 [*] `notify_dataset_created` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:45:4
|
||||
@@ -255,16 +233,14 @@ AIR301 [*] `notify_dataset_created` is removed in Airflow 3.0
|
||||
47 | dm.notify_dataset_alias_created()
|
||||
|
|
||||
help: Use `notify_asset_created` instead
|
||||
|
||||
ℹ Safe fix
|
||||
42 42 | dm = DatasetManager()
|
||||
43 43 | dm.register_dataset_change()
|
||||
44 44 | dm.create_datasets()
|
||||
45 |-dm.notify_dataset_created()
|
||||
45 |+dm.notify_asset_created()
|
||||
46 46 | dm.notify_dataset_changed()
|
||||
47 47 | dm.notify_dataset_alias_created()
|
||||
48 48 |
|
||||
42 | dm = DatasetManager()
|
||||
43 | dm.register_dataset_change()
|
||||
44 | dm.create_datasets()
|
||||
- dm.notify_dataset_created()
|
||||
45 + dm.notify_asset_created()
|
||||
46 | dm.notify_dataset_changed()
|
||||
47 | dm.notify_dataset_alias_created()
|
||||
48 |
|
||||
|
||||
AIR301 [*] `notify_dataset_changed` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:46:4
|
||||
@@ -276,16 +252,14 @@ AIR301 [*] `notify_dataset_changed` is removed in Airflow 3.0
|
||||
47 | dm.notify_dataset_alias_created()
|
||||
|
|
||||
help: Use `notify_asset_changed` instead
|
||||
|
||||
ℹ Safe fix
|
||||
43 43 | dm.register_dataset_change()
|
||||
44 44 | dm.create_datasets()
|
||||
45 45 | dm.notify_dataset_created()
|
||||
46 |-dm.notify_dataset_changed()
|
||||
46 |+dm.notify_asset_changed()
|
||||
47 47 | dm.notify_dataset_alias_created()
|
||||
48 48 |
|
||||
49 49 | # airflow.lineage.hook
|
||||
43 | dm.register_dataset_change()
|
||||
44 | dm.create_datasets()
|
||||
45 | dm.notify_dataset_created()
|
||||
- dm.notify_dataset_changed()
|
||||
46 + dm.notify_asset_changed()
|
||||
47 | dm.notify_dataset_alias_created()
|
||||
48 |
|
||||
49 | # airflow.lineage.hook
|
||||
|
||||
AIR301 [*] `notify_dataset_alias_created` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:47:4
|
||||
@@ -298,16 +272,14 @@ AIR301 [*] `notify_dataset_alias_created` is removed in Airflow 3.0
|
||||
49 | # airflow.lineage.hook
|
||||
|
|
||||
help: Use `notify_asset_alias_created` instead
|
||||
|
||||
ℹ Safe fix
|
||||
44 44 | dm.create_datasets()
|
||||
45 45 | dm.notify_dataset_created()
|
||||
46 46 | dm.notify_dataset_changed()
|
||||
47 |-dm.notify_dataset_alias_created()
|
||||
47 |+dm.notify_asset_alias_created()
|
||||
48 48 |
|
||||
49 49 | # airflow.lineage.hook
|
||||
50 50 | dl_info = DatasetLineageInfo()
|
||||
44 | dm.create_datasets()
|
||||
45 | dm.notify_dataset_created()
|
||||
46 | dm.notify_dataset_changed()
|
||||
- dm.notify_dataset_alias_created()
|
||||
47 + dm.notify_asset_alias_created()
|
||||
48 |
|
||||
49 | # airflow.lineage.hook
|
||||
50 | dl_info = DatasetLineageInfo()
|
||||
|
||||
AIR301 [*] `airflow.lineage.hook.DatasetLineageInfo` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:50:11
|
||||
@@ -318,25 +290,23 @@ AIR301 [*] `airflow.lineage.hook.DatasetLineageInfo` is removed in Airflow 3.0
|
||||
51 | dl_info.dataset
|
||||
|
|
||||
help: Use `AssetLineageInfo` from `airflow.lineage.hook` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
9 9 | DatasetAny,
|
||||
10 10 | )
|
||||
11 11 | from airflow.datasets.manager import DatasetManager
|
||||
12 |-from airflow.lineage.hook import DatasetLineageInfo, HookLineageCollector
|
||||
12 |+from airflow.lineage.hook import DatasetLineageInfo, HookLineageCollector, AssetLineageInfo
|
||||
13 13 | from airflow.providers.amazon.aws.auth_manager.aws_auth_manager import AwsAuthManager
|
||||
14 14 | from airflow.providers.apache.beam.hooks import BeamHook, NotAir302HookError
|
||||
15 15 | from airflow.providers.google.cloud.secrets.secret_manager import (
|
||||
9 | DatasetAny,
|
||||
10 | )
|
||||
11 | from airflow.datasets.manager import DatasetManager
|
||||
- from airflow.lineage.hook import DatasetLineageInfo, HookLineageCollector
|
||||
12 + from airflow.lineage.hook import DatasetLineageInfo, HookLineageCollector, AssetLineageInfo
|
||||
13 | from airflow.providers.amazon.aws.auth_manager.aws_auth_manager import AwsAuthManager
|
||||
14 | from airflow.providers.apache.beam.hooks import BeamHook, NotAir302HookError
|
||||
15 | from airflow.providers.google.cloud.secrets.secret_manager import (
|
||||
--------------------------------------------------------------------------------
|
||||
47 47 | dm.notify_dataset_alias_created()
|
||||
48 48 |
|
||||
49 49 | # airflow.lineage.hook
|
||||
50 |-dl_info = DatasetLineageInfo()
|
||||
50 |+dl_info = AssetLineageInfo()
|
||||
51 51 | dl_info.dataset
|
||||
52 52 |
|
||||
53 53 | hlc = HookLineageCollector()
|
||||
47 | dm.notify_dataset_alias_created()
|
||||
48 |
|
||||
49 | # airflow.lineage.hook
|
||||
- dl_info = DatasetLineageInfo()
|
||||
50 + dl_info = AssetLineageInfo()
|
||||
51 | dl_info.dataset
|
||||
52 |
|
||||
53 | hlc = HookLineageCollector()
|
||||
|
||||
AIR301 [*] `dataset` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:51:9
|
||||
@@ -349,16 +319,14 @@ AIR301 [*] `dataset` is removed in Airflow 3.0
|
||||
53 | hlc = HookLineageCollector()
|
||||
|
|
||||
help: Use `asset` instead
|
||||
|
||||
ℹ Safe fix
|
||||
48 48 |
|
||||
49 49 | # airflow.lineage.hook
|
||||
50 50 | dl_info = DatasetLineageInfo()
|
||||
51 |-dl_info.dataset
|
||||
51 |+dl_info.asset
|
||||
52 52 |
|
||||
53 53 | hlc = HookLineageCollector()
|
||||
54 54 | hlc.create_dataset()
|
||||
48 |
|
||||
49 | # airflow.lineage.hook
|
||||
50 | dl_info = DatasetLineageInfo()
|
||||
- dl_info.dataset
|
||||
51 + dl_info.asset
|
||||
52 |
|
||||
53 | hlc = HookLineageCollector()
|
||||
54 | hlc.create_dataset()
|
||||
|
||||
AIR301 [*] `create_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:54:5
|
||||
@@ -370,16 +338,14 @@ AIR301 [*] `create_dataset` is removed in Airflow 3.0
|
||||
56 | hlc.add_output_dataset()
|
||||
|
|
||||
help: Use `create_asset` instead
|
||||
|
||||
ℹ Safe fix
|
||||
51 51 | dl_info.dataset
|
||||
52 52 |
|
||||
53 53 | hlc = HookLineageCollector()
|
||||
54 |-hlc.create_dataset()
|
||||
54 |+hlc.create_asset()
|
||||
55 55 | hlc.add_input_dataset()
|
||||
56 56 | hlc.add_output_dataset()
|
||||
57 57 | hlc.collected_datasets()
|
||||
51 | dl_info.dataset
|
||||
52 |
|
||||
53 | hlc = HookLineageCollector()
|
||||
- hlc.create_dataset()
|
||||
54 + hlc.create_asset()
|
||||
55 | hlc.add_input_dataset()
|
||||
56 | hlc.add_output_dataset()
|
||||
57 | hlc.collected_datasets()
|
||||
|
||||
AIR301 [*] `add_input_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:55:5
|
||||
@@ -392,16 +358,14 @@ AIR301 [*] `add_input_dataset` is removed in Airflow 3.0
|
||||
57 | hlc.collected_datasets()
|
||||
|
|
||||
help: Use `add_input_asset` instead
|
||||
|
||||
ℹ Safe fix
|
||||
52 52 |
|
||||
53 53 | hlc = HookLineageCollector()
|
||||
54 54 | hlc.create_dataset()
|
||||
55 |-hlc.add_input_dataset()
|
||||
55 |+hlc.add_input_asset()
|
||||
56 56 | hlc.add_output_dataset()
|
||||
57 57 | hlc.collected_datasets()
|
||||
58 58 |
|
||||
52 |
|
||||
53 | hlc = HookLineageCollector()
|
||||
54 | hlc.create_dataset()
|
||||
- hlc.add_input_dataset()
|
||||
55 + hlc.add_input_asset()
|
||||
56 | hlc.add_output_dataset()
|
||||
57 | hlc.collected_datasets()
|
||||
58 |
|
||||
|
||||
AIR301 [*] `add_output_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:56:5
|
||||
@@ -413,16 +377,14 @@ AIR301 [*] `add_output_dataset` is removed in Airflow 3.0
|
||||
57 | hlc.collected_datasets()
|
||||
|
|
||||
help: Use `add_output_asset` instead
|
||||
|
||||
ℹ Safe fix
|
||||
53 53 | hlc = HookLineageCollector()
|
||||
54 54 | hlc.create_dataset()
|
||||
55 55 | hlc.add_input_dataset()
|
||||
56 |-hlc.add_output_dataset()
|
||||
56 |+hlc.add_output_asset()
|
||||
57 57 | hlc.collected_datasets()
|
||||
58 58 |
|
||||
59 59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
53 | hlc = HookLineageCollector()
|
||||
54 | hlc.create_dataset()
|
||||
55 | hlc.add_input_dataset()
|
||||
- hlc.add_output_dataset()
|
||||
56 + hlc.add_output_asset()
|
||||
57 | hlc.collected_datasets()
|
||||
58 |
|
||||
59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
|
||||
AIR301 [*] `collected_datasets` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:57:5
|
||||
@@ -435,16 +397,14 @@ AIR301 [*] `collected_datasets` is removed in Airflow 3.0
|
||||
59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
|
|
||||
help: Use `collected_assets` instead
|
||||
|
||||
ℹ Safe fix
|
||||
54 54 | hlc.create_dataset()
|
||||
55 55 | hlc.add_input_dataset()
|
||||
56 56 | hlc.add_output_dataset()
|
||||
57 |-hlc.collected_datasets()
|
||||
57 |+hlc.collected_assets()
|
||||
58 58 |
|
||||
59 59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
60 60 | aam = AwsAuthManager()
|
||||
54 | hlc.create_dataset()
|
||||
55 | hlc.add_input_dataset()
|
||||
56 | hlc.add_output_dataset()
|
||||
- hlc.collected_datasets()
|
||||
57 + hlc.collected_assets()
|
||||
58 |
|
||||
59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
60 | aam = AwsAuthManager()
|
||||
|
||||
AIR301 [*] `is_authorized_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:61:5
|
||||
@@ -457,16 +417,14 @@ AIR301 [*] `is_authorized_dataset` is removed in Airflow 3.0
|
||||
63 | # airflow.providers.apache.beam.hooks
|
||||
|
|
||||
help: Use `is_authorized_asset` instead
|
||||
|
||||
ℹ Safe fix
|
||||
58 58 |
|
||||
59 59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
60 60 | aam = AwsAuthManager()
|
||||
61 |-aam.is_authorized_dataset()
|
||||
61 |+aam.is_authorized_asset()
|
||||
62 62 |
|
||||
63 63 | # airflow.providers.apache.beam.hooks
|
||||
64 64 | # check get_conn_uri is caught if the class inherits from an airflow hook
|
||||
58 |
|
||||
59 | # airflow.providers.amazon.auth_manager.aws_auth_manager
|
||||
60 | aam = AwsAuthManager()
|
||||
- aam.is_authorized_dataset()
|
||||
61 + aam.is_authorized_asset()
|
||||
62 |
|
||||
63 | # airflow.providers.apache.beam.hooks
|
||||
64 | # check get_conn_uri is caught if the class inherits from an airflow hook
|
||||
|
||||
AIR301 [*] `get_conn_uri` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:73:13
|
||||
@@ -478,16 +436,14 @@ AIR301 [*] `get_conn_uri` is removed in Airflow 3.0
|
||||
74 | csm_backend.get_connections()
|
||||
|
|
||||
help: Use `get_conn_value` instead
|
||||
|
||||
ℹ Safe fix
|
||||
70 70 |
|
||||
71 71 | # airflow.providers.google.cloud.secrets.secret_manager
|
||||
72 72 | csm_backend = CloudSecretManagerBackend()
|
||||
73 |-csm_backend.get_conn_uri()
|
||||
73 |+csm_backend.get_conn_value()
|
||||
74 74 | csm_backend.get_connections()
|
||||
75 75 |
|
||||
76 76 | # airflow.providers.hashicorp.secrets.vault
|
||||
70 |
|
||||
71 | # airflow.providers.google.cloud.secrets.secret_manager
|
||||
72 | csm_backend = CloudSecretManagerBackend()
|
||||
- csm_backend.get_conn_uri()
|
||||
73 + csm_backend.get_conn_value()
|
||||
74 | csm_backend.get_connections()
|
||||
75 |
|
||||
76 | # airflow.providers.hashicorp.secrets.vault
|
||||
|
||||
AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:74:13
|
||||
@@ -500,16 +456,14 @@ AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
76 | # airflow.providers.hashicorp.secrets.vault
|
||||
|
|
||||
help: Use `get_connection` instead
|
||||
|
||||
ℹ Safe fix
|
||||
71 71 | # airflow.providers.google.cloud.secrets.secret_manager
|
||||
72 72 | csm_backend = CloudSecretManagerBackend()
|
||||
73 73 | csm_backend.get_conn_uri()
|
||||
74 |-csm_backend.get_connections()
|
||||
74 |+csm_backend.get_connection()
|
||||
75 75 |
|
||||
76 76 | # airflow.providers.hashicorp.secrets.vault
|
||||
77 77 | vault_backend = VaultBackend()
|
||||
71 | # airflow.providers.google.cloud.secrets.secret_manager
|
||||
72 | csm_backend = CloudSecretManagerBackend()
|
||||
73 | csm_backend.get_conn_uri()
|
||||
- csm_backend.get_connections()
|
||||
74 + csm_backend.get_connection()
|
||||
75 |
|
||||
76 | # airflow.providers.hashicorp.secrets.vault
|
||||
77 | vault_backend = VaultBackend()
|
||||
|
||||
AIR301 [*] `get_conn_uri` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:78:15
|
||||
@@ -521,16 +475,14 @@ AIR301 [*] `get_conn_uri` is removed in Airflow 3.0
|
||||
79 | vault_backend.get_connections()
|
||||
|
|
||||
help: Use `get_conn_value` instead
|
||||
|
||||
ℹ Safe fix
|
||||
75 75 |
|
||||
76 76 | # airflow.providers.hashicorp.secrets.vault
|
||||
77 77 | vault_backend = VaultBackend()
|
||||
78 |-vault_backend.get_conn_uri()
|
||||
78 |+vault_backend.get_conn_value()
|
||||
79 79 | vault_backend.get_connections()
|
||||
80 80 |
|
||||
81 81 | not_an_error = NotAir302SecretError()
|
||||
75 |
|
||||
76 | # airflow.providers.hashicorp.secrets.vault
|
||||
77 | vault_backend = VaultBackend()
|
||||
- vault_backend.get_conn_uri()
|
||||
78 + vault_backend.get_conn_value()
|
||||
79 | vault_backend.get_connections()
|
||||
80 |
|
||||
81 | not_an_error = NotAir302SecretError()
|
||||
|
||||
AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:79:15
|
||||
@@ -543,16 +495,14 @@ AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
81 | not_an_error = NotAir302SecretError()
|
||||
|
|
||||
help: Use `get_connection` instead
|
||||
|
||||
ℹ Safe fix
|
||||
76 76 | # airflow.providers.hashicorp.secrets.vault
|
||||
77 77 | vault_backend = VaultBackend()
|
||||
78 78 | vault_backend.get_conn_uri()
|
||||
79 |-vault_backend.get_connections()
|
||||
79 |+vault_backend.get_connection()
|
||||
80 80 |
|
||||
81 81 | not_an_error = NotAir302SecretError()
|
||||
82 82 | not_an_error.get_conn_uri()
|
||||
76 | # airflow.providers.hashicorp.secrets.vault
|
||||
77 | vault_backend = VaultBackend()
|
||||
78 | vault_backend.get_conn_uri()
|
||||
- vault_backend.get_connections()
|
||||
79 + vault_backend.get_connection()
|
||||
80 |
|
||||
81 | not_an_error = NotAir302SecretError()
|
||||
82 | not_an_error.get_conn_uri()
|
||||
|
||||
AIR301 [*] `initialize_providers_dataset_uri_resources` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:86:4
|
||||
@@ -565,16 +515,14 @@ AIR301 [*] `initialize_providers_dataset_uri_resources` is removed in Airflow 3.
|
||||
88 | pm.dataset_uri_handlers
|
||||
|
|
||||
help: Use `initialize_providers_asset_uri_resources` instead
|
||||
|
||||
ℹ Safe fix
|
||||
83 83 |
|
||||
84 84 | # airflow.providers_manager
|
||||
85 85 | pm = ProvidersManager()
|
||||
86 |-pm.initialize_providers_dataset_uri_resources()
|
||||
86 |+pm.initialize_providers_asset_uri_resources()
|
||||
87 87 | pm.dataset_factories
|
||||
88 88 | pm.dataset_uri_handlers
|
||||
89 89 | pm.dataset_to_openlineage_converters
|
||||
83 |
|
||||
84 | # airflow.providers_manager
|
||||
85 | pm = ProvidersManager()
|
||||
- pm.initialize_providers_dataset_uri_resources()
|
||||
86 + pm.initialize_providers_asset_uri_resources()
|
||||
87 | pm.dataset_factories
|
||||
88 | pm.dataset_uri_handlers
|
||||
89 | pm.dataset_to_openlineage_converters
|
||||
|
||||
AIR301 [*] `dataset_factories` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:87:4
|
||||
@@ -587,16 +535,14 @@ AIR301 [*] `dataset_factories` is removed in Airflow 3.0
|
||||
89 | pm.dataset_to_openlineage_converters
|
||||
|
|
||||
help: Use `asset_factories` instead
|
||||
|
||||
ℹ Safe fix
|
||||
84 84 | # airflow.providers_manager
|
||||
85 85 | pm = ProvidersManager()
|
||||
86 86 | pm.initialize_providers_dataset_uri_resources()
|
||||
87 |-pm.dataset_factories
|
||||
87 |+pm.asset_factories
|
||||
88 88 | pm.dataset_uri_handlers
|
||||
89 89 | pm.dataset_to_openlineage_converters
|
||||
90 90 |
|
||||
84 | # airflow.providers_manager
|
||||
85 | pm = ProvidersManager()
|
||||
86 | pm.initialize_providers_dataset_uri_resources()
|
||||
- pm.dataset_factories
|
||||
87 + pm.asset_factories
|
||||
88 | pm.dataset_uri_handlers
|
||||
89 | pm.dataset_to_openlineage_converters
|
||||
90 |
|
||||
|
||||
AIR301 [*] `dataset_uri_handlers` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:88:4
|
||||
@@ -608,16 +554,14 @@ AIR301 [*] `dataset_uri_handlers` is removed in Airflow 3.0
|
||||
89 | pm.dataset_to_openlineage_converters
|
||||
|
|
||||
help: Use `asset_uri_handlers` instead
|
||||
|
||||
ℹ Safe fix
|
||||
85 85 | pm = ProvidersManager()
|
||||
86 86 | pm.initialize_providers_dataset_uri_resources()
|
||||
87 87 | pm.dataset_factories
|
||||
88 |-pm.dataset_uri_handlers
|
||||
88 |+pm.asset_uri_handlers
|
||||
89 89 | pm.dataset_to_openlineage_converters
|
||||
90 90 |
|
||||
91 91 | # airflow.secrets.base_secrets
|
||||
85 | pm = ProvidersManager()
|
||||
86 | pm.initialize_providers_dataset_uri_resources()
|
||||
87 | pm.dataset_factories
|
||||
- pm.dataset_uri_handlers
|
||||
88 + pm.asset_uri_handlers
|
||||
89 | pm.dataset_to_openlineage_converters
|
||||
90 |
|
||||
91 | # airflow.secrets.base_secrets
|
||||
|
||||
AIR301 [*] `dataset_to_openlineage_converters` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:89:4
|
||||
@@ -630,16 +574,14 @@ AIR301 [*] `dataset_to_openlineage_converters` is removed in Airflow 3.0
|
||||
91 | # airflow.secrets.base_secrets
|
||||
|
|
||||
help: Use `asset_to_openlineage_converters` instead
|
||||
|
||||
ℹ Safe fix
|
||||
86 86 | pm.initialize_providers_dataset_uri_resources()
|
||||
87 87 | pm.dataset_factories
|
||||
88 88 | pm.dataset_uri_handlers
|
||||
89 |-pm.dataset_to_openlineage_converters
|
||||
89 |+pm.asset_to_openlineage_converters
|
||||
90 90 |
|
||||
91 91 | # airflow.secrets.base_secrets
|
||||
92 92 | base_secret_backend = BaseSecretsBackend()
|
||||
86 | pm.initialize_providers_dataset_uri_resources()
|
||||
87 | pm.dataset_factories
|
||||
88 | pm.dataset_uri_handlers
|
||||
- pm.dataset_to_openlineage_converters
|
||||
89 + pm.asset_to_openlineage_converters
|
||||
90 |
|
||||
91 | # airflow.secrets.base_secrets
|
||||
92 | base_secret_backend = BaseSecretsBackend()
|
||||
|
||||
AIR301 [*] `get_conn_uri` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:93:21
|
||||
@@ -651,16 +593,14 @@ AIR301 [*] `get_conn_uri` is removed in Airflow 3.0
|
||||
94 | base_secret_backend.get_connections()
|
||||
|
|
||||
help: Use `get_conn_value` instead
|
||||
|
||||
ℹ Safe fix
|
||||
90 90 |
|
||||
91 91 | # airflow.secrets.base_secrets
|
||||
92 92 | base_secret_backend = BaseSecretsBackend()
|
||||
93 |-base_secret_backend.get_conn_uri()
|
||||
93 |+base_secret_backend.get_conn_value()
|
||||
94 94 | base_secret_backend.get_connections()
|
||||
95 95 |
|
||||
96 96 | # airflow.secrets.local_filesystem
|
||||
90 |
|
||||
91 | # airflow.secrets.base_secrets
|
||||
92 | base_secret_backend = BaseSecretsBackend()
|
||||
- base_secret_backend.get_conn_uri()
|
||||
93 + base_secret_backend.get_conn_value()
|
||||
94 | base_secret_backend.get_connections()
|
||||
95 |
|
||||
96 | # airflow.secrets.local_filesystem
|
||||
|
||||
AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:94:21
|
||||
@@ -673,16 +613,14 @@ AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
96 | # airflow.secrets.local_filesystem
|
||||
|
|
||||
help: Use `get_connection` instead
|
||||
|
||||
ℹ Safe fix
|
||||
91 91 | # airflow.secrets.base_secrets
|
||||
92 92 | base_secret_backend = BaseSecretsBackend()
|
||||
93 93 | base_secret_backend.get_conn_uri()
|
||||
94 |-base_secret_backend.get_connections()
|
||||
94 |+base_secret_backend.get_connection()
|
||||
95 95 |
|
||||
96 96 | # airflow.secrets.local_filesystem
|
||||
97 97 | lfb = LocalFilesystemBackend()
|
||||
91 | # airflow.secrets.base_secrets
|
||||
92 | base_secret_backend = BaseSecretsBackend()
|
||||
93 | base_secret_backend.get_conn_uri()
|
||||
- base_secret_backend.get_connections()
|
||||
94 + base_secret_backend.get_connection()
|
||||
95 |
|
||||
96 | # airflow.secrets.local_filesystem
|
||||
97 | lfb = LocalFilesystemBackend()
|
||||
|
||||
AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
--> AIR301_class_attribute.py:98:5
|
||||
@@ -693,10 +631,8 @@ AIR301 [*] `get_connections` is removed in Airflow 3.0
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Use `get_connection` instead
|
||||
|
||||
ℹ Safe fix
|
||||
95 95 |
|
||||
96 96 | # airflow.secrets.local_filesystem
|
||||
97 97 | lfb = LocalFilesystemBackend()
|
||||
98 |-lfb.get_connections()
|
||||
98 |+lfb.get_connection()
|
||||
95 |
|
||||
96 | # airflow.secrets.local_filesystem
|
||||
97 | lfb = LocalFilesystemBackend()
|
||||
- lfb.get_connections()
|
||||
98 + lfb.get_connection()
|
||||
|
||||
@@ -337,16 +337,14 @@ AIR301 [*] `schedule_interval` is removed in Airflow 3.0
|
||||
113 | template_searchpath=["/templates"],
|
||||
|
|
||||
help: Use `schedule` instead
|
||||
|
||||
ℹ Safe fix
|
||||
108 108 |
|
||||
109 109 | with DAG(
|
||||
110 110 | dag_id="example_dag",
|
||||
111 |- schedule_interval="@daily",
|
||||
111 |+ schedule="@daily",
|
||||
112 112 | start_date=datetime(2023, 1, 1),
|
||||
113 113 | template_searchpath=["/templates"],
|
||||
114 114 | ) as dag:
|
||||
108 |
|
||||
109 | with DAG(
|
||||
110 | dag_id="example_dag",
|
||||
- schedule_interval="@daily",
|
||||
111 + schedule="@daily",
|
||||
112 | start_date=datetime(2023, 1, 1),
|
||||
113 | template_searchpath=["/templates"],
|
||||
114 | ) as dag:
|
||||
|
||||
AIR301 `next_ds` is removed in Airflow 3.0
|
||||
--> AIR301_context.py:135:23
|
||||
|
||||
@@ -2,351 +2,362 @@
|
||||
source: crates/ruff_linter/src/rules/airflow/mod.rs
|
||||
---
|
||||
AIR301 `airflow.PY36` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:1
|
||||
--> AIR301_names.py:40:1
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.PY37` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:7
|
||||
--> AIR301_names.py:40:7
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.PY38` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:13
|
||||
--> AIR301_names.py:40:13
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.PY39` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:19
|
||||
--> AIR301_names.py:40:19
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.PY310` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:25
|
||||
--> AIR301_names.py:40:25
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.PY311` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:32
|
||||
--> AIR301_names.py:40:32
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.PY312` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:39:39
|
||||
--> AIR301_names.py:40:39
|
||||
|
|
||||
38 | # airflow root
|
||||
39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
| ^^^^^
|
||||
40 |
|
||||
41 | # airflow.api_connexion.security
|
||||
41 |
|
||||
42 | # airflow.api_connexion.security
|
||||
|
|
||||
help: Use `sys.version_info` instead
|
||||
|
||||
AIR301 `airflow.api_connexion.security.requires_access` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:42:1
|
||||
--> AIR301_names.py:43:1
|
||||
|
|
||||
41 | # airflow.api_connexion.security
|
||||
42 | requires_access
|
||||
42 | # airflow.api_connexion.security
|
||||
43 | requires_access
|
||||
| ^^^^^^^^^^^^^^^
|
||||
43 |
|
||||
44 | # airflow.contrib.*
|
||||
44 |
|
||||
45 | # airflow.contrib.*
|
||||
|
|
||||
help: Use `airflow.api_fastapi.core_api.security.requires_access_*` instead
|
||||
|
||||
AIR301 `airflow.contrib.aws_athena_hook.AWSAthenaHook` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:45:1
|
||||
--> AIR301_names.py:46:1
|
||||
|
|
||||
44 | # airflow.contrib.*
|
||||
45 | AWSAthenaHook()
|
||||
45 | # airflow.contrib.*
|
||||
46 | AWSAthenaHook()
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: The whole `airflow.contrib` module has been removed.
|
||||
|
||||
AIR301 `airflow.datasets.DatasetAliasEvent` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:49:1
|
||||
--> AIR301_names.py:50:1
|
||||
|
|
||||
48 | # airflow.datasets
|
||||
49 | DatasetAliasEvent()
|
||||
49 | # airflow.datasets
|
||||
50 | DatasetAliasEvent()
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
|
||||
AIR301 `airflow.operators.subdag.SubDagOperator` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:53:1
|
||||
--> AIR301_names.py:54:1
|
||||
|
|
||||
52 | # airflow.operators.subdag.*
|
||||
53 | SubDagOperator()
|
||||
53 | # airflow.operators.subdag.*
|
||||
54 | SubDagOperator()
|
||||
| ^^^^^^^^^^^^^^
|
||||
55 |
|
||||
56 | # airflow.operators.postgres_operator
|
||||
|
|
||||
help: The whole `airflow.subdag` module has been removed.
|
||||
|
||||
AIR301 [*] `airflow.secrets.cache.SecretCache` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:61:1
|
||||
AIR301 `airflow.operators.postgres_operator.Mapping` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:57:1
|
||||
|
|
||||
60 | # airflow.secrets.cache
|
||||
61 | SecretCache()
|
||||
56 | # airflow.operators.postgres_operator
|
||||
57 | Mapping()
|
||||
| ^^^^^^^
|
||||
58 |
|
||||
59 | # airflow.secrets
|
||||
|
|
||||
|
||||
AIR301 [*] `airflow.secrets.cache.SecretCache` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:64:1
|
||||
|
|
||||
63 | # airflow.secrets.cache
|
||||
64 | SecretCache()
|
||||
| ^^^^^^^^^^^
|
||||
|
|
||||
help: Use `SecretCache` from `airflow.sdk` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
13 13 | from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
14 14 | from airflow.datasets import DatasetAliasEvent
|
||||
15 15 | from airflow.operators.subdag import SubDagOperator
|
||||
16 |-from airflow.secrets.cache import SecretCache
|
||||
17 16 | from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
18 17 | from airflow.triggers.external_task import TaskStateTrigger
|
||||
19 18 | from airflow.utils import dates
|
||||
14 | from airflow.datasets import DatasetAliasEvent
|
||||
15 | from airflow.operators.postgres_operator import Mapping
|
||||
16 | from airflow.operators.subdag import SubDagOperator
|
||||
- from airflow.secrets.cache import SecretCache
|
||||
17 | from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
18 | from airflow.triggers.external_task import TaskStateTrigger
|
||||
19 | from airflow.utils import dates
|
||||
--------------------------------------------------------------------------------
|
||||
34 33 | from airflow.utils.trigger_rule import TriggerRule
|
||||
35 34 | from airflow.www.auth import has_access, has_access_dataset
|
||||
36 35 | from airflow.www.utils import get_sensitive_variables_fields, should_hide_value_for_key
|
||||
36 |+from airflow.sdk import SecretCache
|
||||
37 37 |
|
||||
38 38 | # airflow root
|
||||
39 39 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
34 | from airflow.utils.trigger_rule import TriggerRule
|
||||
35 | from airflow.www.auth import has_access, has_access_dataset
|
||||
36 | from airflow.www.utils import get_sensitive_variables_fields, should_hide_value_for_key
|
||||
37 + from airflow.sdk import SecretCache
|
||||
38 |
|
||||
39 | # airflow root
|
||||
40 | PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR301 `airflow.triggers.external_task.TaskStateTrigger` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:65:1
|
||||
|
|
||||
64 | # airflow.triggers.external_task
|
||||
65 | TaskStateTrigger()
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
66 |
|
||||
67 | # airflow.utils.date
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.date_range` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:68:1
|
||||
|
|
||||
67 | # airflow.utils.date
|
||||
68 | dates.date_range
|
||||
67 | # airflow.triggers.external_task
|
||||
68 | TaskStateTrigger()
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
69 | dates.days_ago
|
||||
69 |
|
||||
70 | # airflow.utils.date
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.days_ago` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:69:1
|
||||
|
|
||||
67 | # airflow.utils.date
|
||||
68 | dates.date_range
|
||||
69 | dates.days_ago
|
||||
| ^^^^^^^^^^^^^^
|
||||
70 |
|
||||
71 | date_range
|
||||
|
|
||||
help: Use `pendulum.today('UTC').add(days=-N, ...)` instead
|
||||
|
||||
AIR301 `airflow.utils.dates.date_range` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:71:1
|
||||
|
|
||||
69 | dates.days_ago
|
||||
70 |
|
||||
71 | date_range
|
||||
| ^^^^^^^^^^
|
||||
72 | days_ago
|
||||
73 | infer_time_unit
|
||||
70 | # airflow.utils.date
|
||||
71 | dates.date_range
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
72 | dates.days_ago
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.days_ago` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:72:1
|
||||
|
|
||||
71 | date_range
|
||||
72 | days_ago
|
||||
70 | # airflow.utils.date
|
||||
71 | dates.date_range
|
||||
72 | dates.days_ago
|
||||
| ^^^^^^^^^^^^^^
|
||||
73 |
|
||||
74 | date_range
|
||||
|
|
||||
help: Use `pendulum.today('UTC').add(days=-N, ...)` instead
|
||||
|
||||
AIR301 `airflow.utils.dates.date_range` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:74:1
|
||||
|
|
||||
72 | dates.days_ago
|
||||
73 |
|
||||
74 | date_range
|
||||
| ^^^^^^^^^^
|
||||
75 | days_ago
|
||||
76 | infer_time_unit
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.days_ago` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:75:1
|
||||
|
|
||||
74 | date_range
|
||||
75 | days_ago
|
||||
| ^^^^^^^^
|
||||
73 | infer_time_unit
|
||||
74 | parse_execution_date
|
||||
76 | infer_time_unit
|
||||
77 | parse_execution_date
|
||||
|
|
||||
help: Use `pendulum.today('UTC').add(days=-N, ...)` instead
|
||||
|
||||
AIR301 `airflow.utils.dates.infer_time_unit` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:73:1
|
||||
--> AIR301_names.py:76:1
|
||||
|
|
||||
71 | date_range
|
||||
72 | days_ago
|
||||
73 | infer_time_unit
|
||||
74 | date_range
|
||||
75 | days_ago
|
||||
76 | infer_time_unit
|
||||
| ^^^^^^^^^^^^^^^
|
||||
74 | parse_execution_date
|
||||
75 | round_time
|
||||
77 | parse_execution_date
|
||||
78 | round_time
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.parse_execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:74:1
|
||||
--> AIR301_names.py:77:1
|
||||
|
|
||||
72 | days_ago
|
||||
73 | infer_time_unit
|
||||
74 | parse_execution_date
|
||||
75 | days_ago
|
||||
76 | infer_time_unit
|
||||
77 | parse_execution_date
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
75 | round_time
|
||||
76 | scale_time_units
|
||||
78 | round_time
|
||||
79 | scale_time_units
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.round_time` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:75:1
|
||||
--> AIR301_names.py:78:1
|
||||
|
|
||||
73 | infer_time_unit
|
||||
74 | parse_execution_date
|
||||
75 | round_time
|
||||
76 | infer_time_unit
|
||||
77 | parse_execution_date
|
||||
78 | round_time
|
||||
| ^^^^^^^^^^
|
||||
76 | scale_time_units
|
||||
79 | scale_time_units
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.scale_time_units` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:76:1
|
||||
--> AIR301_names.py:79:1
|
||||
|
|
||||
74 | parse_execution_date
|
||||
75 | round_time
|
||||
76 | scale_time_units
|
||||
77 | parse_execution_date
|
||||
78 | round_time
|
||||
79 | scale_time_units
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
77 |
|
||||
78 | # This one was not deprecated.
|
||||
80 |
|
||||
81 | # This one was not deprecated.
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dag_cycle_tester.test_cycle` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:83:1
|
||||
--> AIR301_names.py:86:1
|
||||
|
|
||||
82 | # airflow.utils.dag_cycle_tester
|
||||
83 | test_cycle
|
||||
85 | # airflow.utils.dag_cycle_tester
|
||||
86 | test_cycle
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.db.create_session` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:87:1
|
||||
--> AIR301_names.py:90:1
|
||||
|
|
||||
86 | # airflow.utils.db
|
||||
87 | create_session
|
||||
89 | # airflow.utils.db
|
||||
90 | create_session
|
||||
| ^^^^^^^^^^^^^^
|
||||
88 |
|
||||
89 | # airflow.utils.decorators
|
||||
91 |
|
||||
92 | # airflow.utils.decorators
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.decorators.apply_defaults` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:90:1
|
||||
--> AIR301_names.py:93:1
|
||||
|
|
||||
89 | # airflow.utils.decorators
|
||||
90 | apply_defaults
|
||||
92 | # airflow.utils.decorators
|
||||
93 | apply_defaults
|
||||
| ^^^^^^^^^^^^^^
|
||||
91 |
|
||||
92 | # airflow.utils.file
|
||||
94 |
|
||||
95 | # airflow.utils.file
|
||||
|
|
||||
help: `apply_defaults` is now unconditionally done and can be safely removed.
|
||||
|
||||
AIR301 `airflow.utils.file.mkdirs` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:93:1
|
||||
--> AIR301_names.py:96:1
|
||||
|
|
||||
92 | # airflow.utils.file
|
||||
93 | mkdirs
|
||||
95 | # airflow.utils.file
|
||||
96 | mkdirs
|
||||
| ^^^^^^
|
||||
|
|
||||
help: Use `pathlib.Path({path}).mkdir` instead
|
||||
|
||||
AIR301 `airflow.utils.state.SHUTDOWN` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:97:1
|
||||
|
|
||||
96 | # airflow.utils.state
|
||||
97 | SHUTDOWN
|
||||
| ^^^^^^^^
|
||||
98 | terminating_states
|
||||
|
|
||||
--> AIR301_names.py:100:1
|
||||
|
|
||||
99 | # airflow.utils.state
|
||||
100 | SHUTDOWN
|
||||
| ^^^^^^^^
|
||||
101 | terminating_states
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.state.terminating_states` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:98:1
|
||||
--> AIR301_names.py:101:1
|
||||
|
|
||||
96 | # airflow.utils.state
|
||||
97 | SHUTDOWN
|
||||
98 | terminating_states
|
||||
99 | # airflow.utils.state
|
||||
100 | SHUTDOWN
|
||||
101 | terminating_states
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
99 |
|
||||
100 | # airflow.utils.trigger_rule
|
||||
102 |
|
||||
103 | # airflow.utils.trigger_rule
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.trigger_rule.TriggerRule.DUMMY` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:101:1
|
||||
--> AIR301_names.py:104:1
|
||||
|
|
||||
100 | # airflow.utils.trigger_rule
|
||||
101 | TriggerRule.DUMMY
|
||||
103 | # airflow.utils.trigger_rule
|
||||
104 | TriggerRule.DUMMY
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
102 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
105 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.trigger_rule.TriggerRule.NONE_FAILED_OR_SKIPPED` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:102:1
|
||||
--> AIR301_names.py:105:1
|
||||
|
|
||||
100 | # airflow.utils.trigger_rule
|
||||
101 | TriggerRule.DUMMY
|
||||
102 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
103 | # airflow.utils.trigger_rule
|
||||
104 | TriggerRule.DUMMY
|
||||
105 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.auth.has_access` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:106:1
|
||||
--> AIR301_names.py:109:1
|
||||
|
|
||||
105 | # airflow.www.auth
|
||||
106 | has_access
|
||||
108 | # airflow.www.auth
|
||||
109 | has_access
|
||||
| ^^^^^^^^^^
|
||||
107 | has_access_dataset
|
||||
110 | has_access_dataset
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.auth.has_access_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:107:1
|
||||
--> AIR301_names.py:110:1
|
||||
|
|
||||
105 | # airflow.www.auth
|
||||
106 | has_access
|
||||
107 | has_access_dataset
|
||||
108 | # airflow.www.auth
|
||||
109 | has_access
|
||||
110 | has_access_dataset
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
108 |
|
||||
109 | # airflow.www.utils
|
||||
111 |
|
||||
112 | # airflow.www.utils
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.utils.get_sensitive_variables_fields` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:110:1
|
||||
--> AIR301_names.py:113:1
|
||||
|
|
||||
109 | # airflow.www.utils
|
||||
110 | get_sensitive_variables_fields
|
||||
112 | # airflow.www.utils
|
||||
113 | get_sensitive_variables_fields
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
111 | should_hide_value_for_key
|
||||
114 | should_hide_value_for_key
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.utils.should_hide_value_for_key` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:111:1
|
||||
--> AIR301_names.py:114:1
|
||||
|
|
||||
109 | # airflow.www.utils
|
||||
110 | get_sensitive_variables_fields
|
||||
111 | should_hide_value_for_key
|
||||
112 | # airflow.www.utils
|
||||
113 | get_sensitive_variables_fields
|
||||
114 | should_hide_value_for_key
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,16 +12,14 @@ AIR301 [*] `airflow.providers.amazon.aws.auth_manager.avp.entities.AvpEntities.D
|
||||
13 | # airflow.providers.openlineage.utils.utils
|
||||
|
|
||||
help: Use `AvpEntities.ASSET` from `airflow.providers.amazon.aws.auth_manager.avp.entities` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 | from airflow.secrets.local_filesystem import load_connections
|
||||
9 9 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
10 10 |
|
||||
11 |-AvpEntities.DATASET
|
||||
11 |+AvpEntities
|
||||
12 12 |
|
||||
13 13 | # airflow.providers.openlineage.utils.utils
|
||||
14 14 | DatasetInfo()
|
||||
8 | from airflow.secrets.local_filesystem import load_connections
|
||||
9 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
10 |
|
||||
- AvpEntities.DATASET
|
||||
11 + AvpEntities
|
||||
12 |
|
||||
13 | # airflow.providers.openlineage.utils.utils
|
||||
14 | DatasetInfo()
|
||||
|
||||
AIR301 [*] `airflow.providers.openlineage.utils.utils.DatasetInfo` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:14:1
|
||||
@@ -32,24 +30,22 @@ AIR301 [*] `airflow.providers.openlineage.utils.utils.DatasetInfo` is removed in
|
||||
15 | translate_airflow_dataset()
|
||||
|
|
||||
help: Use `AssetInfo` from `airflow.providers.openlineage.utils.utils` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
4 4 | from airflow.providers.openlineage.utils.utils import (
|
||||
5 5 | DatasetInfo,
|
||||
6 6 | translate_airflow_dataset,
|
||||
7 |+AssetInfo,
|
||||
7 8 | )
|
||||
8 9 | from airflow.secrets.local_filesystem import load_connections
|
||||
9 10 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
4 | from airflow.providers.openlineage.utils.utils import (
|
||||
5 | DatasetInfo,
|
||||
6 | translate_airflow_dataset,
|
||||
7 + AssetInfo,
|
||||
8 | )
|
||||
9 | from airflow.secrets.local_filesystem import load_connections
|
||||
10 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
--------------------------------------------------------------------------------
|
||||
11 12 | AvpEntities.DATASET
|
||||
12 13 |
|
||||
13 14 | # airflow.providers.openlineage.utils.utils
|
||||
14 |-DatasetInfo()
|
||||
15 |+AssetInfo()
|
||||
15 16 | translate_airflow_dataset()
|
||||
16 17 |
|
||||
17 18 | # airflow.secrets.local_filesystem
|
||||
12 | AvpEntities.DATASET
|
||||
13 |
|
||||
14 | # airflow.providers.openlineage.utils.utils
|
||||
- DatasetInfo()
|
||||
15 + AssetInfo()
|
||||
16 | translate_airflow_dataset()
|
||||
17 |
|
||||
18 | # airflow.secrets.local_filesystem
|
||||
|
||||
AIR301 [*] `airflow.providers.openlineage.utils.utils.translate_airflow_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:15:1
|
||||
@@ -62,24 +58,22 @@ AIR301 [*] `airflow.providers.openlineage.utils.utils.translate_airflow_dataset`
|
||||
17 | # airflow.secrets.local_filesystem
|
||||
|
|
||||
help: Use `translate_airflow_asset` from `airflow.providers.openlineage.utils.utils` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
4 4 | from airflow.providers.openlineage.utils.utils import (
|
||||
5 5 | DatasetInfo,
|
||||
6 6 | translate_airflow_dataset,
|
||||
7 |+translate_airflow_asset,
|
||||
7 8 | )
|
||||
8 9 | from airflow.secrets.local_filesystem import load_connections
|
||||
9 10 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
4 | from airflow.providers.openlineage.utils.utils import (
|
||||
5 | DatasetInfo,
|
||||
6 | translate_airflow_dataset,
|
||||
7 + translate_airflow_asset,
|
||||
8 | )
|
||||
9 | from airflow.secrets.local_filesystem import load_connections
|
||||
10 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
--------------------------------------------------------------------------------
|
||||
12 13 |
|
||||
13 14 | # airflow.providers.openlineage.utils.utils
|
||||
14 15 | DatasetInfo()
|
||||
15 |-translate_airflow_dataset()
|
||||
16 |+translate_airflow_asset()
|
||||
16 17 |
|
||||
17 18 | # airflow.secrets.local_filesystem
|
||||
18 19 | load_connections()
|
||||
13 |
|
||||
14 | # airflow.providers.openlineage.utils.utils
|
||||
15 | DatasetInfo()
|
||||
- translate_airflow_dataset()
|
||||
16 + translate_airflow_asset()
|
||||
17 |
|
||||
18 | # airflow.secrets.local_filesystem
|
||||
19 | load_connections()
|
||||
|
||||
AIR301 [*] `airflow.secrets.local_filesystem.load_connections` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:18:1
|
||||
@@ -91,25 +85,23 @@ AIR301 [*] `airflow.secrets.local_filesystem.load_connections` is removed in Air
|
||||
20 | # airflow.security.permissions
|
||||
|
|
||||
help: Use `load_connections_dict` from `airflow.secrets.local_filesystem` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
5 5 | DatasetInfo,
|
||||
6 6 | translate_airflow_dataset,
|
||||
7 7 | )
|
||||
8 |-from airflow.secrets.local_filesystem import load_connections
|
||||
8 |+from airflow.secrets.local_filesystem import load_connections, load_connections_dict
|
||||
9 9 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
10 10 |
|
||||
11 11 | AvpEntities.DATASET
|
||||
5 | DatasetInfo,
|
||||
6 | translate_airflow_dataset,
|
||||
7 | )
|
||||
- from airflow.secrets.local_filesystem import load_connections
|
||||
8 + from airflow.secrets.local_filesystem import load_connections, load_connections_dict
|
||||
9 | from airflow.security.permissions import RESOURCE_DATASET
|
||||
10 |
|
||||
11 | AvpEntities.DATASET
|
||||
--------------------------------------------------------------------------------
|
||||
15 15 | translate_airflow_dataset()
|
||||
16 16 |
|
||||
17 17 | # airflow.secrets.local_filesystem
|
||||
18 |-load_connections()
|
||||
18 |+load_connections_dict()
|
||||
19 19 |
|
||||
20 20 | # airflow.security.permissions
|
||||
21 21 | RESOURCE_DATASET
|
||||
15 | translate_airflow_dataset()
|
||||
16 |
|
||||
17 | # airflow.secrets.local_filesystem
|
||||
- load_connections()
|
||||
18 + load_connections_dict()
|
||||
19 |
|
||||
20 | # airflow.security.permissions
|
||||
21 | RESOURCE_DATASET
|
||||
|
||||
AIR301 [*] `airflow.security.permissions.RESOURCE_DATASET` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:21:1
|
||||
@@ -121,25 +113,23 @@ AIR301 [*] `airflow.security.permissions.RESOURCE_DATASET` is removed in Airflow
|
||||
23 | from airflow.providers.amazon.aws.datasets.s3 import (
|
||||
|
|
||||
help: Use `RESOURCE_ASSET` from `airflow.security.permissions` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
6 6 | translate_airflow_dataset,
|
||||
7 7 | )
|
||||
8 8 | from airflow.secrets.local_filesystem import load_connections
|
||||
9 |-from airflow.security.permissions import RESOURCE_DATASET
|
||||
9 |+from airflow.security.permissions import RESOURCE_DATASET, RESOURCE_ASSET
|
||||
10 10 |
|
||||
11 11 | AvpEntities.DATASET
|
||||
12 12 |
|
||||
6 | translate_airflow_dataset,
|
||||
7 | )
|
||||
8 | from airflow.secrets.local_filesystem import load_connections
|
||||
- from airflow.security.permissions import RESOURCE_DATASET
|
||||
9 + from airflow.security.permissions import RESOURCE_DATASET, RESOURCE_ASSET
|
||||
10 |
|
||||
11 | AvpEntities.DATASET
|
||||
12 |
|
||||
--------------------------------------------------------------------------------
|
||||
18 18 | load_connections()
|
||||
19 19 |
|
||||
20 20 | # airflow.security.permissions
|
||||
21 |-RESOURCE_DATASET
|
||||
21 |+RESOURCE_ASSET
|
||||
22 22 |
|
||||
23 23 | from airflow.providers.amazon.aws.datasets.s3 import (
|
||||
24 24 | convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
18 | load_connections()
|
||||
19 |
|
||||
20 | # airflow.security.permissions
|
||||
- RESOURCE_DATASET
|
||||
21 + RESOURCE_ASSET
|
||||
22 |
|
||||
23 | from airflow.providers.amazon.aws.datasets.s3 import (
|
||||
24 | convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
|
||||
AIR301 [*] `airflow.providers.amazon.aws.datasets.s3.create_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:28:1
|
||||
@@ -151,18 +141,16 @@ AIR301 [*] `airflow.providers.amazon.aws.datasets.s3.create_dataset` is removed
|
||||
29 | s3_convert_dataset_to_openlineage()
|
||||
|
|
||||
help: Use `create_asset` from `airflow.providers.amazon.aws.assets.s3` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
25 25 | )
|
||||
26 26 | from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
27 |+from airflow.providers.amazon.aws.assets.s3 import create_asset
|
||||
27 28 |
|
||||
28 |-s3_create_dataset()
|
||||
29 |+create_asset()
|
||||
29 30 | s3_convert_dataset_to_openlineage()
|
||||
30 31 |
|
||||
31 32 | from airflow.providers.common.io.dataset.file import (
|
||||
24 | convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
25 | )
|
||||
26 | from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
27 + from airflow.providers.amazon.aws.assets.s3 import create_asset
|
||||
28 |
|
||||
- s3_create_dataset()
|
||||
29 + create_asset()
|
||||
30 | s3_convert_dataset_to_openlineage()
|
||||
31 |
|
||||
32 | from airflow.providers.common.io.dataset.file import (
|
||||
|
||||
AIR301 [*] `airflow.providers.amazon.aws.datasets.s3.convert_dataset_to_openlineage` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:29:1
|
||||
@@ -174,19 +162,17 @@ AIR301 [*] `airflow.providers.amazon.aws.datasets.s3.convert_dataset_to_openline
|
||||
31 | from airflow.providers.common.io.dataset.file import (
|
||||
|
|
||||
help: Use `convert_asset_to_openlineage` from `airflow.providers.amazon.aws.assets.s3` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
25 25 | )
|
||||
26 26 | from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
27 |+from airflow.providers.amazon.aws.assets.s3 import convert_asset_to_openlineage
|
||||
27 28 |
|
||||
28 29 | s3_create_dataset()
|
||||
29 |-s3_convert_dataset_to_openlineage()
|
||||
30 |+convert_asset_to_openlineage()
|
||||
30 31 |
|
||||
31 32 | from airflow.providers.common.io.dataset.file import (
|
||||
32 33 | convert_dataset_to_openlineage as io_convert_dataset_to_openlineage,
|
||||
24 | convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
25 | )
|
||||
26 | from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
27 + from airflow.providers.amazon.aws.assets.s3 import convert_asset_to_openlineage
|
||||
28 |
|
||||
29 | s3_create_dataset()
|
||||
- s3_convert_dataset_to_openlineage()
|
||||
30 + convert_asset_to_openlineage()
|
||||
31 |
|
||||
32 | from airflow.providers.common.io.dataset.file import (
|
||||
33 | convert_dataset_to_openlineage as io_convert_dataset_to_openlineage,
|
||||
|
||||
AIR301 [*] `airflow.providers.google.datasets.bigquery.create_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:45:1
|
||||
@@ -199,18 +185,16 @@ AIR301 [*] `airflow.providers.google.datasets.bigquery.create_dataset` is remove
|
||||
47 | # airflow.providers.google.datasets.gcs
|
||||
|
|
||||
help: Use `create_asset` from `airflow.providers.google.assets.bigquery` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
41 41 | from airflow.providers.google.datasets.bigquery import (
|
||||
42 42 | create_dataset as bigquery_create_dataset,
|
||||
43 43 | )
|
||||
44 |+from airflow.providers.google.assets.bigquery import create_asset
|
||||
44 45 |
|
||||
45 |-bigquery_create_dataset()
|
||||
46 |+create_asset()
|
||||
46 47 |
|
||||
47 48 | # airflow.providers.google.datasets.gcs
|
||||
48 49 | from airflow.providers.google.datasets.gcs import (
|
||||
41 | from airflow.providers.google.datasets.bigquery import (
|
||||
42 | create_dataset as bigquery_create_dataset,
|
||||
43 | )
|
||||
44 + from airflow.providers.google.assets.bigquery import create_asset
|
||||
45 |
|
||||
- bigquery_create_dataset()
|
||||
46 + create_asset()
|
||||
47 |
|
||||
48 | # airflow.providers.google.datasets.gcs
|
||||
49 | from airflow.providers.google.datasets.gcs import (
|
||||
|
||||
AIR301 [*] `airflow.providers.google.datasets.gcs.create_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:53:1
|
||||
@@ -222,16 +206,14 @@ AIR301 [*] `airflow.providers.google.datasets.gcs.create_dataset` is removed in
|
||||
54 | gcs_convert_dataset_to_openlineage()
|
||||
|
|
||||
help: Use `create_asset` from `airflow.providers.google.assets.gcs` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 | convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
50 50 | )
|
||||
51 51 | from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
52 |+from airflow.providers.google.assets.gcs import create_asset
|
||||
52 53 |
|
||||
53 |-gcs_create_dataset()
|
||||
54 |+create_asset()
|
||||
54 55 | gcs_convert_dataset_to_openlineage()
|
||||
49 | convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
50 | )
|
||||
51 | from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
52 + from airflow.providers.google.assets.gcs import create_asset
|
||||
53 |
|
||||
- gcs_create_dataset()
|
||||
54 + create_asset()
|
||||
55 | gcs_convert_dataset_to_openlineage()
|
||||
|
||||
AIR301 [*] `airflow.providers.google.datasets.gcs.convert_dataset_to_openlineage` is removed in Airflow 3.0
|
||||
--> AIR301_provider_names_fix.py:54:1
|
||||
@@ -241,13 +223,11 @@ AIR301 [*] `airflow.providers.google.datasets.gcs.convert_dataset_to_openlineage
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Use `convert_asset_to_openlineage` from `airflow.providers.google.assets.gcs` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 | convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
50 50 | )
|
||||
51 51 | from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
52 |+from airflow.providers.google.assets.gcs import convert_asset_to_openlineage
|
||||
52 53 |
|
||||
53 54 | gcs_create_dataset()
|
||||
54 |-gcs_convert_dataset_to_openlineage()
|
||||
55 |+convert_asset_to_openlineage()
|
||||
49 | convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
50 | )
|
||||
51 | from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
52 + from airflow.providers.google.assets.gcs import convert_asset_to_openlineage
|
||||
53 |
|
||||
54 | gcs_create_dataset()
|
||||
- gcs_convert_dataset_to_openlineage()
|
||||
55 + convert_asset_to_openlineage()
|
||||
|
||||
@@ -11,23 +11,22 @@ AIR302 [*] `airflow.hooks.S3_hook.S3Hook` is moved into `amazon` provider in Air
|
||||
15 | provide_bucket_name()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3Hook` from `airflow.providers.amazon.aws.hooks.s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.S3_hook import (
|
||||
4 |- S3Hook,
|
||||
5 4 | provide_bucket_name,
|
||||
6 5 | )
|
||||
7 6 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.S3_hook import (
|
||||
- S3Hook,
|
||||
4 | provide_bucket_name,
|
||||
5 | )
|
||||
6 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
--------------------------------------------------------------------------------
|
||||
10 9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.hooks.s3 import S3Hook
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.S3_hook.provide_bucket_name` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:15:1
|
||||
@@ -39,23 +38,22 @@ AIR302 [*] `airflow.hooks.S3_hook.provide_bucket_name` is moved into `amazon` pr
|
||||
17 | GCSToS3Operator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `provide_bucket_name` from `airflow.providers.amazon.aws.hooks.s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.S3_hook import (
|
||||
4 4 | S3Hook,
|
||||
5 |- provide_bucket_name,
|
||||
6 5 | )
|
||||
7 6 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 7 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
2 |
|
||||
3 | from airflow.hooks.S3_hook import (
|
||||
4 | S3Hook,
|
||||
- provide_bucket_name,
|
||||
5 | )
|
||||
6 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
7 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
--------------------------------------------------------------------------------
|
||||
10 9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.hooks.s3 import provide_bucket_name
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.hooks.s3 import provide_bucket_name
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.gcs_to_s3.GCSToS3Operator` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:17:1
|
||||
@@ -68,21 +66,20 @@ AIR302 [*] `airflow.operators.gcs_to_s3.GCSToS3Operator` is moved into `amazon`
|
||||
19 | RedshiftToS3Operator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `GCSToS3Operator` from `airflow.providers.amazon.aws.transfers.gcs_to_s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
4 4 | S3Hook,
|
||||
5 5 | provide_bucket_name,
|
||||
6 6 | )
|
||||
7 |-from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 7 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 8 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
4 | S3Hook,
|
||||
5 | provide_bucket_name,
|
||||
6 | )
|
||||
- from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
7 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
8 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Operator` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:18:1
|
||||
@@ -94,20 +91,19 @@ AIR302 [*] `airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Operator` i
|
||||
20 | S3FileTransformOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `GoogleApiToS3Operator` from `airflow.providers.amazon.aws.transfers.google_api_to_s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 | provide_bucket_name,
|
||||
6 6 | )
|
||||
7 7 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 |-from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 8 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
5 | provide_bucket_name,
|
||||
6 | )
|
||||
7 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
- from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
8 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.redshift_to_s3_operator.RedshiftToS3Operator` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:19:1
|
||||
@@ -120,19 +116,18 @@ AIR302 [*] `airflow.operators.redshift_to_s3_operator.RedshiftToS3Operator` is m
|
||||
21 | S3ToRedshiftOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `RedshiftToS3Operator` from `airflow.providers.amazon.aws.transfers.redshift_to_s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
6 6 | )
|
||||
7 7 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 8 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 |-from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
6 | )
|
||||
7 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
- from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
9 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.s3_file_transform_operator.S3FileTransformOperator` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:20:1
|
||||
@@ -145,18 +140,17 @@ AIR302 [*] `airflow.operators.s3_file_transform_operator.S3FileTransformOperator
|
||||
22 | S3KeySensor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=3.0.0` and use `S3FileTransformOperator` from `airflow.providers.amazon.aws.operators.s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
7 7 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 8 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 9 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 |-from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.operators.s3 import S3FileTransformOperator
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
7 | from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
8 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
- from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
10 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.operators.s3 import S3FileTransformOperator
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.s3_to_redshift_operator.S3ToRedshiftOperator` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:21:1
|
||||
@@ -168,17 +162,16 @@ AIR302 [*] `airflow.operators.s3_to_redshift_operator.S3ToRedshiftOperator` is m
|
||||
22 | S3KeySensor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3ToRedshiftOperator` from `airflow.providers.amazon.aws.transfers.s3_to_redshift` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
8 8 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 9 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 10 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 |-from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
8 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
9 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
- from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
11 | from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.s3_key_sensor.S3KeySensor` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:22:1
|
||||
@@ -191,16 +184,15 @@ AIR302 [*] `airflow.sensors.s3_key_sensor.S3KeySensor` is moved into `amazon` pr
|
||||
24 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Transfer
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3KeySensor` from `airflow.providers.amazon.aws.sensors.s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
9 9 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 10 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 11 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
12 |-from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 |+from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor
|
||||
13 13 |
|
||||
14 14 | S3Hook()
|
||||
15 15 | provide_bucket_name()
|
||||
9 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
10 | from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
11 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
- from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
12 + from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor
|
||||
13 |
|
||||
14 | S3Hook()
|
||||
15 | provide_bucket_name()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:26:1
|
||||
@@ -213,15 +205,14 @@ AIR302 [*] `airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer` i
|
||||
28 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Transfer
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `GoogleApiToS3Operator` from `airflow.providers.amazon.aws.transfers.google_api_to_s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
22 22 | S3KeySensor()
|
||||
23 23 |
|
||||
24 24 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Transfer
|
||||
25 |+from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
|
||||
25 26 |
|
||||
26 27 | GoogleApiToS3Transfer()
|
||||
27 28 |
|
||||
22 | S3KeySensor()
|
||||
23 |
|
||||
24 | from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Transfer
|
||||
25 + from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
|
||||
26 |
|
||||
27 | GoogleApiToS3Transfer()
|
||||
28 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:30:1
|
||||
@@ -234,15 +225,14 @@ AIR302 [*] `airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer` is m
|
||||
32 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftTransfer
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `RedshiftToS3Operator` from `airflow.providers.amazon.aws.transfers.redshift_to_s3` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
26 26 | GoogleApiToS3Transfer()
|
||||
27 27 |
|
||||
28 28 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Transfer
|
||||
29 |+from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
|
||||
29 30 |
|
||||
30 31 | RedshiftToS3Transfer()
|
||||
31 32 |
|
||||
26 | GoogleApiToS3Transfer()
|
||||
27 |
|
||||
28 | from airflow.operators.redshift_to_s3_operator import RedshiftToS3Transfer
|
||||
29 + from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
|
||||
30 |
|
||||
31 | RedshiftToS3Transfer()
|
||||
32 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer` is moved into `amazon` provider in Airflow 3.0;
|
||||
--> AIR302_amazon.py:34:1
|
||||
@@ -253,11 +243,10 @@ AIR302 [*] `airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer` is m
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3ToRedshiftOperator` from `airflow.providers.amazon.aws.transfers.s3_to_redshift` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 | RedshiftToS3Transfer()
|
||||
31 31 |
|
||||
32 32 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftTransfer
|
||||
33 |+from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
|
||||
33 34 |
|
||||
34 35 | S3ToRedshiftTransfer()
|
||||
30 | RedshiftToS3Transfer()
|
||||
31 |
|
||||
32 | from airflow.operators.s3_to_redshift_operator import S3ToRedshiftTransfer
|
||||
33 + from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
|
||||
34 |
|
||||
35 | S3ToRedshiftTransfer()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,19 +12,18 @@ AIR302 [*] `airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG` is mo
|
||||
11 | app
|
||||
|
|
||||
help: Install `apache-airflow-providers-celery>=3.3.0` and use `DEFAULT_CELERY_CONFIG` from `airflow.providers.celery.executors.default_celery` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
4 3 | from airflow.executors.celery_executor import (
|
||||
5 4 | CeleryExecutor,
|
||||
6 5 | app,
|
||||
7 6 | )
|
||||
7 |+from airflow.providers.celery.executors.default_celery import DEFAULT_CELERY_CONFIG
|
||||
8 8 |
|
||||
9 9 | DEFAULT_CELERY_CONFIG
|
||||
10 10 |
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
3 | from airflow.executors.celery_executor import (
|
||||
4 | CeleryExecutor,
|
||||
5 | app,
|
||||
6 | )
|
||||
7 + from airflow.providers.celery.executors.default_celery import DEFAULT_CELERY_CONFIG
|
||||
8 |
|
||||
9 | DEFAULT_CELERY_CONFIG
|
||||
10 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.executors.celery_executor.app` is moved into `celery` provider in Airflow 3.0;
|
||||
--> AIR302_celery.py:11:1
|
||||
@@ -36,17 +35,16 @@ AIR302 [*] `airflow.executors.celery_executor.app` is moved into `celery` provid
|
||||
12 | CeleryExecutor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-celery>=3.3.0` and use `app` from `airflow.providers.celery.executors.celery_executor_utils` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
3 3 | from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
4 4 | from airflow.executors.celery_executor import (
|
||||
5 5 | CeleryExecutor,
|
||||
6 |- app,
|
||||
7 6 | )
|
||||
7 |+from airflow.providers.celery.executors.celery_executor_utils import app
|
||||
8 8 |
|
||||
9 9 | DEFAULT_CELERY_CONFIG
|
||||
10 10 |
|
||||
3 | from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
4 | from airflow.executors.celery_executor import (
|
||||
5 | CeleryExecutor,
|
||||
- app,
|
||||
6 | )
|
||||
7 + from airflow.providers.celery.executors.celery_executor_utils import app
|
||||
8 |
|
||||
9 | DEFAULT_CELERY_CONFIG
|
||||
10 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.executors.celery_executor.CeleryExecutor` is moved into `celery` provider in Airflow 3.0;
|
||||
--> AIR302_celery.py:12:1
|
||||
@@ -56,15 +54,14 @@ AIR302 [*] `airflow.executors.celery_executor.CeleryExecutor` is moved into `cel
|
||||
| ^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-celery>=3.3.0` and use `CeleryExecutor` from `airflow.providers.celery.executors.celery_executor` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
4 4 | from airflow.executors.celery_executor import (
|
||||
5 |- CeleryExecutor,
|
||||
6 5 | app,
|
||||
7 6 | )
|
||||
7 |+from airflow.providers.celery.executors.celery_executor import CeleryExecutor
|
||||
8 8 |
|
||||
9 9 | DEFAULT_CELERY_CONFIG
|
||||
10 10 |
|
||||
2 |
|
||||
3 | from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
4 | from airflow.executors.celery_executor import (
|
||||
- CeleryExecutor,
|
||||
5 | app,
|
||||
6 | )
|
||||
7 + from airflow.providers.celery.executors.celery_executor import CeleryExecutor
|
||||
8 |
|
||||
9 | DEFAULT_CELERY_CONFIG
|
||||
10 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -11,18 +11,17 @@ AIR302 [*] `airflow.hooks.dbapi.ConnectorProtocol` is moved into `common-sql` pr
|
||||
9 | DbApiHook()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `ConnectorProtocol` from `airflow.providers.common.sql.hooks.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.dbapi import (
|
||||
4 |- ConnectorProtocol,
|
||||
5 4 | DbApiHook,
|
||||
6 5 | )
|
||||
6 |+from airflow.providers.common.sql.hooks.sql import ConnectorProtocol
|
||||
7 7 |
|
||||
8 8 | ConnectorProtocol()
|
||||
9 9 | DbApiHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.dbapi import (
|
||||
- ConnectorProtocol,
|
||||
4 | DbApiHook,
|
||||
5 | )
|
||||
6 + from airflow.providers.common.sql.hooks.sql import ConnectorProtocol
|
||||
7 |
|
||||
8 | ConnectorProtocol()
|
||||
9 | DbApiHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.dbapi.DbApiHook` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:9:1
|
||||
@@ -34,17 +33,16 @@ AIR302 [*] `airflow.hooks.dbapi.DbApiHook` is moved into `common-sql` provider i
|
||||
11 | from airflow.hooks.dbapi_hook import DbApiHook
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `DbApiHook` from `airflow.providers.common.sql.hooks.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.dbapi import (
|
||||
4 4 | ConnectorProtocol,
|
||||
5 |- DbApiHook,
|
||||
6 5 | )
|
||||
6 |+from airflow.providers.common.sql.hooks.sql import DbApiHook
|
||||
7 7 |
|
||||
8 8 | ConnectorProtocol()
|
||||
9 9 | DbApiHook()
|
||||
2 |
|
||||
3 | from airflow.hooks.dbapi import (
|
||||
4 | ConnectorProtocol,
|
||||
- DbApiHook,
|
||||
5 | )
|
||||
6 + from airflow.providers.common.sql.hooks.sql import DbApiHook
|
||||
7 |
|
||||
8 | ConnectorProtocol()
|
||||
9 | DbApiHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.dbapi_hook.DbApiHook` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:14:1
|
||||
@@ -56,17 +54,16 @@ AIR302 [*] `airflow.hooks.dbapi_hook.DbApiHook` is moved into `common-sql` provi
|
||||
15 | SQLCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `DbApiHook` from `airflow.providers.common.sql.hooks.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
8 8 | ConnectorProtocol()
|
||||
9 9 | DbApiHook()
|
||||
10 10 |
|
||||
11 |-from airflow.hooks.dbapi_hook import DbApiHook
|
||||
12 11 | from airflow.operators.check_operator import SQLCheckOperator
|
||||
12 |+from airflow.providers.common.sql.hooks.sql import DbApiHook
|
||||
13 13 |
|
||||
14 14 | DbApiHook()
|
||||
15 15 | SQLCheckOperator()
|
||||
8 | ConnectorProtocol()
|
||||
9 | DbApiHook()
|
||||
10 |
|
||||
- from airflow.hooks.dbapi_hook import DbApiHook
|
||||
11 | from airflow.operators.check_operator import SQLCheckOperator
|
||||
12 + from airflow.providers.common.sql.hooks.sql import DbApiHook
|
||||
13 |
|
||||
14 | DbApiHook()
|
||||
15 | SQLCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.SQLCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:15:1
|
||||
@@ -76,16 +73,15 @@ AIR302 [*] `airflow.operators.check_operator.SQLCheckOperator` is moved into `co
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
9 9 | DbApiHook()
|
||||
10 10 |
|
||||
11 11 | from airflow.hooks.dbapi_hook import DbApiHook
|
||||
12 |-from airflow.operators.check_operator import SQLCheckOperator
|
||||
12 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
13 13 |
|
||||
14 14 | DbApiHook()
|
||||
15 15 | SQLCheckOperator()
|
||||
9 | DbApiHook()
|
||||
10 |
|
||||
11 | from airflow.hooks.dbapi_hook import DbApiHook
|
||||
- from airflow.operators.check_operator import SQLCheckOperator
|
||||
12 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
13 |
|
||||
14 | DbApiHook()
|
||||
15 | SQLCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.SQLCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:21:1
|
||||
@@ -97,16 +93,15 @@ AIR302 [*] `airflow.operators.sql.SQLCheckOperator` is moved into `common-sql` p
|
||||
22 | CheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 |
|
||||
17 17 |
|
||||
18 18 | from airflow.operators.check_operator import CheckOperator
|
||||
19 |-from airflow.operators.sql import SQLCheckOperator
|
||||
19 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
20 20 |
|
||||
21 21 | SQLCheckOperator()
|
||||
22 22 | CheckOperator()
|
||||
16 |
|
||||
17 |
|
||||
18 | from airflow.operators.check_operator import CheckOperator
|
||||
- from airflow.operators.sql import SQLCheckOperator
|
||||
19 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
20 |
|
||||
21 | SQLCheckOperator()
|
||||
22 | CheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.CheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:22:1
|
||||
@@ -116,15 +111,14 @@ AIR302 [*] `airflow.operators.check_operator.CheckOperator` is moved into `commo
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
17 17 |
|
||||
18 18 | from airflow.operators.check_operator import CheckOperator
|
||||
19 19 | from airflow.operators.sql import SQLCheckOperator
|
||||
20 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
20 21 |
|
||||
21 22 | SQLCheckOperator()
|
||||
22 23 | CheckOperator()
|
||||
17 |
|
||||
18 | from airflow.operators.check_operator import CheckOperator
|
||||
19 | from airflow.operators.sql import SQLCheckOperator
|
||||
20 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
21 |
|
||||
22 | SQLCheckOperator()
|
||||
23 | CheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.druid_check_operator.CheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:27:1
|
||||
@@ -135,15 +129,14 @@ AIR302 [*] `airflow.operators.druid_check_operator.CheckOperator` is moved into
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
23 23 |
|
||||
24 24 |
|
||||
25 25 | from airflow.operators.druid_check_operator import CheckOperator
|
||||
26 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
26 27 |
|
||||
27 28 | CheckOperator()
|
||||
28 29 |
|
||||
23 |
|
||||
24 |
|
||||
25 | from airflow.operators.druid_check_operator import CheckOperator
|
||||
26 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
27 |
|
||||
28 | CheckOperator()
|
||||
29 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_check_operator.CheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:32:1
|
||||
@@ -154,15 +147,14 @@ AIR302 [*] `airflow.operators.presto_check_operator.CheckOperator` is moved into
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
28 28 |
|
||||
29 29 |
|
||||
30 30 | from airflow.operators.presto_check_operator import CheckOperator
|
||||
31 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
31 32 |
|
||||
32 33 | CheckOperator()
|
||||
33 34 |
|
||||
28 |
|
||||
29 |
|
||||
30 | from airflow.operators.presto_check_operator import CheckOperator
|
||||
31 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
32 |
|
||||
33 | CheckOperator()
|
||||
34 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.druid_check_operator.DruidCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:42:1
|
||||
@@ -175,15 +167,14 @@ AIR302 [*] `airflow.operators.druid_check_operator.DruidCheckOperator` is moved
|
||||
44 | IntervalCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
38 38 | )
|
||||
39 39 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
40 40 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
41 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
41 42 |
|
||||
42 43 | DruidCheckOperator()
|
||||
43 44 | PrestoCheckOperator()
|
||||
38 | )
|
||||
39 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
40 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
41 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
42 |
|
||||
43 | DruidCheckOperator()
|
||||
44 | PrestoCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_check_operator.PrestoCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:43:1
|
||||
@@ -195,15 +186,14 @@ AIR302 [*] `airflow.operators.presto_check_operator.PrestoCheckOperator` is move
|
||||
45 | SQLIntervalCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
38 38 | )
|
||||
39 39 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
40 40 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
41 |+from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
41 42 |
|
||||
42 43 | DruidCheckOperator()
|
||||
43 44 | PrestoCheckOperator()
|
||||
38 | )
|
||||
39 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
40 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
41 + from airflow.providers.common.sql.operators.sql import SQLCheckOperator
|
||||
42 |
|
||||
43 | DruidCheckOperator()
|
||||
44 | PrestoCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.IntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:44:1
|
||||
@@ -215,19 +205,18 @@ AIR302 [*] `airflow.operators.check_operator.IntervalCheckOperator` is moved int
|
||||
45 | SQLIntervalCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
34 34 |
|
||||
35 35 | from airflow.operators.check_operator import (
|
||||
36 36 | IntervalCheckOperator,
|
||||
37 |- SQLIntervalCheckOperator,
|
||||
38 37 | )
|
||||
39 38 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
40 39 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
40 |+from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
41 41 |
|
||||
42 42 | DruidCheckOperator()
|
||||
43 43 | PrestoCheckOperator()
|
||||
34 |
|
||||
35 | from airflow.operators.check_operator import (
|
||||
36 | IntervalCheckOperator,
|
||||
- SQLIntervalCheckOperator,
|
||||
37 | )
|
||||
38 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
39 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
40 + from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
41 |
|
||||
42 | DruidCheckOperator()
|
||||
43 | PrestoCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.SQLIntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:45:1
|
||||
@@ -238,19 +227,18 @@ AIR302 [*] `airflow.operators.check_operator.SQLIntervalCheckOperator` is moved
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
34 34 |
|
||||
35 35 | from airflow.operators.check_operator import (
|
||||
36 36 | IntervalCheckOperator,
|
||||
37 |- SQLIntervalCheckOperator,
|
||||
38 37 | )
|
||||
39 38 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
40 39 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
40 |+from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
41 41 |
|
||||
42 42 | DruidCheckOperator()
|
||||
43 43 | PrestoCheckOperator()
|
||||
34 |
|
||||
35 | from airflow.operators.check_operator import (
|
||||
36 | IntervalCheckOperator,
|
||||
- SQLIntervalCheckOperator,
|
||||
37 | )
|
||||
38 | from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
39 | from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
40 + from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
41 |
|
||||
42 | DruidCheckOperator()
|
||||
43 | PrestoCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_check_operator.IntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:54:1
|
||||
@@ -263,15 +251,14 @@ AIR302 [*] `airflow.operators.presto_check_operator.IntervalCheckOperator` is mo
|
||||
56 | PrestoIntervalCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
50 50 | PrestoIntervalCheckOperator,
|
||||
51 51 | )
|
||||
52 52 | from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
53 |+from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
53 54 |
|
||||
54 55 | IntervalCheckOperator()
|
||||
55 56 | SQLIntervalCheckOperator()
|
||||
50 | PrestoIntervalCheckOperator,
|
||||
51 | )
|
||||
52 | from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
53 + from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
54 |
|
||||
55 | IntervalCheckOperator()
|
||||
56 | SQLIntervalCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.SQLIntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:55:1
|
||||
@@ -282,16 +269,15 @@ AIR302 [*] `airflow.operators.sql.SQLIntervalCheckOperator` is moved into `commo
|
||||
56 | PrestoIntervalCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
49 49 | IntervalCheckOperator,
|
||||
50 50 | PrestoIntervalCheckOperator,
|
||||
51 51 | )
|
||||
52 |-from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
52 |+from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
53 53 |
|
||||
54 54 | IntervalCheckOperator()
|
||||
55 55 | SQLIntervalCheckOperator()
|
||||
49 | IntervalCheckOperator,
|
||||
50 | PrestoIntervalCheckOperator,
|
||||
51 | )
|
||||
- from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
52 + from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
53 |
|
||||
54 | IntervalCheckOperator()
|
||||
55 | SQLIntervalCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_check_operator.PrestoIntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:56:1
|
||||
@@ -302,15 +288,14 @@ AIR302 [*] `airflow.operators.presto_check_operator.PrestoIntervalCheckOperator`
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
50 50 | PrestoIntervalCheckOperator,
|
||||
51 51 | )
|
||||
52 52 | from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
53 |+from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
53 54 |
|
||||
54 55 | IntervalCheckOperator()
|
||||
55 56 | SQLIntervalCheckOperator()
|
||||
50 | PrestoIntervalCheckOperator,
|
||||
51 | )
|
||||
52 | from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
53 + from airflow.providers.common.sql.operators.sql import SQLIntervalCheckOperator
|
||||
54 |
|
||||
55 | IntervalCheckOperator()
|
||||
56 | SQLIntervalCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.SQLThresholdCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:64:1
|
||||
@@ -322,18 +307,17 @@ AIR302 [*] `airflow.operators.check_operator.SQLThresholdCheckOperator` is moved
|
||||
65 | ThresholdCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLThresholdCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
57 57 |
|
||||
58 58 |
|
||||
59 59 | from airflow.operators.check_operator import (
|
||||
60 |- SQLThresholdCheckOperator,
|
||||
61 60 | ThresholdCheckOperator,
|
||||
62 61 | )
|
||||
62 |+from airflow.providers.common.sql.operators.sql import SQLThresholdCheckOperator
|
||||
63 63 |
|
||||
64 64 | SQLThresholdCheckOperator()
|
||||
65 65 | ThresholdCheckOperator()
|
||||
57 |
|
||||
58 |
|
||||
59 | from airflow.operators.check_operator import (
|
||||
- SQLThresholdCheckOperator,
|
||||
60 | ThresholdCheckOperator,
|
||||
61 | )
|
||||
62 + from airflow.providers.common.sql.operators.sql import SQLThresholdCheckOperator
|
||||
63 |
|
||||
64 | SQLThresholdCheckOperator()
|
||||
65 | ThresholdCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.ThresholdCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:65:1
|
||||
@@ -343,18 +327,17 @@ AIR302 [*] `airflow.operators.check_operator.ThresholdCheckOperator` is moved in
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLThresholdCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
57 57 |
|
||||
58 58 |
|
||||
59 59 | from airflow.operators.check_operator import (
|
||||
60 |- SQLThresholdCheckOperator,
|
||||
61 60 | ThresholdCheckOperator,
|
||||
62 61 | )
|
||||
62 |+from airflow.providers.common.sql.operators.sql import SQLThresholdCheckOperator
|
||||
63 63 |
|
||||
64 64 | SQLThresholdCheckOperator()
|
||||
65 65 | ThresholdCheckOperator()
|
||||
57 |
|
||||
58 |
|
||||
59 | from airflow.operators.check_operator import (
|
||||
- SQLThresholdCheckOperator,
|
||||
60 | ThresholdCheckOperator,
|
||||
61 | )
|
||||
62 + from airflow.providers.common.sql.operators.sql import SQLThresholdCheckOperator
|
||||
63 |
|
||||
64 | SQLThresholdCheckOperator()
|
||||
65 | ThresholdCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.SQLThresholdCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:70:1
|
||||
@@ -365,16 +348,15 @@ AIR302 [*] `airflow.operators.sql.SQLThresholdCheckOperator` is moved into `comm
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLThresholdCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
65 65 | ThresholdCheckOperator()
|
||||
66 66 |
|
||||
67 67 |
|
||||
68 |-from airflow.operators.sql import SQLThresholdCheckOperator
|
||||
68 |+from airflow.providers.common.sql.operators.sql import SQLThresholdCheckOperator
|
||||
69 69 |
|
||||
70 70 | SQLThresholdCheckOperator()
|
||||
71 71 |
|
||||
65 | ThresholdCheckOperator()
|
||||
66 |
|
||||
67 |
|
||||
- from airflow.operators.sql import SQLThresholdCheckOperator
|
||||
68 + from airflow.providers.common.sql.operators.sql import SQLThresholdCheckOperator
|
||||
69 |
|
||||
70 | SQLThresholdCheckOperator()
|
||||
71 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.SQLValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:78:1
|
||||
@@ -386,18 +368,17 @@ AIR302 [*] `airflow.operators.check_operator.SQLValueCheckOperator` is moved int
|
||||
79 | ValueCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
71 71 |
|
||||
72 72 |
|
||||
73 73 | from airflow.operators.check_operator import (
|
||||
74 |- SQLValueCheckOperator,
|
||||
75 74 | ValueCheckOperator,
|
||||
76 75 | )
|
||||
76 |+from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
77 77 |
|
||||
78 78 | SQLValueCheckOperator()
|
||||
79 79 | ValueCheckOperator()
|
||||
71 |
|
||||
72 |
|
||||
73 | from airflow.operators.check_operator import (
|
||||
- SQLValueCheckOperator,
|
||||
74 | ValueCheckOperator,
|
||||
75 | )
|
||||
76 + from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
77 |
|
||||
78 | SQLValueCheckOperator()
|
||||
79 | ValueCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.check_operator.ValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:79:1
|
||||
@@ -407,18 +388,17 @@ AIR302 [*] `airflow.operators.check_operator.ValueCheckOperator` is moved into `
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
71 71 |
|
||||
72 72 |
|
||||
73 73 | from airflow.operators.check_operator import (
|
||||
74 |- SQLValueCheckOperator,
|
||||
75 74 | ValueCheckOperator,
|
||||
76 75 | )
|
||||
76 |+from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
77 77 |
|
||||
78 78 | SQLValueCheckOperator()
|
||||
79 79 | ValueCheckOperator()
|
||||
71 |
|
||||
72 |
|
||||
73 | from airflow.operators.check_operator import (
|
||||
- SQLValueCheckOperator,
|
||||
74 | ValueCheckOperator,
|
||||
75 | )
|
||||
76 + from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
77 |
|
||||
78 | SQLValueCheckOperator()
|
||||
79 | ValueCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.SQLValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:88:1
|
||||
@@ -431,16 +411,15 @@ AIR302 [*] `airflow.operators.sql.SQLValueCheckOperator` is moved into `common-s
|
||||
90 | PrestoValueCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
83 83 | PrestoValueCheckOperator,
|
||||
84 84 | ValueCheckOperator,
|
||||
85 85 | )
|
||||
86 |-from airflow.operators.sql import SQLValueCheckOperator
|
||||
86 |+from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
87 87 |
|
||||
88 88 | SQLValueCheckOperator()
|
||||
89 89 | ValueCheckOperator()
|
||||
83 | PrestoValueCheckOperator,
|
||||
84 | ValueCheckOperator,
|
||||
85 | )
|
||||
- from airflow.operators.sql import SQLValueCheckOperator
|
||||
86 + from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
87 |
|
||||
88 | SQLValueCheckOperator()
|
||||
89 | ValueCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_check_operator.ValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:89:1
|
||||
@@ -451,15 +430,14 @@ AIR302 [*] `airflow.operators.presto_check_operator.ValueCheckOperator` is moved
|
||||
90 | PrestoValueCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
84 84 | ValueCheckOperator,
|
||||
85 85 | )
|
||||
86 86 | from airflow.operators.sql import SQLValueCheckOperator
|
||||
87 |+from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
87 88 |
|
||||
88 89 | SQLValueCheckOperator()
|
||||
89 90 | ValueCheckOperator()
|
||||
84 | ValueCheckOperator,
|
||||
85 | )
|
||||
86 | from airflow.operators.sql import SQLValueCheckOperator
|
||||
87 + from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
88 |
|
||||
89 | SQLValueCheckOperator()
|
||||
90 | ValueCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_check_operator.PrestoValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:90:1
|
||||
@@ -470,15 +448,14 @@ AIR302 [*] `airflow.operators.presto_check_operator.PrestoValueCheckOperator` is
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
84 84 | ValueCheckOperator,
|
||||
85 85 | )
|
||||
86 86 | from airflow.operators.sql import SQLValueCheckOperator
|
||||
87 |+from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
87 88 |
|
||||
88 89 | SQLValueCheckOperator()
|
||||
89 90 | ValueCheckOperator()
|
||||
84 | ValueCheckOperator,
|
||||
85 | )
|
||||
86 | from airflow.operators.sql import SQLValueCheckOperator
|
||||
87 + from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
|
||||
88 |
|
||||
89 | SQLValueCheckOperator()
|
||||
90 | ValueCheckOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.BaseSQLOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:102:1
|
||||
@@ -491,22 +468,21 @@ AIR302 [*] `airflow.operators.sql.BaseSQLOperator` is moved into `common-sql` pr
|
||||
104 | SQLTableCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `BaseSQLOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
91 91 |
|
||||
92 92 |
|
||||
93 93 | from airflow.operators.sql import (
|
||||
94 |- BaseSQLOperator,
|
||||
95 94 | BranchSQLOperator,
|
||||
96 95 | SQLColumnCheckOperator,
|
||||
97 96 | SQLTableCheckOperator,
|
||||
98 97 | _convert_to_float_if_possible,
|
||||
99 98 | parse_boolean,
|
||||
100 99 | )
|
||||
100 |+from airflow.providers.common.sql.operators.sql import BaseSQLOperator
|
||||
101 101 |
|
||||
102 102 | BaseSQLOperator()
|
||||
103 103 | BranchSQLOperator()
|
||||
91 |
|
||||
92 |
|
||||
93 | from airflow.operators.sql import (
|
||||
- BaseSQLOperator,
|
||||
94 | BranchSQLOperator,
|
||||
95 | SQLColumnCheckOperator,
|
||||
96 | SQLTableCheckOperator,
|
||||
97 | _convert_to_float_if_possible,
|
||||
98 | parse_boolean,
|
||||
99 | )
|
||||
100 + from airflow.providers.common.sql.operators.sql import BaseSQLOperator
|
||||
101 |
|
||||
102 | BaseSQLOperator()
|
||||
103 | BranchSQLOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.BranchSQLOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:103:1
|
||||
@@ -518,21 +494,20 @@ AIR302 [*] `airflow.operators.sql.BranchSQLOperator` is moved into `common-sql`
|
||||
105 | SQLColumnCheckOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `BranchSQLOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
92 92 |
|
||||
93 93 | from airflow.operators.sql import (
|
||||
94 94 | BaseSQLOperator,
|
||||
95 |- BranchSQLOperator,
|
||||
96 95 | SQLColumnCheckOperator,
|
||||
97 96 | SQLTableCheckOperator,
|
||||
98 97 | _convert_to_float_if_possible,
|
||||
99 98 | parse_boolean,
|
||||
100 99 | )
|
||||
100 |+from airflow.providers.common.sql.operators.sql import BranchSQLOperator
|
||||
101 101 |
|
||||
102 102 | BaseSQLOperator()
|
||||
103 103 | BranchSQLOperator()
|
||||
92 |
|
||||
93 | from airflow.operators.sql import (
|
||||
94 | BaseSQLOperator,
|
||||
- BranchSQLOperator,
|
||||
95 | SQLColumnCheckOperator,
|
||||
96 | SQLTableCheckOperator,
|
||||
97 | _convert_to_float_if_possible,
|
||||
98 | parse_boolean,
|
||||
99 | )
|
||||
100 + from airflow.providers.common.sql.operators.sql import BranchSQLOperator
|
||||
101 |
|
||||
102 | BaseSQLOperator()
|
||||
103 | BranchSQLOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.SQLTableCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:104:1
|
||||
@@ -545,19 +520,18 @@ AIR302 [*] `airflow.operators.sql.SQLTableCheckOperator` is moved into `common-s
|
||||
106 | _convert_to_float_if_possible()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLTableCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
94 94 | BaseSQLOperator,
|
||||
95 95 | BranchSQLOperator,
|
||||
96 96 | SQLColumnCheckOperator,
|
||||
97 |- SQLTableCheckOperator,
|
||||
98 97 | _convert_to_float_if_possible,
|
||||
99 98 | parse_boolean,
|
||||
100 99 | )
|
||||
100 |+from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator
|
||||
101 101 |
|
||||
102 102 | BaseSQLOperator()
|
||||
103 103 | BranchSQLOperator()
|
||||
94 | BaseSQLOperator,
|
||||
95 | BranchSQLOperator,
|
||||
96 | SQLColumnCheckOperator,
|
||||
- SQLTableCheckOperator,
|
||||
97 | _convert_to_float_if_possible,
|
||||
98 | parse_boolean,
|
||||
99 | )
|
||||
100 + from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator
|
||||
101 |
|
||||
102 | BaseSQLOperator()
|
||||
103 | BranchSQLOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.SQLColumnCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:105:1
|
||||
@@ -570,20 +544,19 @@ AIR302 [*] `airflow.operators.sql.SQLColumnCheckOperator` is moved into `common-
|
||||
107 | parse_boolean()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `SQLColumnCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
93 93 | from airflow.operators.sql import (
|
||||
94 94 | BaseSQLOperator,
|
||||
95 95 | BranchSQLOperator,
|
||||
96 |- SQLColumnCheckOperator,
|
||||
97 96 | SQLTableCheckOperator,
|
||||
98 97 | _convert_to_float_if_possible,
|
||||
99 98 | parse_boolean,
|
||||
100 99 | )
|
||||
100 |+from airflow.providers.common.sql.operators.sql import SQLColumnCheckOperator
|
||||
101 101 |
|
||||
102 102 | BaseSQLOperator()
|
||||
103 103 | BranchSQLOperator()
|
||||
93 | from airflow.operators.sql import (
|
||||
94 | BaseSQLOperator,
|
||||
95 | BranchSQLOperator,
|
||||
- SQLColumnCheckOperator,
|
||||
96 | SQLTableCheckOperator,
|
||||
97 | _convert_to_float_if_possible,
|
||||
98 | parse_boolean,
|
||||
99 | )
|
||||
100 + from airflow.providers.common.sql.operators.sql import SQLColumnCheckOperator
|
||||
101 |
|
||||
102 | BaseSQLOperator()
|
||||
103 | BranchSQLOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql._convert_to_float_if_possible` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:106:1
|
||||
@@ -595,18 +568,17 @@ AIR302 [*] `airflow.operators.sql._convert_to_float_if_possible` is moved into `
|
||||
107 | parse_boolean()
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `_convert_to_float_if_possible` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
95 95 | BranchSQLOperator,
|
||||
96 96 | SQLColumnCheckOperator,
|
||||
97 97 | SQLTableCheckOperator,
|
||||
98 |- _convert_to_float_if_possible,
|
||||
99 98 | parse_boolean,
|
||||
100 99 | )
|
||||
100 |+from airflow.providers.common.sql.operators.sql import _convert_to_float_if_possible
|
||||
101 101 |
|
||||
102 102 | BaseSQLOperator()
|
||||
103 103 | BranchSQLOperator()
|
||||
95 | BranchSQLOperator,
|
||||
96 | SQLColumnCheckOperator,
|
||||
97 | SQLTableCheckOperator,
|
||||
- _convert_to_float_if_possible,
|
||||
98 | parse_boolean,
|
||||
99 | )
|
||||
100 + from airflow.providers.common.sql.operators.sql import _convert_to_float_if_possible
|
||||
101 |
|
||||
102 | BaseSQLOperator()
|
||||
103 | BranchSQLOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.sql.parse_boolean` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:107:1
|
||||
@@ -617,17 +589,16 @@ AIR302 [*] `airflow.operators.sql.parse_boolean` is moved into `common-sql` prov
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `parse_boolean` from `airflow.providers.common.sql.operators.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
96 96 | SQLColumnCheckOperator,
|
||||
97 97 | SQLTableCheckOperator,
|
||||
98 98 | _convert_to_float_if_possible,
|
||||
99 |- parse_boolean,
|
||||
100 99 | )
|
||||
100 |+from airflow.providers.common.sql.operators.sql import parse_boolean
|
||||
101 101 |
|
||||
102 102 | BaseSQLOperator()
|
||||
103 103 | BranchSQLOperator()
|
||||
96 | SQLColumnCheckOperator,
|
||||
97 | SQLTableCheckOperator,
|
||||
98 | _convert_to_float_if_possible,
|
||||
- parse_boolean,
|
||||
99 | )
|
||||
100 + from airflow.providers.common.sql.operators.sql import parse_boolean
|
||||
101 |
|
||||
102 | BaseSQLOperator()
|
||||
103 | BranchSQLOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.sql.SqlSensor` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:112:1
|
||||
@@ -638,16 +609,15 @@ AIR302 [*] `airflow.sensors.sql.SqlSensor` is moved into `common-sql` provider i
|
||||
| ^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `SqlSensor` from `airflow.providers.common.sql.sensors.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
107 107 | parse_boolean()
|
||||
108 108 |
|
||||
109 109 |
|
||||
110 |-from airflow.sensors.sql import SqlSensor
|
||||
110 |+from airflow.providers.common.sql.sensors.sql import SqlSensor
|
||||
111 111 |
|
||||
112 112 | SqlSensor()
|
||||
113 113 |
|
||||
107 | parse_boolean()
|
||||
108 |
|
||||
109 |
|
||||
- from airflow.sensors.sql import SqlSensor
|
||||
110 + from airflow.providers.common.sql.sensors.sql import SqlSensor
|
||||
111 |
|
||||
112 | SqlSensor()
|
||||
113 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.sql_sensor.SqlSensor` is moved into `common-sql` provider in Airflow 3.0;
|
||||
--> AIR302_common_sql.py:117:1
|
||||
@@ -658,13 +628,12 @@ AIR302 [*] `airflow.sensors.sql_sensor.SqlSensor` is moved into `common-sql` pro
|
||||
| ^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `SqlSensor` from `airflow.providers.common.sql.sensors.sql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
112 112 | SqlSensor()
|
||||
113 113 |
|
||||
114 114 |
|
||||
115 |-from airflow.sensors.sql_sensor import SqlSensor
|
||||
115 |+from airflow.providers.common.sql.sensors.sql import SqlSensor
|
||||
116 116 |
|
||||
117 117 | SqlSensor()
|
||||
118 118 |
|
||||
112 | SqlSensor()
|
||||
113 |
|
||||
114 |
|
||||
- from airflow.sensors.sql_sensor import SqlSensor
|
||||
115 + from airflow.providers.common.sql.sensors.sql import SqlSensor
|
||||
116 |
|
||||
117 | SqlSensor()
|
||||
118 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.executors.dask_executor.DaskExecutor` is moved into `daskexe
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-daskexecutor>=1.0.0` and use `DaskExecutor` from `airflow.providers.daskexecutor.executors.dask_executor` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.executors.dask_executor import DaskExecutor
|
||||
3 |+from airflow.providers.daskexecutor.executors.dask_executor import DaskExecutor
|
||||
4 4 |
|
||||
5 5 | DaskExecutor()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.executors.dask_executor import DaskExecutor
|
||||
3 + from airflow.providers.daskexecutor.executors.dask_executor import DaskExecutor
|
||||
4 |
|
||||
5 | DaskExecutor()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -11,22 +11,21 @@ AIR302 [*] `airflow.hooks.druid_hook.DruidDbApiHook` is moved into `apache-druid
|
||||
13 | DruidHook()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `DruidDbApiHook` from `airflow.providers.apache.druid.hooks.druid` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.druid_hook import (
|
||||
4 |- DruidDbApiHook,
|
||||
5 4 | DruidHook,
|
||||
6 5 | )
|
||||
7 6 | from airflow.operators.hive_to_druid import (
|
||||
8 7 | HiveToDruidOperator,
|
||||
9 8 | HiveToDruidTransfer,
|
||||
10 9 | )
|
||||
10 |+from airflow.providers.apache.druid.hooks.druid import DruidDbApiHook
|
||||
11 11 |
|
||||
12 12 | DruidDbApiHook()
|
||||
13 13 | DruidHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.druid_hook import (
|
||||
- DruidDbApiHook,
|
||||
4 | DruidHook,
|
||||
5 | )
|
||||
6 | from airflow.operators.hive_to_druid import (
|
||||
7 | HiveToDruidOperator,
|
||||
8 | HiveToDruidTransfer,
|
||||
9 | )
|
||||
10 + from airflow.providers.apache.druid.hooks.druid import DruidDbApiHook
|
||||
11 |
|
||||
12 | DruidDbApiHook()
|
||||
13 | DruidHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.druid_hook.DruidHook` is moved into `apache-druid` provider in Airflow 3.0;
|
||||
--> AIR302_druid.py:13:1
|
||||
@@ -38,21 +37,20 @@ AIR302 [*] `airflow.hooks.druid_hook.DruidHook` is moved into `apache-druid` pro
|
||||
15 | HiveToDruidOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `DruidHook` from `airflow.providers.apache.druid.hooks.druid` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.druid_hook import (
|
||||
4 4 | DruidDbApiHook,
|
||||
5 |- DruidHook,
|
||||
6 5 | )
|
||||
7 6 | from airflow.operators.hive_to_druid import (
|
||||
8 7 | HiveToDruidOperator,
|
||||
9 8 | HiveToDruidTransfer,
|
||||
10 9 | )
|
||||
10 |+from airflow.providers.apache.druid.hooks.druid import DruidHook
|
||||
11 11 |
|
||||
12 12 | DruidDbApiHook()
|
||||
13 13 | DruidHook()
|
||||
2 |
|
||||
3 | from airflow.hooks.druid_hook import (
|
||||
4 | DruidDbApiHook,
|
||||
- DruidHook,
|
||||
5 | )
|
||||
6 | from airflow.operators.hive_to_druid import (
|
||||
7 | HiveToDruidOperator,
|
||||
8 | HiveToDruidTransfer,
|
||||
9 | )
|
||||
10 + from airflow.providers.apache.druid.hooks.druid import DruidHook
|
||||
11 |
|
||||
12 | DruidDbApiHook()
|
||||
13 | DruidHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_to_druid.HiveToDruidOperator` is moved into `apache-druid` provider in Airflow 3.0;
|
||||
--> AIR302_druid.py:15:1
|
||||
@@ -64,18 +62,17 @@ AIR302 [*] `airflow.operators.hive_to_druid.HiveToDruidOperator` is moved into `
|
||||
16 | HiveToDruidTransfer()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `HiveToDruidOperator` from `airflow.providers.apache.druid.transfers.hive_to_druid` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 | DruidHook,
|
||||
6 6 | )
|
||||
7 7 | from airflow.operators.hive_to_druid import (
|
||||
8 |- HiveToDruidOperator,
|
||||
9 8 | HiveToDruidTransfer,
|
||||
10 9 | )
|
||||
10 |+from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
|
||||
11 11 |
|
||||
12 12 | DruidDbApiHook()
|
||||
13 13 | DruidHook()
|
||||
5 | DruidHook,
|
||||
6 | )
|
||||
7 | from airflow.operators.hive_to_druid import (
|
||||
- HiveToDruidOperator,
|
||||
8 | HiveToDruidTransfer,
|
||||
9 | )
|
||||
10 + from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
|
||||
11 |
|
||||
12 | DruidDbApiHook()
|
||||
13 | DruidHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_to_druid.HiveToDruidTransfer` is moved into `apache-druid` provider in Airflow 3.0;
|
||||
--> AIR302_druid.py:16:1
|
||||
@@ -85,15 +82,14 @@ AIR302 [*] `airflow.operators.hive_to_druid.HiveToDruidTransfer` is moved into `
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `HiveToDruidOperator` from `airflow.providers.apache.druid.transfers.hive_to_druid` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 | DruidHook,
|
||||
6 6 | )
|
||||
7 7 | from airflow.operators.hive_to_druid import (
|
||||
8 |- HiveToDruidOperator,
|
||||
9 8 | HiveToDruidTransfer,
|
||||
10 9 | )
|
||||
10 |+from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
|
||||
11 11 |
|
||||
12 12 | DruidDbApiHook()
|
||||
13 13 | DruidHook()
|
||||
5 | DruidHook,
|
||||
6 | )
|
||||
7 | from airflow.operators.hive_to_druid import (
|
||||
- HiveToDruidOperator,
|
||||
8 | HiveToDruidTransfer,
|
||||
9 | )
|
||||
10 + from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
|
||||
11 |
|
||||
12 | DruidDbApiHook()
|
||||
13 | DruidHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,20 +12,19 @@ AIR302 [*] `airflow.api.auth.backend.basic_auth.CLIENT_AUTH` is moved into `fab`
|
||||
12 | auth_current_user()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `CLIENT_AUTH` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.api.auth.backend.basic_auth import (
|
||||
4 |- CLIENT_AUTH,
|
||||
5 4 | auth_current_user,
|
||||
6 5 | init_app,
|
||||
7 6 | requires_authentication,
|
||||
8 7 | )
|
||||
8 |+from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import CLIENT_AUTH
|
||||
9 9 |
|
||||
10 10 | CLIENT_AUTH
|
||||
11 11 | init_app()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.api.auth.backend.basic_auth import (
|
||||
- CLIENT_AUTH,
|
||||
4 | auth_current_user,
|
||||
5 | init_app,
|
||||
6 | requires_authentication,
|
||||
7 | )
|
||||
8 + from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import CLIENT_AUTH
|
||||
9 |
|
||||
10 | CLIENT_AUTH
|
||||
11 | init_app()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.basic_auth.init_app` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:11:1
|
||||
@@ -37,18 +36,17 @@ AIR302 [*] `airflow.api.auth.backend.basic_auth.init_app` is moved into `fab` pr
|
||||
13 | requires_authentication()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `init_app` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
3 3 | from airflow.api.auth.backend.basic_auth import (
|
||||
4 4 | CLIENT_AUTH,
|
||||
5 5 | auth_current_user,
|
||||
6 |- init_app,
|
||||
7 6 | requires_authentication,
|
||||
8 7 | )
|
||||
8 |+from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import init_app
|
||||
9 9 |
|
||||
10 10 | CLIENT_AUTH
|
||||
11 11 | init_app()
|
||||
3 | from airflow.api.auth.backend.basic_auth import (
|
||||
4 | CLIENT_AUTH,
|
||||
5 | auth_current_user,
|
||||
- init_app,
|
||||
6 | requires_authentication,
|
||||
7 | )
|
||||
8 + from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import init_app
|
||||
9 |
|
||||
10 | CLIENT_AUTH
|
||||
11 | init_app()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.basic_auth.auth_current_user` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:12:1
|
||||
@@ -60,19 +58,18 @@ AIR302 [*] `airflow.api.auth.backend.basic_auth.auth_current_user` is moved into
|
||||
13 | requires_authentication()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `auth_current_user` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.api.auth.backend.basic_auth import (
|
||||
4 4 | CLIENT_AUTH,
|
||||
5 |- auth_current_user,
|
||||
6 5 | init_app,
|
||||
7 6 | requires_authentication,
|
||||
8 7 | )
|
||||
8 |+from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import auth_current_user
|
||||
9 9 |
|
||||
10 10 | CLIENT_AUTH
|
||||
11 11 | init_app()
|
||||
2 |
|
||||
3 | from airflow.api.auth.backend.basic_auth import (
|
||||
4 | CLIENT_AUTH,
|
||||
- auth_current_user,
|
||||
5 | init_app,
|
||||
6 | requires_authentication,
|
||||
7 | )
|
||||
8 + from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import auth_current_user
|
||||
9 |
|
||||
10 | CLIENT_AUTH
|
||||
11 | init_app()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.basic_auth.requires_authentication` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:13:1
|
||||
@@ -85,17 +82,16 @@ AIR302 [*] `airflow.api.auth.backend.basic_auth.requires_authentication` is move
|
||||
15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `requires_authentication` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
4 4 | CLIENT_AUTH,
|
||||
5 5 | auth_current_user,
|
||||
6 6 | init_app,
|
||||
7 |- requires_authentication,
|
||||
8 7 | )
|
||||
8 |+from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import requires_authentication
|
||||
9 9 |
|
||||
10 10 | CLIENT_AUTH
|
||||
11 11 | init_app()
|
||||
4 | CLIENT_AUTH,
|
||||
5 | auth_current_user,
|
||||
6 | init_app,
|
||||
- requires_authentication,
|
||||
7 | )
|
||||
8 + from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import requires_authentication
|
||||
9 |
|
||||
10 | CLIENT_AUTH
|
||||
11 | init_app()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.kerberos_auth.log` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:23:1
|
||||
@@ -108,18 +104,17 @@ AIR302 [*] `airflow.api.auth.backend.kerberos_auth.log` is moved into `fab` prov
|
||||
25 | find_user()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `log` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 | CLIENT_AUTH,
|
||||
17 17 | find_user,
|
||||
18 18 | init_app,
|
||||
19 |- log,
|
||||
20 19 | requires_authentication,
|
||||
21 20 | )
|
||||
21 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import log
|
||||
22 22 |
|
||||
23 23 | log()
|
||||
24 24 | CLIENT_AUTH
|
||||
16 | CLIENT_AUTH,
|
||||
17 | find_user,
|
||||
18 | init_app,
|
||||
- log,
|
||||
19 | requires_authentication,
|
||||
20 | )
|
||||
21 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import log
|
||||
22 |
|
||||
23 | log()
|
||||
24 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.kerberos_auth.CLIENT_AUTH` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:24:1
|
||||
@@ -131,21 +126,20 @@ AIR302 [*] `airflow.api.auth.backend.kerberos_auth.CLIENT_AUTH` is moved into `f
|
||||
26 | init_app()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `CLIENT_AUTH` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
13 13 | requires_authentication()
|
||||
14 14 |
|
||||
15 15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
16 |- CLIENT_AUTH,
|
||||
17 16 | find_user,
|
||||
18 17 | init_app,
|
||||
19 18 | log,
|
||||
20 19 | requires_authentication,
|
||||
21 20 | )
|
||||
21 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import CLIENT_AUTH
|
||||
22 22 |
|
||||
23 23 | log()
|
||||
24 24 | CLIENT_AUTH
|
||||
13 | requires_authentication()
|
||||
14 |
|
||||
15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
- CLIENT_AUTH,
|
||||
16 | find_user,
|
||||
17 | init_app,
|
||||
18 | log,
|
||||
19 | requires_authentication,
|
||||
20 | )
|
||||
21 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import CLIENT_AUTH
|
||||
22 |
|
||||
23 | log()
|
||||
24 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.kerberos_auth.find_user` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:25:1
|
||||
@@ -158,20 +152,19 @@ AIR302 [*] `airflow.api.auth.backend.kerberos_auth.find_user` is moved into `fab
|
||||
27 | requires_authentication()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `find_user` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
14 14 |
|
||||
15 15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
16 16 | CLIENT_AUTH,
|
||||
17 |- find_user,
|
||||
18 17 | init_app,
|
||||
19 18 | log,
|
||||
20 19 | requires_authentication,
|
||||
21 20 | )
|
||||
21 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import find_user
|
||||
22 22 |
|
||||
23 23 | log()
|
||||
24 24 | CLIENT_AUTH
|
||||
14 |
|
||||
15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
16 | CLIENT_AUTH,
|
||||
- find_user,
|
||||
17 | init_app,
|
||||
18 | log,
|
||||
19 | requires_authentication,
|
||||
20 | )
|
||||
21 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import find_user
|
||||
22 |
|
||||
23 | log()
|
||||
24 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.kerberos_auth.init_app` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:26:1
|
||||
@@ -183,19 +176,18 @@ AIR302 [*] `airflow.api.auth.backend.kerberos_auth.init_app` is moved into `fab`
|
||||
27 | requires_authentication()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `init_app` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
16 16 | CLIENT_AUTH,
|
||||
17 17 | find_user,
|
||||
18 |- init_app,
|
||||
19 18 | log,
|
||||
20 19 | requires_authentication,
|
||||
21 20 | )
|
||||
21 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app
|
||||
22 22 |
|
||||
23 23 | log()
|
||||
24 24 | CLIENT_AUTH
|
||||
15 | from airflow.api.auth.backend.kerberos_auth import (
|
||||
16 | CLIENT_AUTH,
|
||||
17 | find_user,
|
||||
- init_app,
|
||||
18 | log,
|
||||
19 | requires_authentication,
|
||||
20 | )
|
||||
21 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app
|
||||
22 |
|
||||
23 | log()
|
||||
24 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.api.auth.backend.kerberos_auth.requires_authentication` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:27:1
|
||||
@@ -208,17 +200,16 @@ AIR302 [*] `airflow.api.auth.backend.kerberos_auth.requires_authentication` is m
|
||||
29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `requires_authentication` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
17 17 | find_user,
|
||||
18 18 | init_app,
|
||||
19 19 | log,
|
||||
20 |- requires_authentication,
|
||||
21 20 | )
|
||||
21 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import requires_authentication
|
||||
22 22 |
|
||||
23 23 | log()
|
||||
24 24 | CLIENT_AUTH
|
||||
17 | find_user,
|
||||
18 | init_app,
|
||||
19 | log,
|
||||
- requires_authentication,
|
||||
20 | )
|
||||
21 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import requires_authentication
|
||||
22 |
|
||||
23 | log()
|
||||
24 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.log` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:37:1
|
||||
@@ -231,18 +222,17 @@ AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.log` is mov
|
||||
39 | find_user()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `log` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 | CLIENT_AUTH,
|
||||
31 31 | find_user,
|
||||
32 32 | init_app,
|
||||
33 |- log,
|
||||
34 33 | requires_authentication,
|
||||
35 34 | )
|
||||
35 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import log
|
||||
36 36 |
|
||||
37 37 | log()
|
||||
38 38 | CLIENT_AUTH
|
||||
30 | CLIENT_AUTH,
|
||||
31 | find_user,
|
||||
32 | init_app,
|
||||
- log,
|
||||
33 | requires_authentication,
|
||||
34 | )
|
||||
35 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import log
|
||||
36 |
|
||||
37 | log()
|
||||
38 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.CLIENT_AUTH` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:38:1
|
||||
@@ -254,21 +244,20 @@ AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.CLIENT_AUTH
|
||||
40 | init_app()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `CLIENT_AUTH` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
27 27 | requires_authentication()
|
||||
28 28 |
|
||||
29 29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
30 |- CLIENT_AUTH,
|
||||
31 30 | find_user,
|
||||
32 31 | init_app,
|
||||
33 32 | log,
|
||||
34 33 | requires_authentication,
|
||||
35 34 | )
|
||||
35 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import CLIENT_AUTH
|
||||
36 36 |
|
||||
37 37 | log()
|
||||
38 38 | CLIENT_AUTH
|
||||
27 | requires_authentication()
|
||||
28 |
|
||||
29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
- CLIENT_AUTH,
|
||||
30 | find_user,
|
||||
31 | init_app,
|
||||
32 | log,
|
||||
33 | requires_authentication,
|
||||
34 | )
|
||||
35 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import CLIENT_AUTH
|
||||
36 |
|
||||
37 | log()
|
||||
38 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.find_user` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:39:1
|
||||
@@ -281,20 +270,19 @@ AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.find_user`
|
||||
41 | requires_authentication()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `find_user` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
28 28 |
|
||||
29 29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
30 30 | CLIENT_AUTH,
|
||||
31 |- find_user,
|
||||
32 31 | init_app,
|
||||
33 32 | log,
|
||||
34 33 | requires_authentication,
|
||||
35 34 | )
|
||||
35 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import find_user
|
||||
36 36 |
|
||||
37 37 | log()
|
||||
38 38 | CLIENT_AUTH
|
||||
28 |
|
||||
29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
30 | CLIENT_AUTH,
|
||||
- find_user,
|
||||
31 | init_app,
|
||||
32 | log,
|
||||
33 | requires_authentication,
|
||||
34 | )
|
||||
35 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import find_user
|
||||
36 |
|
||||
37 | log()
|
||||
38 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.init_app` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:40:1
|
||||
@@ -306,19 +294,18 @@ AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.init_app` i
|
||||
41 | requires_authentication()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `init_app` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
29 29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
30 30 | CLIENT_AUTH,
|
||||
31 31 | find_user,
|
||||
32 |- init_app,
|
||||
33 32 | log,
|
||||
34 33 | requires_authentication,
|
||||
35 34 | )
|
||||
35 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app
|
||||
36 36 |
|
||||
37 37 | log()
|
||||
38 38 | CLIENT_AUTH
|
||||
29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
30 | CLIENT_AUTH,
|
||||
31 | find_user,
|
||||
- init_app,
|
||||
32 | log,
|
||||
33 | requires_authentication,
|
||||
34 | )
|
||||
35 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app
|
||||
36 |
|
||||
37 | log()
|
||||
38 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.requires_authentication` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:41:1
|
||||
@@ -331,17 +318,16 @@ AIR302 [*] `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.requires_au
|
||||
43 | from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `requires_authentication` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
31 31 | find_user,
|
||||
32 32 | init_app,
|
||||
33 33 | log,
|
||||
34 |- requires_authentication,
|
||||
35 34 | )
|
||||
35 |+from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import requires_authentication
|
||||
36 36 |
|
||||
37 37 | log()
|
||||
38 38 | CLIENT_AUTH
|
||||
31 | find_user,
|
||||
32 | init_app,
|
||||
33 | log,
|
||||
- requires_authentication,
|
||||
34 | )
|
||||
35 + from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import requires_authentication
|
||||
36 |
|
||||
37 | log()
|
||||
38 | CLIENT_AUTH
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.fab_auth_manager.FabAuthManager` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:49:1
|
||||
@@ -354,20 +340,19 @@ AIR302 [*] `airflow.auth.managers.fab.fab_auth_manager.FabAuthManager` is moved
|
||||
51 | FabAirflowSecurityManagerOverride()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `FabAuthManager` from `airflow.providers.fab.auth_manager.fab_auth_manager` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
40 40 | init_app()
|
||||
41 41 | requires_authentication()
|
||||
42 42 |
|
||||
43 |-from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
44 43 | from airflow.auth.managers.fab.security_manager.override import (
|
||||
45 44 | MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
46 45 | FabAirflowSecurityManagerOverride,
|
||||
47 46 | )
|
||||
47 |+from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager
|
||||
48 48 |
|
||||
49 49 | FabAuthManager()
|
||||
50 50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
40 | init_app()
|
||||
41 | requires_authentication()
|
||||
42 |
|
||||
- from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
43 | from airflow.auth.managers.fab.security_manager.override import (
|
||||
44 | MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
45 | FabAirflowSecurityManagerOverride,
|
||||
46 | )
|
||||
47 + from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager
|
||||
48 |
|
||||
49 | FabAuthManager()
|
||||
50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.security_manager.override.MAX_NUM_DATABASE_USER_SESSIONS` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:50:1
|
||||
@@ -378,18 +363,17 @@ AIR302 [*] `airflow.auth.managers.fab.security_manager.override.MAX_NUM_DATABASE
|
||||
51 | FabAirflowSecurityManagerOverride()
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `MAX_NUM_DATABASE_USER_SESSIONS` from `airflow.providers.fab.auth_manager.security_manager.override` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
42 42 |
|
||||
43 43 | from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
44 44 | from airflow.auth.managers.fab.security_manager.override import (
|
||||
45 |- MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
46 45 | FabAirflowSecurityManagerOverride,
|
||||
47 46 | )
|
||||
47 |+from airflow.providers.fab.auth_manager.security_manager.override import MAX_NUM_DATABASE_USER_SESSIONS
|
||||
48 48 |
|
||||
49 49 | FabAuthManager()
|
||||
50 50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
42 |
|
||||
43 | from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
44 | from airflow.auth.managers.fab.security_manager.override import (
|
||||
- MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
45 | FabAirflowSecurityManagerOverride,
|
||||
46 | )
|
||||
47 + from airflow.providers.fab.auth_manager.security_manager.override import MAX_NUM_DATABASE_USER_SESSIONS
|
||||
48 |
|
||||
49 | FabAuthManager()
|
||||
50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.auth.managers.fab.security_manager.override.FabAirflowSecurityManagerOverride` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:51:1
|
||||
@@ -402,17 +386,16 @@ AIR302 [*] `airflow.auth.managers.fab.security_manager.override.FabAirflowSecuri
|
||||
53 | from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `FabAirflowSecurityManagerOverride` from `airflow.providers.fab.auth_manager.security_manager.override` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
43 43 | from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
44 44 | from airflow.auth.managers.fab.security_manager.override import (
|
||||
45 45 | MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
46 |- FabAirflowSecurityManagerOverride,
|
||||
47 46 | )
|
||||
47 |+from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride
|
||||
48 48 |
|
||||
49 49 | FabAuthManager()
|
||||
50 50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
43 | from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
44 | from airflow.auth.managers.fab.security_manager.override import (
|
||||
45 | MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
- FabAirflowSecurityManagerOverride,
|
||||
46 | )
|
||||
47 + from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride
|
||||
48 |
|
||||
49 | FabAuthManager()
|
||||
50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.www.security.FabAirflowSecurityManagerOverride` is moved into `fab` provider in Airflow 3.0;
|
||||
--> AIR302_fab.py:55:1
|
||||
@@ -423,12 +406,11 @@ AIR302 [*] `airflow.www.security.FabAirflowSecurityManagerOverride` is moved int
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-fab>=1.0.0` and use `FabAirflowSecurityManagerOverride` from `airflow.providers.fab.auth_manager.security_manager.override` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
50 50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
51 51 | FabAirflowSecurityManagerOverride()
|
||||
52 52 |
|
||||
53 |-from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
53 |+from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride
|
||||
54 54 |
|
||||
55 55 | FabAirflowSecurityManagerOverride()
|
||||
50 | MAX_NUM_DATABASE_USER_SESSIONS
|
||||
51 | FabAirflowSecurityManagerOverride()
|
||||
52 |
|
||||
- from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
53 + from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride
|
||||
54 |
|
||||
55 | FabAirflowSecurityManagerOverride()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -11,16 +11,15 @@ AIR302 [*] `airflow.hooks.webhdfs_hook.WebHDFSHook` is moved into `apache-hdfs`
|
||||
7 | WebHdfsSensor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hdfs>=1.0.0` and use `WebHDFSHook` from `airflow.providers.apache.hdfs.hooks.webhdfs` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.webhdfs_hook import WebHDFSHook
|
||||
4 3 | from airflow.sensors.web_hdfs_sensor import WebHdfsSensor
|
||||
4 |+from airflow.providers.apache.hdfs.hooks.webhdfs import WebHDFSHook
|
||||
5 5 |
|
||||
6 6 | WebHDFSHook()
|
||||
7 7 | WebHdfsSensor()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.webhdfs_hook import WebHDFSHook
|
||||
3 | from airflow.sensors.web_hdfs_sensor import WebHdfsSensor
|
||||
4 + from airflow.providers.apache.hdfs.hooks.webhdfs import WebHDFSHook
|
||||
5 |
|
||||
6 | WebHDFSHook()
|
||||
7 | WebHdfsSensor()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.web_hdfs_sensor.WebHdfsSensor` is moved into `apache-hdfs` provider in Airflow 3.0;
|
||||
--> AIR302_hdfs.py:7:1
|
||||
@@ -30,13 +29,12 @@ AIR302 [*] `airflow.sensors.web_hdfs_sensor.WebHdfsSensor` is moved into `apache
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hdfs>=1.0.0` and use `WebHdfsSensor` from `airflow.providers.apache.hdfs.sensors.web_hdfs` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.webhdfs_hook import WebHDFSHook
|
||||
4 |-from airflow.sensors.web_hdfs_sensor import WebHdfsSensor
|
||||
4 |+from airflow.providers.apache.hdfs.sensors.web_hdfs import WebHdfsSensor
|
||||
5 5 |
|
||||
6 6 | WebHDFSHook()
|
||||
7 7 | WebHdfsSensor()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.webhdfs_hook import WebHDFSHook
|
||||
- from airflow.sensors.web_hdfs_sensor import WebHdfsSensor
|
||||
4 + from airflow.providers.apache.hdfs.sensors.web_hdfs import WebHdfsSensor
|
||||
5 |
|
||||
6 | WebHDFSHook()
|
||||
7 | WebHdfsSensor()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,23 +12,22 @@ AIR302 [*] `airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES` is moved into `apach
|
||||
20 | HiveMetastoreHook()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HIVE_QUEUE_PRIORITIES` from `airflow.providers.apache.hive.hooks.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.hive_hooks import (
|
||||
4 |- HIVE_QUEUE_PRIORITIES,
|
||||
5 4 | HiveCliHook,
|
||||
6 5 | HiveMetastoreHook,
|
||||
7 6 | HiveServer2Hook,
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.hive_hooks import (
|
||||
- HIVE_QUEUE_PRIORITIES,
|
||||
4 | HiveCliHook,
|
||||
5 | HiveMetastoreHook,
|
||||
6 | HiveServer2Hook,
|
||||
--------------------------------------------------------------------------------
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.hooks.hive import HIVE_QUEUE_PRIORITIES
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.hooks.hive import HIVE_QUEUE_PRIORITIES
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.hive_hooks.HiveCliHook` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:19:1
|
||||
@@ -40,23 +39,22 @@ AIR302 [*] `airflow.hooks.hive_hooks.HiveCliHook` is moved into `apache-hive` pr
|
||||
21 | HiveServer2Hook()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveCliHook` from `airflow.providers.apache.hive.hooks.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.hive_hooks import (
|
||||
4 4 | HIVE_QUEUE_PRIORITIES,
|
||||
5 |- HiveCliHook,
|
||||
6 5 | HiveMetastoreHook,
|
||||
7 6 | HiveServer2Hook,
|
||||
8 7 | )
|
||||
2 |
|
||||
3 | from airflow.hooks.hive_hooks import (
|
||||
4 | HIVE_QUEUE_PRIORITIES,
|
||||
- HiveCliHook,
|
||||
5 | HiveMetastoreHook,
|
||||
6 | HiveServer2Hook,
|
||||
7 | )
|
||||
--------------------------------------------------------------------------------
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.hooks.hive import HiveCliHook
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.hooks.hive import HiveCliHook
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.hive_hooks.HiveMetastoreHook` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:20:1
|
||||
@@ -68,23 +66,22 @@ AIR302 [*] `airflow.hooks.hive_hooks.HiveMetastoreHook` is moved into `apache-hi
|
||||
21 | HiveServer2Hook()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveMetastoreHook` from `airflow.providers.apache.hive.hooks.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
3 3 | from airflow.hooks.hive_hooks import (
|
||||
4 4 | HIVE_QUEUE_PRIORITIES,
|
||||
5 5 | HiveCliHook,
|
||||
6 |- HiveMetastoreHook,
|
||||
7 6 | HiveServer2Hook,
|
||||
8 7 | )
|
||||
9 8 | from airflow.macros.hive import (
|
||||
3 | from airflow.hooks.hive_hooks import (
|
||||
4 | HIVE_QUEUE_PRIORITIES,
|
||||
5 | HiveCliHook,
|
||||
- HiveMetastoreHook,
|
||||
6 | HiveServer2Hook,
|
||||
7 | )
|
||||
8 | from airflow.macros.hive import (
|
||||
--------------------------------------------------------------------------------
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.hive_hooks.HiveServer2Hook` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:21:1
|
||||
@@ -97,23 +94,22 @@ AIR302 [*] `airflow.hooks.hive_hooks.HiveServer2Hook` is moved into `apache-hive
|
||||
23 | closest_ds_partition()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveServer2Hook` from `airflow.providers.apache.hive.hooks.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
4 4 | HIVE_QUEUE_PRIORITIES,
|
||||
5 5 | HiveCliHook,
|
||||
6 6 | HiveMetastoreHook,
|
||||
7 |- HiveServer2Hook,
|
||||
8 7 | )
|
||||
9 8 | from airflow.macros.hive import (
|
||||
10 9 | closest_ds_partition,
|
||||
4 | HIVE_QUEUE_PRIORITIES,
|
||||
5 | HiveCliHook,
|
||||
6 | HiveMetastoreHook,
|
||||
- HiveServer2Hook,
|
||||
7 | )
|
||||
8 | from airflow.macros.hive import (
|
||||
9 | closest_ds_partition,
|
||||
--------------------------------------------------------------------------------
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.macros.hive.closest_ds_partition` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:23:1
|
||||
@@ -125,22 +121,21 @@ AIR302 [*] `airflow.macros.hive.closest_ds_partition` is moved into `apache-hive
|
||||
24 | max_partition()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=5.1.0` and use `closest_ds_partition` from `airflow.providers.apache.hive.macros.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
7 7 | HiveServer2Hook,
|
||||
8 8 | )
|
||||
9 9 | from airflow.macros.hive import (
|
||||
10 |- closest_ds_partition,
|
||||
11 10 | max_partition,
|
||||
12 11 | )
|
||||
13 12 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.macros.hive import closest_ds_partition
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
7 | HiveServer2Hook,
|
||||
8 | )
|
||||
9 | from airflow.macros.hive import (
|
||||
- closest_ds_partition,
|
||||
10 | max_partition,
|
||||
11 | )
|
||||
12 | from airflow.operators.hive_operator import HiveOperator
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.macros.hive import closest_ds_partition
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.macros.hive.max_partition` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:24:1
|
||||
@@ -152,21 +147,20 @@ AIR302 [*] `airflow.macros.hive.max_partition` is moved into `apache-hive` provi
|
||||
26 | HiveOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=5.1.0` and use `max_partition` from `airflow.providers.apache.hive.macros.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
8 8 | )
|
||||
9 9 | from airflow.macros.hive import (
|
||||
10 10 | closest_ds_partition,
|
||||
11 |- max_partition,
|
||||
12 11 | )
|
||||
13 12 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.macros.hive import max_partition
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
8 | )
|
||||
9 | from airflow.macros.hive import (
|
||||
10 | closest_ds_partition,
|
||||
- max_partition,
|
||||
11 | )
|
||||
12 | from airflow.operators.hive_operator import HiveOperator
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.macros.hive import max_partition
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_operator.HiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:26:1
|
||||
@@ -179,19 +173,18 @@ AIR302 [*] `airflow.operators.hive_operator.HiveOperator` is moved into `apache-
|
||||
28 | HiveToMySqlOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveOperator` from `airflow.providers.apache.hive.operators.hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
10 10 | closest_ds_partition,
|
||||
11 11 | max_partition,
|
||||
12 12 | )
|
||||
13 |-from airflow.operators.hive_operator import HiveOperator
|
||||
14 13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.operators.hive import HiveOperator
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
10 | closest_ds_partition,
|
||||
11 | max_partition,
|
||||
12 | )
|
||||
- from airflow.operators.hive_operator import HiveOperator
|
||||
13 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.operators.hive import HiveOperator
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_stats_operator.HiveStatsCollectionOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:27:1
|
||||
@@ -203,18 +196,17 @@ AIR302 [*] `airflow.operators.hive_stats_operator.HiveStatsCollectionOperator` i
|
||||
29 | HiveToSambaOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveStatsCollectionOperator` from `airflow.providers.apache.hive.operators.hive_stats` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
11 11 | max_partition,
|
||||
12 12 | )
|
||||
13 13 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 |-from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
11 | max_partition,
|
||||
12 | )
|
||||
13 | from airflow.operators.hive_operator import HiveOperator
|
||||
- from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
14 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_to_mysql.HiveToMySqlOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:28:1
|
||||
@@ -226,17 +218,16 @@ AIR302 [*] `airflow.operators.hive_to_mysql.HiveToMySqlOperator` is moved into `
|
||||
29 | HiveToSambaOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveToMySqlOperator` from `airflow.providers.apache.hive.transfers.hive_to_mysql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
12 12 | )
|
||||
13 13 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 14 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 |-from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
12 | )
|
||||
13 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
- from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
15 | from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_to_samba_operator.HiveToSambaOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:29:1
|
||||
@@ -247,16 +238,15 @@ AIR302 [*] `airflow.operators.hive_to_samba_operator.HiveToSambaOperator` is mov
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveToSambaOperator` from `airflow.providers.apache.hive.transfers.hive_to_samba` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
13 13 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 14 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 15 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
16 |-from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 |+from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
|
||||
17 17 |
|
||||
18 18 | HIVE_QUEUE_PRIORITIES
|
||||
19 19 | HiveCliHook()
|
||||
13 | from airflow.operators.hive_operator import HiveOperator
|
||||
14 | from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
15 | from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
- from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
16 + from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
|
||||
17 |
|
||||
18 | HIVE_QUEUE_PRIORITIES
|
||||
19 | HiveCliHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.hive_to_mysql.HiveToMySqlTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:34:1
|
||||
@@ -269,15 +259,14 @@ AIR302 [*] `airflow.operators.hive_to_mysql.HiveToMySqlTransfer` is moved into `
|
||||
36 | from airflow.operators.mysql_to_hive import MySqlToHiveOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveToMySqlOperator` from `airflow.providers.apache.hive.transfers.hive_to_mysql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | from airflow.operators.hive_to_mysql import HiveToMySqlTransfer
|
||||
33 |+from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
|
||||
33 34 |
|
||||
34 35 | HiveToMySqlTransfer()
|
||||
35 36 |
|
||||
30 |
|
||||
31 |
|
||||
32 | from airflow.operators.hive_to_mysql import HiveToMySqlTransfer
|
||||
33 + from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
|
||||
34 |
|
||||
35 | HiveToMySqlTransfer()
|
||||
36 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.mysql_to_hive.MySqlToHiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:38:1
|
||||
@@ -290,16 +279,15 @@ AIR302 [*] `airflow.operators.mysql_to_hive.MySqlToHiveOperator` is moved into `
|
||||
40 | from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MySqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mysql_to_hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
33 33 |
|
||||
34 34 | HiveToMySqlTransfer()
|
||||
35 35 |
|
||||
36 |-from airflow.operators.mysql_to_hive import MySqlToHiveOperator
|
||||
36 |+from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
|
||||
37 37 |
|
||||
38 38 | MySqlToHiveOperator()
|
||||
39 39 |
|
||||
33 |
|
||||
34 | HiveToMySqlTransfer()
|
||||
35 |
|
||||
- from airflow.operators.mysql_to_hive import MySqlToHiveOperator
|
||||
36 + from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
|
||||
37 |
|
||||
38 | MySqlToHiveOperator()
|
||||
39 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.mysql_to_hive.MySqlToHiveTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:42:1
|
||||
@@ -312,15 +300,14 @@ AIR302 [*] `airflow.operators.mysql_to_hive.MySqlToHiveTransfer` is moved into `
|
||||
44 | from airflow.operators.mssql_to_hive import MsSqlToHiveOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MySqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mysql_to_hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
38 38 | MySqlToHiveOperator()
|
||||
39 39 |
|
||||
40 40 | from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
|
||||
41 |+from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
|
||||
41 42 |
|
||||
42 43 | MySqlToHiveTransfer()
|
||||
43 44 |
|
||||
38 | MySqlToHiveOperator()
|
||||
39 |
|
||||
40 | from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
|
||||
41 + from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
|
||||
42 |
|
||||
43 | MySqlToHiveTransfer()
|
||||
44 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.mssql_to_hive.MsSqlToHiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:46:1
|
||||
@@ -333,16 +320,15 @@ AIR302 [*] `airflow.operators.mssql_to_hive.MsSqlToHiveOperator` is moved into `
|
||||
48 | from airflow.operators.mssql_to_hive import MsSqlToHiveTransfer
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MsSqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mssql_to_hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
41 41 |
|
||||
42 42 | MySqlToHiveTransfer()
|
||||
43 43 |
|
||||
44 |-from airflow.operators.mssql_to_hive import MsSqlToHiveOperator
|
||||
44 |+from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator
|
||||
45 45 |
|
||||
46 46 | MsSqlToHiveOperator()
|
||||
47 47 |
|
||||
41 |
|
||||
42 | MySqlToHiveTransfer()
|
||||
43 |
|
||||
- from airflow.operators.mssql_to_hive import MsSqlToHiveOperator
|
||||
44 + from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator
|
||||
45 |
|
||||
46 | MsSqlToHiveOperator()
|
||||
47 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.mssql_to_hive.MsSqlToHiveTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:50:1
|
||||
@@ -355,15 +341,14 @@ AIR302 [*] `airflow.operators.mssql_to_hive.MsSqlToHiveTransfer` is moved into `
|
||||
52 | from airflow.operators.s3_to_hive_operator import S3ToHiveOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MsSqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mssql_to_hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
46 46 | MsSqlToHiveOperator()
|
||||
47 47 |
|
||||
48 48 | from airflow.operators.mssql_to_hive import MsSqlToHiveTransfer
|
||||
49 |+from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator
|
||||
49 50 |
|
||||
50 51 | MsSqlToHiveTransfer()
|
||||
51 52 |
|
||||
46 | MsSqlToHiveOperator()
|
||||
47 |
|
||||
48 | from airflow.operators.mssql_to_hive import MsSqlToHiveTransfer
|
||||
49 + from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator
|
||||
50 |
|
||||
51 | MsSqlToHiveTransfer()
|
||||
52 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.s3_to_hive_operator.S3ToHiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:54:1
|
||||
@@ -376,16 +361,15 @@ AIR302 [*] `airflow.operators.s3_to_hive_operator.S3ToHiveOperator` is moved int
|
||||
56 | from airflow.operators.s3_to_hive_operator import S3ToHiveTransfer
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `S3ToHiveOperator` from `airflow.providers.apache.hive.transfers.s3_to_hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
49 49 |
|
||||
50 50 | MsSqlToHiveTransfer()
|
||||
51 51 |
|
||||
52 |-from airflow.operators.s3_to_hive_operator import S3ToHiveOperator
|
||||
52 |+from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator
|
||||
53 53 |
|
||||
54 54 | S3ToHiveOperator()
|
||||
55 55 |
|
||||
49 |
|
||||
50 | MsSqlToHiveTransfer()
|
||||
51 |
|
||||
- from airflow.operators.s3_to_hive_operator import S3ToHiveOperator
|
||||
52 + from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator
|
||||
53 |
|
||||
54 | S3ToHiveOperator()
|
||||
55 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.s3_to_hive_operator.S3ToHiveTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:58:1
|
||||
@@ -398,15 +382,14 @@ AIR302 [*] `airflow.operators.s3_to_hive_operator.S3ToHiveTransfer` is moved int
|
||||
60 | from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `S3ToHiveOperator` from `airflow.providers.apache.hive.transfers.s3_to_hive` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
54 54 | S3ToHiveOperator()
|
||||
55 55 |
|
||||
56 56 | from airflow.operators.s3_to_hive_operator import S3ToHiveTransfer
|
||||
57 |+from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator
|
||||
57 58 |
|
||||
58 59 | S3ToHiveTransfer()
|
||||
59 60 |
|
||||
54 | S3ToHiveOperator()
|
||||
55 |
|
||||
56 | from airflow.operators.s3_to_hive_operator import S3ToHiveTransfer
|
||||
57 + from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator
|
||||
58 |
|
||||
59 | S3ToHiveTransfer()
|
||||
60 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.hive_partition_sensor.HivePartitionSensor` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:62:1
|
||||
@@ -419,16 +402,15 @@ AIR302 [*] `airflow.sensors.hive_partition_sensor.HivePartitionSensor` is moved
|
||||
64 | from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HivePartitionSensor` from `airflow.providers.apache.hive.sensors.hive_partition` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
57 57 |
|
||||
58 58 | S3ToHiveTransfer()
|
||||
59 59 |
|
||||
60 |-from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
60 |+from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor
|
||||
61 61 |
|
||||
62 62 | HivePartitionSensor()
|
||||
63 63 |
|
||||
57 |
|
||||
58 | S3ToHiveTransfer()
|
||||
59 |
|
||||
- from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
60 + from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor
|
||||
61 |
|
||||
62 | HivePartitionSensor()
|
||||
63 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.metastore_partition_sensor.MetastorePartitionSensor` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:66:1
|
||||
@@ -441,16 +423,15 @@ AIR302 [*] `airflow.sensors.metastore_partition_sensor.MetastorePartitionSensor`
|
||||
68 | from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MetastorePartitionSensor` from `airflow.providers.apache.hive.sensors.metastore_partition` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
61 61 |
|
||||
62 62 | HivePartitionSensor()
|
||||
63 63 |
|
||||
64 |-from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
64 |+from airflow.providers.apache.hive.sensors.metastore_partition import MetastorePartitionSensor
|
||||
65 65 |
|
||||
66 66 | MetastorePartitionSensor()
|
||||
67 67 |
|
||||
61 |
|
||||
62 | HivePartitionSensor()
|
||||
63 |
|
||||
- from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
64 + from airflow.providers.apache.hive.sensors.metastore_partition import MetastorePartitionSensor
|
||||
65 |
|
||||
66 | MetastorePartitionSensor()
|
||||
67 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.named_hive_partition_sensor.NamedHivePartitionSensor` is moved into `apache-hive` provider in Airflow 3.0;
|
||||
--> AIR302_hive.py:70:1
|
||||
@@ -461,12 +442,11 @@ AIR302 [*] `airflow.sensors.named_hive_partition_sensor.NamedHivePartitionSensor
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `NamedHivePartitionSensor` from `airflow.providers.apache.hive.sensors.named_hive_partition` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
65 65 |
|
||||
66 66 | MetastorePartitionSensor()
|
||||
67 67 |
|
||||
68 |-from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
68 |+from airflow.providers.apache.hive.sensors.named_hive_partition import NamedHivePartitionSensor
|
||||
69 69 |
|
||||
70 70 | NamedHivePartitionSensor()
|
||||
65 |
|
||||
66 | MetastorePartitionSensor()
|
||||
67 |
|
||||
- from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
68 + from airflow.providers.apache.hive.sensors.named_hive_partition import NamedHivePartitionSensor
|
||||
69 |
|
||||
70 | NamedHivePartitionSensor()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,17 +12,16 @@ AIR302 [*] `airflow.hooks.http_hook.HttpHook` is moved into `http` provider in A
|
||||
9 | HttpSensor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-http>=1.0.0` and use `HttpHook` from `airflow.providers.http.hooks.http` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.http_hook import HttpHook
|
||||
4 3 | from airflow.operators.http_operator import SimpleHttpOperator
|
||||
5 4 | from airflow.sensors.http_sensor import HttpSensor
|
||||
5 |+from airflow.providers.http.hooks.http import HttpHook
|
||||
6 6 |
|
||||
7 7 | HttpHook()
|
||||
8 8 | SimpleHttpOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.http_hook import HttpHook
|
||||
3 | from airflow.operators.http_operator import SimpleHttpOperator
|
||||
4 | from airflow.sensors.http_sensor import HttpSensor
|
||||
5 + from airflow.providers.http.hooks.http import HttpHook
|
||||
6 |
|
||||
7 | HttpHook()
|
||||
8 | SimpleHttpOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.http_operator.SimpleHttpOperator` is moved into `http` provider in Airflow 3.0;
|
||||
--> AIR302_http.py:8:1
|
||||
@@ -33,17 +32,15 @@ AIR302 [*] `airflow.operators.http_operator.SimpleHttpOperator` is moved into `h
|
||||
9 | HttpSensor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-http>=5.0.0` and use `HttpOperator` from `airflow.providers.http.operators.http` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | from airflow.hooks.http_hook import HttpHook
|
||||
4 4 | from airflow.operators.http_operator import SimpleHttpOperator
|
||||
5 5 | from airflow.sensors.http_sensor import HttpSensor
|
||||
6 |+from airflow.providers.http.operators.http import HttpOperator
|
||||
6 7 |
|
||||
7 8 | HttpHook()
|
||||
8 |-SimpleHttpOperator()
|
||||
9 |+HttpOperator()
|
||||
9 10 | HttpSensor()
|
||||
3 | from airflow.hooks.http_hook import HttpHook
|
||||
4 | from airflow.operators.http_operator import SimpleHttpOperator
|
||||
5 | from airflow.sensors.http_sensor import HttpSensor
|
||||
6 + from airflow.providers.http.operators.http import HttpOperator
|
||||
7 |
|
||||
8 | HttpHook()
|
||||
- SimpleHttpOperator()
|
||||
9 + HttpOperator()
|
||||
10 | HttpSensor()
|
||||
|
||||
AIR302 [*] `airflow.sensors.http_sensor.HttpSensor` is moved into `http` provider in Airflow 3.0;
|
||||
--> AIR302_http.py:9:1
|
||||
@@ -54,13 +51,12 @@ AIR302 [*] `airflow.sensors.http_sensor.HttpSensor` is moved into `http` provide
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-http>=1.0.0` and use `HttpSensor` from `airflow.providers.http.sensors.http` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.http_hook import HttpHook
|
||||
4 4 | from airflow.operators.http_operator import SimpleHttpOperator
|
||||
5 |-from airflow.sensors.http_sensor import HttpSensor
|
||||
5 |+from airflow.providers.http.sensors.http import HttpSensor
|
||||
6 6 |
|
||||
7 7 | HttpHook()
|
||||
8 8 | SimpleHttpOperator()
|
||||
2 |
|
||||
3 | from airflow.hooks.http_hook import HttpHook
|
||||
4 | from airflow.operators.http_operator import SimpleHttpOperator
|
||||
- from airflow.sensors.http_sensor import HttpSensor
|
||||
5 + from airflow.providers.http.sensors.http import HttpSensor
|
||||
6 |
|
||||
7 | HttpHook()
|
||||
8 | SimpleHttpOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -11,18 +11,17 @@ AIR302 [*] `airflow.hooks.jdbc_hook.JdbcHook` is moved into `jdbc` provider in A
|
||||
9 | jaydebeapi()
|
||||
|
|
||||
help: Install `apache-airflow-providers-jdbc>=1.0.0` and use `JdbcHook` from `airflow.providers.jdbc.hooks.jdbc` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.jdbc_hook import (
|
||||
4 |- JdbcHook,
|
||||
5 4 | jaydebeapi,
|
||||
6 5 | )
|
||||
6 |+from airflow.providers.jdbc.hooks.jdbc import JdbcHook
|
||||
7 7 |
|
||||
8 8 | JdbcHook()
|
||||
9 9 | jaydebeapi()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.jdbc_hook import (
|
||||
- JdbcHook,
|
||||
4 | jaydebeapi,
|
||||
5 | )
|
||||
6 + from airflow.providers.jdbc.hooks.jdbc import JdbcHook
|
||||
7 |
|
||||
8 | JdbcHook()
|
||||
9 | jaydebeapi()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.jdbc_hook.jaydebeapi` is moved into `jdbc` provider in Airflow 3.0;
|
||||
--> AIR302_jdbc.py:9:1
|
||||
@@ -32,14 +31,13 @@ AIR302 [*] `airflow.hooks.jdbc_hook.jaydebeapi` is moved into `jdbc` provider in
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-jdbc>=1.0.0` and use `jaydebeapi` from `airflow.providers.jdbc.hooks.jdbc` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.jdbc_hook import (
|
||||
4 4 | JdbcHook,
|
||||
5 |- jaydebeapi,
|
||||
6 5 | )
|
||||
6 |+from airflow.providers.jdbc.hooks.jdbc import jaydebeapi
|
||||
7 7 |
|
||||
8 8 | JdbcHook()
|
||||
9 9 | jaydebeapi()
|
||||
2 |
|
||||
3 | from airflow.hooks.jdbc_hook import (
|
||||
4 | JdbcHook,
|
||||
- jaydebeapi,
|
||||
5 | )
|
||||
6 + from airflow.providers.jdbc.hooks.jdbc import jaydebeapi
|
||||
7 |
|
||||
8 | JdbcHook()
|
||||
9 | jaydebeapi()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,19 +12,18 @@ AIR302 [*] `airflow.hooks.mysql_hook.MySqlHook` is moved into `mysql` provider i
|
||||
11 | PrestoToMySqlTransfer()
|
||||
|
|
||||
help: Install `apache-airflow-providers-mysql>=1.0.0` and use `MySqlHook` from `airflow.providers.mysql.hooks.mysql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.mysql_hook import MySqlHook
|
||||
4 3 | from airflow.operators.presto_to_mysql import (
|
||||
5 4 | PrestoToMySqlOperator,
|
||||
6 5 | PrestoToMySqlTransfer,
|
||||
7 6 | )
|
||||
7 |+from airflow.providers.mysql.hooks.mysql import MySqlHook
|
||||
8 8 |
|
||||
9 9 | MySqlHook()
|
||||
10 10 | PrestoToMySqlOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.mysql_hook import MySqlHook
|
||||
3 | from airflow.operators.presto_to_mysql import (
|
||||
4 | PrestoToMySqlOperator,
|
||||
5 | PrestoToMySqlTransfer,
|
||||
6 | )
|
||||
7 + from airflow.providers.mysql.hooks.mysql import MySqlHook
|
||||
8 |
|
||||
9 | MySqlHook()
|
||||
10 | PrestoToMySqlOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_to_mysql.PrestoToMySqlOperator` is moved into `mysql` provider in Airflow 3.0;
|
||||
--> AIR302_mysql.py:10:1
|
||||
@@ -35,18 +34,17 @@ AIR302 [*] `airflow.operators.presto_to_mysql.PrestoToMySqlOperator` is moved in
|
||||
11 | PrestoToMySqlTransfer()
|
||||
|
|
||||
help: Install `apache-airflow-providers-mysql>=1.0.0` and use `PrestoToMySqlOperator` from `airflow.providers.mysql.transfers.presto_to_mysql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.mysql_hook import MySqlHook
|
||||
4 4 | from airflow.operators.presto_to_mysql import (
|
||||
5 |- PrestoToMySqlOperator,
|
||||
6 5 | PrestoToMySqlTransfer,
|
||||
7 6 | )
|
||||
7 |+from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator
|
||||
8 8 |
|
||||
9 9 | MySqlHook()
|
||||
10 10 | PrestoToMySqlOperator()
|
||||
2 |
|
||||
3 | from airflow.hooks.mysql_hook import MySqlHook
|
||||
4 | from airflow.operators.presto_to_mysql import (
|
||||
- PrestoToMySqlOperator,
|
||||
5 | PrestoToMySqlTransfer,
|
||||
6 | )
|
||||
7 + from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator
|
||||
8 |
|
||||
9 | MySqlHook()
|
||||
10 | PrestoToMySqlOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.presto_to_mysql.PrestoToMySqlTransfer` is moved into `mysql` provider in Airflow 3.0;
|
||||
--> AIR302_mysql.py:11:1
|
||||
@@ -57,15 +55,14 @@ AIR302 [*] `airflow.operators.presto_to_mysql.PrestoToMySqlTransfer` is moved in
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-mysql>=1.0.0` and use `PrestoToMySqlOperator` from `airflow.providers.mysql.transfers.presto_to_mysql` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.mysql_hook import MySqlHook
|
||||
4 4 | from airflow.operators.presto_to_mysql import (
|
||||
5 |- PrestoToMySqlOperator,
|
||||
6 5 | PrestoToMySqlTransfer,
|
||||
7 6 | )
|
||||
7 |+from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator
|
||||
8 8 |
|
||||
9 9 | MySqlHook()
|
||||
10 10 | PrestoToMySqlOperator()
|
||||
2 |
|
||||
3 | from airflow.hooks.mysql_hook import MySqlHook
|
||||
4 | from airflow.operators.presto_to_mysql import (
|
||||
- PrestoToMySqlOperator,
|
||||
5 | PrestoToMySqlTransfer,
|
||||
6 | )
|
||||
7 + from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator
|
||||
8 |
|
||||
9 | MySqlHook()
|
||||
10 | PrestoToMySqlOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.hooks.oracle_hook.OracleHook` is moved into `oracle` provide
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-oracle>=1.0.0` and use `OracleHook` from `airflow.providers.oracle.hooks.oracle` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.oracle_hook import OracleHook
|
||||
3 |+from airflow.providers.oracle.hooks.oracle import OracleHook
|
||||
4 4 |
|
||||
5 5 | OracleHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.oracle_hook import OracleHook
|
||||
3 + from airflow.providers.oracle.hooks.oracle import OracleHook
|
||||
4 |
|
||||
5 | OracleHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.operators.papermill_operator.PapermillOperator` is moved int
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-papermill>=1.0.0` and use `PapermillOperator` from `airflow.providers.papermill.operators.papermill` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.operators.papermill_operator import PapermillOperator
|
||||
3 |+from airflow.providers.papermill.operators.papermill import PapermillOperator
|
||||
4 4 |
|
||||
5 5 | PapermillOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.operators.papermill_operator import PapermillOperator
|
||||
3 + from airflow.providers.papermill.operators.papermill import PapermillOperator
|
||||
4 |
|
||||
5 | PapermillOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -11,16 +11,15 @@ AIR302 [*] `airflow.hooks.pig_hook.PigCliHook` is moved into `apache-pig` provid
|
||||
7 | PigOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-pig>=1.0.0` and use `PigCliHook` from `airflow.providers.apache.pig.hooks.pig` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.pig_hook import PigCliHook
|
||||
4 3 | from airflow.operators.pig_operator import PigOperator
|
||||
4 |+from airflow.providers.apache.pig.hooks.pig import PigCliHook
|
||||
5 5 |
|
||||
6 6 | PigCliHook()
|
||||
7 7 | PigOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.pig_hook import PigCliHook
|
||||
3 | from airflow.operators.pig_operator import PigOperator
|
||||
4 + from airflow.providers.apache.pig.hooks.pig import PigCliHook
|
||||
5 |
|
||||
6 | PigCliHook()
|
||||
7 | PigOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.pig_operator.PigOperator` is moved into `apache-pig` provider in Airflow 3.0;
|
||||
--> AIR302_pig.py:7:1
|
||||
@@ -30,13 +29,12 @@ AIR302 [*] `airflow.operators.pig_operator.PigOperator` is moved into `apache-pi
|
||||
| ^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-apache-pig>=1.0.0` and use `PigOperator` from `airflow.providers.apache.pig.operators.pig` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.pig_hook import PigCliHook
|
||||
4 |-from airflow.operators.pig_operator import PigOperator
|
||||
4 |+from airflow.providers.apache.pig.operators.pig import PigOperator
|
||||
5 5 |
|
||||
6 6 | PigCliHook()
|
||||
7 7 | PigOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.pig_hook import PigCliHook
|
||||
- from airflow.operators.pig_operator import PigOperator
|
||||
4 + from airflow.providers.apache.pig.operators.pig import PigOperator
|
||||
5 |
|
||||
6 | PigCliHook()
|
||||
7 | PigOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -11,21 +11,12 @@ AIR302 [*] `airflow.hooks.postgres_hook.PostgresHook` is moved into `postgres` p
|
||||
7 | Mapping()
|
||||
|
|
||||
help: Install `apache-airflow-providers-postgres>=1.0.0` and use `PostgresHook` from `airflow.providers.postgres.hooks.postgres` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.postgres_hook import PostgresHook
|
||||
4 3 | from airflow.operators.postgres_operator import Mapping
|
||||
4 |+from airflow.providers.postgres.hooks.postgres import PostgresHook
|
||||
5 5 |
|
||||
6 6 | PostgresHook()
|
||||
7 7 | Mapping()
|
||||
|
||||
AIR302 `airflow.operators.postgres_operator.Mapping` is removed in Airflow 3.0
|
||||
--> AIR302_postgres.py:7:1
|
||||
|
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.postgres_hook import PostgresHook
|
||||
3 | from airflow.operators.postgres_operator import Mapping
|
||||
4 + from airflow.providers.postgres.hooks.postgres import PostgresHook
|
||||
5 |
|
||||
6 | PostgresHook()
|
||||
7 | Mapping()
|
||||
| ^^^^^^^
|
||||
|
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.hooks.presto_hook.PrestoHook` is moved into `presto` provide
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-presto>=1.0.0` and use `PrestoHook` from `airflow.providers.presto.hooks.presto` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.presto_hook import PrestoHook
|
||||
3 |+from airflow.providers.presto.hooks.presto import PrestoHook
|
||||
4 4 |
|
||||
5 5 | PrestoHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.presto_hook import PrestoHook
|
||||
3 + from airflow.providers.presto.hooks.presto import PrestoHook
|
||||
4 |
|
||||
5 | PrestoHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.hooks.samba_hook.SambaHook` is moved into `samba` provider i
|
||||
| ^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-samba>=1.0.0` and use `SambaHook` from `airflow.providers.samba.hooks.samba` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.samba_hook import SambaHook
|
||||
3 |+from airflow.providers.samba.hooks.samba import SambaHook
|
||||
4 4 |
|
||||
5 5 | SambaHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.samba_hook import SambaHook
|
||||
3 + from airflow.providers.samba.hooks.samba import SambaHook
|
||||
4 |
|
||||
5 | SambaHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,16 +12,15 @@ AIR302 [*] `airflow.hooks.slack_hook.SlackHook` is moved into `slack` provider i
|
||||
8 | SlackAPIPostOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-slack>=1.0.0` and use `SlackHook` from `airflow.providers.slack.hooks.slack` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.slack_hook import SlackHook
|
||||
4 3 | from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
4 |+from airflow.providers.slack.hooks.slack import SlackHook
|
||||
5 5 |
|
||||
6 6 | SlackHook()
|
||||
7 7 | SlackAPIOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.slack_hook import SlackHook
|
||||
3 | from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
4 + from airflow.providers.slack.hooks.slack import SlackHook
|
||||
5 |
|
||||
6 | SlackHook()
|
||||
7 | SlackAPIOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.slack_operator.SlackAPIOperator` is moved into `slack` provider in Airflow 3.0;
|
||||
--> AIR302_slack.py:7:1
|
||||
@@ -32,17 +31,16 @@ AIR302 [*] `airflow.operators.slack_operator.SlackAPIOperator` is moved into `sl
|
||||
8 | SlackAPIPostOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-slack>=1.0.0` and use `SlackAPIOperator` from `airflow.providers.slack.operators.slack` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.slack_hook import SlackHook
|
||||
4 |-from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
4 |+from airflow.operators.slack_operator import SlackAPIPostOperator
|
||||
5 |+from airflow.providers.slack.operators.slack import SlackAPIOperator
|
||||
5 6 |
|
||||
6 7 | SlackHook()
|
||||
7 8 | SlackAPIOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.slack_hook import SlackHook
|
||||
- from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
4 + from airflow.operators.slack_operator import SlackAPIPostOperator
|
||||
5 + from airflow.providers.slack.operators.slack import SlackAPIOperator
|
||||
6 |
|
||||
7 | SlackHook()
|
||||
8 | SlackAPIOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.slack_operator.SlackAPIPostOperator` is moved into `slack` provider in Airflow 3.0;
|
||||
--> AIR302_slack.py:8:1
|
||||
@@ -53,14 +51,13 @@ AIR302 [*] `airflow.operators.slack_operator.SlackAPIPostOperator` is moved into
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-slack>=1.0.0` and use `SlackAPIPostOperator` from `airflow.providers.slack.operators.slack` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 3 | from airflow.hooks.slack_hook import SlackHook
|
||||
4 |-from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
4 |+from airflow.operators.slack_operator import SlackAPIOperator
|
||||
5 |+from airflow.providers.slack.operators.slack import SlackAPIPostOperator
|
||||
5 6 |
|
||||
6 7 | SlackHook()
|
||||
7 8 | SlackAPIOperator()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
3 | from airflow.hooks.slack_hook import SlackHook
|
||||
- from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
4 + from airflow.operators.slack_operator import SlackAPIOperator
|
||||
5 + from airflow.providers.slack.operators.slack import SlackAPIPostOperator
|
||||
6 |
|
||||
7 | SlackHook()
|
||||
8 | SlackAPIOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,15 +12,14 @@ AIR302 [*] `airflow.operators.email_operator.EmailOperator` is moved into `smtp`
|
||||
7 | from airflow.operators.email import EmailOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-smtp>=1.0.0` and use `EmailOperator` from `airflow.providers.smtp.operators.smtp` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.operators.email_operator import EmailOperator
|
||||
3 |+from airflow.providers.smtp.operators.smtp import EmailOperator
|
||||
4 4 |
|
||||
5 5 | EmailOperator()
|
||||
6 6 |
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.operators.email_operator import EmailOperator
|
||||
3 + from airflow.providers.smtp.operators.smtp import EmailOperator
|
||||
4 |
|
||||
5 | EmailOperator()
|
||||
6 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.email.EmailOperator` is moved into `smtp` provider in Airflow 3.0;
|
||||
--> AIR302_smtp.py:9:1
|
||||
@@ -31,12 +30,11 @@ AIR302 [*] `airflow.operators.email.EmailOperator` is moved into `smtp` provider
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-smtp>=1.0.0` and use `EmailOperator` from `airflow.providers.smtp.operators.smtp` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
4 4 |
|
||||
5 5 | EmailOperator()
|
||||
6 6 |
|
||||
7 |-from airflow.operators.email import EmailOperator
|
||||
7 |+from airflow.providers.smtp.operators.smtp import EmailOperator
|
||||
8 8 |
|
||||
9 9 | EmailOperator()
|
||||
4 |
|
||||
5 | EmailOperator()
|
||||
6 |
|
||||
- from airflow.operators.email import EmailOperator
|
||||
7 + from airflow.providers.smtp.operators.smtp import EmailOperator
|
||||
8 |
|
||||
9 | EmailOperator()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.hooks.sqlite_hook.SqliteHook` is moved into `sqlite` provide
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-sqlite>=1.0.0` and use `SqliteHook` from `airflow.providers.sqlite.hooks.sqlite` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.sqlite_hook import SqliteHook
|
||||
3 |+from airflow.providers.sqlite.hooks.sqlite import SqliteHook
|
||||
4 4 |
|
||||
5 5 | SqliteHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.sqlite_hook import SqliteHook
|
||||
3 + from airflow.providers.sqlite.hooks.sqlite import SqliteHook
|
||||
4 |
|
||||
5 | SqliteHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
@@ -12,22 +12,21 @@ AIR302 [*] `airflow.operators.bash_operator.BashOperator` is moved into `standar
|
||||
22 | TriggerDagRunLink()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `BashOperator` from `airflow.providers.standard.operators.bash` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.operators.bash_operator import BashOperator
|
||||
4 3 | from airflow.operators.dagrun_operator import (
|
||||
5 4 | TriggerDagRunLink,
|
||||
6 5 | TriggerDagRunOperator,
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.operators.bash_operator import BashOperator
|
||||
3 | from airflow.operators.dagrun_operator import (
|
||||
4 | TriggerDagRunLink,
|
||||
5 | TriggerDagRunOperator,
|
||||
--------------------------------------------------------------------------------
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.bash import BashOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.bash import BashOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.dagrun_operator.TriggerDagRunLink` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:22:1
|
||||
@@ -39,23 +38,22 @@ AIR302 [*] `airflow.operators.dagrun_operator.TriggerDagRunLink` is moved into `
|
||||
23 | TriggerDagRunOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunLink` from `airflow.providers.standard.operators.trigger_dagrun` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 |
|
||||
3 3 | from airflow.operators.bash_operator import BashOperator
|
||||
4 4 | from airflow.operators.dagrun_operator import (
|
||||
5 |- TriggerDagRunLink,
|
||||
6 5 | TriggerDagRunOperator,
|
||||
7 6 | )
|
||||
8 7 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
2 |
|
||||
3 | from airflow.operators.bash_operator import BashOperator
|
||||
4 | from airflow.operators.dagrun_operator import (
|
||||
- TriggerDagRunLink,
|
||||
5 | TriggerDagRunOperator,
|
||||
6 | )
|
||||
7 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
--------------------------------------------------------------------------------
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunLink
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunLink
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.dagrun_operator.TriggerDagRunOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:23:1
|
||||
@@ -67,23 +65,22 @@ AIR302 [*] `airflow.operators.dagrun_operator.TriggerDagRunOperator` is moved in
|
||||
25 | LatestOnlyOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunOperator` from `airflow.providers.standard.operators.trigger_dagrun` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
3 3 | from airflow.operators.bash_operator import BashOperator
|
||||
4 4 | from airflow.operators.dagrun_operator import (
|
||||
5 5 | TriggerDagRunLink,
|
||||
6 |- TriggerDagRunOperator,
|
||||
7 6 | )
|
||||
8 7 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
9 8 | from airflow.operators.python_operator import (
|
||||
3 | from airflow.operators.bash_operator import BashOperator
|
||||
4 | from airflow.operators.dagrun_operator import (
|
||||
5 | TriggerDagRunLink,
|
||||
- TriggerDagRunOperator,
|
||||
6 | )
|
||||
7 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
8 | from airflow.operators.python_operator import (
|
||||
--------------------------------------------------------------------------------
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.latest_only_operator.LatestOnlyOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:25:1
|
||||
@@ -96,23 +93,22 @@ AIR302 [*] `airflow.operators.latest_only_operator.LatestOnlyOperator` is moved
|
||||
27 | BranchPythonOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `LatestOnlyOperator` from `airflow.providers.standard.operators.latest_only` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 | TriggerDagRunLink,
|
||||
6 6 | TriggerDagRunOperator,
|
||||
7 7 | )
|
||||
8 |-from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
9 8 | from airflow.operators.python_operator import (
|
||||
10 9 | BranchPythonOperator,
|
||||
11 10 | PythonOperator,
|
||||
5 | TriggerDagRunLink,
|
||||
6 | TriggerDagRunOperator,
|
||||
7 | )
|
||||
- from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
8 | from airflow.operators.python_operator import (
|
||||
9 | BranchPythonOperator,
|
||||
10 | PythonOperator,
|
||||
--------------------------------------------------------------------------------
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.latest_only import LatestOnlyOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.latest_only import LatestOnlyOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.python_operator.BranchPythonOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:27:1
|
||||
@@ -125,23 +121,22 @@ AIR302 [*] `airflow.operators.python_operator.BranchPythonOperator` is moved int
|
||||
29 | PythonVirtualenvOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `BranchPythonOperator` from `airflow.providers.standard.operators.python` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
7 7 | )
|
||||
8 8 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
9 9 | from airflow.operators.python_operator import (
|
||||
10 |- BranchPythonOperator,
|
||||
11 10 | PythonOperator,
|
||||
12 11 | PythonVirtualenvOperator,
|
||||
13 12 | ShortCircuitOperator,
|
||||
7 | )
|
||||
8 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
9 | from airflow.operators.python_operator import (
|
||||
- BranchPythonOperator,
|
||||
10 | PythonOperator,
|
||||
11 | PythonVirtualenvOperator,
|
||||
12 | ShortCircuitOperator,
|
||||
--------------------------------------------------------------------------------
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.python import BranchPythonOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.python import BranchPythonOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.python_operator.PythonOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:28:1
|
||||
@@ -153,23 +148,22 @@ AIR302 [*] `airflow.operators.python_operator.PythonOperator` is moved into `sta
|
||||
30 | ShortCircuitOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `PythonOperator` from `airflow.providers.standard.operators.python` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
8 8 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
9 9 | from airflow.operators.python_operator import (
|
||||
10 10 | BranchPythonOperator,
|
||||
11 |- PythonOperator,
|
||||
12 11 | PythonVirtualenvOperator,
|
||||
13 12 | ShortCircuitOperator,
|
||||
14 13 | )
|
||||
8 | from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
9 | from airflow.operators.python_operator import (
|
||||
10 | BranchPythonOperator,
|
||||
- PythonOperator,
|
||||
11 | PythonVirtualenvOperator,
|
||||
12 | ShortCircuitOperator,
|
||||
13 | )
|
||||
--------------------------------------------------------------------------------
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.python import PythonOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.python import PythonOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.python_operator.PythonVirtualenvOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:29:1
|
||||
@@ -181,22 +175,21 @@ AIR302 [*] `airflow.operators.python_operator.PythonVirtualenvOperator` is moved
|
||||
30 | ShortCircuitOperator()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `PythonVirtualenvOperator` from `airflow.providers.standard.operators.python` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
9 9 | from airflow.operators.python_operator import (
|
||||
10 10 | BranchPythonOperator,
|
||||
11 11 | PythonOperator,
|
||||
12 |- PythonVirtualenvOperator,
|
||||
13 12 | ShortCircuitOperator,
|
||||
14 13 | )
|
||||
15 14 | from airflow.sensors.external_task_sensor import (
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.python import PythonVirtualenvOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
9 | from airflow.operators.python_operator import (
|
||||
10 | BranchPythonOperator,
|
||||
11 | PythonOperator,
|
||||
- PythonVirtualenvOperator,
|
||||
12 | ShortCircuitOperator,
|
||||
13 | )
|
||||
14 | from airflow.sensors.external_task_sensor import (
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.python import PythonVirtualenvOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.python_operator.ShortCircuitOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:30:1
|
||||
@@ -209,21 +202,20 @@ AIR302 [*] `airflow.operators.python_operator.ShortCircuitOperator` is moved int
|
||||
32 | ExternalTaskMarker()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `ShortCircuitOperator` from `airflow.providers.standard.operators.python` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
10 10 | BranchPythonOperator,
|
||||
11 11 | PythonOperator,
|
||||
12 12 | PythonVirtualenvOperator,
|
||||
13 |- ShortCircuitOperator,
|
||||
14 13 | )
|
||||
15 14 | from airflow.sensors.external_task_sensor import (
|
||||
16 15 | ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.operators.python import ShortCircuitOperator
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
10 | BranchPythonOperator,
|
||||
11 | PythonOperator,
|
||||
12 | PythonVirtualenvOperator,
|
||||
- ShortCircuitOperator,
|
||||
13 | )
|
||||
14 | from airflow.sensors.external_task_sensor import (
|
||||
15 | ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.operators.python import ShortCircuitOperator
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.external_task_sensor.ExternalTaskMarker` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:32:1
|
||||
@@ -235,18 +227,17 @@ AIR302 [*] `airflow.sensors.external_task_sensor.ExternalTaskMarker` is moved in
|
||||
33 | ExternalTaskSensor()
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskMarker` from `airflow.providers.standard.sensors.external_task` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
13 13 | ShortCircuitOperator,
|
||||
14 14 | )
|
||||
15 15 | from airflow.sensors.external_task_sensor import (
|
||||
16 |- ExternalTaskMarker,
|
||||
17 16 | ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.sensors.external_task import ExternalTaskMarker
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
13 | ShortCircuitOperator,
|
||||
14 | )
|
||||
15 | from airflow.sensors.external_task_sensor import (
|
||||
- ExternalTaskMarker,
|
||||
16 | ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.sensors.external_task import ExternalTaskMarker
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.external_task_sensor.ExternalTaskSensor` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:33:1
|
||||
@@ -256,17 +247,16 @@ AIR302 [*] `airflow.sensors.external_task_sensor.ExternalTaskSensor` is moved in
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskSensor` from `airflow.providers.standard.sensors.external_task` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
14 14 | )
|
||||
15 15 | from airflow.sensors.external_task_sensor import (
|
||||
16 16 | ExternalTaskMarker,
|
||||
17 |- ExternalTaskSensor,
|
||||
18 17 | )
|
||||
18 |+from airflow.providers.standard.sensors.external_task import ExternalTaskSensor
|
||||
19 19 |
|
||||
20 20 | BashOperator()
|
||||
21 21 |
|
||||
14 | )
|
||||
15 | from airflow.sensors.external_task_sensor import (
|
||||
16 | ExternalTaskMarker,
|
||||
- ExternalTaskSensor,
|
||||
17 | )
|
||||
18 + from airflow.providers.standard.sensors.external_task import ExternalTaskSensor
|
||||
19 |
|
||||
20 | BashOperator()
|
||||
21 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.subprocess.SubprocessResult` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:38:1
|
||||
@@ -279,16 +269,15 @@ AIR302 [*] `airflow.hooks.subprocess.SubprocessResult` is moved into `standard`
|
||||
40 | from airflow.hooks.subprocess import working_directory
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `SubprocessResult` from `airflow.providers.standard.hooks.subprocess` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
33 33 | ExternalTaskSensor()
|
||||
34 34 |
|
||||
35 35 |
|
||||
36 |-from airflow.hooks.subprocess import SubprocessResult
|
||||
36 |+from airflow.providers.standard.hooks.subprocess import SubprocessResult
|
||||
37 37 |
|
||||
38 38 | SubprocessResult()
|
||||
39 39 |
|
||||
33 | ExternalTaskSensor()
|
||||
34 |
|
||||
35 |
|
||||
- from airflow.hooks.subprocess import SubprocessResult
|
||||
36 + from airflow.providers.standard.hooks.subprocess import SubprocessResult
|
||||
37 |
|
||||
38 | SubprocessResult()
|
||||
39 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.hooks.subprocess.working_directory` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:42:1
|
||||
@@ -301,16 +290,15 @@ AIR302 [*] `airflow.hooks.subprocess.working_directory` is moved into `standard`
|
||||
44 | from airflow.operators.datetime import target_times_as_dates
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `working_directory` from `airflow.providers.standard.hooks.subprocess` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
37 37 |
|
||||
38 38 | SubprocessResult()
|
||||
39 39 |
|
||||
40 |-from airflow.hooks.subprocess import working_directory
|
||||
40 |+from airflow.providers.standard.hooks.subprocess import working_directory
|
||||
41 41 |
|
||||
42 42 | working_directory()
|
||||
43 43 |
|
||||
37 |
|
||||
38 | SubprocessResult()
|
||||
39 |
|
||||
- from airflow.hooks.subprocess import working_directory
|
||||
40 + from airflow.providers.standard.hooks.subprocess import working_directory
|
||||
41 |
|
||||
42 | working_directory()
|
||||
43 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.datetime.target_times_as_dates` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:46:1
|
||||
@@ -323,16 +311,15 @@ AIR302 [*] `airflow.operators.datetime.target_times_as_dates` is moved into `sta
|
||||
48 | from airflow.operators.trigger_dagrun import TriggerDagRunLink
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `target_times_as_dates` from `airflow.providers.standard.operators.datetime` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
41 41 |
|
||||
42 42 | working_directory()
|
||||
43 43 |
|
||||
44 |-from airflow.operators.datetime import target_times_as_dates
|
||||
44 |+from airflow.providers.standard.operators.datetime import target_times_as_dates
|
||||
45 45 |
|
||||
46 46 | target_times_as_dates()
|
||||
47 47 |
|
||||
41 |
|
||||
42 | working_directory()
|
||||
43 |
|
||||
- from airflow.operators.datetime import target_times_as_dates
|
||||
44 + from airflow.providers.standard.operators.datetime import target_times_as_dates
|
||||
45 |
|
||||
46 | target_times_as_dates()
|
||||
47 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.trigger_dagrun.TriggerDagRunLink` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:50:1
|
||||
@@ -345,16 +332,15 @@ AIR302 [*] `airflow.operators.trigger_dagrun.TriggerDagRunLink` is moved into `s
|
||||
52 | from airflow.sensors.external_task import ExternalTaskSensorLink
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunLink` from `airflow.providers.standard.operators.trigger_dagrun` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
45 45 |
|
||||
46 46 | target_times_as_dates()
|
||||
47 47 |
|
||||
48 |-from airflow.operators.trigger_dagrun import TriggerDagRunLink
|
||||
48 |+from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunLink
|
||||
49 49 |
|
||||
50 50 | TriggerDagRunLink()
|
||||
51 51 |
|
||||
45 |
|
||||
46 | target_times_as_dates()
|
||||
47 |
|
||||
- from airflow.operators.trigger_dagrun import TriggerDagRunLink
|
||||
48 + from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunLink
|
||||
49 |
|
||||
50 | TriggerDagRunLink()
|
||||
51 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.external_task.ExternalTaskSensorLink` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:54:1
|
||||
@@ -367,18 +353,16 @@ AIR302 [*] `airflow.sensors.external_task.ExternalTaskSensorLink` is moved into
|
||||
56 | from airflow.sensors.time_delta import WaitSensor
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalDagLink` from `airflow.providers.standard.sensors.external_task` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
50 50 | TriggerDagRunLink()
|
||||
51 51 |
|
||||
52 52 | from airflow.sensors.external_task import ExternalTaskSensorLink
|
||||
53 |+from airflow.providers.standard.sensors.external_task import ExternalDagLink
|
||||
53 54 |
|
||||
54 |-ExternalTaskSensorLink()
|
||||
55 |+ExternalDagLink()
|
||||
55 56 |
|
||||
56 57 | from airflow.sensors.time_delta import WaitSensor
|
||||
57 58 |
|
||||
50 | TriggerDagRunLink()
|
||||
51 |
|
||||
52 | from airflow.sensors.external_task import ExternalTaskSensorLink
|
||||
53 + from airflow.providers.standard.sensors.external_task import ExternalDagLink
|
||||
54 |
|
||||
- ExternalTaskSensorLink()
|
||||
55 + ExternalDagLink()
|
||||
56 |
|
||||
57 | from airflow.sensors.time_delta import WaitSensor
|
||||
58 |
|
||||
|
||||
AIR302 [*] `airflow.sensors.time_delta.WaitSensor` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:58:1
|
||||
@@ -391,16 +375,15 @@ AIR302 [*] `airflow.sensors.time_delta.WaitSensor` is moved into `standard` prov
|
||||
60 | from airflow.operators.dummy import DummyOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.1` and use `WaitSensor` from `airflow.providers.standard.sensors.time_delta` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
53 53 |
|
||||
54 54 | ExternalTaskSensorLink()
|
||||
55 55 |
|
||||
56 |-from airflow.sensors.time_delta import WaitSensor
|
||||
56 |+from airflow.providers.standard.sensors.time_delta import WaitSensor
|
||||
57 57 |
|
||||
58 58 | WaitSensor()
|
||||
59 59 |
|
||||
53 |
|
||||
54 | ExternalTaskSensorLink()
|
||||
55 |
|
||||
- from airflow.sensors.time_delta import WaitSensor
|
||||
56 + from airflow.providers.standard.sensors.time_delta import WaitSensor
|
||||
57 |
|
||||
58 | WaitSensor()
|
||||
59 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.dummy.DummyOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:62:1
|
||||
@@ -413,18 +396,16 @@ AIR302 [*] `airflow.operators.dummy.DummyOperator` is moved into `standard` prov
|
||||
64 | from airflow.operators.dummy import EmptyOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
58 58 | WaitSensor()
|
||||
59 59 |
|
||||
60 60 | from airflow.operators.dummy import DummyOperator
|
||||
61 |+from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
61 62 |
|
||||
62 |-DummyOperator()
|
||||
63 |+EmptyOperator()
|
||||
63 64 |
|
||||
64 65 | from airflow.operators.dummy import EmptyOperator
|
||||
65 66 |
|
||||
58 | WaitSensor()
|
||||
59 |
|
||||
60 | from airflow.operators.dummy import DummyOperator
|
||||
61 + from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
62 |
|
||||
- DummyOperator()
|
||||
63 + EmptyOperator()
|
||||
64 |
|
||||
65 | from airflow.operators.dummy import EmptyOperator
|
||||
66 |
|
||||
|
||||
AIR302 [*] `airflow.operators.dummy.EmptyOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:66:1
|
||||
@@ -437,16 +418,15 @@ AIR302 [*] `airflow.operators.dummy.EmptyOperator` is moved into `standard` prov
|
||||
68 | from airflow.operators.dummy_operator import DummyOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
61 61 |
|
||||
62 62 | DummyOperator()
|
||||
63 63 |
|
||||
64 |-from airflow.operators.dummy import EmptyOperator
|
||||
64 |+from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
65 65 |
|
||||
66 66 | EmptyOperator()
|
||||
67 67 |
|
||||
61 |
|
||||
62 | DummyOperator()
|
||||
63 |
|
||||
- from airflow.operators.dummy import EmptyOperator
|
||||
64 + from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
65 |
|
||||
66 | EmptyOperator()
|
||||
67 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.dummy_operator.DummyOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:70:1
|
||||
@@ -459,15 +439,14 @@ AIR302 [*] `airflow.operators.dummy_operator.DummyOperator` is moved into `stand
|
||||
72 | from airflow.operators.dummy_operator import EmptyOperator
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
66 66 | EmptyOperator()
|
||||
67 67 |
|
||||
68 68 | from airflow.operators.dummy_operator import DummyOperator
|
||||
69 |+from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
69 70 |
|
||||
70 71 | DummyOperator()
|
||||
71 72 |
|
||||
66 | EmptyOperator()
|
||||
67 |
|
||||
68 | from airflow.operators.dummy_operator import DummyOperator
|
||||
69 + from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
70 |
|
||||
71 | DummyOperator()
|
||||
72 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.operators.dummy_operator.EmptyOperator` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:74:1
|
||||
@@ -480,16 +459,15 @@ AIR302 [*] `airflow.operators.dummy_operator.EmptyOperator` is moved into `stand
|
||||
76 | from airflow.sensors.external_task_sensor import ExternalTaskSensorLink
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
69 69 |
|
||||
70 70 | DummyOperator()
|
||||
71 71 |
|
||||
72 |-from airflow.operators.dummy_operator import EmptyOperator
|
||||
72 |+from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
73 73 |
|
||||
74 74 | EmptyOperator()
|
||||
75 75 |
|
||||
69 |
|
||||
70 | DummyOperator()
|
||||
71 |
|
||||
- from airflow.operators.dummy_operator import EmptyOperator
|
||||
72 + from airflow.providers.standard.operators.empty import EmptyOperator
|
||||
73 |
|
||||
74 | EmptyOperator()
|
||||
75 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR302 [*] `airflow.sensors.external_task_sensor.ExternalTaskSensorLink` is moved into `standard` provider in Airflow 3.0;
|
||||
--> AIR302_standard.py:78:1
|
||||
@@ -500,12 +478,10 @@ AIR302 [*] `airflow.sensors.external_task_sensor.ExternalTaskSensorLink` is move
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalDagLink` from `airflow.providers.standard.sensors.external_task` instead.
|
||||
|
||||
ℹ Safe fix
|
||||
74 74 | EmptyOperator()
|
||||
75 75 |
|
||||
76 76 | from airflow.sensors.external_task_sensor import ExternalTaskSensorLink
|
||||
77 |+from airflow.providers.standard.sensors.external_task import ExternalDagLink
|
||||
77 78 |
|
||||
78 |-ExternalTaskSensorLink()
|
||||
79 |+ExternalDagLink()
|
||||
74 | EmptyOperator()
|
||||
75 |
|
||||
76 | from airflow.sensors.external_task_sensor import ExternalTaskSensorLink
|
||||
77 + from airflow.providers.standard.sensors.external_task import ExternalDagLink
|
||||
78 |
|
||||
- ExternalTaskSensorLink()
|
||||
79 + ExternalDagLink()
|
||||
|
||||
@@ -10,11 +10,10 @@ AIR302 [*] `airflow.hooks.zendesk_hook.ZendeskHook` is moved into `zendesk` prov
|
||||
| ^^^^^^^^^^^
|
||||
|
|
||||
help: Install `apache-airflow-providers-zendesk>=1.0.0` and use `ZendeskHook` from `airflow.providers.zendesk.hooks.zendesk` instead.
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 1 | from __future__ import annotations
|
||||
2 2 |
|
||||
3 |-from airflow.hooks.zendesk_hook import ZendeskHook
|
||||
3 |+from airflow.providers.zendesk.hooks.zendesk import ZendeskHook
|
||||
4 4 |
|
||||
5 5 | ZendeskHook()
|
||||
1 | from __future__ import annotations
|
||||
2 |
|
||||
- from airflow.hooks.zendesk_hook import ZendeskHook
|
||||
3 + from airflow.providers.zendesk.hooks.zendesk import ZendeskHook
|
||||
4 |
|
||||
5 | ZendeskHook()
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/eradicate/mod.rs
|
||||
---
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:1:1
|
||||
|
|
||||
1 | #import os
|
||||
@@ -10,14 +10,13 @@ ERA001 Found commented-out code
|
||||
3 | #a = 3
|
||||
|
|
||||
help: Remove commented-out code
|
||||
- #import os
|
||||
1 | # from foo import junk
|
||||
2 | #a = 3
|
||||
3 | a = 4
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
1 |-#import os
|
||||
2 1 | # from foo import junk
|
||||
3 2 | #a = 3
|
||||
4 3 | a = 4
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:2:1
|
||||
|
|
||||
1 | #import os
|
||||
@@ -27,15 +26,14 @@ ERA001 Found commented-out code
|
||||
4 | a = 4
|
||||
|
|
||||
help: Remove commented-out code
|
||||
1 | #import os
|
||||
- # from foo import junk
|
||||
2 | #a = 3
|
||||
3 | a = 4
|
||||
4 | #foo(1, 2, 3)
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
1 1 | #import os
|
||||
2 |-# from foo import junk
|
||||
3 2 | #a = 3
|
||||
4 3 | a = 4
|
||||
5 4 | #foo(1, 2, 3)
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:3:1
|
||||
|
|
||||
1 | #import os
|
||||
@@ -46,16 +44,15 @@ ERA001 Found commented-out code
|
||||
5 | #foo(1, 2, 3)
|
||||
|
|
||||
help: Remove commented-out code
|
||||
1 | #import os
|
||||
2 | # from foo import junk
|
||||
- #a = 3
|
||||
3 | a = 4
|
||||
4 | #foo(1, 2, 3)
|
||||
5 |
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
1 1 | #import os
|
||||
2 2 | # from foo import junk
|
||||
3 |-#a = 3
|
||||
4 3 | a = 4
|
||||
5 4 | #foo(1, 2, 3)
|
||||
6 5 |
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:5:1
|
||||
|
|
||||
3 | #a = 3
|
||||
@@ -66,17 +63,16 @@ ERA001 Found commented-out code
|
||||
7 | def foo(x, y, z):
|
||||
|
|
||||
help: Remove commented-out code
|
||||
2 | # from foo import junk
|
||||
3 | #a = 3
|
||||
4 | a = 4
|
||||
- #foo(1, 2, 3)
|
||||
5 |
|
||||
6 | def foo(x, y, z):
|
||||
7 | content = 1 # print('hello')
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
2 2 | # from foo import junk
|
||||
3 3 | #a = 3
|
||||
4 4 | a = 4
|
||||
5 |-#foo(1, 2, 3)
|
||||
6 5 |
|
||||
7 6 | def foo(x, y, z):
|
||||
8 7 | content = 1 # print('hello')
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:13:5
|
||||
|
|
||||
11 | # This is a real comment.
|
||||
@@ -86,17 +82,16 @@ ERA001 Found commented-out code
|
||||
14 | return False
|
||||
|
|
||||
help: Remove commented-out code
|
||||
10 |
|
||||
11 | # This is a real comment.
|
||||
12 | # # This is a (nested) comment.
|
||||
- #return True
|
||||
13 | return False
|
||||
14 |
|
||||
15 | #import os # noqa: ERA001
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
10 10 |
|
||||
11 11 | # This is a real comment.
|
||||
12 12 | # # This is a (nested) comment.
|
||||
13 |- #return True
|
||||
14 13 | return False
|
||||
15 14 |
|
||||
16 15 | #import os # noqa: ERA001
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:21:5
|
||||
|
|
||||
19 | class A():
|
||||
@@ -105,17 +100,16 @@ ERA001 Found commented-out code
|
||||
| ^^^^^^^
|
||||
|
|
||||
help: Remove commented-out code
|
||||
18 |
|
||||
19 | class A():
|
||||
20 | pass
|
||||
- # b = c
|
||||
21 |
|
||||
22 |
|
||||
23 | dictionary = {
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
18 18 |
|
||||
19 19 | class A():
|
||||
20 20 | pass
|
||||
21 |- # b = c
|
||||
22 21 |
|
||||
23 22 |
|
||||
24 23 | dictionary = {
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:26:5
|
||||
|
|
||||
24 | dictionary = {
|
||||
@@ -126,17 +120,16 @@ ERA001 Found commented-out code
|
||||
28 | }
|
||||
|
|
||||
help: Remove commented-out code
|
||||
23 |
|
||||
24 | dictionary = {
|
||||
25 | # "key1": 123, # noqa: ERA001
|
||||
- # "key2": 456,
|
||||
26 | # "key3": 789, # test
|
||||
27 | }
|
||||
28 |
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
23 23 |
|
||||
24 24 | dictionary = {
|
||||
25 25 | # "key1": 123, # noqa: ERA001
|
||||
26 |- # "key2": 456,
|
||||
27 26 | # "key3": 789, # test
|
||||
28 27 | }
|
||||
29 28 |
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:27:5
|
||||
|
|
||||
25 | # "key1": 123, # noqa: ERA001
|
||||
@@ -146,17 +139,16 @@ ERA001 Found commented-out code
|
||||
28 | }
|
||||
|
|
||||
help: Remove commented-out code
|
||||
24 | dictionary = {
|
||||
25 | # "key1": 123, # noqa: ERA001
|
||||
26 | # "key2": 456,
|
||||
- # "key3": 789, # test
|
||||
27 | }
|
||||
28 |
|
||||
29 | #import os # noqa
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
24 24 | dictionary = {
|
||||
25 25 | # "key1": 123, # noqa: ERA001
|
||||
26 26 | # "key2": 456,
|
||||
27 |- # "key3": 789, # test
|
||||
28 27 | }
|
||||
29 28 |
|
||||
30 29 | #import os # noqa
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:32:1
|
||||
|
|
||||
30 | #import os # noqa
|
||||
@@ -167,17 +159,16 @@ ERA001 Found commented-out code
|
||||
34 | # try: # with comment
|
||||
|
|
||||
help: Remove commented-out code
|
||||
29 |
|
||||
30 | #import os # noqa
|
||||
31 |
|
||||
- # case 1:
|
||||
32 | # try:
|
||||
33 | # try: # with comment
|
||||
34 | # try: print()
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
29 29 |
|
||||
30 30 | #import os # noqa
|
||||
31 31 |
|
||||
32 |-# case 1:
|
||||
33 32 | # try:
|
||||
34 33 | # try: # with comment
|
||||
35 34 | # try: print()
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:33:1
|
||||
|
|
||||
32 | # case 1:
|
||||
@@ -187,17 +178,16 @@ ERA001 Found commented-out code
|
||||
35 | # try: print()
|
||||
|
|
||||
help: Remove commented-out code
|
||||
30 | #import os # noqa
|
||||
31 |
|
||||
32 | # case 1:
|
||||
- # try:
|
||||
33 | # try: # with comment
|
||||
34 | # try: print()
|
||||
35 | # except:
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
30 30 | #import os # noqa
|
||||
31 31 |
|
||||
32 32 | # case 1:
|
||||
33 |-# try:
|
||||
34 33 | # try: # with comment
|
||||
35 34 | # try: print()
|
||||
36 35 | # except:
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:34:1
|
||||
|
|
||||
32 | # case 1:
|
||||
@@ -208,17 +198,16 @@ ERA001 Found commented-out code
|
||||
36 | # except:
|
||||
|
|
||||
help: Remove commented-out code
|
||||
31 |
|
||||
32 | # case 1:
|
||||
33 | # try:
|
||||
- # try: # with comment
|
||||
34 | # try: print()
|
||||
35 | # except:
|
||||
36 | # except Foo:
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
31 31 |
|
||||
32 32 | # case 1:
|
||||
33 33 | # try:
|
||||
34 |-# try: # with comment
|
||||
35 34 | # try: print()
|
||||
36 35 | # except:
|
||||
37 36 | # except Foo:
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:35:1
|
||||
|
|
||||
33 | # try:
|
||||
@@ -229,17 +218,16 @@ ERA001 Found commented-out code
|
||||
37 | # except Foo:
|
||||
|
|
||||
help: Remove commented-out code
|
||||
32 | # case 1:
|
||||
33 | # try:
|
||||
34 | # try: # with comment
|
||||
- # try: print()
|
||||
35 | # except:
|
||||
36 | # except Foo:
|
||||
37 | # except Exception as e: print(e)
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
32 32 | # case 1:
|
||||
33 33 | # try:
|
||||
34 34 | # try: # with comment
|
||||
35 |-# try: print()
|
||||
36 35 | # except:
|
||||
37 36 | # except Foo:
|
||||
38 37 | # except Exception as e: print(e)
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:36:1
|
||||
|
|
||||
34 | # try: # with comment
|
||||
@@ -250,17 +238,16 @@ ERA001 Found commented-out code
|
||||
38 | # except Exception as e: print(e)
|
||||
|
|
||||
help: Remove commented-out code
|
||||
33 | # try:
|
||||
34 | # try: # with comment
|
||||
35 | # try: print()
|
||||
- # except:
|
||||
36 | # except Foo:
|
||||
37 | # except Exception as e: print(e)
|
||||
38 |
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
33 33 | # try:
|
||||
34 34 | # try: # with comment
|
||||
35 35 | # try: print()
|
||||
36 |-# except:
|
||||
37 36 | # except Foo:
|
||||
38 37 | # except Exception as e: print(e)
|
||||
39 38 |
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:37:1
|
||||
|
|
||||
35 | # try: print()
|
||||
@@ -270,17 +257,16 @@ ERA001 Found commented-out code
|
||||
38 | # except Exception as e: print(e)
|
||||
|
|
||||
help: Remove commented-out code
|
||||
34 | # try: # with comment
|
||||
35 | # try: print()
|
||||
36 | # except:
|
||||
- # except Foo:
|
||||
37 | # except Exception as e: print(e)
|
||||
38 |
|
||||
39 |
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
34 34 | # try: # with comment
|
||||
35 35 | # try: print()
|
||||
36 36 | # except:
|
||||
37 |-# except Foo:
|
||||
38 37 | # except Exception as e: print(e)
|
||||
39 38 |
|
||||
40 39 |
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:38:1
|
||||
|
|
||||
36 | # except:
|
||||
@@ -289,17 +275,16 @@ ERA001 Found commented-out code
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Remove commented-out code
|
||||
35 | # try: print()
|
||||
36 | # except:
|
||||
37 | # except Foo:
|
||||
- # except Exception as e: print(e)
|
||||
38 |
|
||||
39 |
|
||||
40 | # Script tag without an opening tag (Error)
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
35 35 | # try: print()
|
||||
36 36 | # except:
|
||||
37 37 | # except Foo:
|
||||
38 |-# except Exception as e: print(e)
|
||||
39 38 |
|
||||
40 39 |
|
||||
41 40 | # Script tag without an opening tag (Error)
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:44:1
|
||||
|
|
||||
43 | # requires-python = ">=3.11"
|
||||
@@ -309,17 +294,16 @@ ERA001 Found commented-out code
|
||||
46 | # "rich",
|
||||
|
|
||||
help: Remove commented-out code
|
||||
41 | # Script tag without an opening tag (Error)
|
||||
42 |
|
||||
43 | # requires-python = ">=3.11"
|
||||
- # dependencies = [
|
||||
44 | # "requests<3",
|
||||
45 | # "rich",
|
||||
46 | # ]
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
41 41 | # Script tag without an opening tag (Error)
|
||||
42 42 |
|
||||
43 43 | # requires-python = ">=3.11"
|
||||
44 |-# dependencies = [
|
||||
45 44 | # "requests<3",
|
||||
46 45 | # "rich",
|
||||
47 46 | # ]
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:47:1
|
||||
|
|
||||
45 | # "requests<3",
|
||||
@@ -329,17 +313,16 @@ ERA001 Found commented-out code
|
||||
48 | # ///
|
||||
|
|
||||
help: Remove commented-out code
|
||||
44 | # dependencies = [
|
||||
45 | # "requests<3",
|
||||
46 | # "rich",
|
||||
- # ]
|
||||
47 | # ///
|
||||
48 |
|
||||
49 | # Script tag (OK)
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
44 44 | # dependencies = [
|
||||
45 45 | # "requests<3",
|
||||
46 46 | # "rich",
|
||||
47 |-# ]
|
||||
48 47 | # ///
|
||||
49 48 |
|
||||
50 49 | # Script tag (OK)
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:75:1
|
||||
|
|
||||
73 | # /// script
|
||||
@@ -350,17 +333,16 @@ ERA001 Found commented-out code
|
||||
77 | # "rich",
|
||||
|
|
||||
help: Remove commented-out code
|
||||
72 |
|
||||
73 | # /// script
|
||||
74 | # requires-python = ">=3.11"
|
||||
- # dependencies = [
|
||||
75 | # "requests<3",
|
||||
76 | # "rich",
|
||||
77 | # ]
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
ℹ Display-only fix
|
||||
72 72 |
|
||||
73 73 | # /// script
|
||||
74 74 | # requires-python = ">=3.11"
|
||||
75 |-# dependencies = [
|
||||
76 75 | # "requests<3",
|
||||
77 76 | # "rich",
|
||||
78 77 | # ]
|
||||
|
||||
ERA001 Found commented-out code
|
||||
ERA001 [*] Found commented-out code
|
||||
--> ERA001.py:78:1
|
||||
|
|
||||
76 | # "requests<3",
|
||||
@@ -371,12 +353,11 @@ ERA001 Found commented-out code
|
||||
80 | # Script tag block followed by normal block (Ok)
|
||||
|
|
||||
help: Remove commented-out code
|
||||
|
||||
ℹ Display-only fix
|
||||
75 75 | # dependencies = [
|
||||
76 76 | # "requests<3",
|
||||
77 77 | # "rich",
|
||||
78 |-# ]
|
||||
79 78 |
|
||||
80 79 | # Script tag block followed by normal block (Ok)
|
||||
81 80 |
|
||||
75 | # dependencies = [
|
||||
76 | # "requests<3",
|
||||
77 | # "rich",
|
||||
- # ]
|
||||
78 |
|
||||
79 | # Script tag block followed by normal block (Ok)
|
||||
80 |
|
||||
note: This is a display-only fix and is likely to be incorrect
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user