Compare commits

..

1 Commits

Author SHA1 Message Date
David Peter
b19177cd6d Experiment: allow functions to be redefined (same signature) 2025-05-23 15:08:14 +02:00
97 changed files with 948 additions and 1598 deletions

View File

@@ -237,13 +237,13 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-insta
- name: ty mdtests (GitHub annotations)
@@ -295,13 +295,13 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-insta
- name: "Run tests"
@@ -324,7 +324,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Run tests"
@@ -380,7 +380,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Build"
run: cargo build --release --locked
@@ -405,13 +405,13 @@ jobs:
MSRV: ${{ steps.msrv.outputs.value }}
run: rustup default "${MSRV}"
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- name: "Install cargo nextest"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-nextest
- name: "Install cargo insta"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-insta
- name: "Run tests"
@@ -459,7 +459,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download Ruff binary to test
id: download-cached-binary
@@ -660,7 +660,7 @@ jobs:
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- name: Fuzz
env:
FORCE_COLOR: 1
@@ -730,7 +730,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
@@ -773,7 +773,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@@ -910,7 +910,7 @@ jobs:
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@6c6479b49816fcc0975a31af977bdc1f847c2920 # v2.52.1
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
with:
tool: cargo-codspeed

View File

@@ -34,11 +34,11 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Build ruff
# A debug build means the script runs slower once it gets started,

View File

@@ -37,7 +37,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:

View File

@@ -22,7 +22,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels-*

View File

@@ -80,7 +80,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.11
rev: v0.11.10
hooks:
- id: ruff-format
- id: ruff
@@ -98,7 +98,7 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.8.0
rev: v1.7.0
hooks:
- id: zizmor

35
Cargo.lock generated
View File

@@ -486,7 +486,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -495,7 +495,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
dependencies = [
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -909,7 +909,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1444,7 +1444,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi 0.5.1",
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1498,9 +1498,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jiff"
version = "0.2.14"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a194df1107f33c79f4f93d02c80798520551949d59dfad22b6157048a88cca93"
checksum = "f02000660d30638906021176af16b17498bd0d12813dbfe7b276d8bc7f3c0806"
dependencies = [
"jiff-static",
"jiff-tzdb-platform",
@@ -1508,14 +1508,14 @@ dependencies = [
"portable-atomic",
"portable-atomic-util",
"serde",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
name = "jiff-static"
version = "0.2.14"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c6e1db7ed32c6c71b759497fae34bf7933636f75a251b9e736555da426f6442"
checksum = "f3c30758ddd7188629c6713fc45d1188af4f44c90582311d0c8d8c9907f60c48"
dependencies = [
"proc-macro2",
"quote",
@@ -3081,7 +3081,6 @@ dependencies = [
"ruff_workspace",
"serde",
"serde-wasm-bindgen",
"uuid",
"wasm-bindgen",
"wasm-bindgen-test",
]
@@ -3163,7 +3162,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -3532,7 +3531,7 @@ dependencies = [
"getrandom 0.3.3",
"once_cell",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -4004,7 +4003,6 @@ dependencies = [
"ruff_source_file",
"ruff_text_size",
"rustc-hash 2.1.1",
"salsa",
"serde",
"serde_json",
"shellexpand",
@@ -4073,7 +4071,6 @@ dependencies = [
"ty_ide",
"ty_project",
"ty_python_semantic",
"uuid",
"wasm-bindgen",
"wasm-bindgen-test",
]
@@ -4243,9 +4240,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.17.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
dependencies = [
"getrandom 0.3.3",
"js-sys",
@@ -4256,9 +4253,9 @@ dependencies = [
[[package]]
name = "uuid-macro-internal"
version = "1.17.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b682e8c381995ea03130e381928e0e005b7c9eb483c6c8682f50e07b33c2b7"
checksum = "72dcd78c4f979627a754f5522cea6e6a25e55139056535fe6e69c506cd64a862"
dependencies = [
"proc-macro2",
"quote",
@@ -4526,7 +4523,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]

View File

@@ -179,6 +179,7 @@ uuid = { version = "1.6.1", features = [
"v4",
"fast-rng",
"macro-diagnostics",
"js",
] }
walkdir = { version = "2.3.2" }
wasm-bindgen = { version = "0.2.92" }
@@ -187,7 +188,7 @@ wild = { version = "2" }
zip = { version = "0.6.6", default-features = false }
[workspace.metadata.cargo-shear]
ignored = ["getrandom", "ruff_options_metadata", "uuid"]
ignored = ["getrandom", "ruff_options_metadata"]
[workspace.lints.rust]

View File

@@ -34,7 +34,8 @@ An extremely fast Python linter and code formatter, written in Rust.
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
of popular Flake8 plugins, like flake8-bugbear
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editors) for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
Ruff aims to be orders of magnitude faster than alternative tools while integrating more

View File

@@ -131,7 +131,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
fn setup() -> Case {
let case = setup_tomllib_case();
let result: Vec<_> = case.db.check();
let result: Vec<_> = case.db.check().unwrap();
assert_diagnostics(&case.db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
@@ -159,7 +159,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
None,
);
let result = db.check();
let result = db.check().unwrap();
assert_eq!(result.len(), EXPECTED_TOMLLIB_DIAGNOSTICS.len());
}
@@ -179,7 +179,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
setup_tomllib_case,
|case| {
let Case { db, .. } = case;
let result: Vec<_> = db.check();
let result: Vec<_> = db.check().unwrap();
assert_diagnostics(db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
},
@@ -293,7 +293,7 @@ fn benchmark_many_string_assignments(criterion: &mut Criterion) {
},
|case| {
let Case { db, .. } = case;
let result = db.check();
let result = db.check().unwrap();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
@@ -339,7 +339,7 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
},
|case| {
let Case { db, .. } = case;
let result = db.check();
let result = db.check().unwrap();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,

View File

@@ -1,4 +1,3 @@
use std::any::Any;
use std::backtrace::BacktraceStatus;
use std::cell::Cell;
use std::panic::Location;
@@ -25,25 +24,17 @@ impl Payload {
None
}
}
pub fn downcast_ref<R: Any>(&self) -> Option<&R> {
self.0.downcast_ref::<R>()
}
}
impl std::fmt::Display for PanicError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "panicked at")?;
writeln!(f, "panicked at")?;
if let Some(location) = &self.location {
write!(f, " {location}")?;
}
if let Some(payload) = self.payload.as_str() {
write!(f, ":\n{payload}")?;
}
if let Some(query_trace) = self.salsa_backtrace.as_ref() {
let _ = writeln!(f, "{query_trace}");
}
if let Some(backtrace) = &self.backtrace {
match backtrace.status() {
BacktraceStatus::Disabled => {
@@ -58,7 +49,6 @@ impl std::fmt::Display for PanicError {
_ => {}
}
}
Ok(())
}
}

View File

@@ -18,43 +18,44 @@ const FIX_SYMBOL: &str = "🛠️";
const PREVIEW_SYMBOL: &str = "🧪";
const REMOVED_SYMBOL: &str = "";
const WARNING_SYMBOL: &str = "⚠️";
const STABLE_SYMBOL: &str = "✔️";
const SPACER: &str = "&nbsp;&nbsp;&nbsp;&nbsp;";
/// Style for the rule's fixability and status icons.
const SYMBOL_STYLE: &str = "style='width: 1em; display: inline-block;'";
/// Style for the container wrapping the fixability and status icons.
const SYMBOLS_CONTAINER: &str = "style='display: flex; gap: 0.5rem; justify-content: end;'";
fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>, linter: &Linter) {
table_out.push_str("| Code | Name | Message | |");
table_out.push_str("| Code | Name | Message | |");
table_out.push('\n');
table_out.push_str("| ---- | ---- | ------- | -: |");
table_out.push_str("| ---- | ---- | ------- | ------: |");
table_out.push('\n');
for rule in rules {
let status_token = match rule.group() {
RuleGroup::Removed => {
format!(
"<span {SYMBOL_STYLE} title='Rule has been removed'>{REMOVED_SYMBOL}</span>"
)
format!("<span title='Rule has been removed'>{REMOVED_SYMBOL}</span>")
}
RuleGroup::Deprecated => {
format!(
"<span {SYMBOL_STYLE} title='Rule has been deprecated'>{WARNING_SYMBOL}</span>"
)
format!("<span title='Rule has been deprecated'>{WARNING_SYMBOL}</span>")
}
RuleGroup::Preview => {
format!("<span {SYMBOL_STYLE} title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
format!("<span title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
}
RuleGroup::Stable => {
// A full opacity checkmark is a bit aggressive for indicating stable
format!("<span title='Rule is stable' style='opacity: 0.6'>{STABLE_SYMBOL}</span>")
}
RuleGroup::Stable => format!("<span {SYMBOL_STYLE}></span>"),
};
let fix_token = match rule.fixable() {
FixAvailability::Always | FixAvailability::Sometimes => {
format!("<span {SYMBOL_STYLE} title='Automatic fix available'>{FIX_SYMBOL}</span>")
format!("<span title='Automatic fix available'>{FIX_SYMBOL}</span>")
}
FixAvailability::None => {
format!(
"<span title='Automatic fix not available' style='opacity: 0.1' aria-hidden='true'>{FIX_SYMBOL}</span>"
)
}
FixAvailability::None => format!("<span {SYMBOL_STYLE}></span>"),
};
let tokens = format!("{status_token} {fix_token}");
let rule_name = rule.as_ref();
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
@@ -81,14 +82,15 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
#[expect(clippy::or_fun_call)]
let _ = write!(
table_out,
"| {ss}{prefix}{code}{se} {{ #{prefix}{code} }} | {ss}{explanation}{se} | {ss}{message}{se} | <div {SYMBOLS_CONTAINER}>{status_token}{fix_token}</div>|",
prefix = linter.common_prefix(),
code = linter.code_for_rule(rule).unwrap(),
explanation = rule
.explanation()
"| {ss}{0}{1}{se} {{ #{0}{1} }} | {ss}{2}{se} | {ss}{3}{se} | {ss}{4}{se} |",
linter.common_prefix(),
linter.code_for_rule(rule).unwrap(),
rule.explanation()
.is_some()
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
.unwrap_or(format_args!("{rule_name}")),
message,
tokens,
);
table_out.push('\n');
}
@@ -102,6 +104,12 @@ pub(crate) fn generate() -> String {
table_out.push_str("### Legend");
table_out.push('\n');
let _ = write!(
&mut table_out,
"{SPACER}{STABLE_SYMBOL}{SPACER} The rule is stable."
);
table_out.push_str("<br />");
let _ = write!(
&mut table_out,
"{SPACER}{PREVIEW_SYMBOL}{SPACER} The rule is unstable and is in [\"preview\"](faq.md#what-is-preview)."
@@ -124,8 +132,7 @@ pub(crate) fn generate() -> String {
&mut table_out,
"{SPACER}{FIX_SYMBOL}{SPACER} The rule is automatically fixable by the `--fix` command-line option."
);
table_out.push_str("\n\n");
table_out.push_str("All rules not marked as preview, deprecated or removed are stable.");
table_out.push_str("<br />");
table_out.push('\n');
for linter in Linter::iter() {

View File

@@ -114,11 +114,16 @@ from airflow.sensors.sql_sensor import SqlSensor
SqlSensor()
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.operators.jdbc_operator import JdbcOperator
from airflow.operators.mssql_operator import MsSqlOperator
from airflow.operators.mysql_operator import MySqlOperator
from airflow.operators.oracle_operator import OracleOperator
from airflow.operators.postgres_operator import PostgresOperator
from airflow.operators.sqlite_operator import SqliteOperator
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
SQLExecuteQueryOperator()
JdbcOperator()
MsSqlOperator()
MySqlOperator()
OracleOperator()
PostgresOperator()
SqliteOperator()

View File

@@ -145,23 +145,3 @@ def func():
sleep = 10
anyio.run(main)
async def test_anyio_async115_helpers():
import anyio
await anyio.sleep(delay=1) # OK
await anyio.sleep(seconds=1) # OK
await anyio.sleep(delay=0) # ASYNC115
await anyio.sleep(seconds=0) # OK
async def test_trio_async115_helpers():
import trio
await trio.sleep(seconds=1) # OK
await trio.sleep(delay=1) # OK
await trio.sleep(seconds=0) # ASYNC115
await trio.sleep(delay=0) # OK

View File

@@ -108,23 +108,3 @@ async def import_from_anyio():
# catch from import
await sleep(86401) # error: 116, "async"
async def test_anyio_async116_helpers():
import anyio
await anyio.sleep(delay=1) # OK
await anyio.sleep(seconds=1) # OK
await anyio.sleep(delay=86401) # ASYNC116
await anyio.sleep(seconds=86401) # OK
async def test_trio_async116_helpers():
import trio
await trio.sleep(seconds=1) # OK
await trio.sleep(delay=1) # OK
await trio.sleep(seconds=86401) # ASYNC116
await trio.sleep(delay=86401) # OK

View File

@@ -12,4 +12,3 @@ if True:
if True:
from __future__ import generator_stop
from __future__ import invalid_module, generators
from __future__ import generators # comment

View File

@@ -65,26 +65,24 @@ impl Violation for Airflow3MovedToProvider {
fn fix_title(&self) -> Option<String> {
let Airflow3MovedToProvider { replacement, .. } = self;
if let Some((module, name, provider, version)) = match &replacement {
ProviderReplacement::AutoImport {
module,
name,
provider,
version,
} => Some((module, *name, provider, version)),
ProviderReplacement::SourceModuleMovedToProvider {
module,
name,
provider,
version,
} => Some((module, name.as_str(), provider, version)),
match replacement {
ProviderReplacement::None => None,
} {
Some(format!(
"Install `apache-airflow-providers-{provider}>={version}` and use `{name}` from `{module}` instead."
))
} else {
None
ProviderReplacement::AutoImport {
name,
module,
provider,
version,
} => Some(format!(
"Install `apache-airflow-providers-{provider}>={version}` and use `{module}.{name}` instead."
)),
ProviderReplacement::SourceModuleMovedToProvider {
name,
module,
provider,
version,
} => Some(format!(
"Install `apache-airflow-providers-{provider}>={version}` and use `{module}.{name}` instead."
)),
}
}
}

View File

@@ -73,10 +73,10 @@ impl Violation for Airflow3Removal {
Replacement::AttrName(name) => Some(format!("Use `{name}` instead")),
Replacement::Message(message) => Some((*message).to_string()),
Replacement::AutoImport { module, name } => {
Some(format!("Use `{name}` from `{module}` instead."))
Some(format!("Use `{module}.{name}` instead"))
}
Replacement::SourceModuleMoved { module, name } => {
Some(format!("Use `{name}` from `{module}` instead."))
Some(format!("Use `{module}.{name}` instead"))
}
}
}

View File

@@ -77,7 +77,7 @@ impl Violation for Airflow3SuggestedToMoveToProvider {
provider,
version,
} => Some(format!(
"Install `apache-airflow-providers-{provider}>={version}` and use `{name}` from `{module}` instead."
"Install `apache-airflow-providers-{provider}>={version}` and use `{module}.{name}` instead."
)),
ProviderReplacement::SourceModuleMovedToProvider {
module,
@@ -85,7 +85,7 @@ impl Violation for Airflow3SuggestedToMoveToProvider {
provider,
version,
} => Some(format!(
"Install `apache-airflow-providers-{provider}>={version}` and use `{name}` from `{module}` instead."
"Install `apache-airflow-providers-{provider}>={version}` and use `{module}.{name}` instead."
)),
}
}

View File

@@ -72,10 +72,10 @@ impl Violation for Airflow3SuggestedUpdate {
Replacement::AttrName(name) => Some(format!("Use `{name}` instead")),
Replacement::Message(message) => Some((*message).to_string()),
Replacement::AutoImport { module, name } => {
Some(format!("Use `{name}` from `{module}` instead."))
Some(format!("Use `{module}.{name}` instead"))
}
Replacement::SourceModuleMoved { module, name } => {
Some(format!("Use `{name}` from `{module}` instead."))
Some(format!("Use `{module}.{name}` instead"))
}
}
}

View File

@@ -171,7 +171,7 @@ AIR301_class_attribute.py:42:6: AIR301 [*] `airflow.datasets.manager.DatasetMana
43 | dm.register_dataset_change()
44 | dm.create_datasets()
|
= help: Use `AssetManager` from `airflow.assets.manager` instead.
= help: Use `airflow.assets.manager.AssetManager` instead
Safe fix
19 19 | from airflow.providers_manager import ProvidersManager
@@ -302,7 +302,7 @@ AIR301_class_attribute.py:50:11: AIR301 [*] `airflow.lineage.hook.DatasetLineage
| ^^^^^^^^^^^^^^^^^^ AIR301
51 | dl_info.dataset
|
= help: Use `AssetLineageInfo` from `airflow.lineage.hook` instead.
= help: Use `airflow.lineage.hook.AssetLineageInfo` instead
Safe fix
9 9 | DatasetAny,

View File

@@ -85,7 +85,7 @@ AIR301_names.py:60:1: AIR301 [*] `airflow.configuration.get` is removed in Airfl
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^ AIR301
|
= help: Use `conf.get` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.get` instead
Safe fix
19 19 | has_option,
@@ -111,7 +111,7 @@ AIR301_names.py:60:6: AIR301 [*] `airflow.configuration.getboolean` is removed i
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^^^^^^^^ AIR301
|
= help: Use `conf.getboolean` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.getboolean` instead
Safe fix
19 19 | has_option,
@@ -137,7 +137,7 @@ AIR301_names.py:60:18: AIR301 [*] `airflow.configuration.getfloat` is removed in
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^^^^^^ AIR301
|
= help: Use `conf.getfloat` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.getfloat` instead
Safe fix
19 19 | has_option,
@@ -163,7 +163,7 @@ AIR301_names.py:60:28: AIR301 [*] `airflow.configuration.getint` is removed in A
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^^^^ AIR301
|
= help: Use `conf.getint` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.getint` instead
Safe fix
19 19 | has_option,
@@ -189,7 +189,7 @@ AIR301_names.py:60:36: AIR301 [*] `airflow.configuration.has_option` is removed
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^^^^^^^^ AIR301
|
= help: Use `conf.has_option` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.has_option` instead
Safe fix
19 19 | has_option,
@@ -215,7 +215,7 @@ AIR301_names.py:60:48: AIR301 [*] `airflow.configuration.remove_option` is remov
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^^^^^^^^^^^ AIR301
|
= help: Use `conf.remove_option` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.remove_option` instead
Safe fix
19 19 | has_option,
@@ -241,7 +241,7 @@ AIR301_names.py:60:63: AIR301 [*] `airflow.configuration.as_dict` is removed in
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^^^^^ AIR301
|
= help: Use `conf.as_dict` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.as_dict` instead
Safe fix
19 19 | has_option,
@@ -267,7 +267,7 @@ AIR301_names.py:60:72: AIR301 [*] `airflow.configuration.set` is removed in Airf
60 | get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
| ^^^ AIR301
|
= help: Use `conf.set` from `airflow.configuration` instead.
= help: Use `airflow.configuration.conf.set` instead
Safe fix
19 19 | has_option,
@@ -308,7 +308,7 @@ AIR301_names.py:72:1: AIR301 `airflow.hooks.base_hook.BaseHook` is removed in Ai
72 | BaseHook()
| ^^^^^^^^ AIR301
|
= help: Use `BaseHook` from `airflow.hooks.base` instead.
= help: Use `airflow.hooks.base.BaseHook` instead
AIR301_names.py:76:1: AIR301 `airflow.operators.subdag.SubDagOperator` is removed in Airflow 3.0
|
@@ -324,7 +324,7 @@ AIR301_names.py:85:1: AIR301 `airflow.sensors.base_sensor_operator.BaseSensorOpe
85 | BaseSensorOperator()
| ^^^^^^^^^^^^^^^^^^ AIR301
|
= help: Use `BaseSensorOperator` from `airflow.sdk.bases.sensor` instead.
= help: Use `airflow.sdk.bases.sensor.BaseSensorOperator` instead
AIR301_names.py:89:1: AIR301 `airflow.triggers.external_task.TaskStateTrigger` is removed in Airflow 3.0
|
@@ -446,7 +446,7 @@ AIR301_names.py:117:1: AIR301 `airflow.utils.file.TemporaryDirectory` is removed
| ^^^^^^^^^^^^^^^^^^ AIR301
118 | mkdirs
|
= help: Use `TemporaryDirectory` from `tempfile` instead.
= help: Use `tempfile.TemporaryDirectory` instead
AIR301_names.py:118:1: AIR301 `airflow.utils.file.mkdirs` is removed in Airflow 3.0
|
@@ -466,7 +466,7 @@ AIR301_names.py:121:1: AIR301 [*] `airflow.utils.helpers.chain` is removed in Ai
| ^^^^^^^^^^^^ AIR301
122 | helper_cross_downstream
|
= help: Use `chain` from `airflow.sdk` instead.
= help: Use `airflow.sdk.chain` instead
Safe fix
48 48 | from airflow.utils.trigger_rule import TriggerRule
@@ -495,7 +495,7 @@ AIR301_names.py:122:1: AIR301 [*] `airflow.utils.helpers.cross_downstream` is re
123 |
124 | # airflow.utils.log
|
= help: Use `cross_downstream` from `airflow.sdk` instead.
= help: Use `airflow.sdk.cross_downstream` instead
Safe fix
48 48 | from airflow.utils.trigger_rule import TriggerRule
@@ -523,7 +523,7 @@ AIR301_names.py:125:1: AIR301 `airflow.utils.log.secrets_masker` is removed in A
126 |
127 | # airflow.utils.state
|
= help: Use `secrets_masker` from `airflow.sdk.execution_time` instead.
= help: Use `airflow.sdk.execution_time.secrets_masker` instead
AIR301_names.py:128:1: AIR301 `airflow.utils.state.SHUTDOWN` is removed in Airflow 3.0
|
@@ -595,7 +595,7 @@ AIR301_names.py:146:1: AIR301 `airflow.operators.python.get_current_context` is
147 |
148 | # airflow.providers.mysql
|
= help: Use `get_current_context` from `airflow.sdk` instead.
= help: Use `airflow.sdk.get_current_context` instead
AIR301_names.py:151:1: AIR301 `airflow.providers.mysql.datasets.mysql.sanitize_uri` is removed in Airflow 3.0
|
@@ -606,7 +606,7 @@ AIR301_names.py:151:1: AIR301 `airflow.providers.mysql.datasets.mysql.sanitize_u
152 |
153 | # airflow.providers.postgres
|
= help: Use `sanitize_uri` from `airflow.providers.mysql.assets.mysql` instead.
= help: Use `airflow.providers.mysql.assets.mysql.sanitize_uri` instead
AIR301_names.py:156:1: AIR301 `airflow.providers.postgres.datasets.postgres.sanitize_uri` is removed in Airflow 3.0
|
@@ -617,7 +617,7 @@ AIR301_names.py:156:1: AIR301 `airflow.providers.postgres.datasets.postgres.sani
157 |
158 | # airflow.providers.trino
|
= help: Use `sanitize_uri` from `airflow.providers.postgres.assets.postgres` instead.
= help: Use `airflow.providers.postgres.assets.postgres.sanitize_uri` instead
AIR301_names.py:161:1: AIR301 `airflow.providers.trino.datasets.trino.sanitize_uri` is removed in Airflow 3.0
|
@@ -628,7 +628,7 @@ AIR301_names.py:161:1: AIR301 `airflow.providers.trino.datasets.trino.sanitize_u
162 |
163 | # airflow.notifications.basenotifier
|
= help: Use `sanitize_uri` from `airflow.providers.trino.assets.trino` instead.
= help: Use `airflow.providers.trino.assets.trino.sanitize_uri` instead
AIR301_names.py:166:1: AIR301 `airflow.notifications.basenotifier.BaseNotifier` is removed in Airflow 3.0
|
@@ -639,7 +639,7 @@ AIR301_names.py:166:1: AIR301 `airflow.notifications.basenotifier.BaseNotifier`
167 |
168 | # airflow.auth.manager
|
= help: Use `BaseNotifier` from `airflow.sdk.bases.notifier` instead.
= help: Use `airflow.sdk.bases.notifier.BaseNotifier` instead
AIR301_names.py:171:1: AIR301 `airflow.auth.managers.base_auth_manager.BaseAuthManager` is removed in Airflow 3.0
|
@@ -648,4 +648,4 @@ AIR301_names.py:171:1: AIR301 `airflow.auth.managers.base_auth_manager.BaseAuthM
171 | BaseAuthManager()
| ^^^^^^^^^^^^^^^ AIR301
|
= help: Use `BaseAuthManager` from `airflow.api_fastapi.auth.managers.base_auth_manager` instead.
= help: Use `airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager` instead

View File

@@ -10,7 +10,7 @@ AIR301_names_fix.py:19:1: AIR301 [*] `airflow.api_connexion.security.requires_ac
20 |
21 | DatasetDetails()
|
= help: Use `requires_access_asset` from `airflow.api_fastapi.core_api.security` instead.
= help: Use `airflow.api_fastapi.core_api.security.requires_access_asset` instead
Safe fix
15 15 | from airflow.secrets.local_filesystm import load_connections
@@ -31,7 +31,7 @@ AIR301_names_fix.py:21:1: AIR301 [*] `airflow.auth.managers.models.resource_deta
21 | DatasetDetails()
| ^^^^^^^^^^^^^^ AIR301
|
= help: Use `AssetDetails` from `airflow.api_fastapi.auth.managers.models.resource_details` instead.
= help: Use `airflow.api_fastapi.auth.managers.models.resource_details.AssetDetails` instead
Safe fix
15 15 | from airflow.secrets.local_filesystm import load_connections
@@ -54,7 +54,7 @@ AIR301_names_fix.py:24:1: AIR301 [*] `airflow.datasets.manager.DatasetManager` i
25 | dataset_manager()
26 | resolve_dataset_manager()
|
= help: Use `AssetManager` from `airflow.assets.manager` instead.
= help: Use `airflow.assets.manager.AssetManager` instead
Safe fix
15 15 | from airflow.secrets.local_filesystm import load_connections
@@ -80,7 +80,7 @@ AIR301_names_fix.py:25:1: AIR301 [*] `airflow.datasets.manager.dataset_manager`
| ^^^^^^^^^^^^^^^ AIR301
26 | resolve_dataset_manager()
|
= help: Use `asset_manager` from `airflow.assets.manager` instead.
= help: Use `airflow.assets.manager.asset_manager` instead
Safe fix
15 15 | from airflow.secrets.local_filesystm import load_connections
@@ -109,7 +109,7 @@ AIR301_names_fix.py:26:1: AIR301 [*] `airflow.datasets.manager.resolve_dataset_m
27 |
28 | DatasetLineageInfo()
|
= help: Use `resolve_asset_manager` from `airflow.assets.manager` instead.
= help: Use `airflow.assets.manager.resolve_asset_manager` instead
Safe fix
15 15 | from airflow.secrets.local_filesystm import load_connections
@@ -138,7 +138,7 @@ AIR301_names_fix.py:28:1: AIR301 [*] `airflow.lineage.hook.DatasetLineageInfo` i
29 |
30 | AllowListValidator()
|
= help: Use `AssetLineageInfo` from `airflow.lineage.hook` instead.
= help: Use `airflow.lineage.hook.AssetLineageInfo` instead
Safe fix
10 10 | dataset_manager,
@@ -167,7 +167,7 @@ AIR301_names_fix.py:30:1: AIR301 [*] `airflow.metrics.validators.AllowListValida
| ^^^^^^^^^^^^^^^^^^ AIR301
31 | BlockListValidator()
|
= help: Use `PatternAllowListValidator` from `airflow.metrics.validators` instead.
= help: Use `airflow.metrics.validators.PatternAllowListValidator` instead
Safe fix
11 11 | resolve_dataset_manager,
@@ -196,7 +196,7 @@ AIR301_names_fix.py:31:1: AIR301 [*] `airflow.metrics.validators.BlockListValida
32 |
33 | load_connections()
|
= help: Use `PatternBlockListValidator` from `airflow.metrics.validators` instead.
= help: Use `airflow.metrics.validators.PatternBlockListValidator` instead
Safe fix
11 11 | resolve_dataset_manager,
@@ -226,7 +226,7 @@ AIR301_names_fix.py:35:1: AIR301 [*] `airflow.security.permissions.RESOURCE_DATA
36 |
37 | has_access_dataset()
|
= help: Use `RESOURCE_ASSET` from `airflow.security.permissions` instead.
= help: Use `airflow.security.permissions.RESOURCE_ASSET` instead
Safe fix
13 13 | from airflow.lineage.hook import DatasetLineageInfo
@@ -265,7 +265,7 @@ AIR301_names_fix.py:44:1: AIR301 [*] `airflow.listeners.spec.dataset.on_dataset_
| ^^^^^^^^^^^^^^^^^^ AIR301
45 | on_dataset_changed()
|
= help: Use `on_asset_created` from `airflow.listeners.spec.asset` instead.
= help: Use `airflow.listeners.spec.asset.on_asset_created` instead
Safe fix
40 40 | on_dataset_changed,
@@ -283,7 +283,7 @@ AIR301_names_fix.py:45:1: AIR301 [*] `airflow.listeners.spec.dataset.on_dataset_
45 | on_dataset_changed()
| ^^^^^^^^^^^^^^^^^^ AIR301
|
= help: Use `on_asset_changed` from `airflow.listeners.spec.asset` instead.
= help: Use `airflow.listeners.spec.asset.on_asset_changed` instead
Safe fix
40 40 | on_dataset_changed,

View File

@@ -10,7 +10,7 @@ AIR301_provider_names_fix.py:25:1: AIR301 [*] `airflow.providers.amazon.aws.auth
26 |
27 | s3_create_dataset()
|
= help: Use `AvpEntities.ASSET` from `airflow.providers.amazon.aws.auth_manager.avp.entities` instead.
= help: Use `airflow.providers.amazon.aws.auth_manager.avp.entities.AvpEntities.ASSET` instead
Safe fix
22 22 | translate_airflow_dataset,
@@ -30,7 +30,7 @@ AIR301_provider_names_fix.py:27:1: AIR301 [*] `airflow.providers.amazon.aws.data
| ^^^^^^^^^^^^^^^^^ AIR301
28 | s3_convert_dataset_to_openlineage()
|
= help: Use `create_asset` from `airflow.providers.amazon.aws.assets.s3` instead.
= help: Use `airflow.providers.amazon.aws.assets.s3.create_asset` instead
Safe fix
21 21 | DatasetInfo,
@@ -54,7 +54,7 @@ AIR301_provider_names_fix.py:28:1: AIR301 [*] `airflow.providers.amazon.aws.data
29 |
30 | io_create_dataset()
|
= help: Use `convert_asset_to_openlineage` from `airflow.providers.amazon.aws.assets.s3` instead.
= help: Use `airflow.providers.amazon.aws.assets.s3.convert_asset_to_openlineage` instead
Safe fix
21 21 | DatasetInfo,
@@ -79,7 +79,7 @@ AIR301_provider_names_fix.py:36:1: AIR301 [*] `airflow.providers.google.datasets
37 | # airflow.providers.google.datasets.gcs
38 | gcs_create_dataset()
|
= help: Use `create_asset` from `airflow.providers.google.assets.bigquery` instead.
= help: Use `airflow.providers.google.assets.bigquery.create_asset` instead
Safe fix
21 21 | DatasetInfo,
@@ -108,7 +108,7 @@ AIR301_provider_names_fix.py:38:1: AIR301 [*] `airflow.providers.google.datasets
39 | gcs_convert_dataset_to_openlineage()
40 | # airflow.providers.openlineage.utils.utils
|
= help: Use `create_asset` from `airflow.providers.google.assets.gcs` instead.
= help: Use `airflow.providers.google.assets.gcs.create_asset` instead
Safe fix
21 21 | DatasetInfo,
@@ -137,7 +137,7 @@ AIR301_provider_names_fix.py:39:1: AIR301 [*] `airflow.providers.google.datasets
40 | # airflow.providers.openlineage.utils.utils
41 | DatasetInfo()
|
= help: Use `convert_asset_to_openlineage` from `airflow.providers.google.assets.gcs` instead.
= help: Use `airflow.providers.google.assets.gcs.convert_asset_to_openlineage` instead
Safe fix
21 21 | DatasetInfo,
@@ -166,7 +166,7 @@ AIR301_provider_names_fix.py:41:1: AIR301 [*] `airflow.providers.openlineage.uti
42 | translate_airflow_dataset()
43 | #
|
= help: Use `AssetInfo` from `airflow.providers.openlineage.utils.utils` instead.
= help: Use `airflow.providers.openlineage.utils.utils.AssetInfo` instead
Safe fix
20 20 | from airflow.providers.openlineage.utils.utils import (
@@ -195,7 +195,7 @@ AIR301_provider_names_fix.py:42:1: AIR301 [*] `airflow.providers.openlineage.uti
43 | #
44 | # airflow.secrets.local_filesystem
|
= help: Use `translate_airflow_asset` from `airflow.providers.openlineage.utils.utils` instead.
= help: Use `airflow.providers.openlineage.utils.utils.translate_airflow_asset` instead
Safe fix
20 20 | from airflow.providers.openlineage.utils.utils import (

View File

@@ -9,7 +9,7 @@ AIR302_amazon.py:23:1: AIR302 `airflow.hooks.S3_hook.S3Hook` is moved into `amaz
| ^^^^^^ AIR302
24 | provide_bucket_name()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3Hook` from `airflow.providers.amazon.aws.hooks.s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.hooks.s3.S3Hook` instead.
AIR302_amazon.py:24:1: AIR302 `airflow.hooks.S3_hook.provide_bucket_name` is moved into `amazon` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_amazon.py:24:1: AIR302 `airflow.hooks.S3_hook.provide_bucket_name` is mov
25 |
26 | GCSToS3Operator()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `provide_bucket_name` from `airflow.providers.amazon.aws.hooks.s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.hooks.s3.provide_bucket_name` instead.
AIR302_amazon.py:26:1: AIR302 `airflow.operators.gcs_to_s3.GCSToS3Operator` is moved into `amazon` provider in Airflow 3.0;
|
@@ -30,7 +30,7 @@ AIR302_amazon.py:26:1: AIR302 `airflow.operators.gcs_to_s3.GCSToS3Operator` is m
27 |
28 | GoogleApiToS3Operator()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `GCSToS3Operator` from `airflow.providers.amazon.aws.transfers.gcs_to_s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator` instead.
AIR302_amazon.py:28:1: AIR302 `airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Operator` is moved into `amazon` provider in Airflow 3.0;
|
@@ -40,7 +40,7 @@ AIR302_amazon.py:28:1: AIR302 `airflow.operators.google_api_to_s3_transfer.Googl
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
29 | GoogleApiToS3Transfer()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `GoogleApiToS3Operator` from `airflow.providers.amazon.aws.transfers.google_api_to_s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator` instead.
AIR302_amazon.py:29:1: AIR302 `airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer` is moved into `amazon` provider in Airflow 3.0;
|
@@ -50,7 +50,7 @@ AIR302_amazon.py:29:1: AIR302 `airflow.operators.google_api_to_s3_transfer.Googl
30 |
31 | RedshiftToS3Operator()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `GoogleApiToS3Operator` from `airflow.providers.amazon.aws.transfers.google_api_to_s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator` instead.
AIR302_amazon.py:31:1: AIR302 `airflow.operators.redshift_to_s3_operator.RedshiftToS3Operator` is moved into `amazon` provider in Airflow 3.0;
|
@@ -60,7 +60,7 @@ AIR302_amazon.py:31:1: AIR302 `airflow.operators.redshift_to_s3_operator.Redshif
| ^^^^^^^^^^^^^^^^^^^^ AIR302
32 | RedshiftToS3Transfer()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `RedshiftToS3Operator` from `airflow.providers.amazon.aws.transfers.redshift_to_s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator` instead.
AIR302_amazon.py:32:1: AIR302 `airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer` is moved into `amazon` provider in Airflow 3.0;
|
@@ -70,7 +70,7 @@ AIR302_amazon.py:32:1: AIR302 `airflow.operators.redshift_to_s3_operator.Redshif
33 |
34 | S3FileTransformOperator()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `RedshiftToS3Operator` from `airflow.providers.amazon.aws.transfers.redshift_to_s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator` instead.
AIR302_amazon.py:34:1: AIR302 `airflow.operators.s3_file_transform_operator.S3FileTransformOperator` is moved into `amazon` provider in Airflow 3.0;
|
@@ -81,7 +81,7 @@ AIR302_amazon.py:34:1: AIR302 `airflow.operators.s3_file_transform_operator.S3Fi
35 |
36 | S3ToRedshiftOperator()
|
= help: Install `apache-airflow-providers-amazon>=3.0.0` and use `S3FileTransformOperator` from `airflow.providers.amazon.aws.operators.s3` instead.
= help: Install `apache-airflow-providers-amazon>=3.0.0` and use `airflow.providers.amazon.aws.operators.s3.S3FileTransformOperator` instead.
AIR302_amazon.py:36:1: AIR302 `airflow.operators.s3_to_redshift_operator.S3ToRedshiftOperator` is moved into `amazon` provider in Airflow 3.0;
|
@@ -91,7 +91,7 @@ AIR302_amazon.py:36:1: AIR302 `airflow.operators.s3_to_redshift_operator.S3ToRed
| ^^^^^^^^^^^^^^^^^^^^ AIR302
37 | S3ToRedshiftTransfer()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3ToRedshiftOperator` from `airflow.providers.amazon.aws.transfers.s3_to_redshift` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator` instead.
AIR302_amazon.py:37:1: AIR302 `airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer` is moved into `amazon` provider in Airflow 3.0;
|
@@ -101,7 +101,7 @@ AIR302_amazon.py:37:1: AIR302 `airflow.operators.s3_to_redshift_operator.S3ToRed
38 |
39 | S3KeySensor()
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3ToRedshiftOperator` from `airflow.providers.amazon.aws.transfers.s3_to_redshift` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator` instead.
AIR302_amazon.py:39:1: AIR302 `airflow.sensors.s3_key_sensor.S3KeySensor` is moved into `amazon` provider in Airflow 3.0;
|
@@ -110,4 +110,4 @@ AIR302_amazon.py:39:1: AIR302 `airflow.sensors.s3_key_sensor.S3KeySensor` is mov
39 | S3KeySensor()
| ^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `S3KeySensor` from `airflow.providers.amazon.aws.sensors.s3` instead.
= help: Install `apache-airflow-providers-amazon>=1.0.0` and use `airflow.providers.amazon.aws.sensors.s3.S3KeySensor` instead.

View File

@@ -10,7 +10,7 @@ AIR302_celery.py:9:1: AIR302 `airflow.config_templates.default_celery.DEFAULT_CE
10 |
11 | app
|
= help: Install `apache-airflow-providers-celery>=3.3.0` and use `DEFAULT_CELERY_CONFIG` from `airflow.providers.celery.executors.default_celery` instead.
= help: Install `apache-airflow-providers-celery>=3.3.0` and use `airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG` instead.
AIR302_celery.py:11:1: AIR302 `airflow.executors.celery_executor.app` is moved into `celery` provider in Airflow 3.0;
|
@@ -20,7 +20,7 @@ AIR302_celery.py:11:1: AIR302 `airflow.executors.celery_executor.app` is moved i
| ^^^ AIR302
12 | CeleryExecutor()
|
= help: Install `apache-airflow-providers-celery>=3.3.0` and use `app` from `airflow.providers.celery.executors.celery_executor_utils` instead.
= help: Install `apache-airflow-providers-celery>=3.3.0` and use `airflow.providers.celery.executors.celery_executor_utils.app` instead.
AIR302_celery.py:12:1: AIR302 `airflow.executors.celery_executor.CeleryExecutor` is moved into `celery` provider in Airflow 3.0;
|
@@ -28,4 +28,4 @@ AIR302_celery.py:12:1: AIR302 `airflow.executors.celery_executor.CeleryExecutor`
12 | CeleryExecutor()
| ^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-celery>=3.3.0` and use `CeleryExecutor` from `airflow.providers.celery.executors.celery_executor` instead.
= help: Install `apache-airflow-providers-celery>=3.3.0` and use `airflow.providers.celery.executors.celery_executor.CeleryExecutor` instead.

View File

@@ -10,7 +10,7 @@ AIR302_common_sql.py:10:1: AIR302 `airflow.hooks.dbapi.ConnectorProtocol` is mov
11 | DbApiHook()
12 | SQLCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `ConnectorProtocol` from `airflow.providers.common.sql.hooks.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.hooks.sql.ConnectorProtocol` instead.
AIR302_common_sql.py:11:1: AIR302 `airflow.hooks.dbapi_hook.DbApiHook` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_common_sql.py:11:1: AIR302 `airflow.hooks.dbapi_hook.DbApiHook` is moved
| ^^^^^^^^^ AIR302
12 | SQLCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `DbApiHook` from `airflow.providers.common.sql.hooks.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.hooks.sql.DbApiHook` instead.
AIR302_common_sql.py:12:1: AIR302 `airflow.operators.check_operator.SQLCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -28,7 +28,7 @@ AIR302_common_sql.py:12:1: AIR302 `airflow.operators.check_operator.SQLCheckOper
12 | SQLCheckOperator()
| ^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:18:1: AIR302 `airflow.operators.sql.SQLCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -38,7 +38,7 @@ AIR302_common_sql.py:18:1: AIR302 `airflow.operators.sql.SQLCheckOperator` is mo
| ^^^^^^^^^^^^^^^^ AIR302
19 | CheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:19:1: AIR302 `airflow.operators.check_operator.CheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -46,7 +46,7 @@ AIR302_common_sql.py:19:1: AIR302 `airflow.operators.check_operator.CheckOperato
19 | CheckOperator()
| ^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:24:1: AIR302 `airflow.operators.druid_check_operator.CheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -55,7 +55,7 @@ AIR302_common_sql.py:24:1: AIR302 `airflow.operators.druid_check_operator.CheckO
24 | CheckOperator()
| ^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:29:1: AIR302 `airflow.operators.presto_check_operator.CheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -64,7 +64,7 @@ AIR302_common_sql.py:29:1: AIR302 `airflow.operators.presto_check_operator.Check
29 | CheckOperator()
| ^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:39:1: AIR302 `airflow.operators.druid_check_operator.DruidCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -75,7 +75,7 @@ AIR302_common_sql.py:39:1: AIR302 `airflow.operators.druid_check_operator.DruidC
40 | PrestoCheckOperator()
41 | IntervalCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:40:1: AIR302 `airflow.operators.presto_check_operator.PrestoCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -85,7 +85,7 @@ AIR302_common_sql.py:40:1: AIR302 `airflow.operators.presto_check_operator.Prest
41 | IntervalCheckOperator()
42 | SQLIntervalCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLCheckOperator` instead.
AIR302_common_sql.py:41:1: AIR302 `airflow.operators.check_operator.IntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -95,7 +95,7 @@ AIR302_common_sql.py:41:1: AIR302 `airflow.operators.check_operator.IntervalChec
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
42 | SQLIntervalCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator` instead.
AIR302_common_sql.py:42:1: AIR302 `airflow.operators.check_operator.SQLIntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -104,7 +104,7 @@ AIR302_common_sql.py:42:1: AIR302 `airflow.operators.check_operator.SQLIntervalC
42 | SQLIntervalCheckOperator()
| ^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator` instead.
AIR302_common_sql.py:51:1: AIR302 `airflow.operators.presto_check_operator.IntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -115,7 +115,7 @@ AIR302_common_sql.py:51:1: AIR302 `airflow.operators.presto_check_operator.Inter
52 | SQLIntervalCheckOperator()
53 | PrestoIntervalCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator` instead.
AIR302_common_sql.py:52:1: AIR302 `airflow.operators.sql.SQLIntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -124,7 +124,7 @@ AIR302_common_sql.py:52:1: AIR302 `airflow.operators.sql.SQLIntervalCheckOperato
| ^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
53 | PrestoIntervalCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator` instead.
AIR302_common_sql.py:53:1: AIR302 `airflow.operators.presto_check_operator.PrestoIntervalCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -133,7 +133,7 @@ AIR302_common_sql.py:53:1: AIR302 `airflow.operators.presto_check_operator.Prest
53 | PrestoIntervalCheckOperator()
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLIntervalCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator` instead.
AIR302_common_sql.py:61:1: AIR302 `airflow.operators.check_operator.SQLThresholdCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -143,7 +143,7 @@ AIR302_common_sql.py:61:1: AIR302 `airflow.operators.check_operator.SQLThreshold
| ^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
62 | ThresholdCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLThresholdCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator` instead.
AIR302_common_sql.py:62:1: AIR302 `airflow.operators.check_operator.ThresholdCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -151,7 +151,7 @@ AIR302_common_sql.py:62:1: AIR302 `airflow.operators.check_operator.ThresholdChe
62 | ThresholdCheckOperator()
| ^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLThresholdCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator` instead.
AIR302_common_sql.py:67:1: AIR302 `airflow.operators.sql.SQLThresholdCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -160,7 +160,7 @@ AIR302_common_sql.py:67:1: AIR302 `airflow.operators.sql.SQLThresholdCheckOperat
67 | SQLThresholdCheckOperator()
| ^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLThresholdCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator` instead.
AIR302_common_sql.py:75:1: AIR302 `airflow.operators.check_operator.SQLValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -170,7 +170,7 @@ AIR302_common_sql.py:75:1: AIR302 `airflow.operators.check_operator.SQLValueChec
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
76 | ValueCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLValueCheckOperator` instead.
AIR302_common_sql.py:76:1: AIR302 `airflow.operators.check_operator.ValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -178,7 +178,7 @@ AIR302_common_sql.py:76:1: AIR302 `airflow.operators.check_operator.ValueCheckOp
76 | ValueCheckOperator()
| ^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLValueCheckOperator` instead.
AIR302_common_sql.py:85:1: AIR302 `airflow.operators.sql.SQLValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -189,7 +189,7 @@ AIR302_common_sql.py:85:1: AIR302 `airflow.operators.sql.SQLValueCheckOperator`
86 | ValueCheckOperator()
87 | PrestoValueCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLValueCheckOperator` instead.
AIR302_common_sql.py:86:1: AIR302 `airflow.operators.presto_check_operator.ValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -198,7 +198,7 @@ AIR302_common_sql.py:86:1: AIR302 `airflow.operators.presto_check_operator.Value
| ^^^^^^^^^^^^^^^^^^ AIR302
87 | PrestoValueCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLValueCheckOperator` instead.
AIR302_common_sql.py:87:1: AIR302 `airflow.operators.presto_check_operator.PrestoValueCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -207,7 +207,7 @@ AIR302_common_sql.py:87:1: AIR302 `airflow.operators.presto_check_operator.Prest
87 | PrestoValueCheckOperator()
| ^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLValueCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLValueCheckOperator` instead.
AIR302_common_sql.py:99:1: AIR302 `airflow.operators.sql.BaseSQLOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -218,7 +218,7 @@ AIR302_common_sql.py:99:1: AIR302 `airflow.operators.sql.BaseSQLOperator` is mov
100 | BranchSQLOperator()
101 | SQLTableCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `BaseSQLOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.BaseSQLOperator` instead.
AIR302_common_sql.py:100:1: AIR302 `airflow.operators.sql.BranchSQLOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -228,7 +228,7 @@ AIR302_common_sql.py:100:1: AIR302 `airflow.operators.sql.BranchSQLOperator` is
101 | SQLTableCheckOperator()
102 | SQLColumnCheckOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `BranchSQLOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.BranchSQLOperator` instead.
AIR302_common_sql.py:101:1: AIR302 `airflow.operators.sql.SQLTableCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -239,7 +239,7 @@ AIR302_common_sql.py:101:1: AIR302 `airflow.operators.sql.SQLTableCheckOperator`
102 | SQLColumnCheckOperator()
103 | _convert_to_float_if_possible()
|
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `SQLTableCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.1.0` and use `airflow.providers.common.sql.operators.sql.SQLTableCheckOperator` instead.
AIR302_common_sql.py:102:1: AIR302 `airflow.operators.sql.SQLColumnCheckOperator` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -250,7 +250,7 @@ AIR302_common_sql.py:102:1: AIR302 `airflow.operators.sql.SQLColumnCheckOperator
103 | _convert_to_float_if_possible()
104 | parse_boolean()
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `SQLColumnCheckOperator` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.operators.sql.SQLColumnCheckOperator` instead.
AIR302_common_sql.py:103:1: AIR302 `airflow.operators.sql._convert_to_float_if_possible` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -260,7 +260,7 @@ AIR302_common_sql.py:103:1: AIR302 `airflow.operators.sql._convert_to_float_if_p
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
104 | parse_boolean()
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `_convert_to_float_if_possible` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.operators.sql._convert_to_float_if_possible` instead.
AIR302_common_sql.py:104:1: AIR302 `airflow.operators.sql.parse_boolean` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -269,7 +269,7 @@ AIR302_common_sql.py:104:1: AIR302 `airflow.operators.sql.parse_boolean` is move
104 | parse_boolean()
| ^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `parse_boolean` from `airflow.providers.common.sql.operators.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.operators.sql.parse_boolean` instead.
AIR302_common_sql.py:109:1: AIR302 `airflow.sensors.sql.SqlSensor` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -278,7 +278,7 @@ AIR302_common_sql.py:109:1: AIR302 `airflow.sensors.sql.SqlSensor` is moved into
109 | SqlSensor()
| ^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `SqlSensor` from `airflow.providers.common.sql.sensors.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.sensors.sql.SqlSensor` instead.
AIR302_common_sql.py:114:1: AIR302 `airflow.sensors.sql_sensor.SqlSensor` is moved into `common-sql` provider in Airflow 3.0;
|
@@ -287,4 +287,147 @@ AIR302_common_sql.py:114:1: AIR302 `airflow.sensors.sql_sensor.SqlSensor` is mov
114 | SqlSensor()
| ^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `SqlSensor` from `airflow.providers.common.sql.sensors.sql` instead.
= help: Install `apache-airflow-providers-common-sql>=1.0.0` and use `airflow.providers.common.sql.sensors.sql.SqlSensor` instead.
AIR302_common_sql.py:124:1: AIR302 [*] `airflow.operators.jdbc_operator.JdbcOperator` is moved into `common-sql` provider in Airflow 3.0;
|
122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |
124 | JdbcOperator()
| ^^^^^^^^^^^^ AIR302
125 | MsSqlOperator()
126 | MySqlOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.3.0` and use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` instead.
Safe fix
120 120 | from airflow.operators.oracle_operator import OracleOperator
121 121 | from airflow.operators.postgres_operator import PostgresOperator
122 122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
123 124 |
124 |-JdbcOperator()
125 |+SQLExecuteQueryOperator()
125 126 | MsSqlOperator()
126 127 | MySqlOperator()
127 128 | OracleOperator()
AIR302_common_sql.py:125:1: AIR302 [*] `airflow.operators.mssql_operator.MsSqlOperator` is moved into `common-sql` provider in Airflow 3.0;
|
124 | JdbcOperator()
125 | MsSqlOperator()
| ^^^^^^^^^^^^^ AIR302
126 | MySqlOperator()
127 | OracleOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.3.0` and use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` instead.
Safe fix
120 120 | from airflow.operators.oracle_operator import OracleOperator
121 121 | from airflow.operators.postgres_operator import PostgresOperator
122 122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
123 124 |
124 125 | JdbcOperator()
125 |-MsSqlOperator()
126 |+SQLExecuteQueryOperator()
126 127 | MySqlOperator()
127 128 | OracleOperator()
128 129 | PostgresOperator()
AIR302_common_sql.py:126:1: AIR302 [*] `airflow.operators.mysql_operator.MySqlOperator` is moved into `common-sql` provider in Airflow 3.0;
|
124 | JdbcOperator()
125 | MsSqlOperator()
126 | MySqlOperator()
| ^^^^^^^^^^^^^ AIR302
127 | OracleOperator()
128 | PostgresOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.3.0` and use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` instead.
Safe fix
120 120 | from airflow.operators.oracle_operator import OracleOperator
121 121 | from airflow.operators.postgres_operator import PostgresOperator
122 122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
123 124 |
124 125 | JdbcOperator()
125 126 | MsSqlOperator()
126 |-MySqlOperator()
127 |+SQLExecuteQueryOperator()
127 128 | OracleOperator()
128 129 | PostgresOperator()
129 130 | SqliteOperator()
AIR302_common_sql.py:127:1: AIR302 [*] `airflow.operators.oracle_operator.OracleOperator` is moved into `common-sql` provider in Airflow 3.0;
|
125 | MsSqlOperator()
126 | MySqlOperator()
127 | OracleOperator()
| ^^^^^^^^^^^^^^ AIR302
128 | PostgresOperator()
129 | SqliteOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.3.0` and use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` instead.
Safe fix
120 120 | from airflow.operators.oracle_operator import OracleOperator
121 121 | from airflow.operators.postgres_operator import PostgresOperator
122 122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
123 124 |
124 125 | JdbcOperator()
125 126 | MsSqlOperator()
126 127 | MySqlOperator()
127 |-OracleOperator()
128 |+SQLExecuteQueryOperator()
128 129 | PostgresOperator()
129 130 | SqliteOperator()
AIR302_common_sql.py:128:1: AIR302 [*] `airflow.operators.postgres_operator.PostgresOperator` is moved into `common-sql` provider in Airflow 3.0;
|
126 | MySqlOperator()
127 | OracleOperator()
128 | PostgresOperator()
| ^^^^^^^^^^^^^^^^ AIR302
129 | SqliteOperator()
|
= help: Install `apache-airflow-providers-common-sql>=1.3.0` and use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` instead.
Safe fix
120 120 | from airflow.operators.oracle_operator import OracleOperator
121 121 | from airflow.operators.postgres_operator import PostgresOperator
122 122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
123 124 |
124 125 | JdbcOperator()
125 126 | MsSqlOperator()
126 127 | MySqlOperator()
127 128 | OracleOperator()
128 |-PostgresOperator()
129 |+SQLExecuteQueryOperator()
129 130 | SqliteOperator()
AIR302_common_sql.py:129:1: AIR302 [*] `airflow.operators.sqlite_operator.SqliteOperator` is moved into `common-sql` provider in Airflow 3.0;
|
127 | OracleOperator()
128 | PostgresOperator()
129 | SqliteOperator()
| ^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-common-sql>=1.3.0` and use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` instead.
Safe fix
120 120 | from airflow.operators.oracle_operator import OracleOperator
121 121 | from airflow.operators.postgres_operator import PostgresOperator
122 122 | from airflow.operators.sqlite_operator import SqliteOperator
123 |+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
123 124 |
124 125 | JdbcOperator()
125 126 | MsSqlOperator()
126 127 | MySqlOperator()
127 128 | OracleOperator()
128 129 | PostgresOperator()
129 |-SqliteOperator()
130 |+SQLExecuteQueryOperator()

View File

@@ -8,4 +8,4 @@ AIR302_daskexecutor.py:5:1: AIR302 `airflow.executors.dask_executor.DaskExecutor
5 | DaskExecutor()
| ^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-daskexecutor>=1.0.0` and use `DaskExecutor` from `airflow.providers.daskexecutor.executors.dask_executor` instead.
= help: Install `apache-airflow-providers-daskexecutor>=1.0.0` and use `airflow.providers.daskexecutor.executors.dask_executor.DaskExecutor` instead.

View File

@@ -9,7 +9,7 @@ AIR302_druid.py:12:1: AIR302 `airflow.hooks.druid_hook.DruidDbApiHook` is moved
| ^^^^^^^^^^^^^^ AIR302
13 | DruidHook()
|
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `DruidDbApiHook` from `airflow.providers.apache.druid.hooks.druid` instead.
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `airflow.providers.apache.druid.hooks.druid.DruidDbApiHook` instead.
AIR302_druid.py:13:1: AIR302 `airflow.hooks.druid_hook.DruidHook` is moved into `apache-druid` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_druid.py:13:1: AIR302 `airflow.hooks.druid_hook.DruidHook` is moved into
14 |
15 | HiveToDruidOperator()
|
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `DruidHook` from `airflow.providers.apache.druid.hooks.druid` instead.
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `airflow.providers.apache.druid.hooks.druid.DruidHook` instead.
AIR302_druid.py:15:1: AIR302 `airflow.operators.hive_to_druid.HiveToDruidOperator` is moved into `apache-druid` provider in Airflow 3.0;
|
@@ -29,7 +29,7 @@ AIR302_druid.py:15:1: AIR302 `airflow.operators.hive_to_druid.HiveToDruidOperato
| ^^^^^^^^^^^^^^^^^^^ AIR302
16 | HiveToDruidTransfer()
|
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `HiveToDruidOperator` from `airflow.providers.apache.druid.transfers.hive_to_druid` instead.
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator` instead.
AIR302_druid.py:16:1: AIR302 `airflow.operators.hive_to_druid.HiveToDruidTransfer` is moved into `apache-druid` provider in Airflow 3.0;
|
@@ -37,4 +37,4 @@ AIR302_druid.py:16:1: AIR302 `airflow.operators.hive_to_druid.HiveToDruidTransfe
16 | HiveToDruidTransfer()
| ^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `HiveToDruidOperator` from `airflow.providers.apache.druid.transfers.hive_to_druid` instead.
= help: Install `apache-airflow-providers-apache-druid>=1.0.0` and use `airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator` instead.

View File

@@ -10,7 +10,7 @@ AIR302_fab.py:10:1: AIR302 `airflow.api.auth.backend.basic_auth.CLIENT_AUTH` is
11 | init_app()
12 | auth_current_user()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `CLIENT_AUTH` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth.CLIENT_AUTH` instead.
AIR302_fab.py:11:1: AIR302 `airflow.api.auth.backend.basic_auth.init_app` is moved into `fab` provider in Airflow 3.0;
|
@@ -20,7 +20,7 @@ AIR302_fab.py:11:1: AIR302 `airflow.api.auth.backend.basic_auth.init_app` is mov
12 | auth_current_user()
13 | requires_authentication()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `init_app` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth.init_app` instead.
AIR302_fab.py:12:1: AIR302 `airflow.api.auth.backend.basic_auth.auth_current_user` is moved into `fab` provider in Airflow 3.0;
|
@@ -30,7 +30,7 @@ AIR302_fab.py:12:1: AIR302 `airflow.api.auth.backend.basic_auth.auth_current_use
| ^^^^^^^^^^^^^^^^^ AIR302
13 | requires_authentication()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `auth_current_user` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth.auth_current_user` instead.
AIR302_fab.py:13:1: AIR302 `airflow.api.auth.backend.basic_auth.requires_authentication` is moved into `fab` provider in Airflow 3.0;
|
@@ -41,7 +41,7 @@ AIR302_fab.py:13:1: AIR302 `airflow.api.auth.backend.basic_auth.requires_authent
14 |
15 | from airflow.api.auth.backend.kerberos_auth import (
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `requires_authentication` from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth.requires_authentication` instead.
AIR302_fab.py:23:1: AIR302 `airflow.api.auth.backend.kerberos_auth.log` is moved into `fab` provider in Airflow 3.0;
|
@@ -52,7 +52,7 @@ AIR302_fab.py:23:1: AIR302 `airflow.api.auth.backend.kerberos_auth.log` is moved
24 | CLIENT_AUTH
25 | find_user()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `log` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.log` instead.
AIR302_fab.py:24:1: AIR302 `airflow.api.auth.backend.kerberos_auth.CLIENT_AUTH` is moved into `fab` provider in Airflow 3.0;
|
@@ -62,7 +62,7 @@ AIR302_fab.py:24:1: AIR302 `airflow.api.auth.backend.kerberos_auth.CLIENT_AUTH`
25 | find_user()
26 | init_app()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `CLIENT_AUTH` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.CLIENT_AUTH` instead.
AIR302_fab.py:25:1: AIR302 `airflow.api.auth.backend.kerberos_auth.find_user` is moved into `fab` provider in Airflow 3.0;
|
@@ -73,7 +73,7 @@ AIR302_fab.py:25:1: AIR302 `airflow.api.auth.backend.kerberos_auth.find_user` is
26 | init_app()
27 | requires_authentication()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `find_user` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.find_user` instead.
AIR302_fab.py:26:1: AIR302 `airflow.api.auth.backend.kerberos_auth.init_app` is moved into `fab` provider in Airflow 3.0;
|
@@ -83,7 +83,7 @@ AIR302_fab.py:26:1: AIR302 `airflow.api.auth.backend.kerberos_auth.init_app` is
| ^^^^^^^^ AIR302
27 | requires_authentication()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `init_app` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.init_app` instead.
AIR302_fab.py:27:1: AIR302 `airflow.api.auth.backend.kerberos_auth.requires_authentication` is moved into `fab` provider in Airflow 3.0;
|
@@ -94,7 +94,7 @@ AIR302_fab.py:27:1: AIR302 `airflow.api.auth.backend.kerberos_auth.requires_auth
28 |
29 | from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `requires_authentication` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.requires_authentication` instead.
AIR302_fab.py:37:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.log` is moved into `fab` provider in Airflow 3.0;
|
@@ -105,7 +105,7 @@ AIR302_fab.py:37:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_
38 | CLIENT_AUTH
39 | find_user()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `log` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.log` instead.
AIR302_fab.py:38:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.CLIENT_AUTH` is moved into `fab` provider in Airflow 3.0;
|
@@ -115,7 +115,7 @@ AIR302_fab.py:38:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_
39 | find_user()
40 | init_app()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `CLIENT_AUTH` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.CLIENT_AUTH` instead.
AIR302_fab.py:39:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.find_user` is moved into `fab` provider in Airflow 3.0;
|
@@ -126,7 +126,7 @@ AIR302_fab.py:39:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_
40 | init_app()
41 | requires_authentication()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `find_user` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.find_user` instead.
AIR302_fab.py:40:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.init_app` is moved into `fab` provider in Airflow 3.0;
|
@@ -136,7 +136,7 @@ AIR302_fab.py:40:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_
| ^^^^^^^^ AIR302
41 | requires_authentication()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `init_app` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.init_app` instead.
AIR302_fab.py:41:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_auth.requires_authentication` is moved into `fab` provider in Airflow 3.0;
|
@@ -147,7 +147,7 @@ AIR302_fab.py:41:1: AIR302 `airflow.auth.managers.fab.api.auth.backend.kerberos_
42 |
43 | from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `requires_authentication` from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth.requires_authentication` instead.
AIR302_fab.py:49:1: AIR302 `airflow.auth.managers.fab.fab_auth_manager.FabAuthManager` is moved into `fab` provider in Airflow 3.0;
|
@@ -158,7 +158,7 @@ AIR302_fab.py:49:1: AIR302 `airflow.auth.managers.fab.fab_auth_manager.FabAuthMa
50 | MAX_NUM_DATABASE_USER_SESSIONS
51 | FabAirflowSecurityManagerOverride()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `FabAuthManager` from `airflow.providers.fab.auth_manager.fab_auth_manager` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager` instead.
AIR302_fab.py:50:1: AIR302 `airflow.auth.managers.fab.security_manager.override.MAX_NUM_DATABASE_USER_SESSIONS` is moved into `fab` provider in Airflow 3.0;
|
@@ -167,7 +167,7 @@ AIR302_fab.py:50:1: AIR302 `airflow.auth.managers.fab.security_manager.override.
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
51 | FabAirflowSecurityManagerOverride()
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `MAX_NUM_DATABASE_USER_SESSIONS` from `airflow.providers.fab.auth_manager.security_manager.override` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.security_manager.override.MAX_NUM_DATABASE_USER_SESSIONS` instead.
AIR302_fab.py:51:1: AIR302 `airflow.auth.managers.fab.security_manager.override.FabAirflowSecurityManagerOverride` is moved into `fab` provider in Airflow 3.0;
|
@@ -178,7 +178,7 @@ AIR302_fab.py:51:1: AIR302 `airflow.auth.managers.fab.security_manager.override.
52 |
53 | from airflow.www.security import FabAirflowSecurityManagerOverride
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `FabAirflowSecurityManagerOverride` from `airflow.providers.fab.auth_manager.security_manager.override` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.security_manager.override.FabAirflowSecurityManagerOverride` instead.
AIR302_fab.py:55:1: AIR302 `airflow.www.security.FabAirflowSecurityManagerOverride` is moved into `fab` provider in Airflow 3.0;
|
@@ -187,4 +187,4 @@ AIR302_fab.py:55:1: AIR302 `airflow.www.security.FabAirflowSecurityManagerOverri
55 | FabAirflowSecurityManagerOverride()
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `FabAirflowSecurityManagerOverride` from `airflow.providers.fab.auth_manager.security_manager.override` instead.
= help: Install `apache-airflow-providers-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.security_manager.override.FabAirflowSecurityManagerOverride` instead.

View File

@@ -9,7 +9,7 @@ AIR302_hdfs.py:6:1: AIR302 `airflow.hooks.webhdfs_hook.WebHDFSHook` is moved int
| ^^^^^^^^^^^ AIR302
7 | WebHdfsSensor()
|
= help: Install `apache-airflow-providers-apache-hdfs>=1.0.0` and use `WebHDFSHook` from `airflow.providers.apache.hdfs.hooks.webhdfs` instead.
= help: Install `apache-airflow-providers-apache-hdfs>=1.0.0` and use `airflow.providers.apache.hdfs.hooks.webhdfs.WebHDFSHook` instead.
AIR302_hdfs.py:7:1: AIR302 `airflow.sensors.web_hdfs_sensor.WebHdfsSensor` is moved into `apache-hdfs` provider in Airflow 3.0;
|
@@ -17,4 +17,4 @@ AIR302_hdfs.py:7:1: AIR302 `airflow.sensors.web_hdfs_sensor.WebHdfsSensor` is mo
7 | WebHdfsSensor()
| ^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-apache-hdfs>=1.0.0` and use `WebHdfsSensor` from `airflow.providers.apache.hdfs.sensors.web_hdfs` instead.
= help: Install `apache-airflow-providers-apache-hdfs>=1.0.0` and use `airflow.providers.apache.hdfs.sensors.web_hdfs.WebHdfsSensor` instead.

View File

@@ -9,7 +9,7 @@ AIR302_hive.py:36:1: AIR302 `airflow.macros.hive.closest_ds_partition` is moved
| ^^^^^^^^^^^^^^^^^^^^ AIR302
37 | max_partition()
|
= help: Install `apache-airflow-providers-apache-hive>=5.1.0` and use `closest_ds_partition` from `airflow.providers.apache.hive.macros.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=5.1.0` and use `airflow.providers.apache.hive.macros.hive.closest_ds_partition` instead.
AIR302_hive.py:37:1: AIR302 `airflow.macros.hive.max_partition` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_hive.py:37:1: AIR302 `airflow.macros.hive.max_partition` is moved into `a
38 |
39 | HiveCliHook()
|
= help: Install `apache-airflow-providers-apache-hive>=5.1.0` and use `max_partition` from `airflow.providers.apache.hive.macros.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=5.1.0` and use `airflow.providers.apache.hive.macros.hive.max_partition` instead.
AIR302_hive.py:39:1: AIR302 `airflow.hooks.hive_hooks.HiveCliHook` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -30,7 +30,7 @@ AIR302_hive.py:39:1: AIR302 `airflow.hooks.hive_hooks.HiveCliHook` is moved into
40 | HiveMetastoreHook()
41 | HiveServer2Hook()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveCliHook` from `airflow.providers.apache.hive.hooks.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.hooks.hive.HiveCliHook` instead.
AIR302_hive.py:40:1: AIR302 `airflow.hooks.hive_hooks.HiveMetastoreHook` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -40,7 +40,7 @@ AIR302_hive.py:40:1: AIR302 `airflow.hooks.hive_hooks.HiveMetastoreHook` is move
41 | HiveServer2Hook()
42 | HIVE_QUEUE_PRIORITIES
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveMetastoreHook` from `airflow.providers.apache.hive.hooks.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook` instead.
AIR302_hive.py:41:1: AIR302 `airflow.hooks.hive_hooks.HiveServer2Hook` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -50,7 +50,7 @@ AIR302_hive.py:41:1: AIR302 `airflow.hooks.hive_hooks.HiveServer2Hook` is moved
| ^^^^^^^^^^^^^^^ AIR302
42 | HIVE_QUEUE_PRIORITIES
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveServer2Hook` from `airflow.providers.apache.hive.hooks.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.hooks.hive.HiveServer2Hook` instead.
AIR302_hive.py:42:1: AIR302 `airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -61,7 +61,7 @@ AIR302_hive.py:42:1: AIR302 `airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES` is
43 |
44 | HiveOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HIVE_QUEUE_PRIORITIES` from `airflow.providers.apache.hive.hooks.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES` instead.
AIR302_hive.py:44:1: AIR302 `airflow.operators.hive_operator.HiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -72,7 +72,7 @@ AIR302_hive.py:44:1: AIR302 `airflow.operators.hive_operator.HiveOperator` is mo
45 |
46 | HiveStatsCollectionOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveOperator` from `airflow.providers.apache.hive.operators.hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.operators.hive.HiveOperator` instead.
AIR302_hive.py:46:1: AIR302 `airflow.operators.hive_stats_operator.HiveStatsCollectionOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -83,7 +83,7 @@ AIR302_hive.py:46:1: AIR302 `airflow.operators.hive_stats_operator.HiveStatsColl
47 |
48 | HiveToMySqlOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveStatsCollectionOperator` from `airflow.providers.apache.hive.operators.hive_stats` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator` instead.
AIR302_hive.py:48:1: AIR302 `airflow.operators.hive_to_mysql.HiveToMySqlOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -93,7 +93,7 @@ AIR302_hive.py:48:1: AIR302 `airflow.operators.hive_to_mysql.HiveToMySqlOperator
| ^^^^^^^^^^^^^^^^^^^ AIR302
49 | HiveToMySqlTransfer()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveToMySqlOperator` from `airflow.providers.apache.hive.transfers.hive_to_mysql` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator` instead.
AIR302_hive.py:49:1: AIR302 `airflow.operators.hive_to_mysql.HiveToMySqlTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -103,7 +103,7 @@ AIR302_hive.py:49:1: AIR302 `airflow.operators.hive_to_mysql.HiveToMySqlTransfer
50 |
51 | HiveToSambaOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveToMySqlOperator` from `airflow.providers.apache.hive.transfers.hive_to_mysql` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator` instead.
AIR302_hive.py:51:1: AIR302 `airflow.operators.hive_to_samba_operator.HiveToSambaOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -114,7 +114,7 @@ AIR302_hive.py:51:1: AIR302 `airflow.operators.hive_to_samba_operator.HiveToSamb
52 |
53 | MsSqlToHiveOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HiveToSambaOperator` from `airflow.providers.apache.hive.transfers.hive_to_samba` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator` instead.
AIR302_hive.py:53:1: AIR302 `airflow.operators.mssql_to_hive.MsSqlToHiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -124,7 +124,7 @@ AIR302_hive.py:53:1: AIR302 `airflow.operators.mssql_to_hive.MsSqlToHiveOperator
| ^^^^^^^^^^^^^^^^^^^ AIR302
54 | MsSqlToHiveTransfer()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MsSqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mssql_to_hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator` instead.
AIR302_hive.py:54:1: AIR302 `airflow.operators.mssql_to_hive.MsSqlToHiveTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -134,7 +134,7 @@ AIR302_hive.py:54:1: AIR302 `airflow.operators.mssql_to_hive.MsSqlToHiveTransfer
55 |
56 | MySqlToHiveOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MsSqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mssql_to_hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator` instead.
AIR302_hive.py:56:1: AIR302 `airflow.operators.mysql_to_hive.MySqlToHiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -144,7 +144,7 @@ AIR302_hive.py:56:1: AIR302 `airflow.operators.mysql_to_hive.MySqlToHiveOperator
| ^^^^^^^^^^^^^^^^^^^ AIR302
57 | MySqlToHiveTransfer()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MySqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mysql_to_hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator` instead.
AIR302_hive.py:57:1: AIR302 `airflow.operators.mysql_to_hive.MySqlToHiveTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -154,7 +154,7 @@ AIR302_hive.py:57:1: AIR302 `airflow.operators.mysql_to_hive.MySqlToHiveTransfer
58 |
59 | S3ToHiveOperator()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MySqlToHiveOperator` from `airflow.providers.apache.hive.transfers.mysql_to_hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator` instead.
AIR302_hive.py:59:1: AIR302 `airflow.operators.s3_to_hive_operator.S3ToHiveOperator` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -164,7 +164,7 @@ AIR302_hive.py:59:1: AIR302 `airflow.operators.s3_to_hive_operator.S3ToHiveOpera
| ^^^^^^^^^^^^^^^^ AIR302
60 | S3ToHiveTransfer()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `S3ToHiveOperator` from `airflow.providers.apache.hive.transfers.s3_to_hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator` instead.
AIR302_hive.py:60:1: AIR302 `airflow.operators.s3_to_hive_operator.S3ToHiveTransfer` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -174,7 +174,7 @@ AIR302_hive.py:60:1: AIR302 `airflow.operators.s3_to_hive_operator.S3ToHiveTrans
61 |
62 | HivePartitionSensor()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `S3ToHiveOperator` from `airflow.providers.apache.hive.transfers.s3_to_hive` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator` instead.
AIR302_hive.py:62:1: AIR302 `airflow.sensors.hive_partition_sensor.HivePartitionSensor` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -185,7 +185,7 @@ AIR302_hive.py:62:1: AIR302 `airflow.sensors.hive_partition_sensor.HivePartition
63 |
64 | MetastorePartitionSensor()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `HivePartitionSensor` from `airflow.providers.apache.hive.sensors.hive_partition` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor` instead.
AIR302_hive.py:64:1: AIR302 `airflow.sensors.metastore_partition_sensor.MetastorePartitionSensor` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -196,7 +196,7 @@ AIR302_hive.py:64:1: AIR302 `airflow.sensors.metastore_partition_sensor.Metastor
65 |
66 | NamedHivePartitionSensor()
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `MetastorePartitionSensor` from `airflow.providers.apache.hive.sensors.metastore_partition` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor` instead.
AIR302_hive.py:66:1: AIR302 `airflow.sensors.named_hive_partition_sensor.NamedHivePartitionSensor` is moved into `apache-hive` provider in Airflow 3.0;
|
@@ -205,4 +205,4 @@ AIR302_hive.py:66:1: AIR302 `airflow.sensors.named_hive_partition_sensor.NamedHi
66 | NamedHivePartitionSensor()
| ^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `NamedHivePartitionSensor` from `airflow.providers.apache.hive.sensors.named_hive_partition` instead.
= help: Install `apache-airflow-providers-apache-hive>=1.0.0` and use `airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor` instead.

View File

@@ -10,7 +10,7 @@ AIR302_http.py:7:1: AIR302 `airflow.hooks.http_hook.HttpHook` is moved into `htt
8 | SimpleHttpOperator()
9 | HttpSensor()
|
= help: Install `apache-airflow-providers-http>=1.0.0` and use `HttpHook` from `airflow.providers.http.hooks.http` instead.
= help: Install `apache-airflow-providers-http>=1.0.0` and use `airflow.providers.http.hooks.http.HttpHook` instead.
AIR302_http.py:8:1: AIR302 [*] `airflow.operators.http_operator.SimpleHttpOperator` is moved into `http` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_http.py:8:1: AIR302 [*] `airflow.operators.http_operator.SimpleHttpOperat
| ^^^^^^^^^^^^^^^^^^ AIR302
9 | HttpSensor()
|
= help: Install `apache-airflow-providers-http>=5.0.0` and use `HttpOperator` from `airflow.providers.http.operators.http` instead.
= help: Install `apache-airflow-providers-http>=5.0.0` and use `airflow.providers.http.operators.http.HttpOperator` instead.
Safe fix
3 3 | from airflow.hooks.http_hook import HttpHook
@@ -39,4 +39,4 @@ AIR302_http.py:9:1: AIR302 `airflow.sensors.http_sensor.HttpSensor` is moved int
9 | HttpSensor()
| ^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-http>=1.0.0` and use `HttpSensor` from `airflow.providers.http.sensors.http` instead.
= help: Install `apache-airflow-providers-http>=1.0.0` and use `airflow.providers.http.sensors.http.HttpSensor` instead.

View File

@@ -9,7 +9,7 @@ AIR302_jdbc.py:8:1: AIR302 `airflow.hooks.jdbc_hook.JdbcHook` is moved into `jdb
| ^^^^^^^^ AIR302
9 | jaydebeapi()
|
= help: Install `apache-airflow-providers-jdbc>=1.0.0` and use `JdbcHook` from `airflow.providers.jdbc.hooks.jdbc` instead.
= help: Install `apache-airflow-providers-jdbc>=1.0.0` and use `airflow.providers.jdbc.hooks.jdbc.JdbcHook` instead.
AIR302_jdbc.py:9:1: AIR302 `airflow.hooks.jdbc_hook.jaydebeapi` is moved into `jdbc` provider in Airflow 3.0;
|
@@ -17,4 +17,4 @@ AIR302_jdbc.py:9:1: AIR302 `airflow.hooks.jdbc_hook.jaydebeapi` is moved into `j
9 | jaydebeapi()
| ^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-jdbc>=1.0.0` and use `jaydebeapi` from `airflow.providers.jdbc.hooks.jdbc` instead.
= help: Install `apache-airflow-providers-jdbc>=1.0.0` and use `airflow.providers.jdbc.hooks.jdbc.jaydebeapi` instead.

View File

@@ -9,7 +9,7 @@ AIR302_kubernetes.py:29:1: AIR302 `airflow.executors.kubernetes_executor_types.A
| ^^^^^^^^^^^^^^ AIR302
30 | POD_EXECUTOR_DONE_KEY
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `ALL_NAMESPACES` from `airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.ALL_NAMESPACES` instead.
AIR302_kubernetes.py:30:1: AIR302 `airflow.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_kubernetes.py:30:1: AIR302 `airflow.executors.kubernetes_executor_types.P
31 |
32 | K8SModel()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `POD_EXECUTOR_DONE_KEY` from `airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY` instead.
AIR302_kubernetes.py:32:1: AIR302 `airflow.kubernetes.k8s_model.K8SModel` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -29,7 +29,7 @@ AIR302_kubernetes.py:32:1: AIR302 `airflow.kubernetes.k8s_model.K8SModel` is mov
| ^^^^^^^^ AIR302
33 | append_to_pod()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `K8SModel` from `airflow.providers.cncf.kubernetes.k8s_model` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.k8s_model.K8SModel` instead.
AIR302_kubernetes.py:33:1: AIR302 `airflow.kubernetes.k8s_model.append_to_pod` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -39,7 +39,7 @@ AIR302_kubernetes.py:33:1: AIR302 `airflow.kubernetes.k8s_model.append_to_pod` i
34 |
35 | _disable_verify_ssl()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `append_to_pod` from `airflow.providers.cncf.kubernetes.k8s_model` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.k8s_model.append_to_pod` instead.
AIR302_kubernetes.py:35:1: AIR302 `airflow.kubernetes.kube_client._disable_verify_ssl` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -50,7 +50,7 @@ AIR302_kubernetes.py:35:1: AIR302 `airflow.kubernetes.kube_client._disable_verif
36 | _enable_tcp_keepalive()
37 | get_kube_client()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `_disable_verify_ssl` from `airflow.providers.cncf.kubernetes.kube_client` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kube_client._disable_verify_ssl` instead.
AIR302_kubernetes.py:36:1: AIR302 `airflow.kubernetes.kube_client._enable_tcp_keepalive` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -59,7 +59,7 @@ AIR302_kubernetes.py:36:1: AIR302 `airflow.kubernetes.kube_client._enable_tcp_ke
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
37 | get_kube_client()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `_enable_tcp_keepalive` from `airflow.providers.cncf.kubernetes.kube_client` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kube_client._enable_tcp_keepalive` instead.
AIR302_kubernetes.py:37:1: AIR302 `airflow.kubernetes.kube_client.get_kube_client` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -70,7 +70,7 @@ AIR302_kubernetes.py:37:1: AIR302 `airflow.kubernetes.kube_client.get_kube_clien
38 |
39 | add_pod_suffix()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `get_kube_client` from `airflow.providers.cncf.kubernetes.kube_client` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kube_client.get_kube_client` instead.
AIR302_kubernetes.py:39:1: AIR302 [*] `airflow.kubernetes.kubernetes_helper_functions.add_pod_suffix` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -80,7 +80,7 @@ AIR302_kubernetes.py:39:1: AIR302 [*] `airflow.kubernetes.kubernetes_helper_func
| ^^^^^^^^^^^^^^ AIR302
40 | create_pod_id()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=10.0.0` and use `add_unique_suffix` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=10.0.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_unique_suffix` instead.
Safe fix
25 25 | Port,
@@ -108,7 +108,7 @@ AIR302_kubernetes.py:40:1: AIR302 [*] `airflow.kubernetes.kubernetes_helper_func
41 |
42 | annotations_for_logging_task_metadata()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=10.0.0` and use `create_unique_id` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=10.0.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.create_unique_id` instead.
Safe fix
25 25 | Port,
@@ -137,7 +137,7 @@ AIR302_kubernetes.py:42:1: AIR302 `airflow.kubernetes.kubernetes_helper_function
43 | annotations_to_key()
44 | get_logs_task_metadata()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `annotations_for_logging_task_metadata` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.annotations_for_logging_task_metadata` instead.
AIR302_kubernetes.py:43:1: AIR302 `airflow.kubernetes.kubernetes_helper_functions.annotations_to_key` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -147,7 +147,7 @@ AIR302_kubernetes.py:43:1: AIR302 `airflow.kubernetes.kubernetes_helper_function
44 | get_logs_task_metadata()
45 | rand_str()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `annotations_to_key` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.annotations_to_key` instead.
AIR302_kubernetes.py:44:1: AIR302 `airflow.kubernetes.kubernetes_helper_functions.get_logs_task_metadata` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -157,7 +157,7 @@ AIR302_kubernetes.py:44:1: AIR302 `airflow.kubernetes.kubernetes_helper_function
| ^^^^^^^^^^^^^^^^^^^^^^ AIR302
45 | rand_str()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `get_logs_task_metadata` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.get_logs_task_metadata` instead.
AIR302_kubernetes.py:45:1: AIR302 `airflow.kubernetes.kubernetes_helper_functions.rand_str` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -168,7 +168,7 @@ AIR302_kubernetes.py:45:1: AIR302 `airflow.kubernetes.kubernetes_helper_function
46 |
47 | Port()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `rand_str` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str` instead.
AIR302_kubernetes.py:47:1: AIR302 [*] `airflow.kubernetes.pod.Port` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -178,7 +178,7 @@ AIR302_kubernetes.py:47:1: AIR302 [*] `airflow.kubernetes.pod.Port` is moved int
| ^^^^ AIR302
48 | Resources()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `V1ContainerPort` from `kubernetes.client.models` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `kubernetes.client.models.V1ContainerPort` instead.
Safe fix
25 25 | Port,
@@ -204,7 +204,7 @@ AIR302_kubernetes.py:48:1: AIR302 [*] `airflow.kubernetes.pod.Resources` is move
48 | Resources()
| ^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `V1ResourceRequirements` from `kubernetes.client.models` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `kubernetes.client.models.V1ResourceRequirements` instead.
Safe fix
25 25 | Port,
@@ -233,7 +233,7 @@ AIR302_kubernetes.py:64:1: AIR302 `airflow.kubernetes.pod_generator.datetime_to_
65 | extend_object_field()
66 | label_safe_datestring_to_datetime()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `datetime_to_label_safe_datestring` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.datetime_to_label_safe_datestring` instead.
AIR302_kubernetes.py:65:1: AIR302 `airflow.kubernetes.pod_generator.extend_object_field` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -243,7 +243,7 @@ AIR302_kubernetes.py:65:1: AIR302 `airflow.kubernetes.pod_generator.extend_objec
66 | label_safe_datestring_to_datetime()
67 | make_safe_label_value()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `extend_object_field` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.extend_object_field` instead.
AIR302_kubernetes.py:66:1: AIR302 `airflow.kubernetes.pod_generator.label_safe_datestring_to_datetime` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -254,7 +254,7 @@ AIR302_kubernetes.py:66:1: AIR302 `airflow.kubernetes.pod_generator.label_safe_d
67 | make_safe_label_value()
68 | merge_objects()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `label_safe_datestring_to_datetime` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.label_safe_datestring_to_datetime` instead.
AIR302_kubernetes.py:67:1: AIR302 `airflow.kubernetes.pod_generator.make_safe_label_value` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -265,7 +265,7 @@ AIR302_kubernetes.py:67:1: AIR302 `airflow.kubernetes.pod_generator.make_safe_la
68 | merge_objects()
69 | PodGenerator()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `make_safe_label_value` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value` instead.
AIR302_kubernetes.py:68:1: AIR302 `airflow.kubernetes.pod_generator.merge_objects` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -276,7 +276,7 @@ AIR302_kubernetes.py:68:1: AIR302 `airflow.kubernetes.pod_generator.merge_object
69 | PodGenerator()
70 | PodDefaults()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `merge_objects` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.merge_objects` instead.
AIR302_kubernetes.py:69:1: AIR302 `airflow.kubernetes.pod_generator.PodGenerator` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -287,7 +287,7 @@ AIR302_kubernetes.py:69:1: AIR302 `airflow.kubernetes.pod_generator.PodGenerator
70 | PodDefaults()
71 | PodGeneratorDeprecated()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `PodGenerator` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.PodGenerator` instead.
AIR302_kubernetes.py:70:1: AIR302 `airflow.kubernetes.pod_generator.PodDefaults` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -298,7 +298,7 @@ AIR302_kubernetes.py:70:1: AIR302 `airflow.kubernetes.pod_generator.PodDefaults`
71 | PodGeneratorDeprecated()
72 | add_pod_suffix()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `PodDefaults` from `airflow.providers.cncf.kubernetes.utils.xcom_sidecar` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.utils.xcom_sidecar.PodDefaults` instead.
AIR302_kubernetes.py:71:1: AIR302 `airflow.kubernetes.pod_generator.PodGeneratorDeprecated` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -309,7 +309,7 @@ AIR302_kubernetes.py:71:1: AIR302 `airflow.kubernetes.pod_generator.PodGenerator
72 | add_pod_suffix()
73 | rand_str()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `PodGenerator` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.PodGenerator` instead.
AIR302_kubernetes.py:72:1: AIR302 [*] `airflow.kubernetes.pod_generator.add_pod_suffix` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -319,7 +319,7 @@ AIR302_kubernetes.py:72:1: AIR302 [*] `airflow.kubernetes.pod_generator.add_pod_
| ^^^^^^^^^^^^^^ AIR302
73 | rand_str()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=10.0.0` and use `add_unique_suffix` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=10.0.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_unique_suffix` instead.
Safe fix
60 60 | merge_objects,
@@ -346,7 +346,7 @@ AIR302_kubernetes.py:73:1: AIR302 `airflow.kubernetes.pod_generator.rand_str` is
73 | rand_str()
| ^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `rand_str` from `airflow.providers.cncf.kubernetes.kubernetes_helper_functions` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str` instead.
AIR302_kubernetes.py:86:1: AIR302 `airflow.kubernetes.pod_generator_deprecated.PodDefaults` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -357,7 +357,7 @@ AIR302_kubernetes.py:86:1: AIR302 `airflow.kubernetes.pod_generator_deprecated.P
87 | PodGenerator()
88 | make_safe_label_value()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `PodDefaults` from `airflow.providers.cncf.kubernetes.utils.xcom_sidecar` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.utils.xcom_sidecar.PodDefaults` instead.
AIR302_kubernetes.py:87:1: AIR302 `airflow.kubernetes.pod_generator_deprecated.PodGenerator` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -366,7 +366,7 @@ AIR302_kubernetes.py:87:1: AIR302 `airflow.kubernetes.pod_generator_deprecated.P
| ^^^^^^^^^^^^ AIR302
88 | make_safe_label_value()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `PodGenerator` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.PodGenerator` instead.
AIR302_kubernetes.py:88:1: AIR302 `airflow.kubernetes.pod_generator_deprecated.make_safe_label_value` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -377,7 +377,7 @@ AIR302_kubernetes.py:88:1: AIR302 `airflow.kubernetes.pod_generator_deprecated.m
89 |
90 | PodLauncher()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `make_safe_label_value` from `airflow.providers.cncf.kubernetes.pod_generator` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value` instead.
AIR302_kubernetes.py:90:1: AIR302 [*] `airflow.kubernetes.pod_launcher.PodLauncher` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -387,7 +387,7 @@ AIR302_kubernetes.py:90:1: AIR302 [*] `airflow.kubernetes.pod_launcher.PodLaunch
| ^^^^^^^^^^^ AIR302
91 | PodStatus()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use `PodManager` from `airflow.providers.cncf.kubernetes.utils.pod_manager` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use `airflow.providers.cncf.kubernetes.utils.pod_manager.PodManager` instead.
Safe fix
82 82 | PodLauncher,
@@ -411,7 +411,7 @@ AIR302_kubernetes.py:91:1: AIR302 [*] `airflow.kubernetes.pod_launcher.PodStatus
91 | PodStatus()
| ^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use `PodPhase` from ` airflow.providers.cncf.kubernetes.utils.pod_manager` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use ` airflow.providers.cncf.kubernetes.utils.pod_manager.PodPhase` instead.
Safe fix
82 82 | PodLauncher,
@@ -439,7 +439,7 @@ AIR302_kubernetes.py:108:1: AIR302 `airflow.kubernetes.pod_launcher_deprecated.P
109 | PodLauncher()
110 | PodStatus()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `PodDefaults` from `airflow.providers.cncf.kubernetes.utils.xcom_sidecar` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.utils.xcom_sidecar.PodDefaults` instead.
AIR302_kubernetes.py:109:1: AIR302 [*] `airflow.kubernetes.pod_launcher_deprecated.PodLauncher` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -449,7 +449,7 @@ AIR302_kubernetes.py:109:1: AIR302 [*] `airflow.kubernetes.pod_launcher_deprecat
110 | PodStatus()
111 | get_kube_client()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use `PodManager` from `airflow.providers.cncf.kubernetes.utils.pod_manager` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use `airflow.providers.cncf.kubernetes.utils.pod_manager.PodManager` instead.
Safe fix
104 104 | )
@@ -472,7 +472,7 @@ AIR302_kubernetes.py:110:1: AIR302 [*] `airflow.kubernetes.pod_launcher_deprecat
| ^^^^^^^^^ AIR302
111 | get_kube_client()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use `PodPhase` from ` airflow.providers.cncf.kubernetes.utils.pod_manager` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=3.0.0` and use ` airflow.providers.cncf.kubernetes.utils.pod_manager.PodPhase` instead.
Safe fix
104 104 | )
@@ -497,7 +497,7 @@ AIR302_kubernetes.py:111:1: AIR302 `airflow.kubernetes.pod_launcher_deprecated.g
112 |
113 | PodRuntimeInfoEnv()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `get_kube_client` from `airflow.providers.cncf.kubernetes.kube_client` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.kube_client.get_kube_client` instead.
AIR302_kubernetes.py:113:1: AIR302 [*] `airflow.kubernetes.pod_runtime_info_env.PodRuntimeInfoEnv` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -508,7 +508,7 @@ AIR302_kubernetes.py:113:1: AIR302 [*] `airflow.kubernetes.pod_runtime_info_env.
114 | K8SModel()
115 | Secret()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `V1EnvVar` from `kubernetes.client.models` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `kubernetes.client.models.V1EnvVar` instead.
Safe fix
104 104 | )
@@ -535,7 +535,7 @@ AIR302_kubernetes.py:114:1: AIR302 `airflow.kubernetes.secret.K8SModel` is moved
115 | Secret()
116 | Volume()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `K8SModel` from `airflow.providers.cncf.kubernetes.k8s_model` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.k8s_model.K8SModel` instead.
AIR302_kubernetes.py:115:1: AIR302 `airflow.kubernetes.secret.Secret` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -546,7 +546,7 @@ AIR302_kubernetes.py:115:1: AIR302 `airflow.kubernetes.secret.Secret` is moved i
116 | Volume()
117 | VolumeMount()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `Secret` from `airflow.providers.cncf.kubernetes.secret` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `airflow.providers.cncf.kubernetes.secret.Secret` instead.
AIR302_kubernetes.py:116:1: AIR302 [*] `airflow.kubernetes.volume.Volume` is moved into `cncf-kubernetes` provider in Airflow 3.0;
|
@@ -556,7 +556,7 @@ AIR302_kubernetes.py:116:1: AIR302 [*] `airflow.kubernetes.volume.Volume` is mov
| ^^^^^^ AIR302
117 | VolumeMount()
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `V1Volume` from `kubernetes.client.models` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `kubernetes.client.models.V1Volume` instead.
Safe fix
104 104 | )
@@ -581,7 +581,7 @@ AIR302_kubernetes.py:117:1: AIR302 [*] `airflow.kubernetes.volume_mount.VolumeMo
117 | VolumeMount()
| ^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `V1VolumeMount` from `kubernetes.client.models` instead.
= help: Install `apache-airflow-providers-cncf-kubernetes>=7.4.0` and use `kubernetes.client.models.V1VolumeMount` instead.
Safe fix
104 104 | )

View File

@@ -10,7 +10,7 @@ AIR302_mysql.py:9:1: AIR302 `airflow.hooks.mysql_hook.MySqlHook` is moved into `
10 | PrestoToMySqlOperator()
11 | PrestoToMySqlTransfer()
|
= help: Install `apache-airflow-providers-mysql>=1.0.0` and use `MySqlHook` from `airflow.providers.mysql.hooks.mysql` instead.
= help: Install `apache-airflow-providers-mysql>=1.0.0` and use `airflow.providers.mysql.hooks.mysql.MySqlHook` instead.
AIR302_mysql.py:10:1: AIR302 `airflow.operators.presto_to_mysql.PrestoToMySqlOperator` is moved into `mysql` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_mysql.py:10:1: AIR302 `airflow.operators.presto_to_mysql.PrestoToMySqlOpe
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
11 | PrestoToMySqlTransfer()
|
= help: Install `apache-airflow-providers-mysql>=1.0.0` and use `PrestoToMySqlOperator` from `airflow.providers.mysql.transfers.presto_to_mysql` instead.
= help: Install `apache-airflow-providers-mysql>=1.0.0` and use `airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator` instead.
AIR302_mysql.py:11:1: AIR302 `airflow.operators.presto_to_mysql.PrestoToMySqlTransfer` is moved into `mysql` provider in Airflow 3.0;
|
@@ -28,4 +28,4 @@ AIR302_mysql.py:11:1: AIR302 `airflow.operators.presto_to_mysql.PrestoToMySqlTra
11 | PrestoToMySqlTransfer()
| ^^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-mysql>=1.0.0` and use `PrestoToMySqlOperator` from `airflow.providers.mysql.transfers.presto_to_mysql` instead.
= help: Install `apache-airflow-providers-mysql>=1.0.0` and use `airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator` instead.

View File

@@ -8,4 +8,4 @@ AIR302_oracle.py:5:1: AIR302 `airflow.hooks.oracle_hook.OracleHook` is moved int
5 | OracleHook()
| ^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-oracle>=1.0.0` and use `OracleHook` from `airflow.providers.oracle.hooks.oracle` instead.
= help: Install `apache-airflow-providers-oracle>=1.0.0` and use `airflow.providers.oracle.hooks.oracle.OracleHook` instead.

View File

@@ -8,4 +8,4 @@ AIR302_papermill.py:5:1: AIR302 `airflow.operators.papermill_operator.PapermillO
5 | PapermillOperator()
| ^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-papermill>=1.0.0` and use `PapermillOperator` from `airflow.providers.papermill.operators.papermill` instead.
= help: Install `apache-airflow-providers-papermill>=1.0.0` and use `airflow.providers.papermill.operators.papermill.PapermillOperator` instead.

View File

@@ -9,7 +9,7 @@ AIR302_pig.py:6:1: AIR302 `airflow.hooks.pig_hook.PigCliHook` is moved into `apa
| ^^^^^^^^^^ AIR302
7 | PigOperator()
|
= help: Install `apache-airflow-providers-apache-pig>=1.0.0` and use `PigCliHook` from `airflow.providers.apache.pig.hooks.pig` instead.
= help: Install `apache-airflow-providers-apache-pig>=1.0.0` and use `airflow.providers.apache.pig.hooks.pig.PigCliHook` instead.
AIR302_pig.py:7:1: AIR302 `airflow.operators.pig_operator.PigOperator` is moved into `apache-pig` provider in Airflow 3.0;
|
@@ -17,4 +17,4 @@ AIR302_pig.py:7:1: AIR302 `airflow.operators.pig_operator.PigOperator` is moved
7 | PigOperator()
| ^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-apache-pig>=1.0.0` and use `PigOperator` from `airflow.providers.apache.pig.operators.pig` instead.
= help: Install `apache-airflow-providers-apache-pig>=1.0.0` and use `airflow.providers.apache.pig.operators.pig.PigOperator` instead.

View File

@@ -9,7 +9,7 @@ AIR302_postgres.py:6:1: AIR302 `airflow.hooks.postgres_hook.PostgresHook` is mov
| ^^^^^^^^^^^^ AIR302
7 | Mapping()
|
= help: Install `apache-airflow-providers-postgres>=1.0.0` and use `PostgresHook` from `airflow.providers.postgres.hooks.postgres` instead.
= help: Install `apache-airflow-providers-postgres>=1.0.0` and use `airflow.providers.postgres.hooks.postgres.PostgresHook` instead.
AIR302_postgres.py:7:1: AIR302 `airflow.operators.postgres_operator.Mapping` is removed in Airflow 3.0
|

View File

@@ -8,4 +8,4 @@ AIR302_presto.py:5:1: AIR302 `airflow.hooks.presto_hook.PrestoHook` is moved int
5 | PrestoHook()
| ^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-presto>=1.0.0` and use `PrestoHook` from `airflow.providers.presto.hooks.presto` instead.
= help: Install `apache-airflow-providers-presto>=1.0.0` and use `airflow.providers.presto.hooks.presto.PrestoHook` instead.

View File

@@ -8,4 +8,4 @@ AIR302_samba.py:5:1: AIR302 `airflow.hooks.samba_hook.SambaHook` is moved into `
5 | SambaHook()
| ^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-samba>=1.0.0` and use `SambaHook` from `airflow.providers.samba.hooks.samba` instead.
= help: Install `apache-airflow-providers-samba>=1.0.0` and use `airflow.providers.samba.hooks.samba.SambaHook` instead.

View File

@@ -10,7 +10,7 @@ AIR302_slack.py:6:1: AIR302 `airflow.hooks.slack_hook.SlackHook` is moved into `
7 | SlackAPIOperator()
8 | SlackAPIPostOperator()
|
= help: Install `apache-airflow-providers-slack>=1.0.0` and use `SlackHook` from `airflow.providers.slack.hooks.slack` instead.
= help: Install `apache-airflow-providers-slack>=1.0.0` and use `airflow.providers.slack.hooks.slack.SlackHook` instead.
AIR302_slack.py:7:1: AIR302 `airflow.operators.slack_operator.SlackAPIOperator` is moved into `slack` provider in Airflow 3.0;
|
@@ -19,7 +19,7 @@ AIR302_slack.py:7:1: AIR302 `airflow.operators.slack_operator.SlackAPIOperator`
| ^^^^^^^^^^^^^^^^ AIR302
8 | SlackAPIPostOperator()
|
= help: Install `apache-airflow-providers-slack>=1.0.0` and use `SlackAPIOperator` from `airflow.providers.slack.operators.slack` instead.
= help: Install `apache-airflow-providers-slack>=1.0.0` and use `airflow.providers.slack.operators.slack.SlackAPIOperator` instead.
AIR302_slack.py:8:1: AIR302 `airflow.operators.slack_operator.SlackAPIPostOperator` is moved into `slack` provider in Airflow 3.0;
|
@@ -28,4 +28,4 @@ AIR302_slack.py:8:1: AIR302 `airflow.operators.slack_operator.SlackAPIPostOperat
8 | SlackAPIPostOperator()
| ^^^^^^^^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-slack>=1.0.0` and use `SlackAPIPostOperator` from `airflow.providers.slack.operators.slack` instead.
= help: Install `apache-airflow-providers-slack>=1.0.0` and use `airflow.providers.slack.operators.slack.SlackAPIPostOperator` instead.

View File

@@ -10,7 +10,7 @@ AIR302_smtp.py:5:1: AIR302 `airflow.operators.email_operator.EmailOperator` is m
6 |
7 | from airflow.operators.email import EmailOperator
|
= help: Install `apache-airflow-providers-smtp>=1.0.0` and use `EmailOperator` from `airflow.providers.smtp.operators.smtp` instead.
= help: Install `apache-airflow-providers-smtp>=1.0.0` and use `airflow.providers.smtp.operators.smtp.EmailOperator` instead.
AIR302_smtp.py:9:1: AIR302 `airflow.operators.email.EmailOperator` is moved into `smtp` provider in Airflow 3.0;
|
@@ -19,4 +19,4 @@ AIR302_smtp.py:9:1: AIR302 `airflow.operators.email.EmailOperator` is moved into
9 | EmailOperator()
| ^^^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-smtp>=1.0.0` and use `EmailOperator` from `airflow.providers.smtp.operators.smtp` instead.
= help: Install `apache-airflow-providers-smtp>=1.0.0` and use `airflow.providers.smtp.operators.smtp.EmailOperator` instead.

View File

@@ -8,4 +8,4 @@ AIR302_sqlite.py:5:1: AIR302 `airflow.hooks.sqlite_hook.SqliteHook` is moved int
5 | SqliteHook()
| ^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-sqlite>=1.0.0` and use `SqliteHook` from `airflow.providers.sqlite.hooks.sqlite` instead.
= help: Install `apache-airflow-providers-sqlite>=1.0.0` and use `airflow.providers.sqlite.hooks.sqlite.SqliteHook` instead.

View File

@@ -10,7 +10,7 @@ AIR302_standard.py:25:1: AIR302 `airflow.operators.bash_operator.BashOperator` i
26 |
27 | TriggerDagRunLink()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `BashOperator` from `airflow.providers.standard.operators.bash` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.bash.BashOperator` instead.
AIR302_standard.py:27:1: AIR302 `airflow.operators.dagrun_operator.TriggerDagRunLink` is moved into `standard` provider in Airflow 3.0;
|
@@ -21,7 +21,7 @@ AIR302_standard.py:27:1: AIR302 `airflow.operators.dagrun_operator.TriggerDagRun
28 | TriggerDagRunOperator()
29 | DummyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunLink` from `airflow.providers.standard.operators.trigger_dagrun` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink` instead.
AIR302_standard.py:28:1: AIR302 `airflow.operators.dagrun_operator.TriggerDagRunOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -31,7 +31,7 @@ AIR302_standard.py:28:1: AIR302 `airflow.operators.dagrun_operator.TriggerDagRun
29 | DummyOperator()
30 | EmptyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunOperator` from `airflow.providers.standard.operators.trigger_dagrun` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator` instead.
AIR302_standard.py:29:1: AIR302 `airflow.operators.dummy.DummyOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -41,7 +41,7 @@ AIR302_standard.py:29:1: AIR302 `airflow.operators.dummy.DummyOperator` is moved
| ^^^^^^^^^^^^^ AIR302
30 | EmptyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.empty.EmptyOperator` instead.
AIR302_standard.py:30:1: AIR302 `airflow.operators.dummy.EmptyOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -52,7 +52,7 @@ AIR302_standard.py:30:1: AIR302 `airflow.operators.dummy.EmptyOperator` is moved
31 |
32 | LatestOnlyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.empty.EmptyOperator` instead.
AIR302_standard.py:32:1: AIR302 `airflow.operators.latest_only_operator.LatestOnlyOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -63,7 +63,7 @@ AIR302_standard.py:32:1: AIR302 `airflow.operators.latest_only_operator.LatestOn
33 |
34 | BranchPythonOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `LatestOnlyOperator` from `airflow.providers.standard.operators.latest_only` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.operators.latest_only.LatestOnlyOperator` instead.
AIR302_standard.py:34:1: AIR302 `airflow.operators.python_operator.BranchPythonOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -74,7 +74,7 @@ AIR302_standard.py:34:1: AIR302 `airflow.operators.python_operator.BranchPythonO
35 | PythonOperator()
36 | PythonVirtualenvOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `BranchPythonOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.BranchPythonOperator` instead.
AIR302_standard.py:35:1: AIR302 `airflow.operators.python_operator.PythonOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -84,7 +84,7 @@ AIR302_standard.py:35:1: AIR302 `airflow.operators.python_operator.PythonOperato
36 | PythonVirtualenvOperator()
37 | ShortCircuitOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `PythonOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.PythonOperator` instead.
AIR302_standard.py:36:1: AIR302 `airflow.operators.python_operator.PythonVirtualenvOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -94,7 +94,7 @@ AIR302_standard.py:36:1: AIR302 `airflow.operators.python_operator.PythonVirtual
| ^^^^^^^^^^^^^^^^^^^^^^^^ AIR302
37 | ShortCircuitOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `PythonVirtualenvOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.PythonVirtualenvOperator` instead.
AIR302_standard.py:37:1: AIR302 `airflow.operators.python_operator.ShortCircuitOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -105,7 +105,7 @@ AIR302_standard.py:37:1: AIR302 `airflow.operators.python_operator.ShortCircuitO
38 |
39 | ExternalTaskMarker()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `ShortCircuitOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.ShortCircuitOperator` instead.
AIR302_standard.py:39:1: AIR302 `airflow.sensors.external_task_sensor.ExternalTaskMarker` is moved into `standard` provider in Airflow 3.0;
|
@@ -116,7 +116,7 @@ AIR302_standard.py:39:1: AIR302 `airflow.sensors.external_task_sensor.ExternalTa
40 | ExternalTaskSensor()
41 | ExternalTaskSensorLink()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskMarker` from `airflow.providers.standard.sensors.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.sensors.external_task.ExternalTaskMarker` instead.
AIR302_standard.py:40:1: AIR302 `airflow.sensors.external_task_sensor.ExternalTaskSensor` is moved into `standard` provider in Airflow 3.0;
|
@@ -125,7 +125,7 @@ AIR302_standard.py:40:1: AIR302 `airflow.sensors.external_task_sensor.ExternalTa
| ^^^^^^^^^^^^^^^^^^ AIR302
41 | ExternalTaskSensorLink()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskSensor` from `airflow.providers.standard.sensors.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.sensors.external_task.ExternalTaskSensor` instead.
AIR302_standard.py:41:1: AIR302 `airflow.sensors.external_task_sensor.ExternalTaskSensorLink` is moved into `standard` provider in Airflow 3.0;
|
@@ -136,7 +136,7 @@ AIR302_standard.py:41:1: AIR302 `airflow.sensors.external_task_sensor.ExternalTa
42 |
43 | from airflow.operators.dummy_operator import (
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskSensorLink` from `airflow.providers.standard.sensors.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.sensors.external_task.ExternalTaskSensorLink` instead.
AIR302_standard.py:48:1: AIR302 `airflow.operators.dummy_operator.DummyOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -146,7 +146,7 @@ AIR302_standard.py:48:1: AIR302 `airflow.operators.dummy_operator.DummyOperator`
| ^^^^^^^^^^^^^ AIR302
49 | EmptyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.empty.EmptyOperator` instead.
AIR302_standard.py:49:1: AIR302 `airflow.operators.dummy_operator.EmptyOperator` is moved into `standard` provider in Airflow 3.0;
|
@@ -156,7 +156,7 @@ AIR302_standard.py:49:1: AIR302 `airflow.operators.dummy_operator.EmptyOperator`
50 |
51 | from airflow.hooks.subprocess import SubprocessResult
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.empty.EmptyOperator` instead.
AIR302_standard.py:52:1: AIR302 `airflow.hooks.subprocess.SubprocessResult` is moved into `standard` provider in Airflow 3.0;
|
@@ -166,7 +166,7 @@ AIR302_standard.py:52:1: AIR302 `airflow.hooks.subprocess.SubprocessResult` is m
53 | from airflow.hooks.subprocess import working_directory
54 | working_directory()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `SubprocessResult` from `airflow.providers.standard.hooks.subprocess` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.hooks.subprocess.SubprocessResult` instead.
AIR302_standard.py:54:1: AIR302 `airflow.hooks.subprocess.working_directory` is moved into `standard` provider in Airflow 3.0;
|
@@ -177,7 +177,7 @@ AIR302_standard.py:54:1: AIR302 `airflow.hooks.subprocess.working_directory` is
55 | from airflow.operators.datetime import target_times_as_dates
56 | target_times_as_dates()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `working_directory` from `airflow.providers.standard.hooks.subprocess` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.hooks.subprocess.working_directory` instead.
AIR302_standard.py:56:1: AIR302 `airflow.operators.datetime.target_times_as_dates` is moved into `standard` provider in Airflow 3.0;
|
@@ -188,7 +188,7 @@ AIR302_standard.py:56:1: AIR302 `airflow.operators.datetime.target_times_as_date
57 | from airflow.operators.trigger_dagrun import TriggerDagRunLink
58 | TriggerDagRunLink()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `target_times_as_dates` from `airflow.providers.standard.operators.datetime` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.datetime.target_times_as_dates` instead.
AIR302_standard.py:58:1: AIR302 `airflow.operators.trigger_dagrun.TriggerDagRunLink` is moved into `standard` provider in Airflow 3.0;
|
@@ -199,7 +199,7 @@ AIR302_standard.py:58:1: AIR302 `airflow.operators.trigger_dagrun.TriggerDagRunL
59 | from airflow.sensors.external_task import ExternalTaskSensorLink
60 | ExternalTaskSensorLink()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunLink` from `airflow.providers.standard.operators.trigger_dagrun` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink` instead.
AIR302_standard.py:60:1: AIR302 [*] `airflow.sensors.external_task.ExternalTaskSensorLink` is moved into `standard` provider in Airflow 3.0;
|
@@ -210,7 +210,7 @@ AIR302_standard.py:60:1: AIR302 [*] `airflow.sensors.external_task.ExternalTaskS
61 | from airflow.sensors.time_delta import WaitSensor
62 | WaitSensor()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalDagLink` from `airflow.providers.standard.sensors.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.sensors.external_task.ExternalDagLink` instead.
Safe fix
57 57 | from airflow.operators.trigger_dagrun import TriggerDagRunLink
@@ -229,4 +229,4 @@ AIR302_standard.py:62:1: AIR302 `airflow.sensors.time_delta.WaitSensor` is moved
62 | WaitSensor()
| ^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `WaitSensor` from `airflow.providers.standard.sensors.time_delta` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.time_delta.WaitSensor` instead.

View File

@@ -8,4 +8,4 @@ AIR302_zendesk.py:5:1: AIR302 `airflow.hooks.zendesk_hook.ZendeskHook` is moved
5 | ZendeskHook()
| ^^^^^^^^^^^ AIR302
|
= help: Install `apache-airflow-providers-zendesk>=1.0.0` and use `ZendeskHook` from `airflow.providers.zendesk.hooks.zendesk` instead.
= help: Install `apache-airflow-providers-zendesk>=1.0.0` and use `airflow.providers.zendesk.hooks.zendesk.ZendeskHook` instead.

View File

@@ -9,7 +9,7 @@ AIR311_names.py:27:1: AIR311 [*] `airflow.Dataset` is removed in Airflow 3.0; It
28 |
29 | # airflow.datasets
|
= help: Use `Asset` from `airflow.sdk` instead.
= help: Use `airflow.sdk.Asset` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -32,7 +32,7 @@ AIR311_names.py:30:1: AIR311 [*] `airflow.datasets.Dataset` is removed in Airflo
31 | DatasetAlias()
32 | DatasetAll()
|
= help: Use `Asset` from `airflow.sdk` instead.
= help: Use `airflow.sdk.Asset` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -59,7 +59,7 @@ AIR311_names.py:31:1: AIR311 [*] `airflow.datasets.DatasetAlias` is removed in A
32 | DatasetAll()
33 | DatasetAny()
|
= help: Use `AssetAlias` from `airflow.sdk` instead.
= help: Use `airflow.sdk.AssetAlias` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -87,7 +87,7 @@ AIR311_names.py:32:1: AIR311 [*] `airflow.datasets.DatasetAll` is removed in Air
33 | DatasetAny()
34 | Metadata()
|
= help: Use `AssetAll` from `airflow.sdk` instead.
= help: Use `airflow.sdk.AssetAll` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -116,7 +116,7 @@ AIR311_names.py:33:1: AIR311 [*] `airflow.datasets.DatasetAny` is removed in Air
34 | Metadata()
35 | expand_alias_to_datasets()
|
= help: Use `AssetAny` from `airflow.sdk` instead.
= help: Use `airflow.sdk.AssetAny` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -144,7 +144,7 @@ AIR311_names.py:34:1: AIR311 `airflow.datasets.metadata.Metadata` is removed in
| ^^^^^^^^ AIR311
35 | expand_alias_to_datasets()
|
= help: Use `Metadata` from `airflow.sdk` instead.
= help: Use `airflow.sdk.Metadata` instead
AIR311_names.py:35:1: AIR311 [*] `airflow.datasets.expand_alias_to_datasets` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -155,7 +155,7 @@ AIR311_names.py:35:1: AIR311 [*] `airflow.datasets.expand_alias_to_datasets` is
36 |
37 | # airflow.decorators
|
= help: Use `expand_alias_to_assets` from `airflow.sdk` instead.
= help: Use `airflow.sdk.expand_alias_to_assets` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -183,7 +183,7 @@ AIR311_names.py:38:1: AIR311 `airflow.decorators.dag` is removed in Airflow 3.0;
39 | task()
40 | task_group()
|
= help: Use `dag` from `airflow.sdk` instead.
= help: Use `airflow.sdk.dag` instead
AIR311_names.py:39:1: AIR311 `airflow.decorators.task` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -194,7 +194,7 @@ AIR311_names.py:39:1: AIR311 `airflow.decorators.task` is removed in Airflow 3.0
40 | task_group()
41 | setup()
|
= help: Use `task` from `airflow.sdk` instead.
= help: Use `airflow.sdk.task` instead
AIR311_names.py:40:1: AIR311 `airflow.decorators.task_group` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -205,7 +205,7 @@ AIR311_names.py:40:1: AIR311 `airflow.decorators.task_group` is removed in Airfl
41 | setup()
42 | teardown()
|
= help: Use `task_group` from `airflow.sdk` instead.
= help: Use `airflow.sdk.task_group` instead
AIR311_names.py:41:1: AIR311 `airflow.decorators.setup` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -215,7 +215,7 @@ AIR311_names.py:41:1: AIR311 `airflow.decorators.setup` is removed in Airflow 3.
| ^^^^^ AIR311
42 | teardown()
|
= help: Use `setup` from `airflow.sdk` instead.
= help: Use `airflow.sdk.setup` instead
AIR311_names.py:42:1: AIR311 `airflow.decorators.teardown` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -226,7 +226,7 @@ AIR311_names.py:42:1: AIR311 `airflow.decorators.teardown` is removed in Airflow
43 |
44 | # airflow.io
|
= help: Use `teardown` from `airflow.sdk` instead.
= help: Use `airflow.sdk.teardown` instead
AIR311_names.py:45:1: AIR311 `airflow.io.path.ObjectStoragePath` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -235,7 +235,7 @@ AIR311_names.py:45:1: AIR311 `airflow.io.path.ObjectStoragePath` is removed in A
| ^^^^^^^^^^^^^^^^^ AIR311
46 | attach()
|
= help: Use `ObjectStoragePath` from `airflow.sdk` instead.
= help: Use `airflow.sdk.ObjectStoragePath` instead
AIR311_names.py:46:1: AIR311 `airflow.io.storage.attach` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -246,7 +246,7 @@ AIR311_names.py:46:1: AIR311 `airflow.io.storage.attach` is removed in Airflow 3
47 |
48 | # airflow.models
|
= help: Use `attach` from `airflow.sdk.io` instead.
= help: Use `airflow.sdk.io.attach` instead
AIR311_names.py:49:1: AIR311 `airflow.models.Connection` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -256,7 +256,7 @@ AIR311_names.py:49:1: AIR311 `airflow.models.Connection` is removed in Airflow 3
50 | DAGFromModel()
51 | Variable()
|
= help: Use `Connection` from `airflow.sdk` instead.
= help: Use `airflow.sdk.Connection` instead
AIR311_names.py:50:1: AIR311 [*] `airflow.models.DAG` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -266,7 +266,7 @@ AIR311_names.py:50:1: AIR311 [*] `airflow.models.DAG` is removed in Airflow 3.0;
| ^^^^^^^^^^^^ AIR311
51 | Variable()
|
= help: Use `DAG` from `airflow.sdk` instead.
= help: Use `airflow.sdk.DAG` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -295,7 +295,7 @@ AIR311_names.py:51:1: AIR311 `airflow.models.Variable` is removed in Airflow 3.0
52 |
53 | # airflow.models.baseoperator
|
= help: Use `Variable` from `airflow.sdk` instead.
= help: Use `airflow.sdk.Variable` instead
AIR311_names.py:54:1: AIR311 `airflow.models.baseoperator.chain` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -305,7 +305,7 @@ AIR311_names.py:54:1: AIR311 `airflow.models.baseoperator.chain` is removed in A
55 | chain_linear()
56 | cross_downstream()
|
= help: Use `chain` from `airflow.sdk` instead.
= help: Use `airflow.sdk.chain` instead
AIR311_names.py:55:1: AIR311 `airflow.models.baseoperator.chain_linear` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -315,7 +315,7 @@ AIR311_names.py:55:1: AIR311 `airflow.models.baseoperator.chain_linear` is remov
| ^^^^^^^^^^^^ AIR311
56 | cross_downstream()
|
= help: Use `chain_linear` from `airflow.sdk` instead.
= help: Use `airflow.sdk.chain_linear` instead
AIR311_names.py:56:1: AIR311 `airflow.models.baseoperator.cross_downstream` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -326,7 +326,7 @@ AIR311_names.py:56:1: AIR311 `airflow.models.baseoperator.cross_downstream` is r
57 |
58 | # airflow.models.baseoperatolinker
|
= help: Use `cross_downstream` from `airflow.sdk` instead.
= help: Use `airflow.sdk.cross_downstream` instead
AIR311_names.py:59:1: AIR311 `airflow.models.baseoperatorlink.BaseOperatorLink` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -336,7 +336,7 @@ AIR311_names.py:59:1: AIR311 `airflow.models.baseoperatorlink.BaseOperatorLink`
60 |
61 | # airflow.models.dag
|
= help: Use `BaseOperatorLink` from `airflow.sdk.definitions.baseoperatorlink` instead.
= help: Use `airflow.sdk.definitions.baseoperatorlink.BaseOperatorLink` instead
AIR311_names.py:62:1: AIR311 [*] `airflow.models.dag.DAG` is removed in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -346,7 +346,7 @@ AIR311_names.py:62:1: AIR311 [*] `airflow.models.dag.DAG` is removed in Airflow
63 | # airflow.timetables.datasets
64 | DatasetOrTimeSchedule()
|
= help: Use `DAG` from `airflow.sdk` instead.
= help: Use `airflow.sdk.DAG` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -375,7 +375,7 @@ AIR311_names.py:64:1: AIR311 [*] `airflow.timetables.datasets.DatasetOrTimeSched
65 |
66 | # airflow.utils.dag_parsing_context
|
= help: Use `AssetOrTimeSchedule` from `airflow.timetables.assets` instead.
= help: Use `airflow.timetables.assets.AssetOrTimeSchedule` instead
Safe fix
22 22 | from airflow.models.dag import DAG as DAGFromDag
@@ -401,4 +401,4 @@ AIR311_names.py:67:1: AIR311 `airflow.utils.dag_parsing_context.get_parsing_cont
67 | get_parsing_context()
| ^^^^^^^^^^^^^^^^^^^ AIR311
|
= help: Use `get_parsing_context` from `airflow.sdk` instead.
= help: Use `airflow.sdk.get_parsing_context` instead

View File

@@ -10,7 +10,7 @@ AIR312.py:32:1: AIR312 `airflow.hooks.filesystem.FSHook` is deprecated and moved
33 | PackageIndexHook()
34 | SubprocessHook()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `FSHook` from `airflow.providers.standard.hooks.filesystem` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.hooks.filesystem.FSHook` instead.
AIR312.py:33:1: AIR312 `airflow.hooks.package_index.PackageIndexHook` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -20,7 +20,7 @@ AIR312.py:33:1: AIR312 `airflow.hooks.package_index.PackageIndexHook` is depreca
34 | SubprocessHook()
35 | BashOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `PackageIndexHook` from `airflow.providers.standard.hooks.package_index` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.hooks.package_index.PackageIndexHook` instead.
AIR312.py:34:1: AIR312 `airflow.hooks.subprocess.SubprocessHook` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -31,7 +31,7 @@ AIR312.py:34:1: AIR312 `airflow.hooks.subprocess.SubprocessHook` is deprecated a
35 | BashOperator()
36 | BranchDateTimeOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `SubprocessHook` from `airflow.providers.standard.hooks.subprocess` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.hooks.subprocess.SubprocessHook` instead.
AIR312.py:35:1: AIR312 `airflow.operators.bash.BashOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -42,7 +42,7 @@ AIR312.py:35:1: AIR312 `airflow.operators.bash.BashOperator` is deprecated and m
36 | BranchDateTimeOperator()
37 | TriggerDagRunOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `BashOperator` from `airflow.providers.standard.operators.bash` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.bash.BashOperator` instead.
AIR312.py:36:1: AIR312 `airflow.operators.datetime.BranchDateTimeOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -53,7 +53,7 @@ AIR312.py:36:1: AIR312 `airflow.operators.datetime.BranchDateTimeOperator` is de
37 | TriggerDagRunOperator()
38 | EmptyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `BranchDateTimeOperator` from `airflow.providers.standard.operators.datetime` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.datetime.BranchDateTimeOperator` instead.
AIR312.py:37:1: AIR312 `airflow.operators.trigger_dagrun.TriggerDagRunOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -64,7 +64,7 @@ AIR312.py:37:1: AIR312 `airflow.operators.trigger_dagrun.TriggerDagRunOperator`
38 | EmptyOperator()
39 | LatestOnlyOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `TriggerDagRunOperator` from `airflow.providers.standard.operators.trigger_dagrun` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator` instead.
AIR312.py:38:1: AIR312 `airflow.operators.empty.EmptyOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -75,7 +75,7 @@ AIR312.py:38:1: AIR312 `airflow.operators.empty.EmptyOperator` is deprecated and
39 | LatestOnlyOperator()
40 | (
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `EmptyOperator` from `airflow.providers.standard.operators.empty` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.operators.empty.EmptyOperator` instead.
AIR312.py:39:1: AIR312 `airflow.operators.latest_only.LatestOnlyOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -86,7 +86,7 @@ AIR312.py:39:1: AIR312 `airflow.operators.latest_only.LatestOnlyOperator` is dep
40 | (
41 | BranchPythonOperator(),
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `LatestOnlyOperator` from `airflow.providers.standard.operators.latest_only` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.operators.latest_only.LatestOnlyOperator` instead.
AIR312.py:41:5: AIR312 `airflow.operators.python.BranchPythonOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -97,7 +97,7 @@ AIR312.py:41:5: AIR312 `airflow.operators.python.BranchPythonOperator` is deprec
42 | PythonOperator(),
43 | PythonVirtualenvOperator(),
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `BranchPythonOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.BranchPythonOperator` instead.
AIR312.py:42:5: AIR312 `airflow.operators.python.PythonOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -108,7 +108,7 @@ AIR312.py:42:5: AIR312 `airflow.operators.python.PythonOperator` is deprecated a
43 | PythonVirtualenvOperator(),
44 | ShortCircuitOperator(),
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `PythonOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.PythonOperator` instead.
AIR312.py:43:5: AIR312 `airflow.operators.python.PythonVirtualenvOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -119,7 +119,7 @@ AIR312.py:43:5: AIR312 `airflow.operators.python.PythonVirtualenvOperator` is de
44 | ShortCircuitOperator(),
45 | )
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `PythonVirtualenvOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.PythonVirtualenvOperator` instead.
AIR312.py:44:5: AIR312 `airflow.operators.python.ShortCircuitOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -130,7 +130,7 @@ AIR312.py:44:5: AIR312 `airflow.operators.python.ShortCircuitOperator` is deprec
45 | )
46 | BranchDayOfWeekOperator()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `ShortCircuitOperator` from `airflow.providers.standard.operators.python` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.python.ShortCircuitOperator` instead.
AIR312.py:46:1: AIR312 `airflow.operators.weekday.BranchDayOfWeekOperator` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -141,7 +141,7 @@ AIR312.py:46:1: AIR312 `airflow.operators.weekday.BranchDayOfWeekOperator` is de
47 | DateTimeSensor(), DateTimeSensorAsync()
48 | ExternalTaskMarker(), ExternalTaskSensor()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `BranchDayOfWeekOperator` from `airflow.providers.standard.operators.weekday` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.operators.weekday.BranchDayOfWeekOperator` instead.
AIR312.py:47:1: AIR312 `airflow.sensors.date_time.DateTimeSensor` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -152,7 +152,7 @@ AIR312.py:47:1: AIR312 `airflow.sensors.date_time.DateTimeSensor` is deprecated
48 | ExternalTaskMarker(), ExternalTaskSensor()
49 | FileSensor()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `DateTimeSensor` from `airflow.providers.standard.sensors.date_time` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.date_time.DateTimeSensor` instead.
AIR312.py:47:19: AIR312 `airflow.sensors.date_time.DateTimeSensorAsync` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -163,7 +163,7 @@ AIR312.py:47:19: AIR312 `airflow.sensors.date_time.DateTimeSensorAsync` is depre
48 | ExternalTaskMarker(), ExternalTaskSensor()
49 | FileSensor()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `DateTimeSensorAsync` from `airflow.providers.standard.sensors.date_time` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.date_time.DateTimeSensorAsync` instead.
AIR312.py:48:1: AIR312 `airflow.sensors.external_task.ExternalTaskMarker` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -174,7 +174,7 @@ AIR312.py:48:1: AIR312 `airflow.sensors.external_task.ExternalTaskMarker` is dep
49 | FileSensor()
50 | TimeSensor(), TimeSensorAsync()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskMarker` from `airflow.providers.standard.sensors.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.sensors.external_task.ExternalTaskMarker` instead.
AIR312.py:48:23: AIR312 `airflow.sensors.external_task.ExternalTaskSensor` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -185,7 +185,7 @@ AIR312.py:48:23: AIR312 `airflow.sensors.external_task.ExternalTaskSensor` is de
49 | FileSensor()
50 | TimeSensor(), TimeSensorAsync()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `ExternalTaskSensor` from `airflow.providers.standard.sensors.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.sensors.external_task.ExternalTaskSensor` instead.
AIR312.py:49:1: AIR312 `airflow.sensors.filesystem.FileSensor` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -196,7 +196,7 @@ AIR312.py:49:1: AIR312 `airflow.sensors.filesystem.FileSensor` is deprecated and
50 | TimeSensor(), TimeSensorAsync()
51 | TimeDeltaSensor(), TimeDeltaSensorAsync()
|
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `FileSensor` from `airflow.providers.standard.sensors.filesystem` instead.
= help: Install `apache-airflow-providers-standard>=0.0.2` and use `airflow.providers.standard.sensors.filesystem.FileSensor` instead.
AIR312.py:50:1: AIR312 `airflow.sensors.time_sensor.TimeSensor` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -207,7 +207,7 @@ AIR312.py:50:1: AIR312 `airflow.sensors.time_sensor.TimeSensor` is deprecated an
51 | TimeDeltaSensor(), TimeDeltaSensorAsync()
52 | DayOfWeekSensor()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `TimeSensor` from `airflow.providers.standard.sensors.time` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.time.TimeSensor` instead.
AIR312.py:50:15: AIR312 `airflow.sensors.time_sensor.TimeSensorAsync` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -218,7 +218,7 @@ AIR312.py:50:15: AIR312 `airflow.sensors.time_sensor.TimeSensorAsync` is depreca
51 | TimeDeltaSensor(), TimeDeltaSensorAsync()
52 | DayOfWeekSensor()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `TimeSensorAsync` from `airflow.providers.standard.sensors.time` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.time.TimeSensorAsync` instead.
AIR312.py:51:1: AIR312 `airflow.sensors.time_delta.TimeDeltaSensor` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -229,7 +229,7 @@ AIR312.py:51:1: AIR312 `airflow.sensors.time_delta.TimeDeltaSensor` is deprecate
52 | DayOfWeekSensor()
53 | DagStateTrigger(), WorkflowTrigger()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `TimeDeltaSensor` from `airflow.providers.standard.sensors.time_delta` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.time_delta.TimeDeltaSensor` instead.
AIR312.py:51:20: AIR312 `airflow.sensors.time_delta.TimeDeltaSensorAsync` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -240,7 +240,7 @@ AIR312.py:51:20: AIR312 `airflow.sensors.time_delta.TimeDeltaSensorAsync` is dep
52 | DayOfWeekSensor()
53 | DagStateTrigger(), WorkflowTrigger()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `TimeDeltaSensorAsync` from `airflow.providers.standard.sensors.time_delta` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.time_delta.TimeDeltaSensorAsync` instead.
AIR312.py:52:1: AIR312 `airflow.sensors.weekday.DayOfWeekSensor` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -251,7 +251,7 @@ AIR312.py:52:1: AIR312 `airflow.sensors.weekday.DayOfWeekSensor` is deprecated a
53 | DagStateTrigger(), WorkflowTrigger()
54 | FileTrigger()
|
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `DayOfWeekSensor` from `airflow.providers.standard.sensors.weekday` instead.
= help: Install `apache-airflow-providers-standard>=0.0.1` and use `airflow.providers.standard.sensors.weekday.DayOfWeekSensor` instead.
AIR312.py:53:1: AIR312 `airflow.triggers.external_task.DagStateTrigger` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -262,7 +262,7 @@ AIR312.py:53:1: AIR312 `airflow.triggers.external_task.DagStateTrigger` is depre
54 | FileTrigger()
55 | DateTimeTrigger(), TimeDeltaTrigger()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `DagStateTrigger` from `airflow.providers.standard.triggers.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.triggers.external_task.DagStateTrigger` instead.
AIR312.py:53:20: AIR312 `airflow.triggers.external_task.WorkflowTrigger` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -273,7 +273,7 @@ AIR312.py:53:20: AIR312 `airflow.triggers.external_task.WorkflowTrigger` is depr
54 | FileTrigger()
55 | DateTimeTrigger(), TimeDeltaTrigger()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `WorkflowTrigger` from `airflow.providers.standard.triggers.external_task` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.triggers.external_task.WorkflowTrigger` instead.
AIR312.py:54:1: AIR312 `airflow.triggers.file.FileTrigger` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -283,7 +283,7 @@ AIR312.py:54:1: AIR312 `airflow.triggers.file.FileTrigger` is deprecated and mov
| ^^^^^^^^^^^ AIR312
55 | DateTimeTrigger(), TimeDeltaTrigger()
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `FileTrigger` from `airflow.providers.standard.triggers.file` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.triggers.file.FileTrigger` instead.
AIR312.py:55:1: AIR312 `airflow.triggers.temporal.DateTimeTrigger` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -292,7 +292,7 @@ AIR312.py:55:1: AIR312 `airflow.triggers.temporal.DateTimeTrigger` is deprecated
55 | DateTimeTrigger(), TimeDeltaTrigger()
| ^^^^^^^^^^^^^^^ AIR312
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `DateTimeTrigger` from `airflow.providers.standard.triggers.temporal` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.triggers.temporal.DateTimeTrigger` instead.
AIR312.py:55:20: AIR312 `airflow.triggers.temporal.TimeDeltaTrigger` is deprecated and moved into `standard` provider in Airflow 3.0; It still works in Airflow 3.0 but is expected to be removed in a future version.
|
@@ -301,4 +301,4 @@ AIR312.py:55:20: AIR312 `airflow.triggers.temporal.TimeDeltaTrigger` is deprecat
55 | DateTimeTrigger(), TimeDeltaTrigger()
| ^^^^^^^^^^^^^^^^ AIR312
|
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `TimeDeltaTrigger` from `airflow.providers.standard.triggers.temporal` instead.
= help: Install `apache-airflow-providers-standard>=0.0.3` and use `airflow.providers.standard.triggers.temporal.TimeDeltaTrigger` instead.

View File

@@ -52,20 +52,17 @@ impl Violation for SysVersionCmpStr3 {
/// ## What it does
/// Checks for equality comparisons against the major version returned by
/// `sys.version_info` (e.g., `sys.version_info[0] == 3` or `sys.version_info[0] != 3`).
/// `sys.version_info` (e.g., `sys.version_info[0] == 3`).
///
/// ## Why is this bad?
/// Using `sys.version_info[0] == 3` to verify that the major version is
/// Python 3 or greater will fail if the major version number is ever
/// incremented (e.g., to Python 4). This is likely unintended, as code
/// that uses this comparison is likely intended to be run on Python 2,
/// but would now run on Python 4 too. Similarly, using `sys.version_info[0] != 3`
/// to check for Python 2 will also fail if the major version number is
/// incremented.
/// but would now run on Python 4 too.
///
/// Instead, use `>=` to check if the major version number is 3 or greater,
/// or `<` to check if the major version number is less than 3, to future-proof
/// the code.
/// to future-proof the code.
///
/// ## Example
/// ```python
@@ -91,18 +88,12 @@ impl Violation for SysVersionCmpStr3 {
/// - [Python documentation: `sys.version`](https://docs.python.org/3/library/sys.html#sys.version)
/// - [Python documentation: `sys.version_info`](https://docs.python.org/3/library/sys.html#sys.version_info)
#[derive(ViolationMetadata)]
pub(crate) struct SysVersionInfo0Eq3 {
eq: bool,
}
pub(crate) struct SysVersionInfo0Eq3;
impl Violation for SysVersionInfo0Eq3 {
#[derive_message_formats]
fn message(&self) -> String {
if self.eq {
"`sys.version_info[0] == 3` referenced (python4), use `>=`".to_string()
} else {
"`sys.version_info[0] != 3` referenced (python4), use `<`".to_string()
}
"`sys.version_info[0] == 3` referenced (python4), use `>=`".to_string()
}
}
@@ -244,7 +235,7 @@ pub(crate) fn compare(checker: &Checker, left: &Expr, ops: &[CmpOp], comparators
{
if *i == 0 {
if let (
[operator @ (CmpOp::Eq | CmpOp::NotEq)],
[CmpOp::Eq | CmpOp::NotEq],
[
Expr::NumberLiteral(ast::ExprNumberLiteral {
value: ast::Number::Int(n),
@@ -255,9 +246,7 @@ pub(crate) fn compare(checker: &Checker, left: &Expr, ops: &[CmpOp], comparators
{
if *n == 3 && checker.enabled(Rule::SysVersionInfo0Eq3) {
checker.report_diagnostic(Diagnostic::new(
SysVersionInfo0Eq3 {
eq: matches!(*operator, CmpOp::Eq),
},
SysVersionInfo0Eq3,
left.range(),
));
}

View File

@@ -20,7 +20,7 @@ YTT201.py:8:7: YTT201 `sys.version_info[0] == 3` referenced (python4), use `>=`
10 | PY2 = version_info[0] != 3
|
YTT201.py:9:7: YTT201 `sys.version_info[0] != 3` referenced (python4), use `<`
YTT201.py:9:7: YTT201 `sys.version_info[0] == 3` referenced (python4), use `>=`
|
7 | PY3 = sys.version_info[0] == 3
8 | PY3 = version_info[0] == 3
@@ -29,7 +29,7 @@ YTT201.py:9:7: YTT201 `sys.version_info[0] != 3` referenced (python4), use `<`
10 | PY2 = version_info[0] != 3
|
YTT201.py:10:7: YTT201 `sys.version_info[0] != 3` referenced (python4), use `<`
YTT201.py:10:7: YTT201 `sys.version_info[0] == 3` referenced (python4), use `>=`
|
8 | PY3 = version_info[0] == 3
9 | PY2 = sys.version_info[0] != 3

View File

@@ -62,25 +62,7 @@ pub(crate) fn async_zero_sleep(checker: &Checker, call: &ExprCall) {
return;
}
let Some(qualified_name) = checker
.semantic()
.resolve_qualified_name(call.func.as_ref())
else {
return;
};
let Some(module) = AsyncModule::try_from(&qualified_name) else {
return;
};
// Determine the correct argument name
let arg_name = match module {
AsyncModule::Trio => "seconds",
AsyncModule::AnyIo => "delay",
AsyncModule::AsyncIo => return,
};
let Some(arg) = call.arguments.find_argument_value(arg_name, 0) else {
let Some(arg) = call.arguments.find_argument_value("seconds", 0) else {
return;
};

View File

@@ -71,25 +71,7 @@ pub(crate) fn long_sleep_not_forever(checker: &Checker, call: &ExprCall) {
return;
}
let Some(qualified_name) = checker
.semantic()
.resolve_qualified_name(call.func.as_ref())
else {
return;
};
let Some(module) = AsyncModule::try_from(&qualified_name) else {
return;
};
// Determine the correct argument name
let arg_name = match module {
AsyncModule::Trio => "seconds",
AsyncModule::AnyIo => "delay",
AsyncModule::AsyncIo => return,
};
let Some(arg) = call.arguments.find_argument_value(arg_name, 0) else {
let Some(arg) = call.arguments.find_argument_value("seconds", 0) else {
return;
};

View File

@@ -214,41 +214,3 @@ ASYNC115.py:128:15: ASYNC115 [*] Use `anyio.lowlevel.checkpoint()` instead of `a
129 129 |
130 130 |
131 131 | def func():
ASYNC115.py:156:11: ASYNC115 [*] Use `anyio.lowlevel.checkpoint()` instead of `anyio.sleep(0)`
|
154 | await anyio.sleep(seconds=1) # OK
155 |
156 | await anyio.sleep(delay=0) # ASYNC115
| ^^^^^^^^^^^^^^^^^^^^ ASYNC115
157 | await anyio.sleep(seconds=0) # OK
|
= help: Replace with `anyio.lowlevel.checkpoint()`
Safe fix
153 153 | await anyio.sleep(delay=1) # OK
154 154 | await anyio.sleep(seconds=1) # OK
155 155 |
156 |- await anyio.sleep(delay=0) # ASYNC115
156 |+ await anyio.lowlevel.checkpoint() # ASYNC115
157 157 | await anyio.sleep(seconds=0) # OK
158 158 |
159 159 |
ASYNC115.py:166:11: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
164 | await trio.sleep(delay=1) # OK
165 |
166 | await trio.sleep(seconds=0) # ASYNC115
| ^^^^^^^^^^^^^^^^^^^^^ ASYNC115
167 | await trio.sleep(delay=0) # OK
|
= help: Replace with `trio.lowlevel.checkpoint()`
Safe fix
163 163 | await trio.sleep(seconds=1) # OK
164 164 | await trio.sleep(delay=1) # OK
165 165 |
166 |- await trio.sleep(seconds=0) # ASYNC115
166 |+ await trio.lowlevel.checkpoint() # ASYNC115
167 167 | await trio.sleep(delay=0) # OK

View File

@@ -289,44 +289,3 @@ ASYNC116.py:110:11: ASYNC116 [*] `anyio.sleep()` with >24 hour interval should u
109 110 | # catch from import
110 |- await sleep(86401) # error: 116, "async"
111 |+ await sleep_forever() # error: 116, "async"
111 112 |
112 113 |
113 114 | async def test_anyio_async116_helpers():
ASYNC116.py:119:11: ASYNC116 [*] `anyio.sleep()` with >24 hour interval should usually be `anyio.sleep_forever()`
|
117 | await anyio.sleep(seconds=1) # OK
118 |
119 | await anyio.sleep(delay=86401) # ASYNC116
| ^^^^^^^^^^^^^^^^^^^^^^^^ ASYNC116
120 | await anyio.sleep(seconds=86401) # OK
|
= help: Replace with `anyio.sleep_forever()`
Unsafe fix
116 116 | await anyio.sleep(delay=1) # OK
117 117 | await anyio.sleep(seconds=1) # OK
118 118 |
119 |- await anyio.sleep(delay=86401) # ASYNC116
119 |+ await anyio.sleep_forever() # ASYNC116
120 120 | await anyio.sleep(seconds=86401) # OK
121 121 |
122 122 |
ASYNC116.py:129:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
127 | await trio.sleep(delay=1) # OK
128 |
129 | await trio.sleep(seconds=86401) # ASYNC116
| ^^^^^^^^^^^^^^^^^^^^^^^^^ ASYNC116
130 | await trio.sleep(delay=86401) # OK
|
= help: Replace with `trio.sleep_forever()`
Unsafe fix
126 126 | await trio.sleep(seconds=1) # OK
127 127 | await trio.sleep(delay=1) # OK
128 128 |
129 |- await trio.sleep(seconds=86401) # ASYNC116
129 |+ await trio.sleep_forever() # ASYNC116
130 130 | await trio.sleep(delay=86401) # OK

View File

@@ -1,7 +1,7 @@
use itertools::Itertools;
use ruff_python_ast::{Alias, Stmt};
use ruff_diagnostics::{AlwaysFixableViolation, Applicability, Diagnostic, Fix};
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Fix};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_text_size::Ranged;
@@ -123,19 +123,9 @@ pub(crate) fn unnecessary_future_import(checker: &Checker, stmt: &Stmt, names: &
checker.stylist(),
checker.indexer(),
)?;
let range = edit.range();
let applicability = if checker.comment_ranges().intersects(range) {
Applicability::Unsafe
} else {
Applicability::Safe
};
Ok(
Fix::applicable_edit(edit, applicability).isolate(Checker::isolation(
checker.semantic().current_statement_parent_id(),
)),
)
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
checker.semantic().current_statement_parent_id(),
)))
});
checker.report_diagnostic(diagnostic);
}

View File

@@ -156,7 +156,6 @@ UP010.py:13:5: UP010 [*] Unnecessary `__future__` import `generator_stop` for ta
13 | from __future__ import generator_stop
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP010
14 | from __future__ import invalid_module, generators
15 | from __future__ import generators # comment
|
= help: Remove unnecessary `__future__` import
@@ -166,7 +165,6 @@ UP010.py:13:5: UP010 [*] Unnecessary `__future__` import `generator_stop` for ta
12 12 | if True:
13 |- from __future__ import generator_stop
14 13 | from __future__ import invalid_module, generators
15 14 | from __future__ import generators # comment
UP010.py:14:5: UP010 [*] Unnecessary `__future__` import `generators` for target Python version
|
@@ -174,7 +172,6 @@ UP010.py:14:5: UP010 [*] Unnecessary `__future__` import `generators` for target
13 | from __future__ import generator_stop
14 | from __future__ import invalid_module, generators
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP010
15 | from __future__ import generators # comment
|
= help: Remove unnecessary `__future__` import
@@ -184,19 +181,3 @@ UP010.py:14:5: UP010 [*] Unnecessary `__future__` import `generators` for target
13 13 | from __future__ import generator_stop
14 |- from __future__ import invalid_module, generators
14 |+ from __future__ import invalid_module
15 15 | from __future__ import generators # comment
UP010.py:15:5: UP010 [*] Unnecessary `__future__` import `generators` for target Python version
|
13 | from __future__ import generator_stop
14 | from __future__ import invalid_module, generators
15 | from __future__ import generators # comment
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP010
|
= help: Remove unnecessary `__future__` import
Unsafe fix
12 12 | if True:
13 13 | from __future__ import generator_stop
14 14 | from __future__ import invalid_module, generators
15 |- from __future__ import generators # comment

View File

@@ -41,8 +41,6 @@ getrandom = { workspace = true, features = ["wasm_js"] }
serde = { workspace = true }
serde-wasm-bindgen = { workspace = true }
wasm-bindgen = { workspace = true }
# Not a direct dependency but required to compile for Wasm.
uuid = { workspace = true, features = ["js"] }
[dev-dependencies]
wasm-bindgen-test = { workspace = true }

5
crates/ty/docs/cli.md generated
View File

@@ -43,10 +43,7 @@ ty check [OPTIONS] [PATH]...
<li><code>auto</code>: Display colors if the output goes to an interactive terminal</li>
<li><code>always</code>: Always display colors</li>
<li><code>never</code>: Never display colors</li>
</ul></dd><dt id="ty-check--config"><a href="#ty-check--config"><code>--config</code></a>, <code>-c</code> <i>config-option</i></dt><dd><p>A TOML <code>&lt;KEY&gt; = &lt;VALUE&gt;</code> pair (such as you might find in a <code>ty.toml</code> configuration file)
overriding a specific configuration option.</p>
<p>Overrides of individual settings using this option always take precedence
over all configuration files.</p>
</ul></dd><dt id="ty-check--config"><a href="#ty-check--config"><code>--config</code></a>, <code>-c</code> <i>config-option</i></dt><dd><p>A TOML <code>&lt;KEY&gt; = &lt;VALUE&gt;</code> pair</p>
</dd><dt id="ty-check--error"><a href="#ty-check--error"><code>--error</code></a> <i>rule</i></dt><dd><p>Treat the given rule as having severity 'error'. Can be specified multiple times.</p>
</dd><dt id="ty-check--error-on-warning"><a href="#ty-check--error-on-warning"><code>--error-on-warning</code></a></dt><dd><p>Use exit code 1 if there are any warning-level diagnostics</p>
</dd><dt id="ty-check--exit-zero"><a href="#ty-check--exit-zero"><code>--exit-zero</code></a></dt><dd><p>Always use exit code 0, even when there are error-level diagnostics</p>

View File

@@ -172,9 +172,6 @@ If left unspecified, ty will try to detect common project layouts and initialize
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
* otherwise, default to `.` (flat layout)
Besides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
it will also be included in the first party search path.
**Default value**: `null`
**Type**: `str`

View File

@@ -322,11 +322,9 @@ pub(crate) enum TerminalColor {
/// Never display colors.
Never,
}
/// A TOML `<KEY> = <VALUE>` pair
/// (such as you might find in a `ty.toml` configuration file)
/// overriding a specific configuration option.
///
/// Overrides of individual settings using this option always take precedence
/// over all configuration files.
#[derive(Debug, Clone)]
@@ -361,15 +359,7 @@ impl clap::Args for ConfigsArg {
.short('c')
.long("config")
.value_name("CONFIG_OPTION")
.help("A TOML `<KEY> = <VALUE>` pair overriding a specific configuration option.")
.long_help(
"
A TOML `<KEY> = <VALUE>` pair (such as you might find in a `ty.toml` configuration file)
overriding a specific configuration option.
Overrides of individual settings using this option always take precedence
over all configuration files.",
)
.help("A TOML `<KEY> = <VALUE>` pair")
.action(ArgAction::Append),
)
}

View File

@@ -243,14 +243,12 @@ impl MainLoop {
MainLoopMessage::CheckWorkspace => {
let db = db.clone();
let sender = self.sender.clone();
let mut reporter = R::default();
// Spawn a new task that checks the project. This needs to be done in a separate thread
// to prevent blocking the main loop here.
rayon::spawn(move || {
match salsa::Cancelled::catch(|| {
let mut reporter = R::default();
db.check_with_reporter(&mut reporter)
}) {
match db.check_with_reporter(&mut reporter) {
Ok(result) => {
// Send the result back to the main loop for printing.
sender

View File

@@ -1555,83 +1555,6 @@ fn cli_config_args_invalid_option() -> anyhow::Result<()> {
Ok(())
}
/// The `site-packages` directory is used by ty for external import.
/// Ty does the following checks to discover the `site-packages` directory in the order:
/// 1) If `VIRTUAL_ENV` environment variable is set
/// 2) If `CONDA_PREFIX` environment variable is set
/// 3) If a `.venv` directory exists at the project root
///
/// This test is aiming at validating the logic around `CONDA_PREFIX`.
///
/// A conda-like environment file structure is used
/// We test by first not setting the `CONDA_PREFIX` and expect a fail.
/// Then we test by setting `CONDA_PREFIX` to `conda-env` and expect a pass.
///
/// ├── project
/// │ └── test.py
/// └── conda-env
/// └── lib
/// └── python3.13
/// └── site-packages
/// └── package1
/// └── __init__.py
///
/// test.py imports package1
/// And the command is run in the `project` directory.
#[test]
fn check_conda_prefix_var_to_resolve_path() -> anyhow::Result<()> {
let conda_package1_path = if cfg!(windows) {
"conda-env/Lib/site-packages/package1/__init__.py"
} else {
"conda-env/lib/python3.13/site-packages/package1/__init__.py"
};
let case = TestCase::with_files([
(
"project/test.py",
r#"
import package1
"#,
),
(
conda_package1_path,
r#"
"#,
),
])?;
assert_cmd_snapshot!(case.command().current_dir(case.root().join("project")), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-import]: Cannot resolve imported module `package1`
--> test.py:2:8
|
2 | import package1
| ^^^^^^^^
|
info: make sure your Python environment is properly configured: https://github.com/astral-sh/ty/blob/main/docs/README.md#python-environment
info: rule `unresolved-import` is enabled by default
Found 1 diagnostic
----- stderr -----
WARN ty is pre-release software and not ready for production use. Expect to encounter bugs, missing features, and fatal errors.
");
// do command : CONDA_PREFIX=<temp_dir>/conda_env
assert_cmd_snapshot!(case.command().current_dir(case.root().join("project")).env("CONDA_PREFIX", case.root().join("conda-env")), @r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
WARN ty is pre-release software and not ready for production use. Expect to encounter bugs, missing features, and fatal errors.
");
Ok(())
}
struct TestCase {
_temp_dir: TempDir,
_settings_scope: SettingsBindDropGuard,

View File

@@ -1135,7 +1135,7 @@ print(sys.last_exc, os.getegid())
Ok(())
})?;
let diagnostics = case.db.check();
let diagnostics = case.db.check().context("Failed to check project.")?;
assert_eq!(diagnostics.len(), 2);
assert_eq!(
@@ -1160,7 +1160,7 @@ print(sys.last_exc, os.getegid())
})
.expect("Search path settings to be valid");
let diagnostics = case.db.check();
let diagnostics = case.db.check().context("Failed to check project.")?;
assert!(diagnostics.is_empty());
Ok(())
@@ -1763,7 +1763,10 @@ fn changes_to_user_configuration() -> anyhow::Result<()> {
let foo = case
.system_file(case.project_path("foo.py"))
.expect("foo.py to exist");
let diagnostics = case.db().check_file(foo);
let diagnostics = case
.db()
.check_file(foo)
.context("Failed to check project.")?;
assert!(
diagnostics.is_empty(),
@@ -1783,7 +1786,10 @@ fn changes_to_user_configuration() -> anyhow::Result<()> {
case.apply_changes(changes);
let diagnostics = case.db().check_file(foo);
let diagnostics = case
.db()
.check_file(foo)
.context("Failed to check project.")?;
assert!(
diagnostics.len() == 1,

View File

@@ -1,30 +0,0 @@
"""
Make sure that types are inferred for all subexpressions of the following
annotations involving ty_extension `_SpecialForm`s.
This is a regression test for https://github.com/astral-sh/ty/issues/366
"""
from ty_extensions import CallableTypeOf, Intersection, Not, TypeOf
class A: ...
class B: ...
def _(x: Not[A]):
pass
def _(x: Intersection[A], y: Intersection[A, B]):
pass
def _(x: TypeOf[1j]):
pass
def _(x: CallableTypeOf[str]):
pass

View File

@@ -8,8 +8,8 @@ use ruff_db::files::{File, Files};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use salsa::Event;
use salsa::plumbing::ZalsaDatabase;
use salsa::{Cancelled, Event};
use ty_ide::Db as IdeDb;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{Db as SemanticDb, Program};
@@ -76,21 +76,24 @@ impl ProjectDatabase {
}
/// Checks all open files in the project and its dependencies.
pub fn check(&self) -> Vec<Diagnostic> {
pub fn check(&self) -> Result<Vec<Diagnostic>, Cancelled> {
let mut reporter = DummyReporter;
let reporter = AssertUnwindSafe(&mut reporter as &mut dyn Reporter);
self.project().check(self, reporter)
self.with_db(|db| db.project().check(db, reporter))
}
/// Checks all open files in the project and its dependencies, using the given reporter.
pub fn check_with_reporter(&self, reporter: &mut dyn Reporter) -> Vec<Diagnostic> {
pub fn check_with_reporter(
&self,
reporter: &mut dyn Reporter,
) -> Result<Vec<Diagnostic>, Cancelled> {
let reporter = AssertUnwindSafe(reporter);
self.project().check(self, reporter)
self.with_db(|db| db.project().check(db, reporter))
}
#[tracing::instrument(level = "debug", skip(self))]
pub fn check_file(&self, file: File) -> Vec<Diagnostic> {
self.project().check_file(self, file)
pub fn check_file(&self, file: File) -> Result<Vec<Diagnostic>, Cancelled> {
self.with_db(|db| self.project().check_file(db, file))
}
/// Returns a mutable reference to the system.
@@ -104,6 +107,13 @@ impl ProjectDatabase {
Arc::get_mut(&mut self.system)
.expect("ref count should be 1 because `zalsa_mut` drops all other DB references.")
}
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
where
F: FnOnce(&ProjectDatabase) -> T + std::panic::UnwindSafe,
{
Cancelled::catch(|| f(self))
}
}
impl Upcast<dyn SemanticDb> for ProjectDatabase {

View File

@@ -1029,52 +1029,6 @@ expected `.`, `]`
Ok(())
}
#[test]
fn src_root_with_tests() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
// pytest will find `tests/test_foo.py` and realize it is NOT part of a package
// given that there's no `__init__.py` file in the same folder.
// It will then add `tests` to `sys.path`
// in order to import `test_foo.py` as the module `test_foo`.
system
.memory_file_system()
.write_files_all([
(root.join("src/main.py"), ""),
(root.join("tests/conftest.py"), ""),
(root.join("tests/test_foo.py"), ""),
])
.context("Failed to write files")?;
let metadata = ProjectMetadata::discover(&root, &system)?;
let settings = metadata
.options
.to_program_settings(&root, "my_package", &system);
assert_eq!(
settings.search_paths.src_roots,
vec![root.clone(), root.join("src"), root.join("tests")]
);
// If `tests/__init__.py` is present, it is considered a package and `tests` is not added to `sys.path`.
system
.memory_file_system()
.write_file(root.join("tests/__init__.py"), "")
.context("Failed to write tests/__init__.py")?;
let metadata = ProjectMetadata::discover(&root, &system)?;
let settings = metadata
.options
.to_program_settings(&root, "my_package", &system);
assert_eq!(
settings.search_paths.src_roots,
vec![root.clone(), root.join("src")]
);
Ok(())
}
#[track_caller]
fn assert_error_eq(error: &ProjectDiscoveryError, message: &str) {
assert_eq!(error.to_string().replace('\\', "/"), message);

View File

@@ -135,7 +135,7 @@ impl Options {
} else {
let src = project_root.join("src");
let mut roots = if system.is_directory(&src) {
if system.is_directory(&src) {
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
// This corresponds to the `src-layout`
tracing::debug!(
@@ -154,24 +154,7 @@ impl Options {
// Default to a [flat project structure](https://packaging.python.org/en/latest/discussions/src-layout-vs-flat-layout/).
tracing::debug!("Defaulting `src.root` to `.`");
vec![project_root.to_path_buf()]
};
// Considering pytest test discovery conventions,
// we also include the `tests` directory if it exists and is not a package.
let tests_dir = project_root.join("tests");
if system.is_directory(&tests_dir)
&& !system.is_file(&tests_dir.join("__init__.py"))
&& !roots.contains(&tests_dir)
{
// If the `tests` directory exists and is not a package, include it as a source root.
tracing::debug!(
"Including `./tests` in `src.root` because a `./tests` directory exists"
);
roots.push(tests_dir);
}
roots
};
let (extra_paths, python, typeshed) = self
@@ -203,11 +186,6 @@ impl Options {
.ok()
.map(PythonPath::from_virtual_env_var)
})
.or_else(|| {
std::env::var("CONDA_PREFIX")
.ok()
.map(PythonPath::from_conda_prefix_var)
})
.unwrap_or_else(|| PythonPath::Discover(project_root.to_path_buf())),
}
}
@@ -409,9 +387,6 @@ pub struct SrcOptions {
/// * if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat)
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
/// * otherwise, default to `.` (flat layout)
///
/// Besides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
/// it will also be included in the first party search path.
#[serde(skip_serializing_if = "Option::is_none")]
#[option(
default = r#"null"#,

View File

@@ -305,37 +305,4 @@ def _(c: Callable[[int], int]):
reveal_type(c.__call__) # revealed: (int, /) -> int
```
Unlike other type checkers, we do _not_ allow attributes to be accessed that would only be available
on function-like callables:
```py
def f_wrong(c: Callable[[], None]):
# error: [unresolved-attribute] "Type `() -> None` has no attribute `__qualname__`"
c.__qualname__
# error: [unresolved-attribute] "Unresolved attribute `__qualname__` on type `() -> None`."
c.__qualname__ = "my_callable"
```
We do this, because at runtime, calls to `f_wrong` with a non-function callable would raise an
`AttributeError`:
```py
class MyCallable:
def __call__(self) -> None:
pass
f_wrong(MyCallable()) # raises `AttributeError` at runtime
```
If users want to write to attributes such as `__qualname__`, they need to check the existence of the
attribute first:
```py
def f_okay(c: Callable[[], None]):
if hasattr(c, "__qualname__"):
c.__qualname__ # okay
c.__qualname__ = "my_callable" # also okay
```
[gradual form]: https://typing.python.org/en/latest/spec/glossary.html#term-gradual-form

View File

@@ -1527,65 +1527,6 @@ def _(ns: argparse.Namespace):
reveal_type(ns.whatever) # revealed: Any
```
## Classes with custom `__getattribute__` methods
If a type provides a custom `__getattribute__`, we use its return type as the type for unknown
attributes. Note that this behavior differs from runtime, where `__getattribute__` is called
unconditionally, even for known attributes. The rationale for doing this is that it allows users to
specify more precise types for specific attributes, such as `x: str` in the example below. This
behavior matches other type checkers such as mypy and pyright.
```py
from typing import Any
class Foo:
x: str
def __getattribute__(self, attr: str) -> Any:
return 42
reveal_type(Foo().x) # revealed: str
reveal_type(Foo().y) # revealed: Any
```
A standard library example for a class with a custom `__getattribute__` method is `SimpleNamespace`:
```py
from types import SimpleNamespace
sn = SimpleNamespace(a="a")
reveal_type(sn.a) # revealed: Any
```
`__getattribute__` takes precedence over `__getattr__`:
```py
class C:
def __getattribute__(self, name: str) -> int:
return 1
def __getattr__(self, name: str) -> str:
return "a"
c = C()
reveal_type(c.x) # revealed: int
```
Like all dunder methods, `__getattribute__` is not looked up on instances:
```py
def external_getattribute(name) -> int:
return 1
class ThisFails:
def __init__(self):
self.__getattribute__ = external_getattribute
# error: [unresolved-attribute]
ThisFails().x
```
## Classes with custom `__setattr__` methods
### Basic
@@ -1757,36 +1698,6 @@ reveal_type(outer.nested.inner.Outer.Nested.Inner.attr) # revealed: int
outer.nested.inner.Outer.Nested.Inner.attr = "a"
```
### Unions of module attributes
`mod1.py`:
```py
global_symbol: str = "a"
```
`mod2.py`:
```py
global_symbol: str = "a"
```
```py
import mod1
import mod2
def _(flag: bool):
if flag:
mod = mod1
else:
mod = mod2
mod.global_symbol = "b"
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to attribute `global_symbol` on type `<module 'mod1'> | <module 'mod2'>`"
mod.global_symbol = 1
```
## Literal types
### Function-literal attributes
@@ -2109,52 +2020,6 @@ reveal_type(Foo.BAR.value) # revealed: @Todo(Attribute access on enum classes)
reveal_type(Foo.__members__) # revealed: @Todo(Attribute access on enum classes)
```
## Errors for unresolved and invalid attribute assignments
```py
class C:
class_attr: int = 1
def __init__(self) -> None:
self.instance_attr: int = 1
c = C()
C.class_attr = 2
c.class_attr = 2
c.instance_attr = 2
C.class_attr = "invalid" # error: [invalid-assignment]
c.class_attr = "invalid" # error: [invalid-assignment]
c.instance_attr = "invalid" # error: [invalid-assignment]
# TODO: The following attribute assignments are flagged by mypy/pyright (Type can
# not be declared in assignment to non-self attribute). We currently do not emit
# any errors and also don't validate the type of the annotation in any form. This
# should probably be changed.
C.class_attr: None = 2
c.class_attr: None = 2
c.instance_attr: None = 2
# TODO: Similar here. We do report `invalid-assignment` errors, but only because
# the value type does not match.
C.class_attr: str = "invalid" # error: [invalid-assignment]
c.class_attr: str = "invalid" # error: [invalid-assignment]
c.instance_attr: str = "invalid" # error: [invalid-assignment]
# For non-existent attributes, we emit `unresolved-attribute` errors:
C.non_existent = 2 # error: [unresolved-attribute]
c.non_existent = 2 # error: [unresolved-attribute]
# TODO: Similar to above, these should either be forbidden or validated.
#
# Make sure that we emit `unresolved-attribute` errors, at least:
C.non_existent: int = 2 # error: [unresolved-attribute]
c.non_existent: int = 2 # error: [unresolved-attribute]
C.non_existent: None = 2 # error: [unresolved-attribute]
c.non_existent: None = 2 # error: [unresolved-attribute]
```
## References
Some of the tests in the *Class and instance variables* section draw inspiration from

View File

@@ -375,7 +375,8 @@ class Foo(Protocol):
def method_member(self) -> bytes:
return b"foo"
reveal_type(get_protocol_members(Foo)) # revealed: frozenset[Literal["method_member", "x", "y", "z"]]
# TODO: actually a frozenset (requires support for legacy generics)
reveal_type(get_protocol_members(Foo)) # revealed: tuple[Literal["method_member"], Literal["x"], Literal["y"], Literal["z"]]
```
Certain special attributes and methods are not considered protocol members at runtime, and should
@@ -393,7 +394,8 @@ class Lumberjack(Protocol):
def __init__(self, x: int) -> None:
self.x = x
reveal_type(get_protocol_members(Lumberjack)) # revealed: frozenset[Literal["x"]]
# TODO: actually a frozenset
reveal_type(get_protocol_members(Lumberjack)) # revealed: tuple[Literal["x"]]
```
A sub-protocol inherits and extends the members of its superclass protocol(s):
@@ -405,11 +407,13 @@ class Bar(Protocol):
class Baz(Bar, Protocol):
ham: memoryview
reveal_type(get_protocol_members(Baz)) # revealed: frozenset[Literal["ham", "spam"]]
# TODO: actually a frozenset
reveal_type(get_protocol_members(Baz)) # revealed: tuple[Literal["ham"], Literal["spam"]]
class Baz2(Bar, Foo, Protocol): ...
# revealed: frozenset[Literal["method_member", "spam", "x", "y", "z"]]
# TODO: actually a frozenset
# revealed: tuple[Literal["method_member"], Literal["spam"], Literal["x"], Literal["y"], Literal["z"]]
reveal_type(get_protocol_members(Baz2))
```
@@ -437,7 +441,8 @@ class Foo(Protocol):
e = 56
def f(self) -> None: ...
reveal_type(get_protocol_members(Foo)) # revealed: frozenset[Literal["d", "e", "f"]]
# TODO: actually a frozenset
reveal_type(get_protocol_members(Foo)) # revealed: tuple[Literal["d"], Literal["e"], Literal["f"]]
```
## Invalid calls to `get_protocol_members()`
@@ -668,7 +673,8 @@ class LotsOfBindings(Protocol):
case l: # TODO: this should error with `[invalid-protocol]` (`l` is not declared)
...
# revealed: frozenset[Literal["Nested", "NestedProtocol", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"]]
# TODO: actually a frozenset
# revealed: tuple[Literal["Nested"], Literal["NestedProtocol"], Literal["a"], Literal["b"], Literal["c"], Literal["d"], Literal["e"], Literal["f"], Literal["g"], Literal["h"], Literal["i"], Literal["j"], Literal["k"], Literal["l"]]
reveal_type(get_protocol_members(LotsOfBindings))
```
@@ -696,7 +702,9 @@ class Foo(Protocol):
# Note: the list of members does not include `a`, `b` or `c`,
# as none of these attributes is declared in the class body.
reveal_type(get_protocol_members(Foo)) # revealed: frozenset[Literal["non_init_method", "x", "y"]]
#
# TODO: actually a frozenset
reveal_type(get_protocol_members(Foo)) # revealed: tuple[Literal["non_init_method"], Literal["x"], Literal["y"]]
```
If a member is declared in a superclass of a protocol class, it is fine for it to be assigned to in
@@ -709,8 +717,9 @@ class Super(Protocol):
class Sub(Super, Protocol):
x = 42 # no error here, since it's declared in the superclass
reveal_type(get_protocol_members(Super)) # revealed: frozenset[Literal["x"]]
reveal_type(get_protocol_members(Sub)) # revealed: frozenset[Literal["x"]]
# TODO: actually frozensets
reveal_type(get_protocol_members(Super)) # revealed: tuple[Literal["x"]]
reveal_type(get_protocol_members(Sub)) # revealed: tuple[Literal["x"]]
```
If a protocol has 0 members, then all other types are assignable to it, and all fully static types

View File

@@ -202,10 +202,6 @@ impl PythonPath {
Self::SysPrefix(path.into(), SysPrefixPathOrigin::VirtualEnvVar)
}
pub fn from_conda_prefix_var(path: impl Into<SystemPathBuf>) -> Self {
Self::Resolve(path.into(), SysPrefixPathOrigin::CondaPrefixVar)
}
pub fn from_cli_flag(path: SystemPathBuf) -> Self {
Self::Resolve(path, SysPrefixPathOrigin::PythonCliFlag)
}

View File

@@ -580,7 +580,6 @@ impl fmt::Display for SysPrefixPath {
pub enum SysPrefixPathOrigin {
PythonCliFlag,
VirtualEnvVar,
CondaPrefixVar,
Derived,
LocalVenv,
}
@@ -591,7 +590,7 @@ impl SysPrefixPathOrigin {
pub(crate) fn must_be_virtual_env(self) -> bool {
match self {
Self::LocalVenv | Self::VirtualEnvVar => true,
Self::PythonCliFlag | Self::Derived | Self::CondaPrefixVar => false,
Self::PythonCliFlag | Self::Derived => false,
}
}
}
@@ -601,7 +600,6 @@ impl Display for SysPrefixPathOrigin {
match self {
Self::PythonCliFlag => f.write_str("`--python` argument"),
Self::VirtualEnvVar => f.write_str("`VIRTUAL_ENV` environment variable"),
Self::CondaPrefixVar => f.write_str("`CONDA_PREFIX` environment variable"),
Self::Derived => f.write_str("derived `sys.prefix` path"),
Self::LocalVenv => f.write_str("local virtual environment"),
}

View File

@@ -2567,13 +2567,9 @@ impl<'db> Type<'db> {
// `Type::NominalInstance(type)` is equivalent to looking up the name in the
// MRO of the class `object`.
Type::NominalInstance(instance) if instance.class.is_known(db, KnownClass::Type) => {
if policy.mro_no_object_fallback() {
Some(Symbol::Unbound.into())
} else {
KnownClass::Object
.to_class_literal(db)
.find_name_in_mro_with_policy(db, name, policy)
}
KnownClass::Object
.to_class_literal(db)
.find_name_in_mro_with_policy(db, name, policy)
}
Type::FunctionLiteral(_)
@@ -3200,28 +3196,6 @@ impl<'db> Type<'db> {
.into()
};
let custom_getattribute_result = || {
// Avoid cycles when looking up `__getattribute__`
if "__getattribute__" == name.as_str() {
return Symbol::Unbound.into();
}
// Typeshed has a `__getattribute__` method defined on `builtins.object` so we
// explicitly hide it here using `MemberLookupPolicy::MRO_NO_OBJECT_FALLBACK`.
self.try_call_dunder_with_policy(
db,
"__getattribute__",
&mut CallArgumentTypes::positional([Type::StringLiteral(
StringLiteralType::new(db, Box::from(name.as_str())),
)]),
MemberLookupPolicy::MRO_NO_OBJECT_FALLBACK,
)
.map(|outcome| Symbol::bound(outcome.return_type(db)))
// TODO: Handle call errors here.
.unwrap_or(Symbol::Unbound)
.into()
};
match result {
member @ SymbolAndQualifiers {
symbol: Symbol::Type(_, Boundness::Bound),
@@ -3230,13 +3204,11 @@ impl<'db> Type<'db> {
member @ SymbolAndQualifiers {
symbol: Symbol::Type(_, Boundness::PossiblyUnbound),
qualifiers: _,
} => member
.or_fall_back_to(db, custom_getattribute_result)
.or_fall_back_to(db, custom_getattr_result),
} => member.or_fall_back_to(db, custom_getattr_result),
SymbolAndQualifiers {
symbol: Symbol::Unbound,
qualifiers: _,
} => custom_getattribute_result().or_fall_back_to(db, custom_getattr_result),
} => custom_getattr_result(),
}
}
@@ -4414,7 +4386,7 @@ impl<'db> Type<'db> {
db,
name,
&mut argument_types,
MemberLookupPolicy::default(),
MemberLookupPolicy::NO_INSTANCE_FALLBACK,
)
}
@@ -4422,9 +4394,6 @@ impl<'db> Type<'db> {
/// particular, this allows to specify `MemberLookupPolicy::MRO_NO_OBJECT_FALLBACK` to avoid
/// looking up dunder methods on `object`, which is needed for functions like `__init__`,
/// `__new__`, or `__setattr__`.
///
/// Note that `NO_INSTANCE_FALLBACK` is always added to the policy, since implicit calls to
/// dunder methods never access instance members.
fn try_call_dunder_with_policy(
self,
db: &'db dyn Db,
@@ -4432,14 +4401,8 @@ impl<'db> Type<'db> {
argument_types: &mut CallArgumentTypes<'_, 'db>,
policy: MemberLookupPolicy,
) -> Result<Bindings<'db>, CallDunderError<'db>> {
// Implicit calls to dunder methods never access instance members, so we pass
// `NO_INSTANCE_FALLBACK` here in addition to other policies:
match self
.member_lookup_with_policy(
db,
name.into(),
policy | MemberLookupPolicy::NO_INSTANCE_FALLBACK,
)
.member_lookup_with_policy(db, name.into(), policy)
.symbol
{
Symbol::Type(dunder_callable, boundness) => {
@@ -7240,10 +7203,9 @@ impl<'db> FunctionType<'db> {
// However, our representation of a function literal includes any specialization that
// should be applied to the signature. Different specializations of the same function
// literal are only assignable to each other if they result in assignable signatures.
self.body_scope(db) == other.body_scope(db)
&& self
.into_callable_type(db)
.is_assignable_to(db, other.into_callable_type(db))
self.into_callable_type(db)
.is_assignable_to(db, other.into_callable_type(db))
}
fn is_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool {

View File

@@ -667,15 +667,15 @@ impl<'db> Bindings<'db> {
Some(KnownFunction::GetProtocolMembers) => {
if let [Some(Type::ClassLiteral(class))] = overload.parameter_types() {
if let Some(protocol_class) = class.into_protocol_class(db) {
let member_names = protocol_class
.interface(db)
.members(db)
.map(|member| Type::string_literal(db, member.name()));
let specialization = UnionType::from_elements(db, member_names);
overload.set_return_type(
KnownClass::FrozenSet
.to_specialized_instance(db, [specialization]),
);
// TODO: actually a frozenset at runtime (requires support for legacy generic classes)
overload.set_return_type(Type::Tuple(TupleType::new(
db,
protocol_class
.interface(db)
.members(db)
.map(|member| Type::string_literal(db, member.name()))
.collect::<Box<[Type<'db>]>>(),
)));
}
}
}

View File

@@ -3231,7 +3231,8 @@ impl<'db> TypeInferenceBuilder<'db> {
)),
value_ty,
]),
MemberLookupPolicy::MRO_NO_OBJECT_FALLBACK,
MemberLookupPolicy::NO_INSTANCE_FALLBACK
| MemberLookupPolicy::MRO_NO_OBJECT_FALLBACK,
);
match result {
@@ -3405,31 +3406,24 @@ impl<'db> TypeInferenceBuilder<'db> {
Type::ModuleLiteral(module) => {
if let Symbol::Type(attr_ty, _) = module.static_member(db, attribute) {
let assignable = value_ty.is_assignable_to(db, attr_ty);
if assignable {
true
} else {
if emit_diagnostics {
report_invalid_attribute_assignment(
&self.context,
target.into(),
attr_ty,
value_ty,
attribute,
);
}
false
if !assignable {
report_invalid_attribute_assignment(
&self.context,
target.into(),
attr_ty,
value_ty,
attribute,
);
}
false
} else {
if emit_diagnostics {
if let Some(builder) =
self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target)
{
builder.into_diagnostic(format_args!(
"Unresolved attribute `{}` on type `{}`.",
attribute,
object_ty.display(db)
));
}
if let Some(builder) = self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target) {
builder.into_diagnostic(format_args!(
"Unresolved attribute `{}` on type `{}`.",
attribute,
object_ty.display(db)
));
}
false
@@ -3549,15 +3543,11 @@ impl<'db> TypeInferenceBuilder<'db> {
} = assignment;
let annotated =
self.infer_annotation_expression(annotation, DeferredExpressionState::None);
self.infer_optional_expression(value.as_deref());
// If we have an annotated assignment like `self.attr: int = 1`, we still need to
// do type inference on the `self.attr` target to get types for all sub-expressions.
if let Some(value) = value {
let value_ty = self.infer_expression(value);
self.infer_target(target, value, |_builder, _value_expr| value_ty);
} else {
self.infer_expression(target);
}
self.infer_expression(target);
// But here we explicitly overwrite the type for the overall `self.attr` node with
// the annotated type. We do no use `store_expression_type` here, because it checks
@@ -8652,16 +8642,11 @@ impl<'db> TypeInferenceBuilder<'db> {
element => Either::Right(std::iter::once(element)),
};
let ty = elements
elements
.fold(IntersectionBuilder::new(db), |builder, element| {
builder.add_positive(self.infer_type_expression(element))
})
.build();
if matches!(arguments_slice, ast::Expr::Tuple(_)) {
self.store_expression_type(arguments_slice, ty);
}
ty
.build()
}
KnownInstanceType::TypeOf => match arguments_slice {
ast::Expr::Tuple(_) => {

View File

@@ -26,7 +26,6 @@ jod-thread = { workspace = true }
lsp-server = { workspace = true }
lsp-types = { workspace = true }
rustc-hash = { workspace = true }
salsa = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
shellexpand = { workspace = true }

View File

@@ -1,5 +1,10 @@
//! Scheduling, I/O, and API endpoints.
use std::num::NonZeroUsize;
// The new PanicInfoHook name requires MSRV >= 1.82
#[expect(deprecated)]
use std::panic::PanicInfo;
use lsp_server::Message;
use lsp_types::{
ClientCapabilities, DiagnosticOptions, DiagnosticServerCapabilities,
@@ -8,8 +13,6 @@ use lsp_types::{
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TypeDefinitionProviderCapability, Url,
};
use std::num::NonZeroUsize;
use std::panic::PanicHookInfo;
use self::connection::{Connection, ConnectionInitializer};
use self::schedule::event_loop_thread;
@@ -122,7 +125,9 @@ impl Server {
}
pub(crate) fn run(self) -> crate::Result<()> {
type PanicHook = Box<dyn Fn(&PanicHookInfo<'_>) + 'static + Sync + Send>;
// The new PanicInfoHook name requires MSRV >= 1.82
#[expect(deprecated)]
type PanicHook = Box<dyn Fn(&PanicInfo<'_>) + 'static + Sync + Send>;
struct RestorePanicHook {
hook: Option<PanicHook>,
}

View File

@@ -3,41 +3,42 @@ use crate::session::Session;
use crate::system::{AnySystemPath, url_to_any_system_path};
use anyhow::anyhow;
use lsp_server as server;
use lsp_server::RequestId;
use lsp_types::notification::Notification;
use ruff_db::panic::PanicError;
use std::panic::UnwindSafe;
mod diagnostics;
mod notifications;
mod requests;
mod traits;
use notifications as notification;
use requests as request;
use self::traits::{NotificationHandler, RequestHandler};
use super::{Result, client::Responder, schedule::BackgroundSchedule};
pub(super) fn request(req: server::Request) -> Task {
pub(super) fn request<'a>(req: server::Request) -> Task<'a> {
let id = req.id.clone();
match req.method.as_str() {
requests::DocumentDiagnosticRequestHandler::METHOD => background_request_task::<
requests::DocumentDiagnosticRequestHandler,
request::DocumentDiagnosticRequestHandler::METHOD => background_request_task::<
request::DocumentDiagnosticRequestHandler,
>(
req, BackgroundSchedule::Worker
),
requests::GotoTypeDefinitionRequestHandler::METHOD => background_request_task::<
requests::GotoTypeDefinitionRequestHandler,
request::GotoTypeDefinitionRequestHandler::METHOD => background_request_task::<
request::GotoTypeDefinitionRequestHandler,
>(
req, BackgroundSchedule::Worker
),
requests::HoverRequestHandler::METHOD => background_request_task::<
requests::HoverRequestHandler,
request::HoverRequestHandler::METHOD => {
background_request_task::<request::HoverRequestHandler>(req, BackgroundSchedule::Worker)
}
request::InlayHintRequestHandler::METHOD => background_request_task::<
request::InlayHintRequestHandler,
>(req, BackgroundSchedule::Worker),
requests::InlayHintRequestHandler::METHOD => background_request_task::<
requests::InlayHintRequestHandler,
>(req, BackgroundSchedule::Worker),
requests::CompletionRequestHandler::METHOD => background_request_task::<
requests::CompletionRequestHandler,
request::CompletionRequestHandler::METHOD => background_request_task::<
request::CompletionRequestHandler,
>(
req, BackgroundSchedule::LatencySensitive
),
@@ -61,25 +62,25 @@ pub(super) fn request(req: server::Request) -> Task {
})
}
pub(super) fn notification(notif: server::Notification) -> Task {
pub(super) fn notification<'a>(notif: server::Notification) -> Task<'a> {
match notif.method.as_str() {
notifications::DidCloseTextDocumentHandler::METHOD => {
local_notification_task::<notifications::DidCloseTextDocumentHandler>(notif)
notification::DidCloseTextDocumentHandler::METHOD => {
local_notification_task::<notification::DidCloseTextDocumentHandler>(notif)
}
notifications::DidOpenTextDocumentHandler::METHOD => {
local_notification_task::<notifications::DidOpenTextDocumentHandler>(notif)
notification::DidOpenTextDocumentHandler::METHOD => {
local_notification_task::<notification::DidOpenTextDocumentHandler>(notif)
}
notifications::DidChangeTextDocumentHandler::METHOD => {
local_notification_task::<notifications::DidChangeTextDocumentHandler>(notif)
notification::DidChangeTextDocumentHandler::METHOD => {
local_notification_task::<notification::DidChangeTextDocumentHandler>(notif)
}
notifications::DidOpenNotebookHandler::METHOD => {
local_notification_task::<notifications::DidOpenNotebookHandler>(notif)
notification::DidOpenNotebookHandler::METHOD => {
local_notification_task::<notification::DidOpenNotebookHandler>(notif)
}
notifications::DidCloseNotebookHandler::METHOD => {
local_notification_task::<notifications::DidCloseNotebookHandler>(notif)
notification::DidCloseNotebookHandler::METHOD => {
local_notification_task::<notification::DidCloseNotebookHandler>(notif)
}
notifications::DidChangeWatchedFiles::METHOD => {
local_notification_task::<notifications::DidChangeWatchedFiles>(notif)
notification::DidChangeWatchedFiles::METHOD => {
local_notification_task::<notification::DidChangeWatchedFiles>(notif)
}
lsp_types::notification::SetTrace::METHOD => {
tracing::trace!("Ignoring `setTrace` notification");
@@ -100,25 +101,25 @@ pub(super) fn notification(notif: server::Notification) -> Task {
})
}
fn _local_request_task<R: traits::SyncRequestHandler>(req: server::Request) -> super::Result<Task>
where
<<R as RequestHandler>::RequestType as lsp_types::request::Request>::Params: UnwindSafe,
{
fn _local_request_task<'a, R: traits::SyncRequestHandler>(
req: server::Request,
) -> super::Result<Task<'a>> {
let (id, params) = cast_request::<R>(req)?;
Ok(Task::local(|session, notifier, requester, responder| {
let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered();
let _span = tracing::trace_span!("request", %id, method = R::METHOD).entered();
let result = R::run(session, notifier, requester, params);
respond::<R>(id, result, &responder);
}))
}
fn background_request_task<R: traits::BackgroundDocumentRequestHandler>(
// TODO(micha): Calls to `db` could panic if the db gets mutated while this task is running.
// We should either wrap `R::run_with_snapshot` with a salsa catch cancellation handler or
// use `SemanticModel` instead of passing `db` which uses a Result for all it's methods
// that propagate cancellations.
fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
req: server::Request,
schedule: BackgroundSchedule,
) -> super::Result<Task>
where
<<R as RequestHandler>::RequestType as lsp_types::request::Request>::Params: UnwindSafe,
{
) -> super::Result<Task<'a>> {
let (id, params) = cast_request::<R>(req)?;
Ok(Task::background(schedule, move |session: &Session| {
let url = R::document_url(&params).into_owned();
@@ -127,7 +128,6 @@ where
tracing::warn!("Ignoring request for invalid `{url}`");
return Box::new(|_, _| {});
};
let db = match &path {
AnySystemPath::System(path) => match session.project_db_for_path(path.as_std_path()) {
Some(db) => db.clone(),
@@ -142,55 +142,19 @@ where
};
Box::new(move |notifier, responder| {
let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered();
let result = ruff_db::panic::catch_unwind(|| {
R::run_with_snapshot(&db, snapshot, notifier, params)
});
if let Some(response) = request_result_to_response(&id, &responder, result) {
respond::<R>(id, response, &responder);
}
let _span = tracing::trace_span!("request", %id, method = R::METHOD).entered();
let result = R::run_with_snapshot(&db, snapshot, notifier, params);
respond::<R>(id, result, &responder);
})
}))
}
fn request_result_to_response<R>(
id: &RequestId,
responder: &Responder,
result: std::result::Result<Result<R>, PanicError>,
) -> Option<Result<R>> {
match result {
Ok(response) => Some(response),
Err(error) => {
if error.payload.downcast_ref::<salsa::Cancelled>().is_some() {
// Request was cancelled by Salsa. TODO: Retry
respond_silent_error(
id.clone(),
responder,
Error {
code: lsp_server::ErrorCode::ContentModified,
error: anyhow!("content modified"),
},
);
None
} else {
let message = format!("request handler {error}");
Some(Err(Error {
code: lsp_server::ErrorCode::InternalError,
error: anyhow!(message),
}))
}
}
}
}
fn local_notification_task<N: traits::SyncNotificationHandler>(
fn local_notification_task<'a, N: traits::SyncNotificationHandler>(
notif: server::Notification,
) -> super::Result<Task> {
) -> super::Result<Task<'a>> {
let (id, params) = cast_notification::<N>(notif)?;
Ok(Task::local(move |session, notifier, requester, _| {
let _span = tracing::debug_span!("notification", method = N::METHOD).entered();
let _span = tracing::trace_span!("notification", method = N::METHOD).entered();
if let Err(err) = N::run(session, notifier, requester, params) {
tracing::error!("An error occurred while running {id}: {err}");
show_err_msg!("ty encountered a problem. Check the logs for more details.");
@@ -199,15 +163,10 @@ fn local_notification_task<N: traits::SyncNotificationHandler>(
}
#[expect(dead_code)]
fn background_notification_thread<N>(
fn background_notification_thread<'a, N: traits::BackgroundDocumentNotificationHandler>(
req: server::Notification,
schedule: BackgroundSchedule,
) -> super::Result<Task>
where
N: traits::BackgroundDocumentNotificationHandler,
<<N as NotificationHandler>::NotificationType as lsp_types::notification::Notification>::Params:
UnwindSafe,
{
) -> super::Result<Task<'a>> {
let (id, params) = cast_notification::<N>(req)?;
Ok(Task::background(schedule, move |session: &Session| {
let url = N::document_url(&params);
@@ -218,20 +177,8 @@ where
return Box::new(|_, _| {});
};
Box::new(move |notifier, _| {
let _span = tracing::debug_span!("notification", method = N::METHOD).entered();
let result = match ruff_db::panic::catch_unwind(|| {
N::run_with_snapshot(snapshot, notifier, params)
}) {
Ok(result) => result,
Err(panic) => {
tracing::error!("An error occurred while running {id}: {panic}");
show_err_msg!("ty encountered a panic. Check the logs for more details.");
return;
}
};
if let Err(err) = result {
let _span = tracing::trace_span!("notification", method = N::METHOD).entered();
if let Err(err) = N::run_with_snapshot(snapshot, notifier, params) {
tracing::error!("An error occurred while running {id}: {err}");
show_err_msg!("ty encountered a problem. Check the logs for more details.");
}
@@ -251,7 +198,6 @@ fn cast_request<Req>(
)>
where
Req: traits::RequestHandler,
<<Req as RequestHandler>::RequestType as lsp_types::request::Request>::Params: UnwindSafe,
{
request
.extract(Req::METHOD)
@@ -286,14 +232,6 @@ fn respond<Req>(
}
}
/// Sends back an error response to the server using a [`Responder`] without showing a warning
/// to the user.
fn respond_silent_error(id: server::RequestId, responder: &Responder, error: Error) {
if let Err(err) = responder.respond::<()>(id, Err(error)) {
tracing::error!("Failed to send response: {err}");
}
}
/// Tries to cast a serialized request from the server into
/// a parameter type for a specific request handler.
fn cast_notification<N>(
@@ -302,9 +240,7 @@ fn cast_notification<N>(
(
&'static str,
<<N as traits::NotificationHandler>::NotificationType as lsp_types::notification::Notification>::Params,
)> where
N: traits::NotificationHandler,
{
)> where N: traits::NotificationHandler{
Ok((
N::METHOD,
notification

View File

@@ -1,18 +1,6 @@
use lsp_server::ErrorCode;
use lsp_types::notification::PublishDiagnostics;
use lsp_types::{
Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, NumberOrString,
PublishDiagnosticsParams, Range, Url,
};
use lsp_types::{PublishDiagnosticsParams, Url, notification::PublishDiagnostics};
use ruff_db::diagnostic::{Annotation, Severity, SubDiagnostic};
use ruff_db::files::FileRange;
use ruff_db::source::{line_index, source_text};
use ty_project::{Db, ProjectDatabase};
use crate::DocumentSnapshot;
use crate::PositionEncoding;
use crate::document::{FileRangeExt, ToRangeExt};
use crate::server::Result;
use crate::server::client::Notifier;
@@ -28,146 +16,3 @@ pub(super) fn clear_diagnostics(uri: &Url, notifier: &Notifier) -> Result<()> {
.with_failure_code(ErrorCode::InternalError)?;
Ok(())
}
pub(super) fn compute_diagnostics(
db: &ProjectDatabase,
snapshot: &DocumentSnapshot,
) -> Vec<Diagnostic> {
let Some(file) = snapshot.file(db) else {
tracing::info!(
"No file found for snapshot for `{}`",
snapshot.query().file_url()
);
return vec![];
};
let diagnostics = db.check_file(file);
diagnostics
.as_slice()
.iter()
.map(|message| to_lsp_diagnostic(db, message, snapshot.encoding()))
.collect()
}
/// Converts the tool specific [`Diagnostic`][ruff_db::diagnostic::Diagnostic] to an LSP
/// [`Diagnostic`].
fn to_lsp_diagnostic(
db: &dyn Db,
diagnostic: &ruff_db::diagnostic::Diagnostic,
encoding: PositionEncoding,
) -> Diagnostic {
let range = if let Some(span) = diagnostic.primary_span() {
let file = span.expect_ty_file();
let index = line_index(db.upcast(), file);
let source = source_text(db.upcast(), file);
span.range()
.map(|range| range.to_lsp_range(&source, &index, encoding))
.unwrap_or_default()
} else {
Range::default()
};
let severity = match diagnostic.severity() {
Severity::Info => DiagnosticSeverity::INFORMATION,
Severity::Warning => DiagnosticSeverity::WARNING,
Severity::Error | Severity::Fatal => DiagnosticSeverity::ERROR,
};
let tags = diagnostic
.primary_tags()
.map(|tags| {
tags.iter()
.map(|tag| match tag {
ruff_db::diagnostic::DiagnosticTag::Unnecessary => DiagnosticTag::UNNECESSARY,
ruff_db::diagnostic::DiagnosticTag::Deprecated => DiagnosticTag::DEPRECATED,
})
.collect::<Vec<DiagnosticTag>>()
})
.filter(|mapped_tags| !mapped_tags.is_empty());
let code_description = diagnostic
.id()
.is_lint()
.then(|| {
Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&format!("https://ty.dev/rules#{}", diagnostic.id()))
.ok()?,
})
})
.flatten();
let mut related_information = Vec::new();
related_information.extend(
diagnostic
.secondary_annotations()
.filter_map(|annotation| annotation_to_related_information(db, annotation, encoding)),
);
for sub_diagnostic in diagnostic.sub_diagnostics() {
related_information.extend(sub_diagnostic_to_related_information(
db,
sub_diagnostic,
encoding,
));
related_information.extend(
sub_diagnostic
.annotations()
.iter()
.filter_map(|annotation| {
annotation_to_related_information(db, annotation, encoding)
}),
);
}
Diagnostic {
range,
severity: Some(severity),
tags,
code: Some(NumberOrString::String(diagnostic.id().to_string())),
code_description,
source: Some("ty".into()),
message: diagnostic.concise_message().to_string(),
related_information: Some(related_information),
data: None,
}
}
/// Converts an [`Annotation`] to a [`DiagnosticRelatedInformation`].
fn annotation_to_related_information(
db: &dyn Db,
annotation: &Annotation,
encoding: PositionEncoding,
) -> Option<DiagnosticRelatedInformation> {
let span = annotation.get_span();
let annotation_message = annotation.get_message()?;
let range = FileRange::try_from(span).ok()?;
let location = range.to_location(db.upcast(), encoding)?;
Some(DiagnosticRelatedInformation {
location,
message: annotation_message.to_string(),
})
}
/// Converts a [`SubDiagnostic`] to a [`DiagnosticRelatedInformation`].
fn sub_diagnostic_to_related_information(
db: &dyn Db,
diagnostic: &SubDiagnostic,
encoding: PositionEncoding,
) -> Option<DiagnosticRelatedInformation> {
let primary_annotation = diagnostic.primary_annotation()?;
let span = primary_annotation.get_span();
let range = FileRange::try_from(span).ok()?;
let location = range.to_location(db.upcast(), encoding)?;
Some(DiagnosticRelatedInformation {
location,
message: diagnostic.concise_message().to_string(),
})
}

View File

@@ -2,15 +2,20 @@ use std::borrow::Cow;
use lsp_types::request::DocumentDiagnosticRequest;
use lsp_types::{
DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
FullDocumentDiagnosticReport, RelatedFullDocumentDiagnosticReport,
Diagnostic, DiagnosticSeverity, DiagnosticTag, DocumentDiagnosticParams,
DocumentDiagnosticReport, DocumentDiagnosticReportResult, FullDocumentDiagnosticReport,
NumberOrString, Range, RelatedFullDocumentDiagnosticReport,
};
use crate::server::api::diagnostics::compute_diagnostics;
use crate::PositionEncoding;
use crate::document::{FileRangeExt, ToRangeExt};
use crate::server::api::traits::{BackgroundDocumentRequestHandler, RequestHandler};
use crate::server::{Result, client::Notifier};
use crate::session::DocumentSnapshot;
use ty_project::ProjectDatabase;
use ruff_db::diagnostic::{Annotation, Severity, SubDiagnostic};
use ruff_db::files::FileRange;
use ruff_db::source::{line_index, source_text};
use ty_project::{Db, ProjectDatabase};
pub(crate) struct DocumentDiagnosticRequestHandler;
@@ -29,7 +34,7 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
_notifier: Notifier,
_params: DocumentDiagnosticParams,
) -> Result<DocumentDiagnosticReportResult> {
let diagnostics = compute_diagnostics(db, &snapshot);
let diagnostics = compute_diagnostics(&snapshot, db);
Ok(DocumentDiagnosticReportResult::Report(
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
@@ -42,3 +47,145 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
))
}
}
fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &ProjectDatabase) -> Vec<Diagnostic> {
let Some(file) = snapshot.file(db) else {
tracing::info!(
"No file found for snapshot for `{}`",
snapshot.query().file_url()
);
return vec![];
};
let diagnostics = match db.check_file(file) {
Ok(diagnostics) => diagnostics,
Err(cancelled) => {
tracing::info!("Diagnostics computation {cancelled}");
return vec![];
}
};
diagnostics
.as_slice()
.iter()
.map(|message| to_lsp_diagnostic(db, message, snapshot.encoding()))
.collect()
}
fn to_lsp_diagnostic(
db: &dyn Db,
diagnostic: &ruff_db::diagnostic::Diagnostic,
encoding: crate::PositionEncoding,
) -> Diagnostic {
let range = if let Some(span) = diagnostic.primary_span() {
let file = span.expect_ty_file();
let index = line_index(db.upcast(), file);
let source = source_text(db.upcast(), file);
span.range()
.map(|range| range.to_lsp_range(&source, &index, encoding))
.unwrap_or_default()
} else {
Range::default()
};
let severity = match diagnostic.severity() {
Severity::Info => DiagnosticSeverity::INFORMATION,
Severity::Warning => DiagnosticSeverity::WARNING,
Severity::Error | Severity::Fatal => DiagnosticSeverity::ERROR,
};
let tags = diagnostic
.primary_tags()
.map(|tags| {
tags.iter()
.map(|tag| match tag {
ruff_db::diagnostic::DiagnosticTag::Unnecessary => DiagnosticTag::UNNECESSARY,
ruff_db::diagnostic::DiagnosticTag::Deprecated => DiagnosticTag::DEPRECATED,
})
.collect::<Vec<DiagnosticTag>>()
})
.filter(|mapped_tags| !mapped_tags.is_empty());
let code_description = diagnostic
.id()
.is_lint()
.then(|| {
Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&format!("https://ty.dev/rules#{}", diagnostic.id()))
.ok()?,
})
})
.flatten();
let mut related_information = Vec::new();
related_information.extend(
diagnostic
.secondary_annotations()
.filter_map(|annotation| annotation_to_related_information(db, annotation, encoding)),
);
for sub_diagnostic in diagnostic.sub_diagnostics() {
related_information.extend(sub_diagnostic_to_related_information(
db,
sub_diagnostic,
encoding,
));
related_information.extend(
sub_diagnostic
.annotations()
.iter()
.filter_map(|annotation| {
annotation_to_related_information(db, annotation, encoding)
}),
);
}
Diagnostic {
range,
severity: Some(severity),
tags,
code: Some(NumberOrString::String(diagnostic.id().to_string())),
code_description,
source: Some("ty".into()),
message: diagnostic.concise_message().to_string(),
related_information: Some(related_information),
data: None,
}
}
fn annotation_to_related_information(
db: &dyn Db,
annotation: &Annotation,
encoding: PositionEncoding,
) -> Option<lsp_types::DiagnosticRelatedInformation> {
let span = annotation.get_span();
let annotation_message = annotation.get_message()?;
let range = FileRange::try_from(span).ok()?;
let location = range.to_location(db.upcast(), encoding)?;
Some(lsp_types::DiagnosticRelatedInformation {
location,
message: annotation_message.to_string(),
})
}
fn sub_diagnostic_to_related_information(
db: &dyn Db,
diagnostic: &SubDiagnostic,
encoding: PositionEncoding,
) -> Option<lsp_types::DiagnosticRelatedInformation> {
let primary_annotation = diagnostic.primary_annotation()?;
let span = primary_annotation.get_span();
let range = FileRange::try_from(span).ok()?;
let location = range.to_location(db.upcast(), encoding)?;
Some(lsp_types::DiagnosticRelatedInformation {
location,
message: diagnostic.concise_message().to_string(),
})
}

View File

@@ -6,12 +6,12 @@ use serde_json::Value;
use super::{ClientSender, schedule::Task};
type ResponseBuilder = Box<dyn FnOnce(lsp_server::Response) -> Task>;
type ResponseBuilder<'s> = Box<dyn FnOnce(lsp_server::Response) -> Task<'s>>;
pub(crate) struct Client {
pub(crate) struct Client<'s> {
notifier: Notifier,
responder: Responder,
pub(super) requester: Requester,
pub(super) requester: Requester<'s>,
}
#[derive(Clone)]
@@ -20,13 +20,13 @@ pub(crate) struct Notifier(ClientSender);
#[derive(Clone)]
pub(crate) struct Responder(ClientSender);
pub(crate) struct Requester {
pub(crate) struct Requester<'s> {
sender: ClientSender,
next_request_id: i32,
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder>,
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
}
impl Client {
impl Client<'_> {
pub(super) fn new(sender: ClientSender) -> Self {
Self {
notifier: Notifier(sender.clone()),
@@ -91,14 +91,14 @@ impl Responder {
}
}
impl Requester {
impl<'s> Requester<'s> {
/// Sends a request of kind `R` to the client, with associated parameters.
/// The task provided by `response_handler` will be dispatched as soon as the response
/// comes back from the client.
pub(crate) fn request<R>(
&mut self,
params: R::Params,
response_handler: impl Fn(R::Result) -> Task + 'static,
response_handler: impl Fn(R::Result) -> Task<'s> + 'static,
) -> crate::Result<()>
where
R: lsp_types::request::Request,
@@ -155,7 +155,7 @@ impl Requester {
Ok(())
}
pub(crate) fn pop_response_task(&mut self, response: lsp_server::Response) -> Task {
pub(crate) fn pop_response_task(&mut self, response: lsp_server::Response) -> Task<'s> {
if let Some(handler) = self.response_handlers.remove(&response.id) {
handler(response)
} else {

View File

@@ -34,7 +34,7 @@ pub(crate) fn event_loop_thread(
pub(crate) struct Scheduler<'s> {
session: &'s mut Session,
client: Client,
client: Client<'s>,
fmt_pool: thread::Pool,
background_pool: thread::Pool,
}
@@ -60,7 +60,7 @@ impl<'s> Scheduler<'s> {
pub(super) fn request<R>(
&mut self,
params: R::Params,
response_handler: impl Fn(R::Result) -> Task + 'static,
response_handler: impl Fn(R::Result) -> Task<'s> + 'static,
) -> crate::Result<()>
where
R: lsp_types::request::Request,
@@ -69,13 +69,13 @@ impl<'s> Scheduler<'s> {
}
/// Creates a task to handle a response from the client.
pub(super) fn response(&mut self, response: lsp_server::Response) -> Task {
pub(super) fn response(&mut self, response: lsp_server::Response) -> Task<'s> {
self.client.requester.pop_response_task(response)
}
/// Dispatches a `task` by either running it as a blocking function or
/// executing it on a background thread pool.
pub(super) fn dispatch(&mut self, task: task::Task) {
pub(super) fn dispatch(&mut self, task: task::Task<'s>) {
match task {
Task::Sync(SyncTask { func }) => {
let notifier = self.client.notifier();

View File

@@ -6,11 +6,11 @@ use crate::{
session::Session,
};
type LocalFn = Box<dyn FnOnce(&mut Session, Notifier, &mut Requester, Responder)>;
type LocalFn<'s> = Box<dyn FnOnce(&mut Session, Notifier, &mut Requester, Responder) + 's>;
type BackgroundFn = Box<dyn FnOnce(Notifier, Responder) + Send + 'static>;
type BackgroundFnBuilder = Box<dyn FnOnce(&Session) -> BackgroundFn>;
type BackgroundFnBuilder<'s> = Box<dyn FnOnce(&Session) -> BackgroundFn + 's>;
/// Describes how the task should be run.
#[derive(Clone, Copy, Debug, Default)]
@@ -36,9 +36,9 @@ pub(in crate::server) enum BackgroundSchedule {
/// while local tasks have exclusive access and can modify it as they please. Keep in mind that
/// local tasks will **block** the main event loop, so only use local tasks if you **need**
/// mutable state access or you need the absolute lowest latency possible.
pub(in crate::server) enum Task {
Background(BackgroundTaskBuilder),
Sync(SyncTask),
pub(in crate::server) enum Task<'s> {
Background(BackgroundTaskBuilder<'s>),
Sync(SyncTask<'s>),
}
// The reason why this isn't just a 'static background closure
@@ -49,31 +49,30 @@ pub(in crate::server) enum Task {
// that the inner closure can capture. This builder closure has a lifetime linked to the scheduler.
// When the task is dispatched, the scheduler runs the synchronous builder, which takes the session
// as a reference, to create the inner 'static closure. That closure is then moved to a background task pool.
pub(in crate::server) struct BackgroundTaskBuilder {
pub(in crate::server) struct BackgroundTaskBuilder<'s> {
pub(super) schedule: BackgroundSchedule,
pub(super) builder: BackgroundFnBuilder,
pub(super) builder: BackgroundFnBuilder<'s>,
}
pub(in crate::server) struct SyncTask {
pub(super) func: LocalFn,
pub(in crate::server) struct SyncTask<'s> {
pub(super) func: LocalFn<'s>,
}
impl Task {
impl<'s> Task<'s> {
/// Creates a new background task.
pub(crate) fn background<F>(schedule: BackgroundSchedule, func: F) -> Self
where
F: FnOnce(&Session) -> Box<dyn FnOnce(Notifier, Responder) + Send + 'static> + 'static,
{
pub(crate) fn background(
schedule: BackgroundSchedule,
func: impl FnOnce(&Session) -> Box<dyn FnOnce(Notifier, Responder) + Send + 'static> + 's,
) -> Self {
Self::Background(BackgroundTaskBuilder {
schedule,
builder: Box::new(func),
})
}
/// Creates a new local task.
pub(crate) fn local<F>(func: F) -> Self
where
F: FnOnce(&mut Session, Notifier, &mut Requester, Responder) + 'static,
{
pub(crate) fn local(
func: impl FnOnce(&mut Session, Notifier, &mut Requester, Responder) + 's,
) -> Self {
Self::Sync(SyncTask {
func: Box::new(func),
})

View File

@@ -13,8 +13,6 @@
//! The thread pool is implemented entirely using
//! the threading utilities in [`crate::server::schedule::thread`].
use crossbeam::channel::{Receiver, Sender};
use std::panic::AssertUnwindSafe;
use std::{
num::NonZeroUsize,
sync::{
@@ -23,6 +21,8 @@ use std::{
},
};
use crossbeam::channel::{Receiver, Sender};
use super::{Builder, JoinHandle, ThreadPriority};
pub(crate) struct Pool {
@@ -51,7 +51,8 @@ impl Pool {
let threads = usize::from(threads);
let (job_sender, job_receiver) = crossbeam::channel::bounded(std::cmp::max(threads * 2, 4));
// Channel buffer capacity is between 2 and 4, depending on the pool size.
let (job_sender, job_receiver) = crossbeam::channel::bounded(std::cmp::min(threads * 2, 4));
let extant_tasks = Arc::new(AtomicUsize::new(0));
let mut handles = Vec::with_capacity(threads);
@@ -70,33 +71,7 @@ impl Pool {
current_priority = job.requested_priority;
}
extant_tasks.fetch_add(1, Ordering::SeqCst);
// SAFETY: it's safe to assume that `job.f` is unwind safe because we always
// abort the process if it panics.
// Panicking here ensures that we don't swallow errors and is the same as
// what rayon does.
// Any recovery should be implemented outside the thread pool (e.g. when
// dispatching requests/notifications etc).
if let Err(error) = std::panic::catch_unwind(AssertUnwindSafe(job.f)) {
if let Some(msg) = error.downcast_ref::<String>() {
tracing::error!("Worker thread panicked with: {msg}; aborting");
} else if let Some(msg) = error.downcast_ref::<&str>() {
tracing::error!("Worker thread panicked with: {msg}; aborting");
} else if let Some(cancelled) =
error.downcast_ref::<salsa::Cancelled>()
{
tracing::error!(
"Worker thread got cancelled: {cancelled}; aborting"
);
} else {
tracing::error!(
"Worker thread panicked with: {error:?}; aborting"
);
}
std::process::abort();
}
(job.f)();
extant_tasks.fetch_sub(1, Ordering::SeqCst);
}
}

View File

@@ -40,8 +40,6 @@ log = { workspace = true }
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
getrandom = { workspace = true, features = ["wasm_js"] }
serde-wasm-bindgen = { workspace = true }
# Not a direct dependency but required to compile for Wasm.
uuid = { workspace = true, features = ["js"] }
wasm-bindgen = { workspace = true }

View File

@@ -1,90 +0,0 @@
use std::{
fs,
path::{Path, PathBuf},
process::Command,
};
fn main() {
// The workspace root directory is not available without walking up the tree
// https://github.com/rust-lang/cargo/issues/3946
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.join("..")
.join("..");
commit_info(&workspace_root);
}
/// Retrieve commit information from the Git repository.
fn commit_info(workspace_root: &Path) {
// If not in a git repository, do not attempt to retrieve commit information
let git_dir = workspace_root.join(".git");
if !git_dir.exists() {
return;
}
if let Some(git_head_path) = git_head(&git_dir) {
println!("cargo:rerun-if-changed={}", git_head_path.display());
let git_head_contents = fs::read_to_string(git_head_path);
if let Ok(git_head_contents) = git_head_contents {
// The contents are either a commit or a reference in the following formats
// - "<commit>" when the head is detached
// - "ref <ref>" when working on a branch
// If a commit, checking if the HEAD file has changed is sufficient
// If a ref, we need to add the head file for that ref to rebuild on commit
let mut git_ref_parts = git_head_contents.split_whitespace();
git_ref_parts.next();
if let Some(git_ref) = git_ref_parts.next() {
let git_ref_path = git_dir.join(git_ref);
println!("cargo:rerun-if-changed={}", git_ref_path.display());
}
}
}
let output = match Command::new("git")
.arg("log")
.arg("-1")
.arg("--date=short")
.arg("--abbrev=9")
.arg("--format=%H %h %cd %(describe:tags)")
.current_dir(workspace_root)
.output()
{
Ok(output) if output.status.success() => output,
_ => return,
};
let stdout = String::from_utf8(output.stdout).unwrap();
let mut parts = stdout.split_whitespace();
let mut next = || parts.next().unwrap();
let _commit_hash = next();
println!("cargo::rustc-env=TY_WASM_COMMIT_SHORT_HASH={}", next());
}
fn git_head(git_dir: &Path) -> Option<PathBuf> {
// The typical case is a standard git repository.
let git_head_path = git_dir.join("HEAD");
if git_head_path.exists() {
return Some(git_head_path);
}
if !git_dir.is_file() {
return None;
}
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
// then let's try to attempt to read it as a worktree. If it's
// a worktree, then its contents will look like this, e.g.:
//
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
//
// And the HEAD file we want to watch will be at:
//
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
let contents = fs::read_to_string(git_dir).ok()?;
let (label, worktree_path) = contents.split_once(':')?;
if label != "gitdir" {
return None;
}
let worktree_path = worktree_path.trim();
Some(PathBuf::from(worktree_path))
}

View File

@@ -23,14 +23,6 @@ use ty_project::{Db, ProjectDatabase};
use ty_python_semantic::Program;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn version() -> String {
option_env!("TY_WASM_COMMIT_SHORT_HASH")
.or_else(|| option_env!("CARGO_PKG_VERSION"))
.unwrap_or("unknown")
.to_string()
}
#[wasm_bindgen(start)]
pub fn run() {
use log::Level;
@@ -187,14 +179,14 @@ impl Workspace {
/// Checks a single file.
#[wasm_bindgen(js_name = "checkFile")]
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<Diagnostic>, Error> {
let result = self.db.check_file(file_id.file);
let result = self.db.check_file(file_id.file).map_err(into_error)?;
Ok(result.into_iter().map(Diagnostic::wrap).collect())
}
/// Checks all open files
pub fn check(&self) -> Result<Vec<Diagnostic>, Error> {
let result = self.db.check();
let result = self.db.check().map_err(into_error)?;
Ok(result.into_iter().map(Diagnostic::wrap).collect())
}

View File

@@ -1,5 +1,5 @@
PyYAML==6.0.2
ruff==0.11.11
ruff==0.11.10
mkdocs==1.6.1
mkdocs-material @ git+ssh://git@github.com/astral-sh/mkdocs-material-insiders.git@39da7a5e761410349e9a1b8abf593b0cdd5453ff
mkdocs-redirects==1.2.2

View File

@@ -1,5 +1,5 @@
PyYAML==6.0.2
ruff==0.11.11
ruff==0.11.10
mkdocs==1.6.1
mkdocs-material==9.5.38
mkdocs-redirects==1.2.2

View File

@@ -32,22 +32,6 @@
"node": ">=18.0.0"
}
},
"node_modules/@cloudflare/unenv-preset": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.3.2.tgz",
"integrity": "sha512-MtUgNl+QkQyhQvv5bbWP+BpBC1N0me4CHHuP2H4ktmOMKdB/6kkz/lo+zqiA4mEazb4y+1cwyNjVrQ2DWeE4mg==",
"dev": true,
"license": "MIT OR Apache-2.0",
"peerDependencies": {
"unenv": "2.0.0-rc.17",
"workerd": "^1.20250508.0"
},
"peerDependenciesMeta": {
"workerd": {
"optional": true
}
}
},
"node_modules/@cloudflare/workerd-darwin-64": {
"version": "1.20250508.0",
"resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20250508.0.tgz",
@@ -134,9 +118,9 @@
}
},
"node_modules/@cloudflare/workers-types": {
"version": "4.20250525.0",
"resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20250525.0.tgz",
"integrity": "sha512-3loeNVJkcDLb9giarUIHmDgvh+/4RtH+R/rHn4BCmME1qKdu73n/hvECYhH8BabCZplF8zQy1wok1MKwXEWC/A==",
"version": "4.20250519.0",
"resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20250519.0.tgz",
"integrity": "sha512-glS4kimqTyxVuNNPCoOzE/BmQ3w+HVQuPwYdPAQrUDaj2AKS8Y3O5w85TVFTKZGhCtwfNs8h4iohldjEs/P5Vw==",
"dev": true,
"license": "MIT OR Apache-2.0"
},
@@ -1106,6 +1090,7 @@
"integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"color-convert": "^2.0.1",
"color-string": "^1.9.0"
@@ -1120,6 +1105,7 @@
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"color-name": "~1.1.4"
},
@@ -1132,7 +1118,8 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true,
"license": "MIT"
"license": "MIT",
"optional": true
},
"node_modules/color-string": {
"version": "1.9.1",
@@ -1140,6 +1127,7 @@
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"color-name": "^1.0.0",
"simple-swizzle": "^0.2.2"
@@ -1187,6 +1175,7 @@
"integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
"dev": true,
"license": "Apache-2.0",
"optional": true,
"engines": {
"node": ">=8"
}
@@ -1327,7 +1316,8 @@
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
"dev": true,
"license": "MIT"
"license": "MIT",
"optional": true
},
"node_modules/is-stream": {
"version": "3.0.0",
@@ -1383,9 +1373,9 @@
}
},
"node_modules/miniflare": {
"version": "4.20250508.3",
"resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20250508.3.tgz",
"integrity": "sha512-43oTmZ0CCmUcieetI5YDDyX0IiwhOcVIWzdBBCqWXK3F1XgQwg4d3fTqRyJnCmHIoaYx9CE1kTEKZC1UahPQhA==",
"version": "4.20250508.2",
"resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20250508.2.tgz",
"integrity": "sha512-+2XoHLSbY49LNQgZoAJRX+SyUwC767Cz46pgx4T/j1YGKSrMzAxCOk59b12QoFNnN50Gtd9HkT3ukZn2nzrIVw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1394,7 +1384,6 @@
"acorn-walk": "8.3.2",
"exit-hook": "2.2.1",
"glob-to-regexp": "0.4.1",
"sharp": "^0.33.5",
"stoppable": "1.1.0",
"undici": "^5.28.5",
"workerd": "1.20250508.0",
@@ -1538,6 +1527,7 @@
"dev": true,
"hasInstallScript": true,
"license": "Apache-2.0",
"optional": true,
"dependencies": {
"color": "^4.2.3",
"detect-libc": "^2.0.3",
@@ -1601,6 +1591,7 @@
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"is-arrayish": "^0.3.1"
}
@@ -1668,9 +1659,9 @@
}
},
"node_modules/ufo": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz",
"integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==",
"version": "1.5.4",
"resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz",
"integrity": "sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==",
"dev": true,
"license": "MIT"
},
@@ -1687,20 +1678,6 @@
"node": ">=14.0"
}
},
"node_modules/unenv": {
"version": "2.0.0-rc.17",
"resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.17.tgz",
"integrity": "sha512-B06u0wXkEd+o5gOCMl/ZHl5cfpYbDZKAT+HWTL+Hws6jWu7dCiqBBXXXzMFcFVJb8D4ytAnYmxJA83uwOQRSsg==",
"dev": true,
"license": "MIT",
"dependencies": {
"defu": "^6.1.4",
"exsolve": "^1.0.4",
"ohash": "^2.0.11",
"pathe": "^2.0.3",
"ufo": "^1.6.1"
}
},
"node_modules/uuid": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
@@ -1761,19 +1738,19 @@
}
},
"node_modules/wrangler": {
"version": "4.16.1",
"resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.16.1.tgz",
"integrity": "sha512-YiLdWXcaQva2K/bqokpsZbySPmoT8TJFyJPsQPZumnkgimM9+/g/yoXArByA+pf+xU8jhw7ybQ8X1yBGXv731g==",
"version": "4.15.2",
"resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.15.2.tgz",
"integrity": "sha512-Rv7zP61DOVzIS3af+/1UzJkRVpqu6VDRi6uIVMiwD1LkXG5Ov08tv94jgnE9bSjVf0paQg3dl0E89h+wQ0x/Bw==",
"dev": true,
"license": "MIT OR Apache-2.0",
"dependencies": {
"@cloudflare/kv-asset-handler": "0.4.0",
"@cloudflare/unenv-preset": "2.3.2",
"@cloudflare/unenv-preset": "2.3.1",
"blake3-wasm": "2.1.5",
"esbuild": "0.25.4",
"miniflare": "4.20250508.3",
"miniflare": "4.20250508.2",
"path-to-regexp": "6.3.0",
"unenv": "2.0.0-rc.17",
"unenv": "2.0.0-rc.15",
"workerd": "1.20250508.0"
},
"bin": {
@@ -1796,6 +1773,36 @@
}
}
},
"node_modules/wrangler/node_modules/@cloudflare/unenv-preset": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.3.1.tgz",
"integrity": "sha512-Xq57Qd+ADpt6hibcVBO0uLG9zzRgyRhfCUgBT9s+g3+3Ivg5zDyVgLFy40ES1VdNcu8rPNSivm9A+kGP5IVaPg==",
"dev": true,
"license": "MIT OR Apache-2.0",
"peerDependencies": {
"unenv": "2.0.0-rc.15",
"workerd": "^1.20250320.0"
},
"peerDependenciesMeta": {
"workerd": {
"optional": true
}
}
},
"node_modules/wrangler/node_modules/unenv": {
"version": "2.0.0-rc.15",
"resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.15.tgz",
"integrity": "sha512-J/rEIZU8w6FOfLNz/hNKsnY+fFHWnu9MH4yRbSZF3xbbGHovcetXPs7sD+9p8L6CeNC//I9bhRYAOsBt2u7/OA==",
"dev": true,
"license": "MIT",
"dependencies": {
"defu": "^6.1.4",
"exsolve": "^1.0.4",
"ohash": "^2.0.11",
"pathe": "^2.0.3",
"ufo": "^1.5.4"
}
},
"node_modules/ws": {
"version": "8.18.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz",

View File

@@ -813,13 +813,6 @@
"node": ">= 8"
}
},
"node_modules/@rolldown/pluginutils": {
"version": "1.0.0-beta.9",
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.9.tgz",
"integrity": "sha512-e9MeMtVWo186sgvFFJOPGy7/d2j2mZhLJIdVW0C/xDluuOvymEATqz6zKsP0ZmXGzQtqlyjz5sC1sYQUoJG98w==",
"dev": true,
"license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.36.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.36.0.tgz",
@@ -1755,9 +1748,9 @@
"license": "MIT"
},
"node_modules/@types/react": {
"version": "19.1.5",
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.5.tgz",
"integrity": "sha512-piErsCVVbpMMT2r7wbawdZsq4xMvIAhQuac2gedQHysu1TZYEigE6pnFfgZT+/jQnrRuF5r+SHzuehFjfRjr4g==",
"version": "19.1.4",
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.4.tgz",
"integrity": "sha512-EB1yiiYdvySuIITtD5lhW4yPyJ31RkJkkDw794LaQYrxCSaQV/47y5o1FMC4zF9ZyjUjzJMZwbovEnT5yHTW6g==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2004,14 +1997,13 @@
}
},
"node_modules/@vitejs/plugin-react-swc": {
"version": "3.10.0",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.10.0.tgz",
"integrity": "sha512-ZmkdHw3wo/o/Rk05YsXZs/DJAfY2CdQ5DUAjoWji+PEr+hYADdGMCGgEAILbiKj+CjspBTuTACBcWDrmC8AUfw==",
"version": "3.9.0",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.9.0.tgz",
"integrity": "sha512-jYFUSXhwMCYsh/aQTgSGLIN3Foz5wMbH9ahb0Zva//UzwZYbMiZd7oT3AU9jHT9DLswYDswsRwPU9jVF3yA48Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@rolldown/pluginutils": "1.0.0-beta.9",
"@swc/core": "^1.11.22"
"@swc/core": "^1.11.21"
},
"peerDependencies": {
"vite": "^4 || ^5 || ^6"

View File

@@ -88,7 +88,7 @@ export default function Chrome() {
edit={revision}
theme={theme}
tool="ruff"
version={`v${ruffVersion}`}
version={ruffVersion}
onChangeTheme={setTheme}
onShare={handleShare}
onReset={handleResetClicked}

View File

@@ -42,7 +42,7 @@ export default function Header({
<div className="flex items-center min-w-0 gap-4 mx-2">
{version ? (
<div className="hidden sm:flex">
<VersionTag>{version}</VersionTag>
<VersionTag>v{version}</VersionTag>
</div>
) : null}
<Divider />

View File

@@ -4,7 +4,6 @@ import { ReactNode } from "react";
export default function VersionTag({ children }: { children: ReactNode }) {
return (
<div
title="Version"
className={classNames(
"text-gray-500",
"text-xs",

View File

@@ -20,7 +20,7 @@ export const SETTINGS_FILE_NAME = "ty.json";
export default function Playground() {
const [theme, setTheme] = useTheme();
const [version, setVersion] = useState<string | null>(null);
const [version, setVersion] = useState<string>("0.0.0");
const [error, setError] = useState<string | null>(null);
const workspacePromiseRef = useRef<Promise<Workspace> | null>(null);
const [workspace, setWorkspace] = useState<Workspace | null>(null);
@@ -198,7 +198,7 @@ export const DEFAULT_SETTINGS = JSON.stringify(
"python-version": "3.13",
},
rules: {
"undefined-reveal": "ignore",
"division-by-zero": "error",
},
},
null,
@@ -453,7 +453,6 @@ export interface InitializedPlayground {
async function startPlayground(): Promise<InitializedPlayground> {
const ty = await import("../ty_wasm");
await ty.default();
const version = ty.version();
const monaco = await loader.init();
setupMonaco(monaco, {
@@ -467,7 +466,7 @@ async function startPlayground(): Promise<InitializedPlayground> {
const workspace = restored ?? DEFAULT_WORKSPACE;
return {
version,
version: "0.0.0",
workspace,
};
}

View File

@@ -54,6 +54,7 @@ SECTIONS: list[Section] = [
Section("Contributing", "contributing.md", generated=True),
]
LINK_REWRITES: dict[str, str] = {
"https://docs.astral.sh/ruff/": "index.md",
"https://docs.astral.sh/ruff/configuration/": "configuration.md",
@@ -61,8 +62,8 @@ LINK_REWRITES: dict[str, str] = {
"configuration.md#config-file-discovery"
),
"https://docs.astral.sh/ruff/contributing/": "contributing.md",
"https://docs.astral.sh/ruff/editors": "editors/index.md",
"https://docs.astral.sh/ruff/editors/setup": "editors/setup.md",
"https://docs.astral.sh/ruff/integrations/": "integrations.md",
"https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8": (
"faq.md#how-does-ruffs-linter-compare-to-flake8"
),

2
ty.schema.json generated
View File

@@ -859,7 +859,7 @@
"type": "object",
"properties": {
"root": {
"description": "The root of the project, used for finding first-party modules.\n\nIf left unspecified, ty will try to detect common project layouts and initialize `src.root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat) * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path * otherwise, default to `.` (flat layout)\n\nBesides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file), it will also be included in the first party search path.",
"description": "The root of the project, used for finding first-party modules.\n\nIf left unspecified, ty will try to detect common project layouts and initialize `src.root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat) * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path * otherwise, default to `.` (flat layout)",
"type": [
"string",
"null"