Skip to content

Commit fdf7b59

Browse files
committed
Replace flake8, isort, pyupgrade with ruff
https://github.com/charliermarsh/ruff/ is a faster replacement of most of the linting tools we use, and is starting to picked up by several other projects. Even project like Pandas have adopted Ruff (pandas-dev/pandas#50160) This PR replaces flake8, isort, pyupgrade.
1 parent a54e6ec commit fdf7b59

File tree

13 files changed

+58
-59
lines changed

13 files changed

+58
-59
lines changed

.flake8

-10
This file was deleted.

.isort.cfg

-8
This file was deleted.

.pre-commit-config.yaml

+5-26
Original file line numberDiff line numberDiff line change
@@ -70,34 +70,13 @@ repos:
7070
alias: black
7171
additional_dependencies: [black>=22.10.0]
7272

73-
- repo: https://github.com/PyCQA/flake8
74-
rev: 6.0.0
73+
- repo: https://github.com/charliermarsh/ruff-pre-commit
74+
rev: 'v0.0.219'
7575
hooks:
76-
- id: flake8
77-
additional_dependencies:
78-
- flake8-builtins
79-
- flake8-comprehensions
80-
- flake8-colors
81-
- flake8-assertive
82-
- flake8-typing-imports
83-
- flake8-logging-format
84-
85-
- repo: https://github.com/PyCQA/isort
86-
rev: 5.11.4
87-
hooks:
88-
- id: isort
89-
name: Run isort
90-
# Exclude auto-generated example files from being changed
91-
exclude: ^sql-cli/include/base/.airflow/dags
76+
- id: ruff
9277
args:
93-
# These options are duplicated to known_first_party in .isort.cfg,
94-
# Please keep these in sync for now. (See comments there for details.)
95-
- --profile=black
96-
- -l=110
97-
- --combine-as
98-
- -p=astro
99-
- -p=tests
100-
- -p=sql_cli
78+
- --config=./ruff.toml
79+
10180
- repo: https://github.com/codespell-project/codespell
10281
rev: v2.2.2
10382
hooks:

python-sdk/src/astro/databases/databricks/load_file/load_file_job.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
log = logging.getLogger(__file__)
2929

3030

31-
def load_file_to_delta(
31+
def load_file_to_delta( # noqa: C901
3232
input_file: File,
3333
delta_table: BaseTable,
3434
databricks_job_name: str,

python-sdk/src/astro/sql/operators/data_validations/ColumnCheckOperator.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -138,9 +138,9 @@ def process_checks(self):
138138
passed_tests.extend(_get_success_checks(checks, column))
139139

140140
if len(failed_tests) > 0:
141-
raise AirflowException(f"The following tests have failed:" f"\n{''.join(failed_tests)}")
141+
raise AirflowException(f"The following tests have failed: \n{''.join(failed_tests)}")
142142
if len(passed_tests) > 0:
143-
print(f"The following tests have passed:" f"\n{''.join(passed_tests)}")
143+
print(f"The following tests have passed: \n{''.join(passed_tests)}")
144144

145145

146146
def _get_failed_checks(checks, col=None):

python-sdk/src/astro/sql/operators/export_to_file.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def get_openlineage_facets_on_complete(self, task_instance): # skipcq: PYL-W061
105105
)
106106
]
107107
output_uri = (
108-
f"{self.output_file.openlineage_dataset_namespace}" f"{self.output_file.openlineage_dataset_name}"
108+
f"{self.output_file.openlineage_dataset_namespace}{self.output_file.openlineage_dataset_name}"
109109
)
110110
output_dataset = [
111111
OpenlineageDataset(

python-sdk/src/astro/sql/operators/raw_sql.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,12 @@
55
from typing import Any, Callable
66

77
try:
8-
from airflow.decorators.base import TaskDecorator, task_decorator_factory
8+
from airflow.decorators.base import TaskDecorator
99
except ImportError:
10-
from airflow.decorators.base import task_decorator_factory
1110
from airflow.decorators import _TaskDecorator as TaskDecorator
1211

1312
import airflow
13+
from airflow.decorators.base import task_decorator_factory
1414

1515
if airflow.__version__ >= "2.3":
1616
from sqlalchemy.engine.row import LegacyRow as SQLAlcRow

python-sdk/src/astro/sql/operators/transform.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,11 @@
44
from typing import Any, Callable
55

66
try:
7-
from airflow.decorators.base import TaskDecorator, task_decorator_factory
7+
from airflow.decorators.base import TaskDecorator
88
except ImportError:
9-
from airflow.decorators.base import task_decorator_factory
109
from airflow.decorators import _TaskDecorator as TaskDecorator
1110

12-
from airflow.decorators.base import get_unique_task_id
11+
from airflow.decorators.base import get_unique_task_id, task_decorator_factory
1312
from airflow.models.xcom_arg import XComArg
1413
from sqlalchemy.sql.functions import Function
1514

python-sdk/src/astro/sql/operators/upstream_task_mixin.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,5 +23,5 @@ def __init__(self, **kwargs):
2323
self.set_upstream(task)
2424
else:
2525
raise AirflowException(
26-
"Cannot upstream a non-task, please only use XcomArg or operators for this" " parameter"
26+
"Cannot upstream a non-task, please only use XcomArg or operators for this parameter"
2727
)

python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,9 @@
8383
task_id="load_five_gb",
8484
bucket="astro-sdk",
8585
source_objects=[
86-
("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + str(i) + ".ndjson")
86+
f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000{str(i)}.ndjson"
8787
if i >= 10
88-
else ("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + "0" + str(i) + ".ndjson")
88+
else f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-00000000000{str(i)}.ndjson"
8989
for i in range(20)
9090
],
9191
destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}",

python-sdk/tests/files/locations/test_location_base.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def test_get_class_name_method_valid_name():
4040
"""Test valid case of implicit naming dependency among the module name and class name for dynamic imports"""
4141

4242
class Test: # skipcq: PY-D0002
43-
__name__ = "test.some"
43+
__name__ = "test.some" # noqa: A003
4444

4545
class TestLocation: # skipcq: PY-D0002
4646
pass
@@ -96,7 +96,7 @@ def test_get_class_name_method_invalid_name():
9696
"""Test invalid case of implicit naming dependency among the module name and class name for dynamic imports"""
9797

9898
class Test: # skipcq: PY-D0002
99-
__name__ = "test.some"
99+
__name__ = "test.some" # noqa: A003
100100

101101
class SomethingElseLocation: # skipcq: PY-D0002
102102
pass

python-sdk/tests_integration/sql/operators/test_snowflake_merge_func.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,8 @@ def test_is_valid_snow_identifier(self): # skipcq PYL-R0201
141141
]
142142
invalid_strings = [
143143
"$invalid",
144-
"Infvalid\x00" "Invalid Name",
144+
"Infvalid\x00",
145+
"Invalid Name",
145146
'"Invalid " Name"',
146147
'"Also Invalid Name""',
147148
]

ruff.toml

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
line-length = 120
2+
3+
# Enable Pyflakes `E` and `F` codes by default.
4+
extend-select = [
5+
"W", # pycodestyle warnings
6+
"I", # isort
7+
"C90", # Complexity
8+
# "B", # flake8-bugbear
9+
"C", # flake8-comprehensions
10+
# "ANN", # flake8-comprehensions
11+
"ISC", # flake8-implicit-str-concat
12+
"T10", # flake8-debugger
13+
"A", # flake8-builtins
14+
"UP", # pyupgrade
15+
]
16+
extend-ignore = ["A002"]
17+
18+
# Exclude a variety of commonly ignored directories.
19+
extend-exclude = [
20+
"__pycache__",
21+
"docs/source/conf.py",
22+
]
23+
24+
target-version = "py37"
25+
fix = true
26+
27+
[per-file-ignores]
28+
"python-sdk/src/astro/sql/__init__.py" = ["F401"]
29+
"python-sdk/src/astro/lineage/__init__.py" = ["F401"]
30+
"python-sdk/src/astro/sql/table.py" = ["F401"]
31+
32+
33+
[mccabe]
34+
max-complexity = 6
35+
36+
[isort]
37+
combine-as-imports = true
38+
known-first-party = ["astro", "tests", "sql_cli"]

0 commit comments

Comments
 (0)