Skip to content

Commit be5eed5

Browse files
committed
D205 Support - Providers - Final Pass (apache#33303)
1 parent 54de496 commit be5eed5

File tree

11 files changed

+18
-6
lines changed

11 files changed

+18
-6
lines changed

airflow/providers/apache/beam/hooks/beam.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -388,6 +388,7 @@ def start_go_pipeline_with_binary(
388388
class BeamAsyncHook(BeamHook):
389389
"""
390390
Asynchronous hook for Apache Beam.
391+
391392
:param runner: Runner type.
392393
"""
393394

@@ -411,6 +412,7 @@ async def _create_tmp_dir(prefix: str) -> str:
411412
async def _cleanup_tmp_dir(tmp_dir: str) -> None:
412413
"""
413414
Helper method to delete temporary directory after finishing work with it.
415+
414416
Is uses `rmtree` method to recursively remove the temporary directory.
415417
"""
416418
shutil.rmtree(tmp_dir)

airflow/providers/apache/beam/operators/beam.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -402,6 +402,7 @@ async def execute_async(self, context: Context):
402402
def execute_complete(self, context: Context, event: dict[str, Any]):
403403
"""
404404
Callback for when the trigger fires - returns immediately.
405+
405406
Relies on trigger to throw an exception, otherwise it assumes execution was
406407
successful.
407408
"""

airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,7 @@ def _list_pods(self, query_kwargs):
182182
def _make_safe_label_value(self, input_value: str | datetime) -> str:
183183
"""
184184
Normalize a provided label to be of valid length and characters.
185+
185186
See airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value for more details.
186187
"""
187188
# airflow.providers.cncf.kubernetes is an expensive import, locally import it here to

airflow/providers/cncf/kubernetes/utils/k8s_hashlib_wrapper.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,7 @@
3131

3232
def md5(__string: ReadableBuffer = b"") -> hashlib._Hash:
3333
"""
34-
Safely allows calling the ``hashlib.md5`` function when ``usedforsecurity`` is disabled in
35-
the configuration.
34+
Safely allows calling the ``hashlib.md5`` function when ``usedforsecurity`` is disabled in configuration.
3635
3736
:param __string: The data to hash. Default to empty str byte.
3837
:return: The hashed value.

airflow/providers/elasticsearch/log/es_response.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ def to_dict(self):
6666
class Hit(AttributeDict):
6767
"""
6868
The Hit class is used to manage and access elements in a document.
69+
6970
It inherits from the AttributeDict class and provides
7071
attribute-like access to its elements, similar to a dictionary.
7172
"""

airflow/providers/elasticsearch/log/es_task_handler.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ def __init__(
136136
def format_url(host: str) -> str:
137137
"""
138138
Formats the given host string to ensure it starts with 'http'.
139+
139140
Checks if the host string represents a valid URL.
140141
141142
:params host: The host string to format and check.
@@ -444,6 +445,7 @@ def supports_external_link(self) -> bool:
444445
def _resolve_nested(self, hit: dict[Any, Any], parent_class=None) -> type[Hit]:
445446
"""
446447
Resolves nested hits from Elasticsearch by iteratively navigating the `_nested` field.
448+
447449
The result is used to fetch the appropriate document class to handle the hit.
448450
449451
This method can be used with nested Elasticsearch fields which are structured
@@ -468,8 +470,7 @@ def _resolve_nested(self, hit: dict[Any, Any], parent_class=None) -> type[Hit]:
468470

469471
def _get_result(self, hit: dict[Any, Any], parent_class=None) -> Hit:
470472
"""
471-
This method processes a hit (i.e., a result) from an Elasticsearch response and transforms it into an
472-
appropriate class instance.
473+
Process a hit (i.e., a result) from an Elasticsearch response and transform it into a class instance.
473474
474475
The transformation depends on the contents of the hit. If the document in hit contains a nested field,
475476
the '_resolve_nested' method is used to determine the appropriate class (based on the nested path).

airflow/providers/ftp/operators/ftp.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,9 @@ def execute(self, context: Any) -> str | list[str] | None:
139139

140140
def get_openlineage_facets_on_start(self):
141141
"""
142-
Returns OpenLineage datasets with following naming structure:
142+
Returns OpenLineage datasets.
143+
144+
Dataset will have the following structure:
143145
input: file://hostname/path
144146
output file://<conn.host>:<conn.port>/path.
145147
"""

airflow/providers/google/cloud/transfers/gcs_to_gcs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -556,6 +556,7 @@ def _copy_single_object(self, hook, source_object, destination_object):
556556
def get_openlineage_facets_on_complete(self, task_instance):
557557
"""
558558
Implementing _on_complete because execute method does preprocessing on internals.
559+
559560
This means we won't have to normalize self.source_object and self.source_objects,
560561
destination bucket and so on.
561562
"""

airflow/providers/google/cloud/transfers/s3_to_gcs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -334,6 +334,7 @@ def submit_transfer_jobs(self, files: list[str], gcs_hook: GCSHook, s3_hook: S3H
334334
def execute_complete(self, context: Context, event: dict[str, Any]) -> None:
335335
"""
336336
Callback for when the trigger fires - returns immediately.
337+
337338
Relies on trigger to throw an exception, otherwise it assumes execution was
338339
successful.
339340
"""

airflow/providers/redis/log/redis_task_handler.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
class RedisTaskHandler(FileTaskHandler, LoggingMixin):
3434
"""
3535
RedisTaskHandler is a Python log handler that handles and reads task instance logs.
36+
3637
It extends airflow FileTaskHandler and uploads to and reads from Redis.
3738
3839
:param base_log_folder:

airflow/providers/sftp/operators/sftp.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,9 @@ def execute(self, context: Any) -> str | list[str] | None:
194194

195195
def get_openlineage_facets_on_start(self):
196196
"""
197-
This returns OpenLineage datasets in format:
197+
Returns OpenLineage datasets.
198+
199+
Dataset will have the following structure:
198200
input: file://<local_host>/path
199201
output: file://<remote_host>:<remote_port>/path.
200202
"""

0 commit comments

Comments
 (0)