Skip to content

Commit

Permalink
Align Black and blacken-docs configs (#24785)
Browse files Browse the repository at this point in the history
  • Loading branch information
BasPH authored Jul 1, 2022
1 parent e8b49d7 commit 46ac083
Show file tree
Hide file tree
Showing 20 changed files with 47 additions and 118 deletions.
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,13 @@ repos:
hooks:
- id: blacken-docs
name: Run black on python code blocks in documentation files
args:
- --line-length=110
- --target-version=py37
- --target-version=py38
- --target-version=py39
- --target-version=py310
- --skip-string-normalization
alias: black
additional_dependencies: [black==22.3.0]
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand Down
4 changes: 1 addition & 3 deletions RELEASE_NOTES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6874,9 +6874,7 @@ New signature:

.. code-block:: python

def wait_for_transfer_job(
self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,)
):
def wait_for_transfer_job(self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,)):
...

The behavior of ``wait_for_transfer_job`` has changed:
Expand Down
8 changes: 2 additions & 6 deletions airflow/providers/amazon/aws/hooks/batch_waiters.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,12 @@ class BatchWaitersHook(BatchClientHook):
# and the details of the config on that waiter can be further modified without any
# accidental impact on the generation of new waiters from the defined waiter_model, e.g.
waiters.get_waiter("JobExists").config.delay # -> 5
waiter = waiters.get_waiter(
"JobExists"
) # -> botocore.waiter.Batch.Waiter.JobExists object
waiter = waiters.get_waiter("JobExists") # -> botocore.waiter.Batch.Waiter.JobExists object
waiter.config.delay = 10
waiters.get_waiter("JobExists").config.delay # -> 5 as defined by waiter_model
# To use a specific waiter, update the config and call the `wait()` method for jobId, e.g.
waiter = waiters.get_waiter(
"JobExists"
) # -> botocore.waiter.Batch.Waiter.JobExists object
waiter = waiters.get_waiter("JobExists") # -> botocore.waiter.Batch.Waiter.JobExists object
waiter.config.delay = random.uniform(1, 10) # seconds
waiter.config.max_attempts = 10
waiter.wait(jobs=[jobId])
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/google/CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -897,8 +897,7 @@ now the snake_case convention is used.
set_acl_permission = GCSBucketCreateAclEntryOperator(
task_id="gcs-set-acl-permission",
bucket=BUCKET_NAME,
entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']"
".split(':', 2)[1] }}",
entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity'].split(':', 2)[1] }}",
role="OWNER",
)
Expand Down
7 changes: 1 addition & 6 deletions airflow/providers/oracle/hooks/oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,7 @@ def get_conn(self) -> oracledb.Connection:
.. code-block:: python
{
"dsn": (
"(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)"
"(HOST=host)(PORT=1521))(CONNECT_DATA=(SID=sid)))"
)
}
{"dsn": ("(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=host)(PORT=1521))(CONNECT_DATA=(SID=sid)))")}
see more param detail in `oracledb.connect
<https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.connect>`_
Expand Down
8 changes: 2 additions & 6 deletions docs/apache-airflow-providers-amazon/connections/aws.rst
Original file line number Diff line number Diff line change
Expand Up @@ -313,17 +313,13 @@ Example
def federated(self):
return "federation" in self.extra_config
def _create_basic_session(
self, session_kwargs: Dict[str, Any]
) -> boto3.session.Session:
def _create_basic_session(self, session_kwargs: Dict[str, Any]) -> boto3.session.Session:
if self.federated:
return self._create_federated_session(session_kwargs)
else:
return super()._create_basic_session(session_kwargs)
def _create_federated_session(
self, session_kwargs: Dict[str, Any]
) -> boto3.session.Session:
def _create_federated_session(self, session_kwargs: Dict[str, Any]) -> boto3.session.Session:
username = self.extra_config["federation"]["username"]
region_name = self._get_region_name()
self.log.debug(
Expand Down
8 changes: 2 additions & 6 deletions docs/apache-airflow/best-practices.rst
Original file line number Diff line number Diff line change
Expand Up @@ -292,12 +292,8 @@ It's easier to grab the concept with an example. Let's say that we have the foll
start_date=datetime(2021, 1, 1),
catchup=False,
) as dag:
failing_task = BashOperator(
task_id="failing_task", bash_command="exit 1", retries=0
)
passing_task = BashOperator(
task_id="passing_task", bash_command="echo passing_task"
)
failing_task = BashOperator(task_id="failing_task", bash_command="exit 1", retries=0)
passing_task = BashOperator(task_id="passing_task", bash_command="echo passing_task")
teardown = BashOperator(
task_id="teardown",
bash_command="echo teardown",
Expand Down
4 changes: 1 addition & 3 deletions docs/apache-airflow/concepts/dags.rst
Original file line number Diff line number Diff line change
Expand Up @@ -413,9 +413,7 @@ You can also combine this with the :ref:`concepts:depends-on-past` functionality
)
run_this_first = EmptyOperator(task_id="run_this_first", dag=dag)
branching = BranchPythonOperator(
task_id="branching", dag=dag, python_callable=lambda: "branch_a"
)
branching = BranchPythonOperator(task_id="branching", dag=dag, python_callable=lambda: "branch_a")
branch_a = EmptyOperator(task_id="branch_a", dag=dag)
follow_branch_a = EmptyOperator(task_id="follow_branch_a", dag=dag)
Expand Down
12 changes: 3 additions & 9 deletions docs/apache-airflow/concepts/dynamic-task-mapping.rst
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,7 @@ It is possible to use ``partial`` and ``expand`` with classic style operators as

.. code-block:: python
BashOperator.partial(task_id="bash", do_xcom_push=False).expand(
bash_command=["echo 1", "echo 2"]
)
BashOperator.partial(task_id="bash", do_xcom_push=False).expand(bash_command=["echo 1", "echo 2"])
.. note:: Only keyword arguments are allowed to be passed to ``partial()``.

Expand Down Expand Up @@ -224,9 +222,7 @@ In this example you have a regular data delivery to an S3 bucket and want to app
def total(lines):
return sum(lines)
counts = count_lines.partial(aws_conn_id="aws_default", bucket=files.bucket).expand(
file=XComArg(files)
)
counts = count_lines.partial(aws_conn_id="aws_default", bucket=files.bucket).expand(file=XComArg(files))
total(lines=counts)
What data types can be expanded?
Expand Down Expand Up @@ -299,9 +295,7 @@ There are two limits that you can place on a task:
return x + 1
BashOperator.partial(task_id="my_task", max_active_tis_per_dag=16).expand(
bash_command=commands
)
BashOperator.partial(task_id="my_task", max_active_tis_per_dag=16).expand(bash_command=commands)
Automatically skipping zero-length maps
=======================================
Expand Down
8 changes: 2 additions & 6 deletions docs/apache-airflow/executor/kubernetes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -172,12 +172,8 @@ Here is an example of a task with both features:
tags=["example3"],
) as dag:
executor_config_template = {
"pod_template_file": os.path.join(
AIRFLOW_HOME, "pod_templates/basic_template.yaml"
),
"pod_override": k8s.V1Pod(
metadata=k8s.V1ObjectMeta(labels={"release": "stable"})
),
"pod_template_file": os.path.join(AIRFLOW_HOME, "pod_templates/basic_template.yaml"),
"pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(labels={"release": "stable"})),
}
@task(executor_config=executor_config_template)
Expand Down
16 changes: 4 additions & 12 deletions docs/apache-airflow/howto/connection.rst
Original file line number Diff line number Diff line change
Expand Up @@ -250,9 +250,7 @@ Here's an example:
from wtforms import StringField
return {
"workspace": StringField(
lazy_gettext("Workspace"), widget=BS3TextFieldWidget()
),
"workspace": StringField(lazy_gettext("Workspace"), widget=BS3TextFieldWidget()),
"project": StringField(lazy_gettext("Project"), widget=BS3TextFieldWidget()),
}
Expand Down Expand Up @@ -413,9 +411,7 @@ You can verify a URI is parsed correctly like so:
>>> from airflow.models.connection import Connection
>>> c = Connection(
... uri="my-conn-type://my-login:my-password@my-host:5432/my-schema?param1=val1&param2=val2"
... )
>>> c = Connection(uri="my-conn-type://my-login:my-password@my-host:5432/my-schema?param1=val1&param2=val2")
>>> print(c.login)
my-login
>>> print(c.password)
Expand All @@ -437,17 +433,13 @@ For example if your password has a ``/``, this fails:

.. code-block:: pycon
>>> c = Connection(
... uri="my-conn-type://my-login:my-pa/ssword@my-host:5432/my-schema?param1=val1&param2=val2"
... )
>>> c = Connection(uri="my-conn-type://my-login:my-pa/ssword@my-host:5432/my-schema?param1=val1&param2=val2")
ValueError: invalid literal for int() with base 10: 'my-pa'
To fix this, you can encode with :func:`~urllib.parse.quote_plus`:

.. code-block:: pycon
>>> c = Connection(
... uri="my-conn-type://my-login:my-pa%2Fssword@my-host:5432/my-schema?param1=val1&param2=val2"
... )
>>> c = Connection(uri="my-conn-type://my-login:my-pa%2Fssword@my-host:5432/my-schema?param1=val1&param2=val2")
>>> print(c.password)
my-pa/ssword
4 changes: 1 addition & 3 deletions docs/apache-airflow/howto/custom-operator.rst
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,7 @@ You can use the template as follows:
.. code-block:: python
with dag:
hello_task = HelloOperator(
task_id="task_id_1", dag=dag, name="{{ task_instance.task_id }}"
)
hello_task = HelloOperator(task_id="task_id_1", dag=dag, name="{{ task_instance.task_id }}")
In this example, Jinja looks for the ``name`` parameter and substitutes ``{{ task_instance.task_id }}`` with
``task_id_1``.
Expand Down
4 changes: 1 addition & 3 deletions docs/apache-airflow/howto/customize-ui.rst
Original file line number Diff line number Diff line change
Expand Up @@ -166,8 +166,6 @@ information, see `String Formatting in the MarkupSafe docs <https://markupsafe.p
.. code-block:: python
DASHBOARD_UIALERTS = [
UIAlert(
'Visit <a href="https://airflow.apache.org">airflow.apache.org</a>', html=True
),
UIAlert('Visit <a href="https://airflow.apache.org">airflow.apache.org</a>', html=True),
UIAlert(Markup("Welcome <em>%s</em>") % ("John & Jane Doe",)),
]
10 changes: 4 additions & 6 deletions docs/apache-airflow/howto/define_extra_link.rst
Original file line number Diff line number Diff line change
Expand Up @@ -93,12 +93,10 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope
operators = [GCSToS3Operator]
def get_link(self, operator, *, ti_key):
return (
"https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format(
dag_id=operator.dag_id,
task_id=operator.task_id,
run_id=ti_key.run_id,
)
return "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format(
dag_id=operator.dag_id,
task_id=operator.task_id,
run_id=ti_key.run_id,
)
Expand Down
8 changes: 2 additions & 6 deletions docs/apache-airflow/lineage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,15 @@ works.
)
f_final = File(url="/tmp/final")
run_this_last = EmptyOperator(
task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final
)
run_this_last = EmptyOperator(task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final)
f_in = File(url="/tmp/whole_directory/")
outlets = []
for file in FILE_CATEGORIES:
f_out = File(url="/tmp/{}/{{{{ data_interval_start }}}}".format(file))
outlets.append(f_out)
run_this = BashOperator(
task_id="run_me_first", bash_command="echo 1", dag=dag, inlets=f_in, outlets=outlets
)
run_this = BashOperator(task_id="run_me_first", bash_command="echo 1", dag=dag, inlets=f_in, outlets=outlets)
run_this.set_downstream(run_this_last)
Inlets can be a (list of) upstream task ids or statically defined as an attr annotated object
Expand Down
4 changes: 1 addition & 3 deletions docs/apache-airflow/plugins.rst
Original file line number Diff line number Diff line change
Expand Up @@ -302,9 +302,7 @@ will automatically load the registered plugins from the entrypoint list.
setup(
name="my-package",
# ...
entry_points={
"airflow.plugins": ["my_plugin = my_package.my_plugin:MyAirflowPlugin"]
},
entry_points={"airflow.plugins": ["my_plugin = my_package.my_plugin:MyAirflowPlugin"]},
)
Automatic reloading webserver
Expand Down
8 changes: 2 additions & 6 deletions docs/apache-airflow/security/webserver.rst
Original file line number Diff line number Diff line change
Expand Up @@ -150,9 +150,7 @@ Here is an example of what you might have in your webserver_config.py:
AUTH_TYPE = AUTH_OAUTH
AUTH_ROLES_SYNC_AT_LOGIN = True # Checks roles on every login
AUTH_USER_REGISTRATION = (
True # allow users who are not already in the FAB DB to register
)
AUTH_USER_REGISTRATION = True # allow users who are not already in the FAB DB to register
# Make sure to replace this with the path to your security manager class
FAB_SECURITY_MANAGER_CLASS = "your_module.your_security_manager_class"
AUTH_ROLES_MAPPING = {
Expand Down Expand Up @@ -219,9 +217,7 @@ webserver_config.py itself if you wish.
# In this example, the oauth provider == 'github'.
# If you ever want to support other providers, see how it is done here:
# https://github.com/dpgaspar/Flask-AppBuilder/blob/master/flask_appbuilder/security/manager.py#L550
def get_oauth_user_info(
self, provider: str, resp: Any
) -> Dict[str, Union[str, List[str]]]:
def get_oauth_user_info(self, provider: str, resp: Any) -> Dict[str, Union[str, List[str]]]:
# Creates the user info payload from Github.
# The user previously allowed your app to act on their behalf,
Expand Down
28 changes: 7 additions & 21 deletions docs/apache-airflow/upgrading-from-1-10/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,7 @@ Whereas previously a user would import each individual class to build the pod as
volume_config = {"persistentVolumeClaim": {"claimName": "test-volume"}}
volume = Volume(name="test-volume", configs=volume_config)
volume_mount = VolumeMount(
"test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True
)
volume_mount = VolumeMount("test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True)
port = Port("http", 80)
secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn")
Expand Down Expand Up @@ -224,9 +222,7 @@ Now the user can use the ``kubernetes.client.models`` class as a single point of
volume = k8s.V1Volume(
name="test-volume",
persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(
claim_name="test-volume"
),
persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(claim_name="test-volume"),
)
port = k8s.V1ContainerPort(name="http", container_port=80)
Expand Down Expand Up @@ -602,9 +598,7 @@ Before:
from airflow.kubernetes.volume_mount import VolumeMount
volume_mount = VolumeMount(
"test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True
)
volume_mount = VolumeMount("test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True)
k = KubernetesPodOperator(
namespace="default",
image="ubuntu:16.04",
Expand Down Expand Up @@ -660,9 +654,7 @@ After:
volume = k8s.V1Volume(
name="test-volume",
persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(
claim_name="test-volume"
),
persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(claim_name="test-volume"),
)
k = KubernetesPodOperator(
namespace="default",
Expand Down Expand Up @@ -737,9 +729,7 @@ After:
env_vars = [
k8s.V1EnvVar(
name="ENV3",
value_from=k8s.V1EnvVarSource(
field_ref=k8s.V1ObjectFieldSelector(field_path="status.podIP")
),
value_from=k8s.V1EnvVarSource(field_ref=k8s.V1ObjectFieldSelector(field_path="status.podIP")),
)
]
Expand Down Expand Up @@ -777,9 +767,7 @@ After:
from kubernetes.client import models as k8s
configmap = "test-configmap"
env_from = [
k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap))
]
env_from = [k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap))]
k = KubernetesPodOperator(
namespace="default",
Expand Down Expand Up @@ -1149,9 +1137,7 @@ non-RBAC UI (``flask-admin`` based UI), update it to use ``flask_appbuilder_view
v = TestView(category="Test Plugin", name="Test View")
ml = MenuLink(
category="Test Plugin", name="Test Menu Link", url="https://airflow.apache.org/"
)
ml = MenuLink(category="Test Plugin", name="Test Menu Link", url="https://airflow.apache.org/")
class AirflowTestPlugin(AirflowPlugin):
Expand Down
4 changes: 1 addition & 3 deletions tests/dags_corrupted/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ Python interpreter from loading this file.
To access a DAG in this folder, use the following code inside a unit test.

```python
TEST_DAG_FOLDER = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "dags_corrupted"
)
TEST_DAG_FOLDER = os.path.join(os.path.dirname(os.path.realpath(__file__)), "dags_corrupted")

dagbag = DagBag(dag_folder=TEST_DAG_FOLDER)
dag = dagbag.get_dag(dag_id)
Expand Down
Loading

0 comments on commit 46ac083

Please sign in to comment.