forked from apache/airflow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pyproject.toml
603 lines (554 loc) · 24.7 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
[build-system]
# build dependencies should be fixed - including all transitive dependencies. This way we can ensure
# reproducibility of the build and make sure that any future releases of any dependencies will not
# break the build of released airflow sources in the future.
# The dependencies can be automatically upgraded by running:
# pre-commit run --hook-stage manual update-build-dependencies --all-files
requires = [
"GitPython==3.1.43",
"gitdb==4.0.11",
"hatchling==1.25.0",
"packaging==24.2",
"pathspec==0.12.1",
"pluggy==1.5.0",
"smmap==5.0.1",
"tomli==2.0.2; python_version < '3.11'",
"trove-classifiers==2024.10.21.16",
]
build-backend = "hatchling.build"
[project]
name = "apache-airflow"
description = "Programmatically author, schedule and monitor data pipelines"
readme = { file = "generated/PYPI_README.md", content-type = "text/markdown" }
license-files.globs = ["LICENSE", "3rd-party-licenses/*.txt"]
requires-python = "~=3.9,<3.13"
authors = [
{ name = "Apache Software Foundation", email = "[email protected]" },
]
maintainers = [
{ name = "Apache Software Foundation", email="[email protected]" },
]
keywords = [ "airflow", "orchestration", "workflow", "dag", "pipelines", "automation", "data" ]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Web Environment",
"Framework :: Apache Airflow",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: System :: Monitoring",
]
dynamic = ["version", "optional-dependencies", "dependencies"]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# !!! YOU MIGHT BE SURPRISED NOT SEEING THE DEPENDENCIES AS `project.dependencies` !!!!!!!!!
# !!! AND EXTRAS AS `project.optional-dependencies` !!!!!!!!!
# !!! THEY ARE marked as `dynamic` GENERATED by `hatch_build.py` !!!!!!!!!
# !!! SEE COMMENTS BELOW TO FIND WHERE DEPENDENCIES ARE MAINTAINED !!!!!!!!!
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#
# !!!!!! Those providers are defined in `hatch_build.py` and should be maintained there !!!!!!!
#
# Those extras are available as regular core airflow extras - they install optional features of Airflow.
#
# START CORE EXTRAS HERE
#
# aiobotocore, apache-atlas, apache-webhdfs, async, cgroups, cloudpickle, github-enterprise, google-
# auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, rabbitmq, s3fs, sentry, statsd, uv
#
# END CORE EXTRAS HERE
#
# The ``devel`` extras are not available in the released packages. They are only available when you install
# Airflow from sources in ``editable`` installation - i.e. one that you are usually using to contribute to
# Airflow. They provide tools such as ``pytest`` and ``mypy`` for general purpose development and testing.
#
# START DEVEL EXTRAS HERE
#
# devel, devel-all-dbs, devel-ci, devel-debuggers, devel-devscripts, devel-duckdb, devel-hadoop,
# devel-mypy, devel-sentry, devel-static-checks, devel-tests
#
# END DEVEL EXTRAS HERE
#
# Those extras are bundles dynamically generated from other extras.
#
# START BUNDLE EXTRAS HERE
#
# all, all-core, all-dbs, devel-all, devel-ci
#
# END BUNDLE EXTRAS HERE
#
# The ``doc`` extras are not available in the released packages. They are only available when you install
# Airflow from sources in ``editable`` installation - i.e. one that you are usually using to contribute to
# Airflow. They provide tools needed when you want to build Airflow documentation (note that you also need
# ``devel`` extras installed for airflow and providers in order to build documentation for airflow and
# provider packages respectively). The ``doc`` package is enough to build regular documentation, where
# ``doc_gen`` is needed to generate ER diagram we have describing our database.
#
# START DOC EXTRAS HERE
#
# doc, doc-gen
#
# END DOC EXTRAS HERE
#
# The `deprecated` extras are deprecated extras from Airflow 1 that will be removed in future versions.
#
# START DEPRECATED EXTRAS HERE
#
# atlas, aws, azure, cassandra, crypto, druid, gcp, gcp-api, hdfs, hive, kubernetes, mssql, pinot, s3,
# spark, webhdfs, winrm
#
# END DEPRECATED EXTRAS HERE
#
# !!!!!! Those providers are defined in the `airflow/providers/<provider>/provider.yaml` files !!!!!!!
#
# Those extras are available as regular Airflow extras, they install provider packages in standard builds
# or dependencies that are necessary to enable the feature in editable build.
# START PROVIDER EXTRAS HERE
#
# airbyte, alibaba, amazon, apache.beam, apache.cassandra, apache.drill, apache.druid, apache.flink,
# apache.hdfs, apache.hive, apache.iceberg, apache.impala, apache.kafka, apache.kylin, apache.livy,
# apache.pig, apache.pinot, apache.spark, apprise, arangodb, asana, atlassian.jira, celery, cloudant,
# cncf.kubernetes, cohere, common.compat, common.io, common.sql, databricks, datadog, dbt.cloud,
# dingding, discord, docker, edge, elasticsearch, exasol, fab, facebook, ftp, github, google, grpc,
# hashicorp, http, imap, influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp,
# microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie,
# oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce,
# samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, standard, tableau,
# telegram, teradata, trino, vertica, weaviate, yandex, ydb, zendesk
#
# END PROVIDER EXTRAS HERE
[project.scripts]
airflow = "airflow.__main__:main"
[project.urls]
"Bug Tracker" = "https://github.com/apache/airflow/issues"
Documentation = "https://airflow.apache.org/docs/"
Downloads = "https://archive.apache.org/dist/airflow/"
Homepage = "https://airflow.apache.org/"
"Release Notes" = "https://airflow.apache.org/docs/apache-airflow/stable/release_notes.html"
"Slack Chat" = "https://s.apache.org/airflow-slack"
"Source Code" = "https://github.com/apache/airflow"
Twitter = "https://twitter.com/ApacheAirflow"
YouTube = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
[tool.hatch.version]
path = "airflow/__init__.py"
[tool.hatch.build.targets.wheel.hooks.custom]
path = "./hatch_build.py"
[tool.hatch.build.hooks.custom]
path = "./hatch_build.py"
[tool.hatch.build.targets.custom]
path = "./hatch_build.py"
[tool.hatch.build.targets.sdist]
include = [
"/airflow",
"/airflow/git_version"
]
exclude = [
"/airflow/www/node_modules/"
]
artifacts = [
"/airflow/www/static/dist/",
"/airflow/git_version",
"/generated/",
]
[tool.hatch.build.targets.wheel]
include = [
"/airflow",
]
artifacts = [
"/airflow/www/static/dist/",
"/airflow/git_version"
]
## black settings ##
[tool.black]
line-length = 110
target-version = ['py38', 'py39', 'py310', 'py311', 'py312']
## ruff settings ##
[tool.ruff]
target-version = "py38"
line-length = 110
extend-exclude = [
".eggs",
"*/_vendor/*",
"tests/dags/test_imports.py",
]
namespace-packages = ["airflow/providers"]
[tool.ruff.lint]
typing-modules = ["airflow.typing_compat"]
extend-select = [
# Enable entire ruff rule section
"I", # Missing required import (auto-fixable)
"UP", # Pyupgrade
"ASYNC", # subset of flake8-async rules
"ISC", # Checks for implicit literal string concatenation (auto-fixable)
"TCH", # Rules around TYPE_CHECKING blocks
"G", # flake8-logging-format rules
"LOG", # flake8-logging rules, most of them autofixable
"PT", # flake8-pytest-style rules
"TID25", # flake8-tidy-imports rules
"E", # pycodestyle rules
"W", # pycodestyle rules
# Per rule enables
"RUF006", # Checks for asyncio dangling task
"RUF015", # Checks for unnecessary iterable allocation for first element
"RUF019", # Checks for unnecessary key check
"RUF100", # Unused noqa (auto-fixable)
# We ignore more pydocstyle than we enable, so be more selective at what we enable
"D1",
"D2",
"D213", # Conflicts with D212. Both can not be enabled.
"D3",
"D400",
"D401",
"D402",
"D403",
"D412",
"D419",
"PGH004", # Use specific rule codes when using noqa
"PGH005", # Invalid unittest.mock.Mock methods/attributes/properties
"S101", # Checks use `assert` outside the test cases, test cases should be added into the exclusions
"B004", # Checks for use of hasattr(x, "__call__") and replaces it with callable(x)
"B006", # Checks for uses of mutable objects as function argument defaults.
"B007", # Checks for unused variables in the loop
"B017", # Checks for pytest.raises context managers that catch Exception or BaseException.
"B019", # Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
"B028", # No explicit stacklevel keyword argument found
"TRY002", # Prohibit use of `raise Exception`, use specific exceptions instead.
]
ignore = [
"D100", # Unwanted; Docstring at the top of every file.
"D102", # TODO: Missing docstring in public method
"D103", # TODO: Missing docstring in public function
"D104", # Unwanted; Docstring at the top of every `__init__.py` file.
"D105", # Unwanted; See https://lists.apache.org/thread/8jbg1dd2lr2cfydtqbjxsd6pb6q2wkc3
"D107", # Unwanted; Docstring in every constructor is unnecessary if the class has a docstring.
"D203",
"D212", # Conflicts with D213. Both can not be enabled.
"E731", # Do not assign a lambda expression, use a def
"TCH003", # Do not move imports from stdlib to TYPE_CHECKING block
"PT004", # Fixture does not return anything, add leading underscore
"PT006", # Wrong type of names in @pytest.mark.parametrize
"PT007", # Wrong type of values in @pytest.mark.parametrize
"PT013", # silly rule prohibiting e.g. `from pytest import param`
"PT011", # pytest.raises() is too broad, set the match parameter
"PT019", # fixture without value is injected as parameter, use @pytest.mark.usefixtures instead
# Rules below explicitly set off which could overlap with Ruff's formatter
# as it recommended by https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
# Except ISC rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"E501", # Formatted code may exceed the line length, leading to line-too-long (E501) errors.
"ASYNC110", # TODO: Use `anyio.Event` instead of awaiting `anyio.sleep` in a `while` loop
]
unfixable = [
# PT022 replace empty `yield` to empty `return`. Might be fixed with a combination of PLR1711
# In addition, it can't do anything with invalid typing annotations, protected by mypy.
"PT022",
]
[tool.ruff.format]
docstring-code-format = true
[tool.ruff.lint.isort]
required-imports = ["from __future__ import annotations"]
combine-as-imports = true
section-order = [
"future",
"standard-library",
"third-party",
"first-party",
"local-folder",
"testing"
]
# Make sure we put the "dev" imports at the end, not as a third-party module
[tool.ruff.lint.isort.sections]
testing = ["dev", "providers.tests", "task_sdk.tests", "tests_common", "tests"]
[tool.ruff.lint.extend-per-file-ignores]
"airflow/__init__.py" = ["F401", "TCH004", "I002"]
"airflow/models/__init__.py" = ["F401", "TCH004"]
"airflow/models/sqla_models.py" = ["F401"]
"providers/src/airflow/providers/__init__.py" = ["I002"]
# The test_python.py is needed because adding __future__.annotations breaks runtime checks that are
# needed for the test to work
"tests/decorators/test_python.py" = ["I002"]
# The Pydantic representations of SqlAlchemy Models are not parsed well with Pydantic
# when __future__.annotations is used so we need to skip them from upgrading
# Pydantic also require models to be imported during execution
"airflow/serialization/pydantic/*.py" = ["I002", "UP007", "TCH001"]
# Failing to detect types and functions used in `Annotated[...]` syntax as required at runtime.
# Annotated is central for FastAPI dependency injection, skipping rules for FastAPI folders.
"airflow/api_fastapi/*" = ["TCH001", "TCH002"]
"tests/api_fastapi/*" = ["TCH001", "TCH002"]
# Ignore pydoc style from these
"*.pyi" = ["D"]
"scripts/*" = ["D", "PT"] # In addition ignore pytest specific rules
"docs/*" = ["D"]
"provider_packages/*" = ["D"]
"*/example_dags/*" = ["D"]
"chart/*" = ["D"]
"dev/*" = ["D"]
# In addition, ignore in tests
# TID253: Banned top level imports, e.g. pandas, numpy
# S101: Use `assert`
# TRY002: Use `raise Exception`
"dev/perf/*" = ["TID253"]
"dev/check_files.py" = ["S101"]
"tests_common/*" = ["S101", "TRY002"]
"dev/breeze/tests/*" = ["TID253", "S101", "TRY002"]
"tests/*" = ["D", "TID253", "S101", "TRY002"]
"docker_tests/*" = ["D", "TID253", "S101", "TRY002"]
"kubernetes_tests/*" = ["D", "TID253", "S101", "TRY002"]
"helm_tests/*" = ["D", "TID253", "S101", "TRY002"]
# All of the modules which have an extra license header (i.e. that we copy from another project) need to
# ignore E402 -- module level import not at top level
"scripts/ci/pre_commit/*.py" = ["E402"]
"airflow/api/auth/backend/kerberos_auth.py" = ["E402"]
"airflow/security/kerberos.py" = ["E402"]
"airflow/security/utils.py" = ["E402"]
# All the modules which do not follow B028 yet: https://docs.astral.sh/ruff/rules/no-explicit-stacklevel/
"helm_tests/airflow_aux/test_basic_helm_chart.py" = ["B028"]
# Test compat imports banned imports to allow testing against older airflow versions
"tests_common/test_utils/compat.py" = ["TID251", "F401"]
[tool.ruff.lint.flake8-tidy-imports]
# Disallow all relative imports.
ban-relative-imports = "all"
# Ban certain modules from being imported at module level, instead requiring
# that they're imported lazily (e.g., within a function definition).
banned-module-level-imports = ["numpy", "pandas"]
[tool.ruff.lint.flake8-tidy-imports.banned-api]
# Direct import from the airflow package modules and constraints
"airflow.AirflowException".msg = "Use airflow.exceptions.AirflowException instead."
"airflow.Dataset".msg = "Use airflow.datasets.Dataset instead."
# Deprecated imports
"airflow.models.baseoperator.BaseOperatorLink".msg = "Use airflow.models.baseoperatorlink.BaseOperatorLink"
"airflow.models.errors.ImportError".msg = "Use airflow.models.errors.ParseImportError"
"airflow.models.ImportError".msg = "Use airflow.models.errors.ParseImportError"
# Deprecated in Python 3.11, Pending Removal in Python 3.15: https://github.com/python/cpython/issues/90817
# Deprecation warning in Python 3.11 also recommends using locale.getencoding but it available in Python 3.11
"locale.getdefaultlocale".msg = "Use locale.setlocale() and locale.getlocale() instead."
# Deprecated in Python 3.12: https://github.com/python/cpython/issues/103857
"datetime.datetime.utcnow".msg = "Use airflow.utils.timezone.utcnow or datetime.datetime.now(tz=datetime.timezone.utc)"
"datetime.datetime.utcfromtimestamp".msg = "Use airflow.utils.timezone.from_timestamp or datetime.datetime.fromtimestamp(tz=datetime.timezone.utc)"
# Deprecated in Python 3.12: https://github.com/python/cpython/issues/94309
"typing.Hashable".msg = "Use collections.abc.Hashable"
"typing.Sized".msg = "Use collections.abc.Sized"
# Uses deprecated in Python 3.12 `datetime.datetime.utcfromtimestamp`
"pendulum.from_timestamp".msg = "Use airflow.utils.timezone.from_timestamp"
# Flask deprecations, worthwhile to keep it until we migrate to Flask 3.0+
"flask._app_ctx_stack".msg = "Deprecated in Flask 2.2, removed in Flask 3.0"
"flask._request_ctx_stack".msg = "Deprecated in Flask 2.2, removed in Flask 3.0"
"flask.escape".msg = "Use markupsafe.escape instead. Deprecated in Flask 2.3, removed in Flask 3.0"
"flask.Markup".msg = "Use markupsafe.Markup instead. Deprecated in Flask 2.3, removed in Flask 3.0"
"flask.signals_available".msg = "Signals are always available. Deprecated in Flask 2.3, removed in Flask 3.0"
# Use root logger by a mistake / IDE autosuggestion
# If for some reason root logger required it could obtained by logging.getLogger("root")
"logging.debug".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.info".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.warning".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.error".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.exception".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.fatal".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.critical".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
"logging.log".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger"
# unittest related restrictions
"unittest.TestCase".msg = "Use pytest compatible classes: https://docs.pytest.org/en/stable/getting-started.html#group-multiple-tests-in-a-class"
"unittest.skip".msg = "Use `pytest.mark.skip` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks"
"unittest.skipIf".msg = "Use `pytest.mark.skipif` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks"
"unittest.skipUnless".msg = "Use `pytest.mark.skipif` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks"
"unittest.expectedFailure".msg = "Use `pytest.mark.xfail` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks"
# Moved in SQLAlchemy 2.0
"sqlalchemy.ext.declarative.declarative_base".msg = "Use `sqlalchemy.orm.declarative_base`. Moved in SQLAlchemy 2.0"
"sqlalchemy.ext.declarative.as_declarative".msg = "Use `sqlalchemy.orm.as_declarative`. Moved in SQLAlchemy 2.0"
"sqlalchemy.ext.declarative.has_inherited_table".msg = "Use `sqlalchemy.orm.has_inherited_table`. Moved in SQLAlchemy 2.0"
"sqlalchemy.ext.declarative.synonym_for".msg = "Use `sqlalchemy.orm.synonym_for`. Moved in SQLAlchemy 2.0"
[tool.ruff.lint.flake8-type-checking]
exempt-modules = ["typing", "typing_extensions"]
[tool.ruff.lint.flake8-pytest-style]
mark-parentheses = false
fixture-parentheses = false
## pytest settings ##
[tool.pytest.ini_options]
addopts = [
"--tb=short",
"-rasl",
"--verbosity=2",
# Disable `flaky` plugin for pytest. This plugin conflicts with `rerunfailures` because provide the same marker.
"-p", "no:flaky",
# Disable `nose` builtin plugin for pytest. This feature is deprecated in 7.2 and will be removed in pytest>=8
"-p", "no:nose",
# Disable support of a legacy `LocalPath` in favor of stdlib `pathlib.Path`.
"-p", "no:legacypath",
# Disable warnings summary, because we use our warning summary.
"--disable-warnings",
"--asyncio-mode=strict",
]
norecursedirs = [
".eggs",
"airflow",
"tests/_internals",
"tests/dags_with_system_exit",
"tests/dags_corrupted",
"tests/dags",
"providers/tests/system/google/cloud/dataproc/resources",
"providers/tests/system/google/cloud/gcs/resources",
]
log_level = "INFO"
filterwarnings = [
"error::pytest.PytestCollectionWarning",
"error::pytest.PytestReturnNotNoneWarning",
# Avoid building cartesian product which might impact performance
"error:SELECT statement has a cartesian product between FROM:sqlalchemy.exc.SAWarning:airflow",
'error:Coercing Subquery object into a select\(\) for use in IN\(\):sqlalchemy.exc.SAWarning:airflow',
'error:Class.*will not make use of SQL compilation caching',
"ignore::DeprecationWarning:flask_appbuilder.filemanager",
"ignore::DeprecationWarning:flask_appbuilder.widgets",
# FAB do not support SQLAclhemy 2
"ignore::sqlalchemy.exc.MovedIn20Warning:flask_appbuilder",
# https://github.com/dpgaspar/Flask-AppBuilder/issues/2194
"ignore::DeprecationWarning:marshmallow_sqlalchemy.convert",
# https://github.com/dpgaspar/Flask-AppBuilder/pull/1940
"ignore::DeprecationWarning:flask_sqlalchemy",
# https://github.com/dpgaspar/Flask-AppBuilder/pull/1903
"ignore::DeprecationWarning:apispec.utils",
# Connexion 2 use different deprecated objects, this should be resolved into Connexion 3
# https://github.com/spec-first/connexion/pull/1536
'ignore::DeprecationWarning:connexion.spec',
'ignore:jsonschema\.RefResolver:DeprecationWarning:connexion.json_schema',
'ignore:jsonschema\.exceptions\.RefResolutionError:DeprecationWarning:connexion.json_schema',
'ignore:Accessing jsonschema\.draft4_format_checker:DeprecationWarning:connexion.decorators.validation',
]
# We cannot add warnings from the airflow package into `filterwarnings`,
# because it invokes import airflow before we set up test environment which breaks the tests.
# Instead of that, we use a separate parameter and dynamically add it into `filterwarnings` marker.
forbidden_warnings = [
"airflow.exceptions.RemovedInAirflow3Warning",
"airflow.utils.context.AirflowContextDeprecationWarning",
"airflow.exceptions.AirflowProviderDeprecationWarning",
]
python_files = [
"test_*.py",
"example_*.py",
]
testpaths = [
"tests",
]
asyncio_default_fixture_loop_scope = "function"
# Keep temporary directories (created by `tmp_path`) for 2 recent runs only failed tests.
tmp_path_retention_count = "2"
tmp_path_retention_policy = "failed"
## coverage.py settings ##
[tool.coverage.run]
branch = true
relative_files = true
source = ["airflow"]
omit = [
"airflow/_vendor/**",
"airflow/contrib/**",
"airflow/example_dags/**",
"airflow/migrations/**",
"airflow/www/node_modules/**",
"providers/src/airflow/providers/**/example_dags/**",
"providers/src/airflow/providers/google/ads/_vendor/**",
]
[tool.coverage.report]
skip_empty = true
exclude_also = [
"def __repr__",
"raise AssertionError",
"raise NotImplementedError",
"if __name__ == .__main__.:",
"@(abc\\.)?abstractmethod",
"@(typing(_extensions)?\\.)?overload",
"if (typing(_extensions)?\\.)?TYPE_CHECKING:"
]
## mypy settings ##
[tool.mypy]
ignore_missing_imports = true
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = false
plugins = [
"dev/mypy/plugin/decorators.py",
"dev/mypy/plugin/outputs.py",
]
pretty = true
show_error_codes = true
disable_error_code = [
"annotation-unchecked",
]
# Since there are no __init__.py files in
# providers/src/apache/airflow/providers we need to tell MyPy where the "base"
# is, otherwise when it sees
# providers/src/apache/airflow/providers/redis/__init__.py, it thinks this is
# the toplevel `redis` folder.
explicit_package_bases = true
mypy_path = [
"$MYPY_CONFIG_FILE_DIR",
"$MYPY_CONFIG_FILE_DIR/providers/src",
"$MYPY_CONFIG_FILE_DIR/task_sdk/src",
]
[[tool.mypy.overrides]]
module="airflow.config_templates.default_webserver_config"
disable_error_code = [
"var-annotated",
]
[[tool.mypy.overrides]]
module="airflow.migrations.*"
ignore_errors = true
[[tool.mypy.overrides]]
module="airflow.*._vendor.*"
ignore_errors = true
[[tool.mypy.overrides]]
module= [
"google.cloud.*",
"azure.*",
]
no_implicit_optional = false
[[tool.mypy.overrides]]
module=[
"referencing.*",
# Beam has some old type annotations, and they introduced an error recently with bad signature of
# a function. This is captured in https://github.com/apache/beam/issues/29927
# and we should remove this exclusion when it is fixed.
"apache_beam.*"
]
ignore_errors = true
[tool.uv]
dev-dependencies = [
"local-providers",
"apache-airflow-task-sdk"
]
[tool.uv.sources]
# These names must match the names as defined in the pyproject.toml of the workspace items,
# *not* the workspace folder paths
local-providers = { workspace = true }
apache-airflow-task-sdk = { workspace = true }
[tool.uv.workspace]
members = ["providers", "task_sdk"]