Skip to content
Snippets Groups Projects
Select Git revision
  • stable-2.0.1
  • main default protected
  • airflow-2.7.0 protected
  • airflow253 protected
  • air251
  • test_docker_op
  • airflow225
  • mptest
  • https-deployment
  • datacat_integration protected
  • datacatalog-integration
  • stable-2.2.2 protected
  • stable-2.2.1 protected
  • stable-2.2.0 protected
  • stable-2.1.4 protected
  • stable-2.1.3 protected
  • stable-2.1.2 protected
  • stable-2.1.1 protected
  • stable-2.1.0 protected
  • stable-2.0.2 protected
  • stable-2.0.0 protected
  • stable-1.0.1 protected
  • stable-1.0 protected
  • stable-0.1 protected
24 results

airflow.cfg

Blame
  • airflow.cfg 42.68 KiB
    [core]
    # The folder where your airflow pipelines live, most likely a
    # subfolder in a code repository. This path must be absolute.
    dags_folder = /opt/airflow/dags
    
    # Hostname by providing a path to a callable, which will resolve the hostname.
    # The format is "package.function".
    #
    # For example, default value "socket.getfqdn" means that result from getfqdn() of "socket"
    # package will be used as hostname.
    #
    # No argument should be required in the function specified.
    # If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address``
    hostname_callable = socket.getfqdn
    
    # Default timezone in case supplied date times are naive
    # can be utc (default), system, or any IANA timezone string (e.g. Europe/Amsterdam)
    default_timezone = utc
    
    # The executor class that airflow should use. Choices include
    # ``SequentialExecutor``, ``LocalExecutor``, ``CeleryExecutor``, ``DaskExecutor``,
    # ``KubernetesExecutor``, ``CeleryKubernetesExecutor`` or the
    # full import path to the class when using a custom executor.
    executor = CeleryExecutor
    
    # The SqlAlchemy connection string to the metadata database.
    # SqlAlchemy supports many different database engines.
    # More information here:
    # http://airflow.apache.org/docs/apache-airflow/stable/howto/set-up-database.html#database-uri
    sql_alchemy_conn = sqlite:////opt/airflow/airflow.db
    
    # The encoding for the databases
    sql_engine_encoding = utf-8
    
    # Collation for ``dag_id``, ``task_id``, ``key`` columns in case they have different encoding.
    # By default this collation is the same as the database collation, however for ``mysql`` and ``mariadb``
    # the default is ``utf8mb3_bin`` so that the index sizes of our index keys will not exceed
    # the maximum size of allowed index when collation is set to ``utf8mb4`` variant
    # (see https://github.com/apache/airflow/pull/17603#issuecomment-901121618).
    # sql_engine_collation_for_ids =
    
    # If SqlAlchemy should pool database connections.
    sql_alchemy_pool_enabled = True
    
    # The SqlAlchemy pool size is the maximum number of database connections
    # in the pool. 0 indicates no limit.
    sql_alchemy_pool_size = 5
    
    # The maximum overflow size of the pool.
    # When the number of checked-out connections reaches the size set in pool_size,
    # additional connections will be returned up to this limit.
    # When those additional connections are returned to the pool, they are disconnected and discarded.
    # It follows then that the total number of simultaneous connections the pool will allow
    # is pool_size + max_overflow,
    # and the total number of "sleeping" connections the pool will allow is pool_size.
    # max_overflow can be set to ``-1`` to indicate no overflow limit;
    # no limit will be placed on the total number of concurrent connections. Defaults to ``10``.
    sql_alchemy_max_overflow = 10
    
    # The SqlAlchemy pool recycle is the number of seconds a connection
    # can be idle in the pool before it is invalidated. This config does
    # not apply to sqlite. If the number of DB connections is ever exceeded,
    # a lower config value will allow the system to recover faster.
    sql_alchemy_pool_recycle = 1800
    
    # Check connection at the start of each connection pool checkout.
    # Typically, this is a simple statement like "SELECT 1".
    # More information here:
    # https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic
    sql_alchemy_pool_pre_ping = True