I just found out that airflow create some tmp config files in /tmp dir on 
servers that run Celery workers. All the files have name tmpxxxxxxx, and the 
same content like:
{"core": {"dags_folder": "/data/data366/airflow/dags", "base_log_folder": 
"/data/data366/airflow/logs", "remote_logging": "False", "remote_log_conn_id": 
"", "remote_base_log_folder": "", "encrypt_s3_logs": "False", "logging_level": 
"INFO", "fab_logging_level": "WARN", "logging_config_class": "", 
"colored_console_log": "True", "colored_log_format": 
"[%%(blue)s%%(asctime)s%%(reset)s] 
{%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d} 
%%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s", 
"colored_formatter_class": 
"airflow.utils.log.colored_log.CustomTTYColoredFormatter", "log_format": 
"[%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s", 
"simple_log_format": "%%(asctime)s %%(levelname)s - %%(message)s", 
"log_filename_template": "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ 
try_number }}.log", "log_processor_filename_template": "{{ filename }}.log", 
"dag_processor_manager_log_location": 
"/data/data366/airflow/logs/dag_processor_manager/dag_processor_manager.log", 
"hostname_callable": "socket:getfqdn", "default_timezone": "utc", "executor": 
"CeleryExecutor", "sql_alchemy_conn": 
"postgresql+psycopg2://airflow:Itpwpc12@rhw9133/airflow", 
"sql_engine_encoding": "utf-8", "sql_alchemy_pool_enabled": "True", 
"sql_alchemy_pool_size": "5", "sql_alchemy_max_overflow": "10", 
"sql_alchemy_pool_recycle": "1800", "sql_alchemy_pool_pre_ping": "True", 
"sql_alchemy_schema": "", "parallelism": "32", "dag_concurrency": "16", 
"dags_are_paused_at_creation": "True", "max_active_runs_per_dag": "16", 
"load_examples": "False", "plugins_folder": "/data/data366/airflow/plugins", 
"fernet_key": "rTXn28iYM2m2KO1qgMygpd6Em9hXNCS700K4JtCU32k=", "donot_pickle": 
"False", "dagbag_import_timeout": "30", "dag_file_processor_timeout": "50", 
"task_runner": "StandardTaskRunner", "default_impersonation": "", "security": 
"", "secure_mode": "False", "unit_test_mode": "False", "task_log_reader": 
"task", "enable_xcom_pickling": "True", "killed_task_cleanup_time": "60", 
"dag_run_conf_overrides_params": "False", "worker_precheck": "False", 
"dag_discovery_safe_mode": "True", "default_task_retries": "0", 
"sql_alchemy_reconnect_timeout": "300", "non_pooled_task_slot_count": "128"}, 
"cli": {"api_client": "airflow.api.client.local_client", "endpoint_url": 
"http://localhost:8080"}, "api": {"auth_backend": 
"airflow.api.auth.backend.default"}, "lineage": {"backend": ""}, "atlas": 
{"sasl_enabled": "False", "host": "", "port": "21000", "username": "", 
"password": ""}, "operators": {"default_owner": "Airflow", "default_cpus": "1", 
"default_ram": "512", "default_disk": "512", "default_gpus": "0"}, "hive": 
{"default_hive_mapred_queue": ""}, "webserver": {"base_url": 
"http://localhost:8080";, "web_server_host": "0.0.0.0", "web_server_port": 
"8080", "web_server_ssl_cert": "", "web_server_ssl_key": "", "w       
eb_server_master_timeout": "120", "web_server_worker_timeout": "120", 
"worker_refresh_batch_size": "1", "worker_

Which airflow process creates the tmp file? Why cannot it be just one tmp file 
or permanent file since all the tmp files have the same content?
Heng

Reply via email to