importdatetimeimportosimportairflowfromairflow.providers.postgres.operators.postgresimportPostgresOperatorSQL_DATABASE=os.environ["SQL_DATABASE"]withairflow.DAG("airflow_db_connection_example",start_date=datetime.datetime(2024,1,1),schedule_interval=None,catchup=False)asdag:PostgresOperator(task_id="run_airflow_db_query",dag=dag,postgres_conn_id="airflow_db",database=SQL_DATABASE,sql="SELECT * FROM dag LIMIT 10;",)
[[["易于理解","easyToUnderstand","thumb-up"],["解决了我的问题","solvedMyProblem","thumb-up"],["其他","otherUp","thumb-up"]],[["很难理解","hardToUnderstand","thumb-down"],["信息或示例代码不正确","incorrectInformationOrSampleCode","thumb-down"],["没有我需要的信息/示例","missingTheInformationSamplesINeed","thumb-down"],["翻译问题","translationIssue","thumb-down"],["其他","otherDown","thumb-down"]],["最后更新时间 (UTC):2025-03-10。"],[[["This page explains how to connect to and run SQL queries on the Cloud SQL instance that hosts the Airflow database for your Cloud Composer environment."],["While direct access to the Airflow database is possible, it's advised to use alternative methods like the Airflow REST API or CLI commands whenever feasible."],["To connect, create a DAG with `PostgresOperator` operators, specifying the SQL query in the `sql` parameter, and uploading/triggering it."],["It is not recommended to add custom tables or change the schema of the airflow database."],["Backing up the environment's data, including the Airflow database, should be done using snapshots instead of the deprecated database dumping method."]]],[]]