airflow 0044_1_10_7_add_serialized_dag_table 源码
airflow 0044_1_10_7_add_serialized_dag_table 代码
文件路径:/airflow/migrations/versions/0044_1_10_7_add_serialized_dag_table.py
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``serialized_dag`` table
Revision ID: d38e04c12aa2
Revises: 6e96a59344a4
Create Date: 2019-08-01 14:39:35.616417
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = 'd38e04c12aa2'
down_revision = '6e96a59344a4'
branch_labels = None
depends_on = None
airflow_version = '1.10.7'
def upgrade():
"""Upgrade version."""
json_type = sa.JSON
conn = op.get_bind()
if conn.dialect.name != "postgresql":
# Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
# versions, check for the function existing.
try:
conn.execute("SELECT JSON_VALID(1)").fetchone()
except (sa.exc.OperationalError, sa.exc.ProgrammingError):
json_type = sa.Text
op.create_table(
'serialized_dag',
sa.Column('dag_id', StringID(), nullable=False),
sa.Column('fileloc', sa.String(length=2000), nullable=False),
sa.Column('fileloc_hash', sa.Integer(), nullable=False),
sa.Column('data', json_type(), nullable=False),
sa.Column('last_updated', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('dag_id'),
)
op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash'])
if conn.dialect.name == "mysql":
conn.execute("SET time_zone = '+00:00'")
cur = conn.execute("SELECT @@explicit_defaults_for_timestamp")
res = cur.fetchall()
if res[0][0] == 0:
raise Exception("Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql")
op.alter_column(
table_name="serialized_dag",
column_name="last_updated",
type_=mysql.TIMESTAMP(fsp=6),
nullable=False,
)
else:
# sqlite and mssql datetime are fine as is. Therefore, not converting
if conn.dialect.name in ("sqlite", "mssql"):
return
# we try to be database agnostic, but not every db (e.g. sqlserver)
# supports per session time zones
if conn.dialect.name == "postgresql":
conn.execute("set timezone=UTC")
op.alter_column(
table_name="serialized_dag",
column_name="last_updated",
type_=sa.TIMESTAMP(timezone=True),
)
def downgrade():
"""Downgrade version."""
op.drop_table('serialized_dag')
相关信息
相关文章
airflow 0001_1_5_0_current_schema 源码
airflow 0002_1_5_0_create_is_encrypted 源码
airflow 0003_1_5_0_for_compatibility 源码
airflow 0004_1_5_0_more_logging_into_task_isntance 源码
airflow 0005_1_5_2_job_id_indices 源码
airflow 0006_1_6_0_adding_extra_to_log 源码
airflow 0007_1_6_0_add_dagrun 源码
airflow 0008_1_6_0_task_duration 源码
0
赞
热门推荐
-
2、 - 优质文章
-
3、 gate.io
-
8、 golang
-
9、 openharmony
-
10、 Vue中input框自动聚焦