Skip to content

Commit

Permalink
make upgrade and downgrade no op for now
Browse files Browse the repository at this point in the history
  • Loading branch information
sadpandajoe committed Jul 18, 2024
1 parent a59ee53 commit d61b0df
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 181 deletions.
15 changes: 15 additions & 0 deletions superset/migrations/shared/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from sqlalchemy import inspect
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.dialects.postgresql.base import PGDialect
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.orm import Query, Session

Expand Down Expand Up @@ -168,3 +169,17 @@ def try_load_json(data: Optional[str]) -> dict[str, Any]:
except json.JSONDecodeError:
print(f"Failed to parse: {data}")
return {}


def drop_constraints(table: str, insp: Inspector) -> None:
"""
Drop all foreign key constraints for a given table.
:param table: Table name
:param insp: SQLAlchemy Inspector instance
"""
fks = insp.get_foreign_keys(table)
for fk in fks:
constraint = fk["name"]
if constraint:
op.drop_constraint(constraint, table, type_="foreignkey")
Original file line number Diff line number Diff line change
Expand Up @@ -30,188 +30,12 @@
revision = "02f4f7811799"
down_revision = "f7b6750b67e8"

tables_to_drop = [
"sl_dataset_columns",
"sl_table_columns",
"sl_dataset_tables",
"sl_columns",
"sl_tables",
"sl_dataset_users",
"sl_datasets"
]

def drop_constraints(table, insp):
conn = op.get_bind()
fks = insp.get_foreign_keys(table)
for fk in fks:
constraint = fk["name"]
if constraint:
op.drop_constraint(constraint, table, type_="foreignkey")
# We were seeing issues with dropping tables which was causing
# deadlocks in the database. For now we'll skip the migration to
# address these issues at a future time.

def upgrade():
bind = op.get_bind()
insp = Inspector.from_engine(bind)

# Drop foreign key constraints first
for table in tables_to_drop:
drop_constraints(table, insp)

# Drop tables
for table in tables_to_drop:
op.drop_table(table)
pass

def downgrade():
op.create_table(
"sl_datasets",
sa.Column("uuid", sa.Numeric(precision=16), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("database_id", sa.Integer(), nullable=False),
sa.Column("is_physical", sa.Boolean(), nullable=True),
sa.Column("is_managed_externally", sa.Boolean(), nullable=False),
sa.Column("name", sa.Text(), nullable=True),
sa.Column("expression", sa.Text(), nullable=True),
sa.Column("external_url", sa.Text(), nullable=True),
sa.Column("extra_json", sa.Text(), nullable=True),
sa.Column("created_by_fk", sa.Integer(), nullable=True),
sa.Column("changed_by_fk", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["changed_by_fk"],
["ab_user.id"],
),
sa.ForeignKeyConstraint(
["created_by_fk"],
["ab_user.id"],
),
sa.ForeignKeyConstraint(
["database_id"],
["dbs.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("uuid"),
)
op.create_table(
"sl_tables",
sa.Column("uuid", sa.Numeric(precision=16), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("database_id", sa.Integer(), nullable=False),
sa.Column("is_managed_externally", sa.Boolean(), nullable=False),
sa.Column("catalog", sa.Text(), nullable=True),
sa.Column("schema", sa.Text(), nullable=True),
sa.Column("name", sa.Text(), nullable=True),
sa.Column("external_url", sa.Text(), nullable=True),
sa.Column("extra_json", sa.Text(), nullable=True),
sa.Column("created_by_fk", sa.Integer(), nullable=True),
sa.Column("changed_by_fk", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["changed_by_fk"],
["ab_user.id"],
),
sa.ForeignKeyConstraint(
["created_by_fk"],
["ab_user.id"],
),
sa.ForeignKeyConstraint(
["database_id"],
["dbs.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("uuid"),
)
op.create_table(
"sl_columns",
sa.Column("uuid", sa.Numeric(precision=16), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("is_aggregation", sa.Boolean(), nullable=False),
sa.Column("is_additive", sa.Boolean(), nullable=False),
sa.Column("is_dimensional", sa.Boolean(), nullable=False),
sa.Column("is_filterable", sa.Boolean(), nullable=False),
sa.Column("is_increase_desired", sa.Boolean(), nullable=False),
sa.Column("is_managed_externally", sa.Boolean(), nullable=False),
sa.Column("is_partition", sa.Boolean(), nullable=False),
sa.Column("is_physical", sa.Boolean(), nullable=False),
sa.Column("is_temporal", sa.Boolean(), nullable=False),
sa.Column("is_spatial", sa.Boolean(), nullable=False),
sa.Column("name", sa.Text(), nullable=True),
sa.Column("type", sa.Text(), nullable=True),
sa.Column("unit", sa.Text(), nullable=True),
sa.Column("expression", sa.Text(), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("warning_text", sa.Text(), nullable=True),
sa.Column("external_url", sa.Text(), nullable=True),
sa.Column("extra_json", sa.Text(), nullable=True),
sa.Column("created_by_fk", sa.Integer(), nullable=True),
sa.Column("changed_by_fk", sa.Integer(), nullable=True),
sa.Column("advanced_data_type", sa.Text(), nullable=True),
sa.ForeignKeyConstraint(
["changed_by_fk"],
["ab_user.id"],
),
sa.ForeignKeyConstraint(
["created_by_fk"],
["ab_user.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("uuid"),
)
op.create_table(
"sl_dataset_users",
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["dataset_id"],
["sl_datasets.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["ab_user.id"],
),
sa.PrimaryKeyConstraint("dataset_id", "user_id"),
)
op.create_table(
"sl_dataset_tables",
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("table_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["dataset_id"],
["sl_datasets.id"],
),
sa.ForeignKeyConstraint(
["table_id"],
["sl_tables.id"],
),
sa.PrimaryKeyConstraint("dataset_id", "table_id"),
)
op.create_table(
"sl_table_columns",
sa.Column("table_id", sa.Integer(), nullable=False),
sa.Column("column_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["column_id"],
["sl_columns.id"],
),
sa.ForeignKeyConstraint(
["table_id"],
["sl_tables.id"],
),
sa.PrimaryKeyConstraint("table_id", "column_id"),
)
op.create_table(
"sl_dataset_columns",
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("column_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["column_id"],
["sl_columns.id"],
),
sa.ForeignKeyConstraint(
["dataset_id"],
["sl_datasets.id"],
),
sa.PrimaryKeyConstraint("dataset_id", "column_id"),
)
pass

0 comments on commit d61b0df

Please sign in to comment.