feat:mysql adaptation for metadb (#28188)

This commit is contained in:
longbingljw
2025-11-20 09:44:39 +08:00
committed by GitHub
parent 012877d8d4
commit c0b7ffd5d0
131 changed files with 6312 additions and 2602 deletions

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'fca025d3b60f'
down_revision = '8fe468ba0ca5'
@@ -18,26 +24,48 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
op.drop_table('sessions')
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('retrieval_model', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.create_index('retrieval_model_idx', ['retrieval_model'], unique=False, postgresql_using='gin')
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('retrieval_model', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.create_index('retrieval_model_idx', ['retrieval_model'], unique=False, postgresql_using='gin')
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('retrieval_model', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_index('retrieval_model_idx', postgresql_using='gin')
batch_op.drop_column('retrieval_model')
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_index('retrieval_model_idx', postgresql_using='gin')
batch_op.drop_column('retrieval_model')
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('retrieval_model')
op.create_table('sessions',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
sa.Column('data', postgresql.BYTEA(), autoincrement=False, nullable=True),
sa.Column('expiry', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='sessions_pkey'),
sa.UniqueConstraint('session_id', name='sessions_session_id_key')
)
if _is_pg(conn):
op.create_table('sessions',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
sa.Column('data', postgresql.BYTEA(), autoincrement=False, nullable=True),
sa.Column('expiry', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='sessions_pkey'),
sa.UniqueConstraint('session_id', name='sessions_session_id_key')
)
else:
op.create_table('sessions',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
sa.Column('data', models.types.BinaryData(), autoincrement=False, nullable=True),
sa.Column('expiry', sa.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='sessions_pkey'),
sa.UniqueConstraint('session_id', name='sessions_session_id_key')
)
# ### end Alembic commands ###