From 16b9212953cf8be01c2217336338653191f734f7 Mon Sep 17 00:00:00 2001 From: inter Date: Mon, 8 Sep 2025 16:35:47 +0800 Subject: [PATCH] Add File --- backend/alembic/versions/016_modify_chat.py | 71 +++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 backend/alembic/versions/016_modify_chat.py diff --git a/backend/alembic/versions/016_modify_chat.py b/backend/alembic/versions/016_modify_chat.py new file mode 100644 index 0000000..9e29bfe --- /dev/null +++ b/backend/alembic/versions/016_modify_chat.py @@ -0,0 +1,71 @@ +"""016_modify_chat + +Revision ID: 031148da1d81 +Revises: 02d84523a979 +Create Date: 2025-06-26 17:00:07.054531 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '031148da1d81' +down_revision = '02d84523a979' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('chat', 'datasource', + existing_type=sa.INTEGER(), + nullable=True) + op.add_column('chat_record', sa.Column('ai_modal_id', sa.Integer(), nullable=True)) + op.add_column('chat_record', sa.Column('first_chat', sa.Boolean(), nullable=True)) + op.add_column('chat_record', sa.Column('recommended_question_answer', sa.Text(), nullable=True)) + op.add_column('chat_record', sa.Column('recommended_question', sa.Text(), nullable=True)) + op.add_column('chat_record', sa.Column('datasource_select_answer', sa.Text(), nullable=True)) + op.add_column('chat_record', sa.Column('token_sql', sa.Integer(), nullable=True)) + op.add_column('chat_record', sa.Column('token_chart', sa.Integer(), nullable=True)) + op.add_column('chat_record', sa.Column('token_analysis', sa.Integer(), nullable=True)) + op.add_column('chat_record', sa.Column('token_predict', sa.Integer(), nullable=True)) + op.add_column('chat_record', sa.Column('full_recommended_question_message', sa.Text(), nullable=True)) + op.add_column('chat_record', sa.Column('token_recommended_question', sa.Integer(), nullable=True)) + op.add_column('chat_record', sa.Column('full_select_datasource_message', sa.Text(), nullable=True)) + op.add_column('chat_record', sa.Column('token_select_datasource_question', sa.Integer(), nullable=True)) + op.alter_column('chat_record', 'chat_id', + existing_type=sa.INTEGER(), + nullable=False) + op.alter_column('chat_record', 'datasource', + existing_type=sa.INTEGER(), + nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('chat_record', 'datasource', + existing_type=sa.INTEGER(), + nullable=False) + op.alter_column('chat_record', 'chat_id', + existing_type=sa.INTEGER(), + nullable=True) + op.drop_column('chat_record', 'token_select_datasource_question') + op.drop_column('chat_record', 'full_select_datasource_message') + op.drop_column('chat_record', 'token_recommended_question') + op.drop_column('chat_record', 'full_recommended_question_message') + op.drop_column('chat_record', 'token_predict') + op.drop_column('chat_record', 'token_analysis') + op.drop_column('chat_record', 'token_chart') + op.drop_column('chat_record', 'token_sql') + op.drop_column('chat_record', 'datasource_select_answer') + op.drop_column('chat_record', 'recommended_question') + op.drop_column('chat_record', 'recommended_question_answer') + op.drop_column('chat_record', 'first_chat') + op.drop_column('chat_record', 'ai_modal_id') + op.alter_column('chat', 'datasource', + existing_type=sa.INTEGER(), + nullable=False) + # ### end Alembic commands ###