"""initial schema

Revision ID: 851d54415f73
Revises: 
Create Date: 2026-04-01 19:53:02.835586

"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '851d54415f73'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table('analysis_types',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('slug', sa.String(length=50), nullable=False),
    sa.Column('name', sa.String(length=255), nullable=False),
    sa.Column('description', sa.Text(), nullable=True),
    sa.Column('category', sa.String(length=50), nullable=True),
    sa.Column('min_fixes', sa.Integer(), nullable=True),
    sa.Column('min_animals', sa.Integer(), nullable=True),
    sa.Column('min_duration_hours', sa.Float(), nullable=True),
    sa.Column('requires_regular_sampling', sa.Boolean(), nullable=True),
    sa.Column('python_module', sa.String(length=255), nullable=False),
    sa.Column('python_function', sa.String(length=255), nullable=False),
    sa.Column('default_params', sa.JSON(), nullable=True),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('slug')
    )
    op.create_table('users',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('email', sa.String(length=255), nullable=False),
    sa.Column('password_hash', sa.String(length=255), nullable=False),
    sa.Column('name', sa.String(length=255), nullable=True),
    sa.Column('institution', sa.String(length=255), nullable=True),
    sa.Column('created_at', sa.DateTime(), nullable=True),
    sa.PrimaryKeyConstraint('id')
    )
    with op.batch_alter_table('users', schema=None) as batch_op:
        batch_op.create_index(batch_op.f('ix_users_email'), ['email'], unique=True)

    op.create_table('datasets',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('user_id', sa.Integer(), nullable=False),
    sa.Column('name', sa.String(length=255), nullable=False),
    sa.Column('description', sa.Text(), nullable=True),
    sa.Column('original_filename', sa.String(length=255), nullable=True),
    sa.Column('num_animals', sa.Integer(), nullable=True),
    sa.Column('num_fixes', sa.Integer(), nullable=True),
    sa.Column('time_start', sa.DateTime(), nullable=True),
    sa.Column('time_end', sa.DateTime(), nullable=True),
    sa.Column('bbox_min_lat', sa.Float(), nullable=True),
    sa.Column('bbox_min_lon', sa.Float(), nullable=True),
    sa.Column('bbox_max_lat', sa.Float(), nullable=True),
    sa.Column('bbox_max_lon', sa.Float(), nullable=True),
    sa.Column('column_mapping', sa.JSON(), nullable=True),
    sa.Column('upload_status', sa.String(length=20), nullable=True),
    sa.Column('created_at', sa.DateTime(), nullable=True),
    sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
    sa.PrimaryKeyConstraint('id')
    )
    op.create_table('analysis_runs',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('uuid', sa.String(length=36), nullable=True),
    sa.Column('user_id', sa.Integer(), nullable=True),
    sa.Column('dataset_id', sa.Integer(), nullable=True),
    sa.Column('analysis_type_id', sa.Integer(), nullable=True),
    sa.Column('animal_ids', sa.JSON(), nullable=True),
    sa.Column('time_start', sa.DateTime(), nullable=True),
    sa.Column('time_end', sa.DateTime(), nullable=True),
    sa.Column('bbox', sa.JSON(), nullable=True),
    sa.Column('params', sa.JSON(), nullable=True),
    sa.Column('status', sa.String(length=20), nullable=True),
    sa.Column('celery_task_id', sa.String(length=255), nullable=True),
    sa.Column('started_at', sa.DateTime(), nullable=True),
    sa.Column('completed_at', sa.DateTime(), nullable=True),
    sa.Column('error_message', sa.Text(), nullable=True),
    sa.Column('result_summary', sa.JSON(), nullable=True),
    sa.Column('result_artifacts', sa.JSON(), nullable=True),
    sa.Column('created_at', sa.DateTime(), nullable=True),
    sa.ForeignKeyConstraint(['analysis_type_id'], ['analysis_types.id'], ),
    sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
    sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('uuid')
    )
    op.create_table('data_points',
    sa.Column('id', sa.BigInteger(), nullable=False),
    sa.Column('dataset_id', sa.Integer(), nullable=False),
    sa.Column('animal_id', sa.String(length=255), nullable=False),
    sa.Column('timestamp', sa.DateTime(), nullable=False),
    sa.Column('lat', sa.Float(), nullable=False),
    sa.Column('lon', sa.Float(), nullable=False),
    sa.Column('sensor_data', sa.JSON(), nullable=True),
    sa.Column('speed', sa.Float(), nullable=True),
    sa.Column('step_length', sa.Float(), nullable=True),
    sa.Column('turning_angle', sa.Float(), nullable=True),
    sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
    sa.PrimaryKeyConstraint('id')
    )
    with op.batch_alter_table('data_points', schema=None) as batch_op:
        batch_op.create_index('idx_dp_animal', ['dataset_id', 'animal_id'], unique=False)
        batch_op.create_index('idx_dp_dataset', ['dataset_id'], unique=False)
        batch_op.create_index('idx_dp_time', ['dataset_id', 'timestamp'], unique=False)

    # ### end Alembic commands ###


def downgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table('data_points', schema=None) as batch_op:
        batch_op.drop_index('idx_dp_time')
        batch_op.drop_index('idx_dp_dataset')
        batch_op.drop_index('idx_dp_animal')

    op.drop_table('data_points')
    op.drop_table('analysis_runs')
    op.drop_table('datasets')
    with op.batch_alter_table('users', schema=None) as batch_op:
        batch_op.drop_index(batch_op.f('ix_users_email'))

    op.drop_table('users')
    op.drop_table('analysis_types')
    # ### end Alembic commands ###
