Compare commits

...

7 Commits

13 changed files with 431 additions and 5 deletions

View File

@ -5,7 +5,7 @@
# this is typically a path given in POSIX (e.g. forward slashes) # this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this # format, relative to the token %(here)s which refers to the location of this
# ini file # ini file
script_location = %(here)s/dev/alembic script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time # Uncomment the line below if you want the files to be prepended with date and time

1
alembic/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

61
alembic/env.py Normal file
View File

@ -0,0 +1,61 @@
from logging.config import fileConfig
import os
import sys
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Add the project root to the Python path
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
# Import your SQLAlchemy models and engine
from models import SQLModel
from database import engine
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Import all your SQLModel models here so that Alembic can detect them
from models import User, DBSession
# Set the target metadata to SQLModel.metadata
target_metadata = SQLModel.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
connectable = engine
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,71 @@
"""Add PublicStream model
Revision ID: 0df481ee920b
Revises: f86c93c7a872
Create Date: 2025-07-19 10:02:22.902696
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '0df481ee920b'
down_revision: Union[str, Sequence[str], None] = 'f86c93c7a872'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# First create the new publicstream table
op.create_table('publicstream',
sa.Column('uid', sa.String(), nullable=False),
sa.Column('size', sa.Integer(), nullable=False),
sa.Column('mtime', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('uid')
)
# Drop the foreign key constraint first
op.drop_constraint('dbsession_user_id_fkey', 'dbsession', type_='foreignkey')
# Then drop the unique constraint
op.drop_constraint(op.f('uq_user_username'), 'user', type_='unique')
# Create the new index
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
# Recreate the foreign key constraint
op.create_foreign_key(
'dbsession_user_id_fkey', 'dbsession', 'user',
['user_id'], ['username'], ondelete='CASCADE'
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Drop the foreign key constraint first
op.drop_constraint('dbsession_user_id_fkey', 'dbsession', type_='foreignkey')
# Drop the index
op.drop_index(op.f('ix_user_username'), table_name='user')
# Recreate the unique constraint
op.create_unique_constraint(op.f('uq_user_username'), 'user', ['username'])
# Recreate the foreign key constraint
op.create_foreign_key(
'dbsession_user_id_fkey', 'dbsession', 'user',
['user_id'], ['username'], ondelete='CASCADE'
)
# Drop the publicstream table
op.drop_table('publicstream')
# ### end Alembic commands ###

View File

@ -0,0 +1,86 @@
"""make username unique
Revision ID: 1ab2db0e4b5e
Revises:
Create Date: 2025-06-27 13:04:10.085253
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import sqlmodel
# revision identifiers, used by Alembic.
revision: str = '1ab2db0e4b5e'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# 1. First, add the unique constraint to the username column
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_unique_constraint('uq_user_username', ['username'])
# 2. Now create the dbsession table with the foreign key
op.create_table('dbsession',
sa.Column('token', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('ip_address', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('user_agent', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('last_activity', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.username'], ),
sa.PrimaryKeyConstraint('token')
)
# 3. Drop old tables if they exist
if op.get_bind().engine.dialect.has_table(op.get_bind(), 'session'):
op.drop_index(op.f('ix_session_token'), table_name='session')
op.drop_index(op.f('ix_session_user_id'), table_name='session')
op.drop_table('session')
if op.get_bind().engine.dialect.has_table(op.get_bind(), 'publicstream'):
op.drop_table('publicstream')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# 1. First drop the dbsession table
op.drop_table('dbsession')
# 2. Recreate the old tables
op.create_table('publicstream',
sa.Column('uid', sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column('size', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('mtime', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('uid', name=op.f('publicstream_pkey'))
)
op.create_table('session',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column('token', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('ip_address', sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column('user_agent', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.Column('expires_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.Column('last_used_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('session_pkey'))
)
op.create_index(op.f('ix_session_user_id'), 'session', ['user_id'], unique=False)
op.create_index(op.f('ix_session_token'), 'session', ['token'], unique=True)
# 3. Finally, remove the unique constraint from username
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_constraint('uq_user_username', type_='unique')
# ### end Alembic commands ###

View File

@ -0,0 +1,49 @@
"""add_display_name_to_user
Revision ID: 8be4811023d8
Revises: 0df481ee920b
Create Date: 2025-07-19 19:46:01.129412
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision: str = '8be4811023d8'
down_revision: Union[str, Sequence[str], None] = '0df481ee920b'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('dbsession_user_id_fkey'), 'dbsession', type_='foreignkey')
op.create_foreign_key(None, 'dbsession', 'user', ['user_id'], ['username'])
op.alter_column('publicstream', 'storage_bytes',
existing_type=sa.INTEGER(),
nullable=False,
existing_server_default=sa.text('0'))
op.create_index(op.f('ix_publicstream_username'), 'publicstream', ['username'], unique=False)
op.drop_column('publicstream', 'size')
op.add_column('user', sa.Column('display_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'display_name')
op.add_column('publicstream', sa.Column('size', sa.INTEGER(), autoincrement=False, nullable=False))
op.drop_index(op.f('ix_publicstream_username'), table_name='publicstream')
op.alter_column('publicstream', 'storage_bytes',
existing_type=sa.INTEGER(),
nullable=True,
existing_server_default=sa.text('0'))
op.drop_constraint(None, 'dbsession', type_='foreignkey')
op.create_foreign_key(op.f('dbsession_user_id_fkey'), 'dbsession', 'user', ['user_id'], ['username'], ondelete='CASCADE')
# ### end Alembic commands ###

View File

@ -0,0 +1,30 @@
"""add_processed_filename_to_uploadlog
Revision ID: f86c93c7a872
Revises: 1ab2db0e4b5e
Create Date: 2025-06-28 15:56:29.169668
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'f86c93c7a872'
down_revision: Union[str, Sequence[str], None] = '1ab2db0e4b5e'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.add_column('uploadlog',
sa.Column('processed_filename', sa.String(), nullable=True),
schema=None)
def downgrade() -> None:
"""Downgrade schema."""
op.drop_column('uploadlog', 'processed_filename', schema=None)

28
alembic/script.py.mako Normal file
View File

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,85 @@
"""initial base schema
Revision ID: 5f0b37b50730
Revises:
Create Date: 2025-08-08 08:42:06.859256
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '5f0b37b50730'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('publicstream',
sa.Column('uid', sa.String(), nullable=False),
sa.Column('username', sa.String(), nullable=True),
sa.Column('storage_bytes', sa.Integer(), nullable=False),
sa.Column('mtime', sa.Integer(), nullable=False),
sa.Column('last_updated', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('uid')
)
op.create_index(op.f('ix_publicstream_username'), 'publicstream', ['username'], unique=False)
op.create_table('uploadlog',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uid', sa.String(), nullable=False),
sa.Column('ip', sa.String(), nullable=False),
sa.Column('filename', sa.String(), nullable=True),
sa.Column('processed_filename', sa.String(), nullable=True),
sa.Column('size_bytes', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('token_created', sa.DateTime(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('username', sa.String(), nullable=False),
sa.Column('token', sa.String(), nullable=False),
sa.Column('confirmed', sa.Boolean(), nullable=False),
sa.Column('ip', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('email')
)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.create_table('userquota',
sa.Column('uid', sa.String(), nullable=False),
sa.Column('storage_bytes', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('uid')
)
op.create_table('dbsession',
sa.Column('token', sa.String(), nullable=False),
sa.Column('uid', sa.String(), nullable=False),
sa.Column('ip_address', sa.String(), nullable=False),
sa.Column('user_agent', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('last_activity', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['uid'], ['user.email'], ),
sa.PrimaryKeyConstraint('token')
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('dbsession')
op.drop_table('userquota')
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_table('user')
op.drop_table('uploadlog')
op.drop_index(op.f('ix_publicstream_username'), table_name='publicstream')
op.drop_table('publicstream')
# ### end Alembic commands ###

View File

@ -1,4 +1,4 @@
bind = "0.0.0.0:8000" bind = "0.0.0.0:8100"
workers = 2 # Tune based on available CPU cores workers = 2 # Tune based on available CPU cores
worker_class = "uvicorn.workers.UvicornWorker" worker_class = "uvicorn.workers.UvicornWorker"
timeout = 300 # Increased from 60 to 300 seconds (5 minutes) timeout = 300 # Increased from 60 to 300 seconds (5 minutes)

View File

@ -12,3 +12,5 @@ uvicorn==0.34.2
uvloop==0.21.0 uvloop==0.21.0
watchfiles==1.0.5 watchfiles==1.0.5
websockets==15.0.1 websockets==15.0.1
alembic
gunicorn

View File

@ -4,6 +4,8 @@
# #
# pip-compile requirements.in # pip-compile requirements.in
# #
alembic==1.16.4
# via -r requirements.in
annotated-types==0.6.0 annotated-types==0.6.0
# via pydantic # via pydantic
anyio==4.2.0 anyio==4.2.0
@ -18,6 +20,8 @@ fastapi==0.115.12
# via -r requirements.in # via -r requirements.in
greenlet==3.2.1 greenlet==3.2.1
# via sqlalchemy # via sqlalchemy
gunicorn==23.0.0
# via -r requirements.in
h11==0.14.0 h11==0.14.0
# via uvicorn # via uvicorn
httptools==0.6.4 httptools==0.6.4
@ -26,8 +30,14 @@ idna==3.4
# via anyio # via anyio
limits==3.2.0 limits==3.2.0
# via slowapi # via slowapi
mako==1.3.10
# via alembic
markupsafe==3.0.2
# via mako
packaging==23.0 packaging==23.0
# via limits # via
# gunicorn
# limits
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
# via -r requirements.in # via -r requirements.in
pydantic==2.6.0 pydantic==2.6.0
@ -47,13 +57,16 @@ slowapi==0.1.9
sniffio==1.3.0 sniffio==1.3.0
# via anyio # via anyio
sqlalchemy==2.0.40 sqlalchemy==2.0.40
# via sqlmodel # via
# alembic
# sqlmodel
sqlmodel==0.0.24 sqlmodel==0.0.24
# via -r requirements.in # via -r requirements.in
starlette==0.46.1 starlette==0.46.1
# via fastapi # via fastapi
typing-extensions==4.13.2 typing-extensions==4.13.2
# via # via
# alembic
# fastapi # fastapi
# limits # limits
# pydantic # pydantic

View File

@ -97,7 +97,7 @@ def upload(request: Request, uid: str = Form(...), file: UploadFile = Form(...))
ip=request.client.host, ip=request.client.host,
filename=file.filename, # original filename from user filename=file.filename, # original filename from user
processed_filename=None, # not yet processed processed_filename=None, # not yet processed
size_bytes=None # not yet known size_bytes=0 # placeholder to satisfy NOT NULL; updated after processing
) )
db.add(early_log) db.add(early_log)
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE FLUSH] Before db.flush() after early_log add") log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE FLUSH] Before db.flush() after early_log add")