This commit is contained in:
oib
2025-07-20 09:26:07 +02:00
parent da28b205e5
commit ab9d93d913
19 changed files with 1207 additions and 419 deletions

View File

@ -0,0 +1,49 @@
"""add_display_name_to_user
Revision ID: 8be4811023d8
Revises: 0df481ee920b
Create Date: 2025-07-19 19:46:01.129412
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision: str = '8be4811023d8'
down_revision: Union[str, Sequence[str], None] = '0df481ee920b'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('dbsession_user_id_fkey'), 'dbsession', type_='foreignkey')
op.create_foreign_key(None, 'dbsession', 'user', ['user_id'], ['username'])
op.alter_column('publicstream', 'storage_bytes',
existing_type=sa.INTEGER(),
nullable=False,
existing_server_default=sa.text('0'))
op.create_index(op.f('ix_publicstream_username'), 'publicstream', ['username'], unique=False)
op.drop_column('publicstream', 'size')
op.add_column('user', sa.Column('display_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'display_name')
op.add_column('publicstream', sa.Column('size', sa.INTEGER(), autoincrement=False, nullable=False))
op.drop_index(op.f('ix_publicstream_username'), table_name='publicstream')
op.alter_column('publicstream', 'storage_bytes',
existing_type=sa.INTEGER(),
nullable=True,
existing_server_default=sa.text('0'))
op.drop_constraint(None, 'dbsession', type_='foreignkey')
op.create_foreign_key(op.f('dbsession_user_id_fkey'), 'dbsession', 'user', ['user_id'], ['username'], ondelete='CASCADE')
# ### end Alembic commands ###

View File

@ -1,13 +1,14 @@
"""Authentication routes for dicta2stream"""
from fastapi import APIRouter, Depends, Request, Response, HTTPException, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlmodel import Session
from sqlmodel import Session, select
from datetime import datetime
from models import Session as DBSession, User
from database import get_db
from auth import get_current_user
router = APIRouter()
router = APIRouter(prefix="/api", tags=["auth"])
security = HTTPBearer()
@router.post("/logout")
@ -18,30 +19,61 @@ async def logout(
credentials: HTTPAuthorizationCredentials = Depends(security)
):
"""Log out by invalidating the current session"""
token = credentials.credentials
try:
# Get the token from the Authorization header
token = credentials.credentials if credentials else None
# Find and invalidate the session
session = db.exec(
select(DBSession)
.where(DBSession.token == token)
.where(DBSession.is_active == True) # noqa: E712
).first()
if not token:
return {"message": "No session to invalidate"}
if session:
session.is_active = False
db.add(session)
db.commit()
try:
# Find and invalidate the session
session = db.exec(
select(DBSession)
.where(DBSession.token == token)
.where(DBSession.is_active == True) # noqa: E712
).first()
# Clear the session cookie
response.delete_cookie(
key="sessionid", # Must match the cookie name in main.py
httponly=True,
secure=True, # Must match the cookie settings from login
samesite="lax",
path="/"
)
if session:
try:
session.is_active = False
db.add(session)
db.commit()
except Exception:
db.rollback()
return {"message": "Successfully logged out"}
except Exception:
# Continue with logout even if session lookup fails
pass
# Clear the session cookie
response.delete_cookie(
key="sessionid",
httponly=True,
secure=True,
samesite="lax",
path="/"
)
# Clear any other auth-related cookies
for cookie_name in ["uid", "authToken", "isAuthenticated", "token"]:
response.delete_cookie(
key=cookie_name,
path="/",
domain=request.url.hostname,
secure=True,
httponly=True,
samesite="lax"
)
return {"message": "Successfully logged out"}
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception:
# Don't expose internal errors to the client
return {"message": "Logout processed"}
@router.get("/me")

View File

@ -9,9 +9,50 @@ def concat_opus_files(user_dir: Path, output_file: Path):
Concatenate all .opus files in user_dir (except stream.opus) in random order into output_file.
Overwrites output_file if exists. Creates it if missing.
"""
files = [f for f in user_dir.glob('*.opus') if f.name != 'stream.opus']
# Clean up any existing filelist.txt to prevent issues
filelist_path = user_dir / 'filelist.txt'
if filelist_path.exists():
try:
filelist_path.unlink()
except Exception as e:
print(f"Warning: Could not clean up old filelist.txt: {e}")
# Get all opus files except stream.opus and remove any duplicates
import hashlib
file_hashes = set()
files = []
for f in user_dir.glob('*.opus'):
if f.name == 'stream.opus':
continue
try:
# Calculate file hash for duplicate detection
hasher = hashlib.md5()
with open(f, 'rb') as file:
buf = file.read(65536) # Read in 64kb chunks
while len(buf) > 0:
hasher.update(buf)
buf = file.read(65536)
file_hash = hasher.hexdigest()
# Skip if we've seen this exact file before
if file_hash in file_hashes:
print(f"Removing duplicate file: {f.name}")
f.unlink()
continue
file_hashes.add(file_hash)
files.append(f)
except Exception as e:
print(f"Error processing {f}: {e}")
if not files:
raise FileNotFoundError(f"No opus files to concatenate in {user_dir}")
# If no files, create an empty stream.opus
output_file.write_bytes(b'')
return output_file
random.shuffle(files)
# Create a filelist for ffmpeg concat

70
create_silent_opus.py Normal file
View File

@ -0,0 +1,70 @@
#!/usr/bin/env python3
"""
Create a silent OPUS audio file with 1 second of silence.
"""
import os
import opuslib
import numpy as np
import struct
# Configuration
SAMPLE_RATE = 48000
CHANNELS = 1
FRAME_SIZE = 960 # 20ms at 48kHz
SILENCE_DURATION = 1.0 # seconds
OUTPUT_FILE = "silent.opus"
# Calculate number of frames needed
num_frames = int((SAMPLE_RATE * SILENCE_DURATION) / (FRAME_SIZE * CHANNELS))
# Initialize Opus encoder
enc = opuslib.Encoder(SAMPLE_RATE, CHANNELS, 'voip')
# Create silent audio data (all zeros)
silent_frame = struct.pack('h' * FRAME_SIZE * CHANNELS, *([0] * FRAME_SIZE * CHANNELS))
# Create Ogg Opus file
with open(OUTPUT_FILE, 'wb') as f:
# Write Ogg header
f.write(b'OggS') # Magic number
f.write(b'\x00') # Version
f.write(b'\x00') # Header type (0 = normal)
f.write(b'\x00\x00\x00\x00\x00\x00\x00\x00') # Granule position
f.write(b'\x00\x00\x00\x00') # Bitstream serial number
f.write(b'\x00\x00\x00\x00') # Page sequence number
f.write(b'\x00\x00\x00\x00') # Checksum
f.write(b'\x01') # Number of segments
f.write(b'\x00') # Segment table (0 = 1 byte segment)
# Write Opus header
f.write(b'OpusHead') # Magic signature
f.write(b'\x01') # Version
f.write(chr(CHANNELS).encode('latin1')) # Channel count
f.write(struct.pack('<H', 80)) # Preskip (80 samples)
f.write(struct.pack('<I', SAMPLE_RATE)) # Input sample rate
f.write(b'\x00\x00') # Output gain
f.write(b'\x00') # Channel mapping family (0 = mono/stereo)
# Write comment header
f.write(b'OpusTags') # Magic signature
f.write(struct.pack('<I', 0)) # Vendor string length (0 for none)
f.write(struct.pack('<I', 0)) # Number of comments (0)
# Encode and write silent frames
for _ in range(num_frames):
# Encode the silent frame
encoded = enc.encode(silent_frame, FRAME_SIZE)
# Write Ogg page
f.write(b'OggS') # Magic number
f.write(b'\x00') # Version
f.write(b'\x00') # Header type (0 = normal)
f.write(struct.pack('<Q', (FRAME_SIZE * _) % (1 << 64))) # Granule position
f.write(b'\x00\x00\x00\x00') # Bitstream serial number
f.write(struct.pack('<I', _ + 2)) # Page sequence number
f.write(b'\x00\x00\x00\x00') # Checksum (0 for now)
f.write(b'\x01') # Number of segments
f.write(chr(len(encoded)).encode('latin1')) # Segment length
f.write(encoded) # The encoded data
print(f"Created silent OPUS file: {OUTPUT_FILE}")

View File

@ -65,43 +65,52 @@ async def list_streams_sse(db):
# Send initial ping
yield ":ping\n\n"
# Query all public streams from the database
# Query all public streams from the database with required fields
stmt = select(PublicStream).order_by(PublicStream.mtime.desc())
result = db.execute(stmt)
streams = result.scalars().all()
if not streams:
print("No public streams found in the database")
yield f"data: {json.dumps({'end': True})}\n\n"
return
print(f"Found {len(streams)} public streams in the database")
# Send each stream as an SSE event
for stream in streams:
try:
# Ensure we have all required fields with fallbacks
stream_data = {
'uid': stream.uid,
'size': stream.size,
'mtime': stream.mtime,
'uid': stream.uid or '',
'size': stream.storage_bytes or 0,
'mtime': int(stream.mtime) if stream.mtime is not None else 0,
'username': stream.username or stream.uid or '',
'display_name': stream.display_name or stream.username or stream.uid or '',
'created_at': stream.created_at.isoformat() if stream.created_at else None,
'updated_at': stream.updated_at.isoformat() if stream.updated_at else None
}
print(f"Sending stream data: {stream_data}")
yield f"data: {json.dumps(stream_data)}\n\n"
# Small delay to prevent overwhelming the client
await asyncio.sleep(0.1)
except Exception as e:
print(f"Error processing stream {stream.uid}: {str(e)}")
if os.getenv("DEBUG") == "1":
import traceback
traceback.print_exc()
continue
# Send end of stream marker
print("Finished sending all streams")
yield f"data: {json.dumps({'end': True})}\n\n"
except Exception as e:
print(f"Error in list_streams_sse: {str(e)}")
if os.getenv("DEBUG") == "1":
import traceback
traceback.print_exc()
yield f"data: {json.dumps({'error': True, 'message': str(e)})}\n\n"
yield f"data: {json.dumps({'error': True, 'message': 'Stream generation failed'})}\n\n"
def list_streams(db: Session = Depends(get_db)):
"""List all public streams from the database"""

299
main.py
View File

@ -11,13 +11,14 @@ import traceback
import shutil
import mimetypes
from typing import Optional
from models import User, UploadLog
from models import User, UploadLog, UserQuota, get_user_by_uid
from sqlmodel import Session, select, SQLModel
from database import get_db, engine
from log import log_violation
import secrets
import time
import json
import subprocess
from datetime import datetime
from dotenv import load_dotenv
@ -135,18 +136,46 @@ async def validation_exception_handler(request: FastAPIRequest, exc: RequestVali
async def generic_exception_handler(request: FastAPIRequest, exc: Exception):
return JSONResponse(status_code=500, content={"detail": str(exc)})
# Debug endpoint to list all routes
@app.get("/debug/routes")
async def list_routes():
routes = []
for route in app.routes:
if hasattr(route, "methods") and hasattr(route, "path"):
routes.append({
"path": route.path,
"methods": list(route.methods) if hasattr(route, "methods") else [],
"name": route.name if hasattr(route, "name") else "",
"endpoint": str(route.endpoint) if hasattr(route, "endpoint") else "",
"router": str(route) # Add router info for debugging
})
# Sort routes by path for easier reading
routes.sort(key=lambda x: x["path"])
# Also print to console for server logs
print("\n=== Registered Routes ===")
for route in routes:
print(f"{', '.join(route['methods']).ljust(20)} {route['path']}")
print("======================\n")
return {"routes": routes}
# include routers from submodules
from register import router as register_router
from magic import router as magic_router
from upload import router as upload_router
from streams import router as streams_router
from list_user_files import router as list_user_files_router
from auth_router import router as auth_router
app.include_router(streams_router)
from list_streams import router as list_streams_router
from account_router import router as account_router
# Include all routers
app.include_router(auth_router)
app.include_router(account_router)
app.include_router(register_router)
app.include_router(magic_router)
@ -253,40 +282,134 @@ MAX_QUOTA_BYTES = 100 * 1024 * 1024
# Delete account endpoint has been moved to account_router.py
@app.delete("/uploads/{uid}/{filename}")
def delete_file(uid: str, filename: str, request: Request, db: Session = Depends(get_db)):
user = get_user_by_uid(uid)
if not user:
raise HTTPException(status_code=403, detail="Invalid user ID")
async def delete_file(uid: str, filename: str, request: Request, db: Session = Depends(get_db)):
"""
Delete a file for a specific user.
ip = request.client.host
if user.ip != ip:
raise HTTPException(status_code=403, detail="Device/IP mismatch")
user_dir = os.path.join('data', user.username)
target_path = os.path.join(user_dir, filename)
# Prevent path traversal attacks
real_target_path = os.path.realpath(target_path)
real_user_dir = os.path.realpath(user_dir)
if not real_target_path.startswith(real_user_dir + os.sep):
raise HTTPException(status_code=403, detail="Invalid path")
if not os.path.isfile(real_target_path):
raise HTTPException(status_code=404, detail="File not found")
os.remove(real_target_path)
log_violation("DELETE", ip, uid, f"Deleted {filename}")
subprocess.run(["/root/scripts/refresh_user_playlist.sh", user.username])
Args:
uid: The username of the user (used as UID in routes)
filename: The name of the file to delete
request: The incoming request object
db: Database session
Returns:
Dict with status message
"""
try:
actual_bytes = int(subprocess.check_output(["du", "-sb", user_dir]).split()[0])
q = db.get(UserQuota, uid)
if q:
q.storage_bytes = actual_bytes
db.add(q)
db.commit()
except Exception as e:
log_violation("QUOTA", ip, uid, f"Quota update after delete failed: {e}")
# Get the user by username (which is used as UID in routes)
user = get_user_by_uid(uid)
if not user:
raise HTTPException(status_code=404, detail="User not found")
return {"status": "deleted"}
# Get client IP and verify it matches the user's IP
ip = request.client.host
if user.ip != ip:
raise HTTPException(status_code=403, detail="Device/IP mismatch. Please log in again.")
# Set up user directory and validate paths
user_dir = os.path.join('data', user.username)
os.makedirs(user_dir, exist_ok=True)
# Decode URL-encoded filename
from urllib.parse import unquote
filename = unquote(filename)
# Construct and validate target path
target_path = os.path.join(user_dir, filename)
real_target_path = os.path.realpath(target_path)
real_user_dir = os.path.realpath(user_dir)
# Security check: Ensure the target path is inside the user's directory
if not real_target_path.startswith(real_user_dir + os.sep):
raise HTTPException(status_code=403, detail="Invalid file path")
# Check if file exists
if not os.path.isfile(real_target_path):
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
# Delete both the target file and its UUID-only variant
deleted_files = []
try:
# First delete the requested file (with log ID prefix)
if os.path.exists(real_target_path):
os.remove(real_target_path)
deleted_files.append(filename)
log_violation("DELETE", ip, uid, f"Deleted {filename}")
# Then try to find and delete the UUID-only variant (without log ID prefix)
if '_' in filename: # If filename has a log ID prefix (e.g., "123_uuid.opus")
uuid_part = filename.split('_', 1)[1] # Get the part after the first underscore
uuid_path = os.path.join(user_dir, uuid_part)
if os.path.exists(uuid_path):
os.remove(uuid_path)
deleted_files.append(uuid_part)
log_violation("DELETE", ip, uid, f"Deleted UUID variant: {uuid_part}")
file_deleted = len(deleted_files) > 0
if not file_deleted:
log_violation("DELETE_WARNING", ip, uid, f"No files found to delete for: {filename}")
except Exception as e:
log_violation("DELETE_ERROR", ip, uid, f"Error deleting file {filename}: {str(e)}")
file_deleted = False
# Try to refresh the user's playlist, but don't fail if we can't
try:
subprocess.run(["/root/scripts/refresh_user_playlist.sh", user.username],
check=False, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)
except Exception as e:
log_violation("PLAYLIST_REFRESH_WARNING", ip, uid,
f"Failed to refresh playlist: {str(e)}")
# Clean up the database record for this file
try:
# Find and delete the upload log entry
log_entry = db.exec(
select(UploadLog)
.where(UploadLog.uid == uid)
.where(UploadLog.processed_filename == filename)
).first()
if log_entry:
db.delete(log_entry)
db.commit()
log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}")
except Exception as e:
log_violation("DB_CLEANUP_ERROR", ip, uid, f"Failed to clean up DB record: {str(e)}")
db.rollback()
# Regenerate stream.opus after file deletion
try:
from concat_opus import concat_opus_files
from pathlib import Path
user_dir_path = Path(user_dir)
stream_path = user_dir_path / "stream.opus"
concat_opus_files(user_dir_path, stream_path)
log_violation("STREAM_UPDATE", ip, uid, "Regenerated stream.opus after file deletion")
except Exception as e:
log_violation("STREAM_UPDATE_ERROR", ip, uid, f"Failed to regenerate stream.opus: {str(e)}")
# Update user quota in a separate try-except to not fail the entire operation
try:
# Use verify_and_fix_quota to ensure consistency between disk and DB
total_size = verify_and_fix_quota(db, user.username, user_dir)
log_violation("QUOTA_UPDATE", ip, uid,
f"Updated quota: {total_size} bytes")
except Exception as e:
log_violation("QUOTA_ERROR", ip, uid, f"Quota update failed: {str(e)}")
db.rollback()
return {"status": "deleted"}
except Exception as e:
# Log the error and re-raise with a user-friendly message
error_detail = str(e)
log_violation("DELETE_ERROR", request.client.host, uid, f"Failed to delete {filename}: {error_detail}")
if not isinstance(e, HTTPException):
raise HTTPException(status_code=500, detail=f"Failed to delete file: {error_detail}")
raise
@app.get("/confirm/{uid}")
def confirm_user(uid: str, request: Request):
@ -296,8 +419,55 @@ def confirm_user(uid: str, request: Request):
raise HTTPException(status_code=403, detail="Unauthorized")
return {"username": user.username, "email": user.email}
def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int:
"""
Verify and fix the user's quota based on the size of stream.opus file.
Returns the size of stream.opus in bytes.
"""
stream_opus_path = os.path.join(user_dir, 'stream.opus')
total_size = 0
# Only consider stream.opus for quota
if os.path.isfile(stream_opus_path):
try:
total_size = os.path.getsize(stream_opus_path)
print(f"[QUOTA] Stream.opus size for {uid}: {total_size} bytes")
except (OSError, FileNotFoundError) as e:
print(f"[QUOTA] Error getting size for stream.opus: {e}")
else:
print(f"[QUOTA] stream.opus not found in {user_dir}")
# Update quota in database
q = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0)
q.storage_bytes = total_size
db.add(q)
# Clean up any database records for files that don't exist
uploads = db.exec(select(UploadLog).where(UploadLog.uid == uid)).all()
for upload in uploads:
if upload.processed_filename: # Only check if processed_filename exists
stored_filename = f"{upload.id}_{upload.processed_filename}"
file_path = os.path.join(user_dir, stored_filename)
if not os.path.isfile(file_path):
print(f"[QUOTA] Removing orphaned DB record: {stored_filename}")
db.delete(upload)
try:
db.commit()
print(f"[QUOTA] Updated quota for {uid}: {total_size} bytes")
except Exception as e:
print(f"[QUOTA] Error committing quota update: {e}")
db.rollback()
raise
return total_size
@app.get("/me/{uid}")
def get_me(uid: str, request: Request, db: Session = Depends(get_db)):
def get_me(uid: str, request: Request, response: Response, db: Session = Depends(get_db)):
# Add headers to prevent caching
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Pragma"] = "no-cache"
response.headers["Expires"] = "0"
print(f"[DEBUG] GET /me/{uid} - Client IP: {request.client.host}")
try:
# Get user info
@ -315,6 +485,10 @@ def get_me(uid: str, request: Request, db: Session = Depends(get_db)):
if not debug_mode:
raise HTTPException(status_code=403, detail="IP address mismatch")
# Get user directory
user_dir = os.path.join('data', uid)
os.makedirs(user_dir, exist_ok=True)
# Get all upload logs for this user
upload_logs = db.exec(
select(UploadLog)
@ -323,23 +497,54 @@ def get_me(uid: str, request: Request, db: Session = Depends(get_db)):
).all()
print(f"[DEBUG] Found {len(upload_logs)} upload logs for UID {uid}")
# Build file list from database records
# Build file list from database records, checking if files exist on disk
files = []
for log in upload_logs:
if log.filename and log.processed_filename:
# The actual filename on disk might have the log ID prepended
stored_filename = f"{log.id}_{log.processed_filename}"
files.append({
"name": stored_filename,
"original_name": log.filename,
"size": log.size_bytes
})
print(f"[DEBUG] Added file from DB: {log.filename} (stored as {stored_filename}, {log.size_bytes} bytes)")
seen_files = set() # Track seen files to avoid duplicates
# Get quota info
q = db.get(UserQuota, uid)
quota_mb = round(q.storage_bytes / (1024 * 1024), 2) if q else 0
print(f"[DEBUG] Quota for UID {uid}: {quota_mb} MB")
print(f"[DEBUG] Processing {len(upload_logs)} upload logs for UID {uid}")
for i, log in enumerate(upload_logs):
if not log.filename or not log.processed_filename:
print(f"[DEBUG] Skipping log entry {i}: missing filename or processed_filename")
continue
# The actual filename on disk has the log ID prepended
stored_filename = f"{log.id}_{log.processed_filename}"
file_path = os.path.join(user_dir, stored_filename)
# Skip if we've already seen this file
if stored_filename in seen_files:
print(f"[DEBUG] Skipping duplicate file: {stored_filename}")
continue
seen_files.add(stored_filename)
# Only include the file if it exists on disk and is not stream.opus
if os.path.isfile(file_path) and stored_filename != 'stream.opus':
try:
# Get the actual file size in case it changed
file_size = os.path.getsize(file_path)
file_info = {
"name": stored_filename,
"original_name": log.filename,
"size": file_size
}
files.append(file_info)
print(f"[DEBUG] Added file {len(files)}: {log.filename} (stored as {stored_filename}, {file_size} bytes)")
except OSError as e:
print(f"[WARNING] Could not access file {stored_filename}: {e}")
else:
print(f"[DEBUG] File not found on disk or is stream.opus: {stored_filename}")
# Log all files being returned
print("[DEBUG] All files being returned:")
for i, file_info in enumerate(files, 1):
print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)")
# Verify and fix quota based on actual files on disk
total_size = verify_and_fix_quota(db, uid, user_dir)
quota_mb = round(total_size / (1024 * 1024), 2)
print(f"[DEBUG] Verified quota for UID {uid}: {quota_mb} MB")
response_data = {
"files": files,

View File

@ -9,6 +9,7 @@ class User(SQLModel, table=True):
token_created: datetime = Field(default_factory=datetime.utcnow)
email: str = Field(primary_key=True)
username: str = Field(unique=True, index=True)
display_name: str = Field(default="", nullable=True)
token: str
confirmed: bool = False
ip: str = Field(default="")
@ -43,17 +44,40 @@ class DBSession(SQLModel, table=True):
class PublicStream(SQLModel, table=True):
"""Stores public stream metadata for all users"""
uid: str = Field(primary_key=True)
size: int = 0
username: Optional[str] = Field(default=None, index=True)
display_name: Optional[str] = Field(default=None)
storage_bytes: int = 0
mtime: int = Field(default_factory=lambda: int(datetime.utcnow().timestamp()))
last_updated: Optional[datetime] = Field(default_factory=datetime.utcnow)
created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)
def get_user_by_uid(uid: str) -> Optional[User]:
"""
Retrieve a user by their UID (username).
Note: In this application, the User model uses email as primary key,
but we're using username as UID for API routes. This function looks up
users by username.
Args:
uid: The username to look up
Returns:
User object if found, None otherwise
"""
with Session(engine) as session:
# First try to find by username (which is what we're using as UID)
statement = select(User).where(User.username == uid)
result = session.exec(statement).first()
return result
user = session.exec(statement).first()
# If not found by username, try by email (for backward compatibility)
if not user and '@' in uid:
statement = select(User).where(User.email == uid)
user = session.exec(statement).first()
return user
def verify_session(db: Session, token: str) -> DBSession:

4
nohup.out Normal file
View File

@ -0,0 +1,4 @@
INFO: Will watch for changes in these directories: ['/home/oib/games/dicta2stream']
ERROR: [Errno 98] Address already in use
INFO: Will watch for changes in these directories: ['/home/oib/games/dicta2stream']
ERROR: [Errno 98] Address already in use

View File

@ -1,4 +1,2 @@
{"uid":"devuser","size":65551721,"mtime":1752752391}
{"uid":"oib9","size":12735117,"mtime":1752843762}
{"uid":"oibchello","size":1549246,"mtime":1752840918}
{"uid":"oibchello","size":3371119,"mtime":1752994076}
{"uid":"orangeicebear","size":1734396,"mtime":1748767975}

View File

@ -0,0 +1,3 @@
{"uid":"devuser","size":90059327,"mtime":1752911461}
{"uid":"oibchello","size":16262818,"mtime":1752911899}
{"uid":"orangeicebear","size":1734396,"mtime":1748767975}

View File

@ -7,11 +7,46 @@ from database import get_db
import uuid
import smtplib
from email.message import EmailMessage
from pathlib import Path
import os
router = APIRouter()
MAGIC_FROM = "noreply@dicta2stream.net"
MAGIC_DOMAIN = "https://dicta2stream.net"
DATA_ROOT = Path("./data")
def initialize_user_directory(username: str):
"""Initialize user directory with a silent stream.opus file"""
try:
user_dir = DATA_ROOT / username
default_stream_path = DATA_ROOT / "stream.opus"
print(f"[DEBUG] Initializing user directory: {user_dir.absolute()}")
# Create the directory if it doesn't exist
user_dir.mkdir(parents=True, exist_ok=True)
print(f"[DEBUG] Directory created or already exists: {user_dir.exists()}")
# Create stream.opus by copying the default stream.opus file
user_stream_path = user_dir / "stream.opus"
print(f"[DEBUG] Creating stream.opus at: {user_stream_path.absolute()}")
if not user_stream_path.exists():
if default_stream_path.exists():
import shutil
shutil.copy2(default_stream_path, user_stream_path)
print(f"[DEBUG] Copied default stream.opus to {user_stream_path}")
else:
print(f"[ERROR] Default stream.opus not found at {default_stream_path}")
# Fallback: create an empty file to prevent errors
with open(user_stream_path, 'wb') as f:
f.write(b'')
return True
except Exception as e:
print(f"Error initializing user directory for {username}: {str(e)}")
return False
@router.post("/register")
def register(request: Request, email: str = Form(...), user: str = Form(...), db: Session = Depends(get_db)):
@ -40,8 +75,13 @@ def register(request: Request, email: str = Form(...), user: str = Form(...), db
# Register new user
db.add(User(email=email, username=user, token=token, confirmed=False, ip=request.client.host))
db.add(UserQuota(uid=user))
try:
# First commit the user to the database
db.commit()
# Only after successful commit, initialize the user directory
initialize_user_directory(user)
except Exception as e:
db.rollback()
if isinstance(e, IntegrityError):

BIN
silent.opus Normal file

Binary file not shown.

View File

@ -37,7 +37,7 @@ function handleMagicLoginRedirect() {
localStorage.setItem('uid', username);
localStorage.setItem('confirmed_uid', username);
localStorage.setItem('uid_time', Date.now().toString());
document.cookie = `uid=${encodeURIComponent(username)}; path=/`;
document.cookie = `uid=${encodeURIComponent(username)}; path=/; SameSite=Lax`;
// Update UI state
document.body.classList.add('authenticated');
@ -45,7 +45,7 @@ function handleMagicLoginRedirect() {
// Update local storage and cookies
localStorage.setItem('isAuthenticated', 'true');
document.cookie = `isAuthenticated=true; path=/`;
document.cookie = `isAuthenticated=true; path=/; SameSite=Lax`;
// Update URL and history without reloading
window.history.replaceState({}, document.title, window.location.pathname);
@ -677,25 +677,170 @@ trackedFunctions.forEach(fnName => {
}
});
// Update the visibility of the account deletion section based on authentication state
function updateAccountDeletionVisibility(isAuthenticated) {
console.log('[ACCOUNT-DELETION] updateAccountDeletionVisibility called with isAuthenticated:', isAuthenticated);
// Find the account deletion section and its auth-only wrapper
const authOnlyWrapper = document.querySelector('#privacy-page .auth-only');
const accountDeletionSection = document.getElementById('account-deletion');
console.log('[ACCOUNT-DELETION] Elements found:', {
authOnlyWrapper: !!authOnlyWrapper,
accountDeletionSection: !!accountDeletionSection
});
// Function to show an element with all necessary styles
const showElement = (element) => {
if (!element) return;
console.log('[ACCOUNT-DELETION] Showing element:', element);
// Remove any hiding classes
element.classList.remove('hidden', 'auth-only-hidden');
// Set all possible visibility properties
element.style.display = 'block';
element.style.visibility = 'visible';
element.style.opacity = '1';
element.style.height = 'auto';
element.style.position = 'relative';
element.style.clip = 'auto';
element.style.overflow = 'visible';
// Add a class to mark as visible
element.classList.add('account-visible');
};
// Function to hide an element
const hideElement = (element) => {
if (!element) return;
console.log('[ACCOUNT-DELETION] Hiding element:', element);
// Set display to none to completely remove from layout
element.style.display = 'none';
// Remove any visibility-related classes
element.classList.remove('account-visible');
};
if (isAuthenticated) {
console.log('[ACCOUNT-DELETION] User is authenticated, checking if on privacy page');
// Get the current page state - only show on #privacy-page
const currentHash = window.location.hash;
const isPrivacyPage = currentHash === '#privacy-page';
console.log('[ACCOUNT-DELETION] Debug - Page State:', {
isAuthenticated,
currentHash,
isPrivacyPage,
documentTitle: document.title
});
if (isAuthenticated && isPrivacyPage) {
console.log('[ACCOUNT-DELETION] On privacy page, showing account deletion section');
// Show the auth wrapper and account deletion section
if (authOnlyWrapper) {
authOnlyWrapper.style.display = 'block';
authOnlyWrapper.style.visibility = 'visible';
}
if (accountDeletionSection) {
accountDeletionSection.style.display = 'block';
accountDeletionSection.style.visibility = 'visible';
}
} else {
console.log('[ACCOUNT-DELETION] Not on privacy page, hiding account deletion section');
// Hide the account deletion section
if (accountDeletionSection) {
accountDeletionSection.style.display = 'none';
accountDeletionSection.style.visibility = 'hidden';
}
// Only hide the auth wrapper if we're not on the privacy page
if (authOnlyWrapper && !isPrivacyPage) {
authOnlyWrapper.style.display = 'none';
authOnlyWrapper.style.visibility = 'hidden';
}
}
// Debug: Log the current state after updates
if (accountDeletionSection) {
console.log('[ACCOUNT-DELETION] Account deletion section state after show:', {
display: window.getComputedStyle(accountDeletionSection).display,
visibility: window.getComputedStyle(accountDeletionSection).visibility,
classes: accountDeletionSection.className,
parent: accountDeletionSection.parentElement ? {
tag: accountDeletionSection.parentElement.tagName,
classes: accountDeletionSection.parentElement.className,
display: window.getComputedStyle(accountDeletionSection.parentElement).display
} : 'no parent'
});
}
} else {
console.log('[ACCOUNT-DELETION] User is not authenticated, hiding account deletion section');
// Hide the account deletion section but keep the auth-only wrapper for other potential content
if (accountDeletionSection) {
hideElement(accountDeletionSection);
}
// Only hide the auth-only wrapper if it doesn't contain other important content
if (authOnlyWrapper) {
const hasOtherContent = Array.from(authOnlyWrapper.children).some(
child => child.id !== 'account-deletion' && child.offsetParent !== null
);
if (!hasOtherContent) {
hideElement(authOnlyWrapper);
}
}
}
// Log final state for debugging
console.log('[ACCOUNT-DELETION] Final state:', {
authOnlyWrapper: authOnlyWrapper ? {
display: window.getComputedStyle(authOnlyWrapper).display,
visibility: window.getComputedStyle(authOnlyWrapper).visibility,
classes: authOnlyWrapper.className
} : 'not found',
accountDeletionSection: accountDeletionSection ? {
display: window.getComputedStyle(accountDeletionSection).display,
visibility: window.getComputedStyle(accountDeletionSection).visibility,
classes: accountDeletionSection.className,
parent: accountDeletionSection.parentElement ? {
tag: accountDeletionSection.parentElement.tagName,
classes: accountDeletionSection.parentElement.className,
display: window.getComputedStyle(accountDeletionSection.parentElement).display
} : 'no parent'
} : 'not found'
});
}
// Check authentication state and update UI
function checkAuthState() {
// Debounce rapid calls
const now = Date.now();
// Throttle the checks
if (now - lastAuthCheckTime < AUTH_CHECK_DEBOUNCE) {
return;
return wasAuthenticated === true;
}
lastAuthCheckTime = now;
authCheckCounter++;
// Check various auth indicators
const hasAuthCookie = document.cookie.includes('sessionid=');
// Check various authentication indicators
const hasAuthCookie = document.cookie.includes('isAuthenticated=true');
const hasUidCookie = document.cookie.includes('uid=');
const hasLocalStorageAuth = localStorage.getItem('isAuthenticated') === 'true';
const hasAuthToken = localStorage.getItem('authToken') !== null;
const hasAuthToken = !!localStorage.getItem('authToken');
// User is considered authenticated if any of these are true
const isAuthenticated = hasAuthCookie || hasUidCookie || hasLocalStorageAuth || hasAuthToken;
// Only log if debug is enabled or if state has changed
if (DEBUG_AUTH_STATE || isAuthenticated !== wasAuthenticated) {
console.log('Auth State Check:', {
hasAuthCookie,
@ -729,6 +874,9 @@ function checkAuthState() {
console.warn('injectNavigation function not found');
}
// Update account deletion section visibility
updateAccountDeletionVisibility(isAuthenticated);
// Update the tracked state
wasAuthenticated = isAuthenticated;
@ -755,6 +903,12 @@ function setupAuthStatePolling() {
}
// Function to handle page navigation
function handlePageNavigation() {
const isAuthenticated = checkAuthState();
updateAccountDeletionVisibility(isAuthenticated);
}
// Initialize the application when DOM is loaded
document.addEventListener("DOMContentLoaded", () => {
// Set up authentication state monitoring
@ -766,6 +920,11 @@ document.addEventListener("DOMContentLoaded", () => {
// Initialize components
initNavigation();
// Initialize account deletion section visibility
handlePageNavigation();
// Listen for hash changes to update visibility when navigating
window.addEventListener('hashchange', handlePageNavigation);
// Initialize profile player after a short delay
setTimeout(() => {
@ -861,32 +1020,96 @@ document.addEventListener("DOMContentLoaded", () => {
const deleteAccountFromPrivacyBtn = document.getElementById('delete-account-from-privacy');
const deleteAccount = async (e) => {
if (e) e.preventDefault();
if (e) {
e.preventDefault();
e.stopPropagation();
}
if (!confirm('Are you sure you want to delete your account? This action cannot be undone.')) {
if (!confirm('Are you sure you want to delete your account?\n\nThis action cannot be undone.')) {
return;
}
// Show loading state
const deleteBtn = e?.target.closest('button');
const originalText = deleteBtn?.textContent || 'Delete My Account';
if (deleteBtn) {
deleteBtn.disabled = true;
deleteBtn.textContent = 'Deleting...';
}
try {
// Get UID from localStorage
const uid = localStorage.getItem('uid');
if (!uid) {
throw new Error('User not authenticated. Please log in again.');
}
console.log('Sending delete account request for UID:', uid);
const response = await fetch('/api/delete-account', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
},
credentials: 'include',
body: JSON.stringify({
uid: uid // Include UID in the request body
})
});
if (response.ok) {
// Clear local storage and redirect to home page
localStorage.clear();
window.location.href = '/';
} else {
const error = await response.json();
throw new Error(error.detail || 'Failed to delete account');
}
} catch (error) {
console.error('Error deleting account:', error);
showToast(`${error.message || 'Failed to delete account'}`, 'error');
console.log('Received response status:', response.status, response.statusText);
// Try to parse response as JSON, but handle non-JSON responses
let data;
const text = await response.text();
try {
data = text ? JSON.parse(text) : {};
} catch (parseError) {
console.error('Failed to parse response as JSON:', parseError);
console.log('Raw response text:', text);
data = {};
}
if (response.ok) {
console.log('Account deletion successful');
showToast('✅ Account deleted successfully', 'success');
// Clear local storage and redirect to home page after a short delay
setTimeout(() => {
localStorage.clear();
window.location.href = '/';
}, 1000);
} else {
console.error('Delete account failed:', { status: response.status, data });
const errorMessage = data.detail || data.message ||
data.error ||
`Server returned ${response.status} ${response.statusText}`;
throw new Error(errorMessage);
}
} catch (error) {
console.error('Error in deleteAccount:', {
name: error.name,
message: error.message,
stack: error.stack,
error: error
});
// Try to extract a meaningful error message
let errorMessage = 'Failed to delete account';
if (error instanceof Error) {
errorMessage = error.message || error.toString();
} else if (typeof error === 'string') {
errorMessage = error;
} else if (error && typeof error === 'object') {
errorMessage = error.message || JSON.stringify(error);
}
showToast(`${errorMessage}`, 'error');
} finally {
// Restore button state
if (deleteBtn) {
deleteBtn.disabled = false;
deleteBtn.textContent = originalText;
}
}
};
// Add event listeners to both delete account buttons
@ -902,22 +1125,49 @@ document.addEventListener("DOMContentLoaded", () => {
});
// Logout function
function logout() {
async function logout(event) {
if (event) {
event.preventDefault();
event.stopPropagation();
}
// If handleLogout is available in dashboard.js, use it for comprehensive logout
if (typeof handleLogout === 'function') {
try {
await handleLogout(event);
} catch (error) {
console.error('Error during logout:', error);
// Fall back to basic logout if handleLogout fails
basicLogout();
}
} else {
// Fallback to basic logout if handleLogout is not available
basicLogout();
}
}
// Basic client-side logout as fallback
function basicLogout() {
// Clear authentication state
document.body.classList.remove('authenticated');
localStorage.removeItem('isAuthenticated');
localStorage.removeItem('uid');
localStorage.removeItem('confirmed_uid');
localStorage.removeItem('uid_time');
localStorage.removeItem('authToken');
// Clear cookies
document.cookie = 'isAuthenticated=; Path=/; Expires=Thu, 01 Jan 1970 00:00:01 GMT;';
document.cookie = 'uid=; Path=/; Expires=Thu, 01 Jan 1970 00:00:01 GMT;';
// Clear all cookies with proper SameSite attribute
document.cookie.split(';').forEach(cookie => {
const [name] = cookie.trim().split('=');
if (name) {
document.cookie = `${name}=; Path=/; Expires=Thu, 01 Jan 1970 00:00:01 GMT; domain=${window.location.hostname}; SameSite=Lax`;
}
});
// Stop any playing audio
stopMainAudio();
// Redirect to home page
// Force a hard redirect to ensure all state is cleared
window.location.href = '/';
}

View File

@ -36,16 +36,78 @@ body.authenticated .auth-only {
#me-page:not([hidden]) > .auth-only,
#me-page:not([hidden]) > section,
#me-page:not([hidden]) > article,
#me-page:not([hidden]) > div,
/* Ensure account deletion section is visible when privacy page is active and user is authenticated */
#privacy-page:not([hidden]) .auth-only,
#privacy-page:not([hidden]) #account-deletion {
#me-page:not([hidden]) > div {
display: block !important;
visibility: visible !important;
opacity: 1 !important;
}
/* Show auth-only elements when authenticated */
body.authenticated .auth-only {
display: block !important;
visibility: visible !important;
}
/* Account deletion section - improved width and formatting */
#account-deletion {
margin: 2.5rem auto;
padding: 2.5rem;
background: rgba(255, 255, 255, 0.05);
border-radius: 10px;
box-shadow: 0 3px 6px rgba(0, 0, 0, 0.15);
max-width: 600px;
line-height: 1.6;
color: var(--text-color);
}
#account-deletion h3 {
color: var(--color-primary);
margin-top: 0;
margin-bottom: 1.5rem;
font-size: 1.5rem;
}
#account-deletion p {
color: var(--color-text);
line-height: 1.6;
margin-bottom: 1.5rem;
}
#account-deletion ul {
margin: 1rem 0 1.5rem 1.5rem;
padding: 0;
color: var(--color-text);
}
#account-deletion .centered-container {
text-align: center;
margin-top: 2rem;
}
#delete-account-from-privacy {
background-color: #ff4d4f;
color: white;
border: none;
padding: 0.75rem 1.5rem;
border-radius: 4px;
cursor: pointer;
font-weight: 600;
font-size: 1rem;
transition: background-color 0.2s ease;
display: inline-flex;
align-items: center;
gap: 0.5rem;
}
#delete-account-from-privacy:hover {
background-color: #ff6b6b;
text-decoration: none;
}
/* Hide guest-only elements when authenticated */
body.authenticated .guest-only {
display: none !important;
visibility: hidden !important;
display: none;
}

View File

@ -3,19 +3,22 @@ document.addEventListener('DOMContentLoaded', () => {
// Function to update the play button with UID
function updatePersonalStreamPlayButton() {
const playButton = document.querySelector('#me-page .play-pause-btn');
if (!playButton) return;
const streamPlayer = document.querySelector('#me-page .stream-player');
if (!playButton || !streamPlayer) return;
// Get UID from localStorage or cookie
const uid = localStorage.getItem('uid') || getCookie('uid');
if (uid) {
// Set the data-uid attribute if not already set
// Show the player and set the UID if not already set
streamPlayer.style.display = 'block';
if (!playButton.dataset.uid) {
playButton.dataset.uid = uid;
console.log('[personal-stream] Set UID for personal stream play button:', uid);
}
} else {
console.warn('[personal-stream] No UID found for personal stream play button');
// Hide the player for guests
streamPlayer.style.display = 'none';
}
}

View File

@ -31,8 +31,8 @@ export async function initMagicLogin() {
const authToken = 'token-' + Math.random().toString(36).substring(2, 15);
// Set cookies and localStorage for SPA session logic
document.cookie = `uid=${encodeURIComponent(confirmedUid)}; path=/`;
document.cookie = `authToken=${authToken}; path=/`;
document.cookie = `uid=${encodeURIComponent(confirmedUid)}; path=/; SameSite=Lax`;
document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`;
// Store in localStorage for client-side access
localStorage.setItem('uid', confirmedUid);
@ -53,8 +53,8 @@ export async function initMagicLogin() {
const authToken = 'token-' + Math.random().toString(36).substring(2, 15);
// Set cookies and localStorage for SPA session logic
document.cookie = `uid=${encodeURIComponent(data.confirmed_uid)}; path=/`;
document.cookie = `authToken=${authToken}; path=/`;
document.cookie = `uid=${encodeURIComponent(data.confirmed_uid)}; path=/; SameSite=Lax`;
document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`;
// Store in localStorage for client-side access
localStorage.setItem('uid', data.confirmed_uid);

View File

@ -1,12 +1,22 @@
// static/streams-ui.js — public streams loader and profile-link handling
import { showOnly } from './router.js';
console.log('[streams-ui] Module loaded');
// Global variable to track if we should force refresh the stream list
let shouldForceRefresh = false;
// Function to refresh the stream list
window.refreshStreamList = function(force = true) {
shouldForceRefresh = force;
loadAndRenderStreams();
return new Promise((resolve) => {
// Resolve after a short delay to allow the stream list to update
setTimeout(resolve, 500);
});
};
// Removed loadingStreams and lastStreamsPageVisible guards for instant fetch
export function initStreamsUI() {
console.log('[streams-ui] Initializing streams UI');
initStreamLinks();
window.addEventListener('popstate', () => {
highlightActiveProfileLink();
@ -29,25 +39,55 @@ window.maybeLoadStreamsOnShow = maybeLoadStreamsOnShow;
// Global variables for audio control
let currentlyPlayingAudio = null;
// Global variable to track the active SSE connection
let activeSSEConnection = null;
// Global cleanup function for SSE connections
const cleanupConnections = () => {
if (window._streamsSSE) {
if (window._streamsSSE.abort) {
window._streamsSSE.abort();
}
window._streamsSSE = null;
}
if (window.connectionTimeout) {
clearTimeout(window.connectionTimeout);
window.connectionTimeout = null;
}
activeSSEConnection = null;
};
// Initialize when DOM is loaded
document.addEventListener('DOMContentLoaded', () => {
console.log('[streams-ui] DOM content loaded, initializing streams UI');
initStreamsUI();
// Also try to load streams immediately in case the page is already loaded
setTimeout(() => {
console.log('[streams-ui] Attempting initial stream load');
loadAndRenderStreams();
}, 100);
});
function loadAndRenderStreams() {
console.log('[streams-ui] loadAndRenderStreams called');
const ul = document.getElementById('stream-list');
if (!ul) {
console.warn('[streams-ui] #stream-list not found in DOM');
console.error('[STREAMS-UI] Stream list element not found');
return;
}
console.log('[STREAMS-UI] loadAndRenderStreams called, shouldForceRefresh:', shouldForceRefresh);
// Don't start a new connection if one is already active and we're not forcing a refresh
if (activeSSEConnection && !shouldForceRefresh) {
return;
}
// If we're forcing a refresh, clean up the existing connection
if (shouldForceRefresh && activeSSEConnection) {
// Clean up any existing connections
cleanupConnections();
shouldForceRefresh = false; // Reset the flag after handling
}
// Clear any existing error messages or retry buttons
ul.innerHTML = '<li>Loading public streams...</li>';
@ -59,36 +99,21 @@ function loadAndRenderStreams() {
const baseUrl = window.location.origin;
const sseUrl = `${baseUrl}/streams-sse?t=${timestamp}`;
console.log(`[streams-ui] Connecting to ${sseUrl}`);
let gotAny = false;
let streams = [];
let connectionTimeout = null;
window.connectionTimeout = null;
// Clean up previous connection and timeouts
if (window._streamsSSE) {
console.group('[streams-ui] Cleaning up previous connection');
console.log('Previous connection exists, aborting...');
if (window._streamsSSE.abort) {
window._streamsSSE.abort();
console.log('Previous connection aborted');
} else {
console.log('No abort method on previous connection');
}
window._streamsSSE = null;
console.groupEnd();
// Clean up any existing connections
cleanupConnections();
// Reset the retry count if we have a successful connection
window.streamRetryCount = 0;
if (window.connectionTimeout) {
clearTimeout(window.connectionTimeout);
window.connectionTimeout = null;
}
if (connectionTimeout) {
console.log('[streams-ui] Clearing previous connection timeout');
clearTimeout(connectionTimeout);
connectionTimeout = null;
} else {
console.log('[streams-ui] No previous connection timeout to clear');
}
console.log(`[streams-ui] Creating fetch-based SSE connection to ${sseUrl}`);
// Use fetch with ReadableStream for better CORS handling
const controller = new AbortController();
const signal = controller.signal;
@ -96,6 +121,9 @@ function loadAndRenderStreams() {
// Store the controller for cleanup
window._streamsSSE = controller;
// Track the active connection
activeSSEConnection = controller;
// Set a connection timeout with debug info
const connectionStartTime = Date.now();
const connectionTimeoutId = setTimeout(() => {
@ -123,20 +151,12 @@ function loadAndRenderStreams() {
window.streamRetryCount = retryCount + 1;
const backoffTime = Math.min(1000 * Math.pow(2, retryCount), 10000); // Exponential backoff, max 10s
setTimeout(loadAndRenderStreams, backoffTime);
} else if (process.env.NODE_ENV === 'development' || window.DEBUG_STREAMS) {
console.warn('Max retries reached for stream loading');
}
}
}, 15000); // 15 second timeout (increased from 10s)
// Store the timeout ID for cleanup
connectionTimeout = connectionTimeoutId;
console.log('[streams-ui] Making fetch request to:', sseUrl);
console.log('[streams-ui] Making fetch request to:', sseUrl);
console.log('[streams-ui] Creating fetch request with URL:', sseUrl);
window.connectionTimeout = connectionTimeoutId;
// Make the fetch request with proper error handling
fetch(sseUrl, {
@ -152,24 +172,13 @@ function loadAndRenderStreams() {
redirect: 'follow'
})
.then(response => {
console.log('[streams-ui] Fetch response received, status:', response.status, response.statusText);
console.log('[streams-ui] Response URL:', response.url);
console.log('[streams-ui] Response type:', response.type);
console.log('[streams-ui] Response redirected:', response.redirected);
console.log('[streams-ui] Response headers:');
response.headers.forEach((value, key) => {
console.log(` ${key}: ${value}`);
});
if (!response.ok) {
// Try to get the response text for error details
return response.text().then(text => {
console.error('[streams-ui] Error response body:', text);
const error = new Error(`HTTP error! status: ${response.status}, statusText: ${response.statusText}`);
error.response = { status: response.status, statusText: response.statusText, body: text };
throw error;
}).catch(textError => {
console.error('[streams-ui] Could not read error response body:', textError);
}).catch(() => {
const error = new Error(`HTTP error! status: ${response.status}, statusText: ${response.statusText}`);
error.response = { status: response.status, statusText: response.statusText };
throw error;
@ -177,13 +186,9 @@ function loadAndRenderStreams() {
}
if (!response.body) {
const error = new Error('Response body is null or undefined');
console.error('[streams-ui] No response body:', error);
throw error;
throw new Error('Response body is null or undefined');
}
console.log('[streams-ui] Response body is available, content-type:', response.headers.get('content-type'));
// Get the readable stream
const reader = response.body.getReader();
const decoder = new TextDecoder();
@ -191,15 +196,18 @@ function loadAndRenderStreams() {
// Process the stream
function processStream({ done, value }) {
console.log('[STREAMS-UI] processStream called with done:', done);
if (done) {
console.log('[streams-ui] Stream completed');
console.log('[STREAMS-UI] Stream processing complete');
// Process any remaining data in the buffer
if (buffer.trim()) {
console.log('[STREAMS-UI] Processing remaining buffer data');
try {
const data = JSON.parse(buffer);
console.log('[STREAMS-UI] Parsed data from buffer:', data);
processSSEEvent(data);
} catch (e) {
console.error('[streams-ui] Error parsing final data:', e);
console.error('[STREAMS-UI] Error parsing buffer data:', e);
}
}
return;
@ -235,68 +243,63 @@ function loadAndRenderStreams() {
return reader.read().then(processStream);
})
.catch(error => {
// Only handle the error if it's not an AbortError (from our own abort)
if (error.name === 'AbortError') {
console.log('[streams-ui] Request was aborted as expected');
return;
}
// Only handle the error if it's not an abort error
if (error.name !== 'AbortError') {
// Clean up the controller reference
window._streamsSSE = null;
activeSSEConnection = null;
console.error('[streams-ui] Stream loading failed:', error);
// Log additional error details
if (error.name === 'TypeError') {
console.error('[streams-ui] This is likely a network error or CORS issue');
}
// Show a user-friendly error message
const ul = document.getElementById('stream-list');
if (ul) {
let errorMessage = 'Error loading streams. ';
if (error.message.includes('Failed to fetch')) {
errorMessage += 'Unable to connect to the server. Please check your internet connection.';
} else if (error.message.includes('CORS')) {
errorMessage += 'A server configuration issue occurred. Please try again later.';
} else {
errorMessage += 'Please try again later.';
// Clear the connection timeout
if (connectionTimeout) {
clearTimeout(connectionTimeout);
connectionTimeout = null;
}
ul.innerHTML = `
<li class="error">
<p>${errorMessage}</p>
<button id="retry-loading" class="retry-button">
<span class="retry-icon">↻</span> Try Again
</button>
</li>
`;
// Show a user-friendly error message
const ul = document.getElementById('stream-list');
if (ul) {
let errorMessage = 'Error loading streams. ';
// Add retry handler
const retryButton = document.getElementById('retry-loading');
if (retryButton) {
retryButton.addEventListener('click', () => {
ul.innerHTML = '<li>Loading streams...</li>';
loadAndRenderStreams();
});
if (error.message && error.message.includes('Failed to fetch')) {
errorMessage += 'Unable to connect to the server. Please check your internet connection.';
} else if (error.message && error.message.includes('CORS')) {
errorMessage += 'A server configuration issue occurred. Please try again later.';
} else {
errorMessage += 'Please try again later.';
}
ul.innerHTML = `
<li class="error">
<p>${errorMessage}</p>
<button id="retry-loading" class="retry-button">
<span class="retry-icon">↻</span> Try Again
</button>
</li>
`;
// Add retry handler
const retryButton = document.getElementById('retry-loading');
if (retryButton) {
retryButton.addEventListener('click', () => {
ul.innerHTML = '<li>Loading streams...</li>';
loadAndRenderStreams();
});
}
}
}
});
// Function to process SSE events
function processSSEEvent(data) {
console.log('[streams-ui] Received SSE event:', data);
console.log('[STREAMS-UI] Processing SSE event:', data);
if (data.end) {
console.log('[streams-ui] Received end event, total streams:', streams.length);
if (streams.length === 0) {
console.log('[streams-ui] No streams found, showing empty state');
ul.innerHTML = '<li>No active streams.</li>';
return;
}
// Sort streams by mtime in descending order (newest first)
streams.sort((a, b) => (b.mtime || 0) - (a.mtime || 0));
console.log('[streams-ui] Sorted streams:', streams);
// Clear the list
ul.innerHTML = '';
@ -307,8 +310,6 @@ function loadAndRenderStreams() {
const sizeMb = stream.size ? (stream.size / (1024 * 1024)).toFixed(1) : '?';
const mtime = stream.mtime ? new Date(stream.mtime * 1000).toISOString().split('T')[0].replace(/-/g, '/') : '';
console.log(`[streams-ui] Rendering stream ${index + 1}/${streams.length}:`, { uid, sizeMb, mtime });
const li = document.createElement('li');
li.className = 'stream-item';
@ -323,9 +324,7 @@ function loadAndRenderStreams() {
</article>
`;
ul.appendChild(li);
console.log(`[streams-ui] Successfully rendered stream: ${uid}`);
} catch (error) {
console.error(`[streams-ui] Error rendering stream ${uid}:`, error);
const errorLi = document.createElement('li');
errorLi.textContent = `Error loading stream: ${uid}`;
errorLi.style.color = 'var(--error)';
@ -379,10 +378,11 @@ function loadAndRenderStreams() {
export function renderStreamList(streams) {
const ul = document.getElementById('stream-list');
if (!ul) {
console.warn('[streams-ui] renderStreamList: #stream-list not found');
console.warn('[STREAMS-UI] renderStreamList: #stream-list not found');
return;
}
console.debug('[streams-ui] Rendering stream list:', streams);
console.log('[STREAMS-UI] Rendering stream list with', streams.length, 'streams');
console.debug('[STREAMS-UI] Streams data:', streams);
if (Array.isArray(streams)) {
if (streams.length) {
// Sort by mtime descending (most recent first)
@ -551,18 +551,14 @@ function stopPlayback() {
// Load and play audio using HTML5 Audio element for Opus
async function loadAndPlayAudio(uid, playPauseBtn) {
console.log(`[streams-ui] loadAndPlayAudio called for UID: ${uid}`);
// If trying to play the currently paused audio, just resume it
if (audioElement && currentUid === uid) {
console.log('[streams-ui] Resuming existing audio');
// If we already have an audio element for this UID and it's paused, just resume it
if (audioElement && currentUid === uid && audioElement.paused) {
try {
await audioElement.play();
isPlaying = true;
updatePlayPauseButton(playPauseBtn, true);
return;
} catch (error) {
console.error('Error resuming audio:', error);
// Fall through to reload if resume fails
}
}
@ -576,11 +572,8 @@ async function loadAndPlayAudio(uid, playPauseBtn) {
currentUid = uid;
try {
console.log(`[streams-ui] Creating new audio element for ${uid}`);
// Create a new audio element with the correct MIME type
const audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus`;
console.log(`[streams-ui] Loading audio from: ${audioUrl}`);
// Create a new audio element with a small delay to prevent race conditions
await new Promise(resolve => setTimeout(resolve, 50));
@ -591,19 +584,16 @@ async function loadAndPlayAudio(uid, playPauseBtn) {
// Set up event handlers with proper binding
const onPlay = () => {
console.log('[streams-ui] Audio play event');
isPlaying = true;
updatePlayPauseButton(playPauseBtn, true);
};
const onPause = () => {
console.log('[streams-ui] Audio pause event');
isPlaying = false;
updatePlayPauseButton(playPauseBtn, false);
};
const onEnded = () => {
console.log('[streams-ui] Audio ended event');
isPlaying = false;
cleanupAudio();
};
@ -611,18 +601,14 @@ async function loadAndPlayAudio(uid, playPauseBtn) {
const onError = (e) => {
// Ignore errors from previous audio elements that were cleaned up
if (!audioElement || audioElement.readyState === 0) {
console.log('[streams-ui] Ignoring error from cleaned up audio element');
return;
}
console.error('[streams-ui] Audio error:', e);
console.error('Error details:', audioElement.error);
isPlaying = false;
updatePlayPauseButton(playPauseBtn, false);
// Don't show error to user for aborted requests
if (audioElement.error && audioElement.error.code === MediaError.MEDIA_ERR_ABORTED) {
console.log('[streams-ui] Playback was aborted as expected');
return;
}
@ -642,7 +628,6 @@ async function loadAndPlayAudio(uid, playPauseBtn) {
audioElement._eventHandlers = { onPlay, onPause, onEnded, onError };
// Start playback with error handling
console.log('[streams-ui] Starting audio playback');
try {
const playPromise = audioElement.play();
@ -650,10 +635,8 @@ async function loadAndPlayAudio(uid, playPauseBtn) {
await playPromise.catch(error => {
// Ignore abort errors when switching between streams
if (error.name !== 'AbortError') {
console.error('[streams-ui] Play failed:', error);
throw error;
}
console.log('[streams-ui] Play was aborted as expected');
});
}
@ -759,27 +742,21 @@ if (streamList) {
const uid = playPauseBtn.dataset.uid;
if (!uid) {
console.error('No UID found for play button');
return;
}
console.log(`[streams-ui] Play/pause clicked for UID: ${uid}, currentUid: ${currentUid}, isPlaying: ${isPlaying}`);
// If clicking the currently playing button, toggle pause/play
if (currentUid === uid) {
if (isPlaying) {
console.log('[streams-ui] Pausing current audio');
await audioElement.pause();
isPlaying = false;
updatePlayPauseButton(playPauseBtn, false);
} else {
console.log('[streams-ui] Resuming current audio');
try {
await audioElement.play();
isPlaying = true;
updatePlayPauseButton(playPauseBtn, true);
} catch (error) {
console.error('[streams-ui] Error resuming audio:', error);
// If resume fails, try reloading the audio
await loadAndPlayAudio(uid, playPauseBtn);
}
@ -788,7 +765,6 @@ if (streamList) {
}
// If a different stream is playing, stop it and start the new one
console.log(`[streams-ui] Switching to new audio stream: ${uid}`);
stopPlayback();
await loadAndPlayAudio(uid, playPauseBtn);
});

View File

@ -19,7 +19,7 @@ document.addEventListener('DOMContentLoaded', () => {
}
const streamInfo = document.getElementById("stream-info");
const streamUrlEl = document.getElementById("streamUrl");
const spinner = document.getElementById("spinner");
const spinner = document.getElementById("spinner") || { style: { display: 'none' } };
let abortController;
// Upload function
@ -89,6 +89,11 @@ document.addEventListener('DOMContentLoaded', () => {
if (window.fetchAndDisplayFiles) {
await window.fetchAndDisplayFiles(uid);
}
// Refresh the stream list to update the last update time
if (window.refreshStreamList) {
await window.refreshStreamList();
}
} catch (e) {
console.error('Failed to refresh:', e);
}
@ -96,8 +101,8 @@ document.addEventListener('DOMContentLoaded', () => {
playBeep(432, 0.25, "sine");
} else {
streamInfo.hidden = true;
spinner.style.display = "none";
if (streamInfo) streamInfo.hidden = true;
if (spinner) spinner.style.display = "none";
if ((data.detail || data.error || "").includes("music")) {
showToast("🎵 Upload rejected: singing or music detected.");
} else {
@ -190,10 +195,10 @@ document.addEventListener('DOMContentLoaded', () => {
const isRenamed = file.original_name && file.original_name !== file.name;
return `
<li class="file-item" data-filename="${file.name}">
<div class="file-name" title="${displayName}">
<div class="file-name" title="${isRenamed ? `Stored as: ${file.name}` : displayName}">
${displayName}
${isRenamed ? `<div class="stored-as" title="Stored as: ${file.name}">${file.name} <button class="delete-file" data-filename="${file.name}" title="Delete file">🗑️</button></div>` :
`<button class="delete-file" data-filename="${file.name}" title="Delete file">🗑️</button>`}
${isRenamed ? `<div class="stored-as"><button class="delete-file" data-filename="${file.name}" data-original-name="${file.original_name}" title="Delete file">🗑️</button></div>` :
`<button class="delete-file" data-filename="${file.name}" data-original-name="${file.original_name}" title="Delete file">🗑️</button>`}
</div>
<span class="file-size">${sizeMB} MB</span>
</li>
@ -203,48 +208,7 @@ document.addEventListener('DOMContentLoaded', () => {
fileList.innerHTML = '<li class="empty-message">No files uploaded yet</li>';
}
// Add event listeners to delete buttons
document.querySelectorAll('.delete-file').forEach(button => {
button.addEventListener('click', async (e) => {
e.stopPropagation();
const filename = button.dataset.filename;
if (confirm(`Are you sure you want to delete ${filename}?`)) {
try {
// Get the auth token from the cookie
const token = document.cookie
.split('; ')
.find(row => row.startsWith('sessionid='))
?.split('=')[1];
if (!token) {
throw new Error('Not authenticated');
}
const response = await fetch(`/delete/${filename}`, {
method: 'DELETE',
headers: {
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
},
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || `Failed to delete file: ${response.statusText}`);
}
// Refresh the file list
const uid = document.body.dataset.userUid;
if (uid) {
fetchAndDisplayFiles(uid);
}
} catch (error) {
console.error('Error deleting file:', error);
alert('Failed to delete file. Please try again.');
}
}
});
});
// Delete button handling is now managed by dashboard.js
// Update quota display if available
if (data.quota !== undefined) {

260
upload.py
View File

@ -6,11 +6,13 @@ from slowapi.util import get_remote_address
from slowapi.errors import RateLimitExceeded
from pathlib import Path
import json
import requests
from datetime import datetime
from convert_to_opus import convert_to_opus
from models import UploadLog, UserQuota, User
from sqlalchemy import select
from models import UploadLog, UserQuota, User, PublicStream
from sqlalchemy import select, or_
from database import get_db
from sqlalchemy.orm import Session
limiter = Limiter(key_func=get_remote_address)
router = APIRouter()
@ -23,55 +25,63 @@ DATA_ROOT = Path("./data")
@router.post("/upload")
async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), file: UploadFile = Form(...)):
from log import log_violation
import time
# Generate a unique request ID for this upload
request_id = str(int(time.time()))
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Starting upload of {file.filename}")
try:
# First, verify the user exists and is confirmed
user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first()
if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"):
user = user[0]
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] User check - found: {user is not None}, confirmed: {getattr(user, 'confirmed', False) if user else 'N/A'}")
if not user or not hasattr(user, "confirmed") or not user.confirmed:
raise HTTPException(status_code=403, detail="Account not confirmed")
# Check quota before doing any file operations
quota = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0)
if quota.storage_bytes >= 100 * 1024 * 1024:
raise HTTPException(status_code=400, detail="Quota exceeded")
# Create user directory if it doesn't exist
user_dir = DATA_ROOT / uid
user_dir.mkdir(parents=True, exist_ok=True)
raw_path = user_dir / ("raw." + file.filename.split(".")[-1])
# Generate a unique filename for the processed file first
import uuid
unique_name = str(uuid.uuid4()) + ".opus"
# Save temp upload FIRST
with open(raw_path, "wb") as f:
f.write(await file.read())
# Block music/singing via Ollama prompt
import requests
try:
with open(raw_path, "rb") as f:
audio = f.read()
res = requests.post("http://localhost:11434/api/generate", json={
"model": "whisper",
"prompt": "Does this audio contain music or singing? Answer yes or no only.",
"audio": audio
}, timeout=10)
resp = res.json().get("response", "").lower()
if "yes" in resp:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=403, detail="Upload rejected: music or singing detected")
except Exception as ollama_err:
# fallback: allow, log if needed
pass
unique_name = f"{uuid.uuid4()}.opus"
raw_ext = file.filename.split(".")[-1].lower()
raw_path = user_dir / ("raw." + raw_ext)
processed_path = user_dir / unique_name
# Block unconfirmed users (use ORM)
user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first()
# If result is a Row or tuple, extract the User object
if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"):
user = user[0]
from log import log_violation
log_violation("UPLOAD", request.client.host, uid, f"DEBUG: Incoming uid={uid}, user found={user}, confirmed={getattr(user, 'confirmed', None)}")
log_violation("UPLOAD", request.client.host, uid, f"DEBUG: After unpack, user={user}, type={type(user)}, confirmed={getattr(user, 'confirmed', None)}")
if not user or not hasattr(user, "confirmed") or not user.confirmed:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=403, detail="Account not confirmed")
# Clean up any existing raw files first (except the one we're about to create)
for old_file in user_dir.glob('raw.*'):
try:
if old_file != raw_path: # Don't delete the file we're about to create
old_file.unlink(missing_ok=True)
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Cleaned up old file: {old_file}")
except Exception as e:
log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_file}: {e}")
# DB-based quota check
quota = db.get(UserQuota, uid)
if quota and quota.storage_bytes >= 100 * 1024 * 1024:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=400, detail="Quota exceeded")
# Save the uploaded file temporarily
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Saving temporary file to {raw_path}")
try:
with open(raw_path, "wb") as f:
content = await file.read()
if not content:
raise ValueError("Uploaded file is empty")
f.write(content)
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Successfully wrote {len(content)} bytes to {raw_path}")
except Exception as e:
log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to save {raw_path}: {e}")
raise HTTPException(status_code=500, detail=f"Failed to save uploaded file: {e}")
# Ollama music/singing check is disabled for this release
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Ollama music/singing check is disabled")
try:
convert_to_opus(str(raw_path), str(processed_path))
@ -82,44 +92,96 @@ async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), f
original_size = raw_path.stat().st_size
raw_path.unlink(missing_ok=True) # cleanup
# First, verify the file was created and has content
if not processed_path.exists() or processed_path.stat().st_size == 0:
raise HTTPException(status_code=500, detail="Failed to process audio file")
# Concatenate all .opus files in random order to stream.opus for public playback
# This is now done after the file is in its final location with log ID
from concat_opus import concat_opus_files
try:
concat_opus_files(user_dir, user_dir / "stream.opus")
except Exception as e:
# fallback: just use the latest processed file if concat fails
import shutil
stream_path = user_dir / "stream.opus"
shutil.copy2(processed_path, stream_path)
def update_stream_opus():
try:
concat_opus_files(user_dir, user_dir / "stream.opus")
except Exception as e:
# fallback: just use the latest processed file if concat fails
import shutil
stream_path = user_dir / "stream.opus"
shutil.copy2(processed_path, stream_path)
log_violation("STREAM_UPDATE", request.client.host, uid,
f"[fallback] Updated stream.opus with {processed_path}")
# Create a log entry with the original filename
log = UploadLog(
uid=uid,
ip=request.client.host,
filename=file.filename, # Store original filename
processed_filename=unique_name, # Store the processed filename
size_bytes=original_size
)
db.add(log)
db.commit()
db.refresh(log)
# We'll call this after the file is in its final location
# Rename the processed file to include the log ID for better tracking
processed_with_id = user_dir / f"{log.id}_{unique_name}"
processed_path.rename(processed_with_id)
processed_path = processed_with_id
# Store updated quota
# Get the final file size
size = processed_path.stat().st_size
quota = db.get(UserQuota, uid)
if not quota:
quota = UserQuota(uid=uid)
db.add(quota)
quota.storage_bytes += size
db.commit()
# Update public streams list
update_public_streams(uid, quota.storage_bytes)
# Start a transaction
try:
# Create a log entry with the original filename
log = UploadLog(
uid=uid,
ip=request.client.host,
filename=file.filename, # Store original filename
processed_filename=unique_name, # Store the processed filename
size_bytes=size
)
db.add(log)
db.flush() # Get the log ID without committing
# Rename the processed file to include the log ID for better tracking
processed_with_id = user_dir / f"{log.id}_{unique_name}"
if processed_path.exists():
# First check if there's already a file with the same UUID but different prefix
for existing_file in user_dir.glob(f"*_{unique_name}"):
if existing_file != processed_path:
log_violation("CLEANUP", request.client.host, uid,
f"[UPLOAD] Removing duplicate file: {existing_file}")
existing_file.unlink(missing_ok=True)
# Now do the rename
if processed_path != processed_with_id:
if processed_with_id.exists():
processed_with_id.unlink(missing_ok=True)
processed_path.rename(processed_with_id)
processed_path = processed_with_id
# Only clean up raw.* files, not previously uploaded opus files
for old_temp_file in user_dir.glob('raw.*'):
try:
old_temp_file.unlink(missing_ok=True)
log_violation("CLEANUP", request.client.host, uid, f"[{request_id}] Cleaned up temp file: {old_temp_file}")
except Exception as e:
log_violation("CLEANUP_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_temp_file}: {e}")
# Get or create quota
quota = db.query(UserQuota).filter(UserQuota.uid == uid).first()
if not quota:
quota = UserQuota(uid=uid, storage_bytes=0)
db.add(quota)
# Update quota with the new file size
quota.storage_bytes = sum(
f.stat().st_size
for f in user_dir.glob('*.opus')
if f.name != 'stream.opus' and f != processed_path
) + size
# Update public streams
update_public_streams(uid, quota.storage_bytes, db)
# Commit the transaction
db.commit()
# Now that the transaction is committed and files are in their final location,
# update the stream.opus file to include all files
update_stream_opus()
except Exception as e:
db.rollback()
# Clean up the processed file if something went wrong
if processed_path.exists():
processed_path.unlink(missing_ok=True)
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
return {
"filename": file.filename,
@ -142,37 +204,33 @@ async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), f
return {"detail": f"Server error: {type(e).__name__}: {str(e)}"}
def update_public_streams(uid: str, storage_bytes: int, db = Depends(get_db)):
def update_public_streams(uid: str, storage_bytes: int, db: Session):
"""Update the public streams list in the database with the latest user upload info"""
try:
from models import PublicStream
# Get the user's info
user = db.query(User).filter(User.username == uid).first()
if not user:
print(f"[WARNING] User {uid} not found when updating public streams")
return
# Get or create the public stream record
public_stream = db.get(PublicStream, uid)
current_time = datetime.utcnow()
if public_stream is None:
# Create a new record if it doesn't exist
public_stream = PublicStream(
uid=uid,
size=storage_bytes,
mtime=int(current_time.timestamp()),
created_at=current_time,
updated_at=current_time
)
# Try to get existing public stream or create new one
public_stream = db.query(PublicStream).filter(PublicStream.uid == uid).first()
if not public_stream:
public_stream = PublicStream(uid=uid)
db.add(public_stream)
else:
# Update existing record
public_stream.size = storage_bytes
public_stream.mtime = int(current_time.timestamp())
public_stream.updated_at = current_time
db.commit()
db.refresh(public_stream)
# Update the public stream info
public_stream.username = user.username
public_stream.display_name = user.display_name or user.username
public_stream.storage_bytes = storage_bytes
public_stream.last_updated = datetime.utcnow()
# Don't commit here - let the caller handle the transaction
db.flush()
except Exception as e:
db.rollback()
# Just log the error and let the caller handle the rollback
print(f"[ERROR] Error updating public streams: {e}")
import traceback
print(f"Error updating public streams in database: {e}")
print(traceback.format_exc())
raise
traceback.print_exc()
raise # Re-raise to let the caller handle the error