Update authentication system, database models, and UI components

This commit is contained in:
oib
2025-08-07 19:39:22 +02:00
parent d497492186
commit 72f79b1059
48 changed files with 5328 additions and 1642 deletions

View File

@ -11,116 +11,126 @@ from typing import Dict, Any
router = APIRouter(prefix="/api", tags=["account"]) router = APIRouter(prefix="/api", tags=["account"])
@router.post("/delete-account") @router.post("/delete-account")
async def delete_account(data: Dict[str, Any], request: Request, db: Session = Depends(get_db)): async def delete_account(data: Dict[str, Any], request: Request):
try: try:
# Get UID from request data # Get UID from request data
uid = data.get("uid") uid = data.get("uid")
if not uid: if not uid:
print(f"[DELETE_ACCOUNT] Error: Missing UID in request data") # Debug messages disabled
raise HTTPException(status_code=400, detail="Missing UID") raise HTTPException(status_code=400, detail="Missing UID")
ip = request.client.host ip = request.client.host
print(f"[DELETE_ACCOUNT] Processing delete request for UID: {uid} from IP: {ip}") # Debug messages disabled
# Verify user exists and IP matches # Verify user exists and IP matches
# Handle both email-based and username-based UIDs for backward compatibility # Use the database session context manager
user = None with get_db() as db:
# Handle both email-based and username-based UIDs for backward compatibility
user = None
# First try to find by email (new UID format) # First try to find by email (new UID format)
if '@' in uid: if '@' in uid:
user = db.exec(select(User).where(User.email == uid)).first() user = db.query(User).filter(User.email == uid).first()
print(f"[DELETE_ACCOUNT] Looking up user by email: {uid}") # Debug messages disabled
# If not found by email, try by username (legacy UID format) # If not found by email, try by username (legacy UID format)
if not user: if not user:
user = db.exec(select(User).where(User.username == uid)).first() user = db.query(User).filter(User.username == uid).first()
print(f"[DELETE_ACCOUNT] Looking up user by username: {uid}") # Debug messages disabled
if not user: if not user:
print(f"[DELETE_ACCOUNT] Error: User {uid} not found (tried both email and username lookup)") # Debug messages disabled
raise HTTPException(status_code=404, detail="User not found") raise HTTPException(status_code=404, detail="User not found")
# Use the actual email as the UID for database operations # Extract user attributes while the object is still bound to the session
actual_uid = user.email actual_uid = user.email
print(f"[DELETE_ACCOUNT] Found user: {user.username} ({user.email}), using email as UID: {actual_uid}") user_ip = user.ip
username = user.username
if user.ip != ip: # Debug messages disabled
print(f"[DELETE_ACCOUNT] Error: IP mismatch. User IP: {user.ip}, Request IP: {ip}")
if user_ip != ip:
# Debug messages disabled
raise HTTPException(status_code=403, detail="Unauthorized: IP address does not match") raise HTTPException(status_code=403, detail="Unauthorized: IP address does not match")
# Start transaction # Use the database session context manager for all database operations
try: with get_db() as db:
# Delete user's upload logs (use actual_uid which is always the email) try:
uploads = db.exec(select(UploadLog).where(UploadLog.uid == actual_uid)).all() # Delete user's upload logs (use actual_uid which is always the email)
for upload in uploads: uploads = db.query(UploadLog).filter(UploadLog.uid == actual_uid).all()
db.delete(upload) for upload in uploads:
print(f"[DELETE_ACCOUNT] Deleted {len(uploads)} upload logs for user {actual_uid}") db.delete(upload)
# Debug messages disabled
# Delete user's public streams # Delete user's public streams
streams = db.exec(select(PublicStream).where(PublicStream.uid == actual_uid)).all() streams = db.query(PublicStream).filter(PublicStream.uid == actual_uid).all()
for stream in streams: for stream in streams:
db.delete(stream) db.delete(stream)
print(f"[DELETE_ACCOUNT] Deleted {len(streams)} public streams for user {actual_uid}") # Debug messages disabled
# Delete user's quota # Delete user's quota
quota = db.get(UserQuota, actual_uid) quota = db.get(UserQuota, actual_uid)
if quota: if quota:
db.delete(quota) db.delete(quota)
print(f"[DELETE_ACCOUNT] Deleted quota for user {actual_uid}") # Debug messages disabled
# Delete user's active sessions (check both email and username as user_id) # Delete user's active sessions (check both email and username as uid)
sessions_by_email = db.exec(select(DBSession).where(DBSession.user_id == actual_uid)).all() sessions_by_email = db.query(DBSession).filter(DBSession.uid == actual_uid).all()
sessions_by_username = db.exec(select(DBSession).where(DBSession.user_id == user.username)).all() sessions_by_username = db.query(DBSession).filter(DBSession.uid == username).all()
all_sessions = list(sessions_by_email) + list(sessions_by_username) all_sessions = list(sessions_by_email) + list(sessions_by_username)
# Remove duplicates using token (primary key) instead of id # Remove duplicates using token (primary key)
unique_sessions = {session.token: session for session in all_sessions}.values() unique_sessions = {session.token: session for session in all_sessions}.values()
for session in unique_sessions: for session in unique_sessions:
db.delete(session) db.delete(session)
print(f"[DELETE_ACCOUNT] Deleted {len(unique_sessions)} active sessions for user {actual_uid} (checked both email and username)") # Debug messages disabled
# Delete user account # Delete user account
user_obj = db.get(User, actual_uid) # Use actual_uid which is the email user_obj = db.get(User, actual_uid) # Use actual_uid which is the email
if user_obj: if user_obj:
db.delete(user_obj) db.delete(user_obj)
print(f"[DELETE_ACCOUNT] Deleted user account {actual_uid}") # Debug messages disabled
db.commit() db.commit()
print(f"[DELETE_ACCOUNT] Database changes committed for user {actual_uid}") # Debug messages disabled
except Exception as e: except Exception as e:
db.rollback() db.rollback()
print(f"[DELETE_ACCOUNT] Database error during account deletion: {str(e)}") # Debug messages disabled
raise HTTPException(status_code=500, detail="Database error during account deletion") # Debug messages disabled
raise HTTPException(status_code=500, detail="Database error during account deletion")
# Delete user's files # Delete user's files
try: try:
user_dir = os.path.join('data', user.username) # Use the email (actual_uid) for the directory name, which matches how files are stored
user_dir = os.path.join('data', actual_uid)
real_user_dir = os.path.realpath(user_dir) real_user_dir = os.path.realpath(user_dir)
# Security check to prevent directory traversal # Security check to prevent directory traversal
if not real_user_dir.startswith(os.path.realpath('data')): if not real_user_dir.startswith(os.path.realpath('data')):
print(f"[DELETE_ACCOUNT] Security alert: Invalid user directory path: {user_dir}") # Debug messages disabled
raise HTTPException(status_code=400, detail="Invalid user directory") raise HTTPException(status_code=400, detail="Invalid user directory")
if os.path.exists(real_user_dir): if os.path.exists(real_user_dir):
import shutil import shutil
shutil.rmtree(real_user_dir, ignore_errors=True) shutil.rmtree(real_user_dir, ignore_errors=True)
print(f"[DELETE_ACCOUNT] Deleted user directory: {real_user_dir}") # Debug messages disabled
else: else:
print(f"[DELETE_ACCOUNT] User directory not found: {real_user_dir}") # Debug messages disabled
pass
except Exception as e: except Exception as e:
print(f"[DELETE_ACCOUNT] Error deleting user files: {str(e)}") # Debug messages disabled
# Continue even if file deletion fails, as the account is already deleted from the DB # Continue even if file deletion fails, as the account is already deleted from the DB
pass
print(f"[DELETE_ACCOUNT] Successfully deleted account for user {actual_uid} (original UID: {uid})") # Debug messages disabled
return {"status": "success", "message": "Account and all associated data have been deleted"} return {"status": "success", "message": "Account and all associated data have been deleted"}
except HTTPException as he: except HTTPException as he:
print(f"[DELETE_ACCOUNT] HTTP Error {he.status_code}: {he.detail}") # Debug messages disabled
raise raise
except Exception as e: except Exception as e:
print(f"[DELETE_ACCOUNT] Unexpected error: {str(e)}") # Debug messages disabled
raise HTTPException(status_code=500, detail="An unexpected error occurred") raise HTTPException(status_code=500, detail="An unexpected error occurred")

355
analyze_db_legacy.py Normal file
View File

@ -0,0 +1,355 @@
#!/usr/bin/env python3
"""
Database Legacy Data Analysis Script
Analyzes the database for legacy data that doesn't match current authentication implementation
"""
import sys
from datetime import datetime, timedelta
from sqlmodel import Session, select
from database import engine
from models import User, UserQuota, UploadLog, DBSession, PublicStream
import re
def validate_email_format(email):
"""Validate email format using RFC 5322 compliant regex"""
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
return re.match(pattern, email) is not None
def analyze_user_table():
"""Analyze User table for legacy data issues"""
print("\n=== ANALYZING USER TABLE ===")
issues = []
with Session(engine) as session:
users = session.exec(select(User)).all()
print(f"Total users: {len(users)}")
for user in users:
user_issues = []
# Check if email (primary key) is valid email format
if not validate_email_format(user.email):
user_issues.append(f"Invalid email format: {user.email}")
# Check if username is also email format (current requirement)
if not validate_email_format(user.username):
user_issues.append(f"Username not in email format: {user.username}")
# Check if email and username match (should be same after migration)
if user.email != user.username:
user_issues.append(f"Email/username mismatch: email={user.email}, username={user.username}")
# Check for missing or empty display_name
if not user.display_name or user.display_name.strip() == "":
user_issues.append(f"Empty display_name")
# Check for very old tokens (potential security issue)
if user.token_created < datetime.utcnow() - timedelta(days=30):
user_issues.append(f"Very old token (created: {user.token_created})")
# Check for unconfirmed users
if not user.confirmed:
user_issues.append(f"Unconfirmed user")
if user_issues:
issues.append({
'email': user.email,
'username': user.username,
'issues': user_issues
})
print(f"Users with issues: {len(issues)}")
for issue in issues:
print(f" User {issue['email']}:")
for problem in issue['issues']:
print(f" - {problem}")
return issues
def analyze_session_table():
"""Analyze DBSession table for legacy data issues"""
print("\n=== ANALYZING SESSION TABLE ===")
issues = []
with Session(engine) as session:
sessions = session.exec(select(DBSession)).all()
print(f"Total sessions: {len(sessions)}")
active_sessions = [s for s in sessions if s.is_active]
expired_sessions = [s for s in sessions if s.expires_at < datetime.utcnow()]
old_sessions = [s for s in sessions if s.created_at < datetime.utcnow() - timedelta(days=7)]
print(f"Active sessions: {len(active_sessions)}")
print(f"Expired sessions: {len(expired_sessions)}")
print(f"Sessions older than 7 days: {len(old_sessions)}")
for db_session in sessions:
session_issues = []
# Check if user_id is in email format (current requirement)
if not validate_email_format(db_session.user_id):
session_issues.append(f"user_id not in email format: {db_session.user_id}")
# Check for expired but still active sessions
if db_session.is_active and db_session.expires_at < datetime.utcnow():
session_issues.append(f"Expired but still marked active (expires: {db_session.expires_at})")
# Check for very old sessions that should be cleaned up
if db_session.created_at < datetime.utcnow() - timedelta(days=30):
session_issues.append(f"Very old session (created: {db_session.created_at})")
# Check for sessions with 1-hour expiry (old system)
session_duration = db_session.expires_at - db_session.created_at
if session_duration < timedelta(hours=2): # Less than 2 hours indicates old 1-hour sessions
session_issues.append(f"Short session duration: {session_duration} (should be 24h)")
if session_issues:
issues.append({
'token': db_session.token[:10] + '...',
'user_id': db_session.user_id,
'created_at': db_session.created_at,
'expires_at': db_session.expires_at,
'issues': session_issues
})
print(f"Sessions with issues: {len(issues)}")
for issue in issues:
print(f" Session {issue['token']} (user: {issue['user_id']}):")
for problem in issue['issues']:
print(f" - {problem}")
return issues
def analyze_quota_table():
"""Analyze UserQuota table for legacy data issues"""
print("\n=== ANALYZING USER QUOTA TABLE ===")
issues = []
with Session(engine) as session:
quotas = session.exec(select(UserQuota)).all()
print(f"Total quota records: {len(quotas)}")
for quota in quotas:
quota_issues = []
# Check if uid is in email format (current requirement)
if not validate_email_format(quota.uid):
quota_issues.append(f"UID not in email format: {quota.uid}")
# Check for negative storage
if quota.storage_bytes < 0:
quota_issues.append(f"Negative storage: {quota.storage_bytes}")
# Check for excessive storage (over 100MB limit)
if quota.storage_bytes > 100 * 1024 * 1024:
quota_issues.append(f"Storage over 100MB limit: {quota.storage_bytes / (1024*1024):.1f}MB")
if quota_issues:
issues.append({
'uid': quota.uid,
'storage_bytes': quota.storage_bytes,
'issues': quota_issues
})
print(f"Quota records with issues: {len(issues)}")
for issue in issues:
print(f" Quota {issue['uid']} ({issue['storage_bytes']} bytes):")
for problem in issue['issues']:
print(f" - {problem}")
return issues
def analyze_upload_log_table():
"""Analyze UploadLog table for legacy data issues"""
print("\n=== ANALYZING UPLOAD LOG TABLE ===")
issues = []
with Session(engine) as session:
uploads = session.exec(select(UploadLog)).all()
print(f"Total upload records: {len(uploads)}")
for upload in uploads:
upload_issues = []
# Check if uid is in email format (current requirement)
if not validate_email_format(upload.uid):
upload_issues.append(f"UID not in email format: {upload.uid}")
# Check for missing processed_filename
if not upload.processed_filename:
upload_issues.append(f"Missing processed_filename")
# Check for negative file size
if upload.size_bytes < 0:
upload_issues.append(f"Negative file size: {upload.size_bytes}")
# Check for very old uploads
if upload.created_at < datetime.utcnow() - timedelta(days=365):
upload_issues.append(f"Very old upload (created: {upload.created_at})")
if upload_issues:
issues.append({
'id': upload.id,
'uid': upload.uid,
'filename': upload.filename,
'created_at': upload.created_at,
'issues': upload_issues
})
print(f"Upload records with issues: {len(issues)}")
for issue in issues:
print(f" Upload {issue['id']} (user: {issue['uid']}, file: {issue['filename']}):")
for problem in issue['issues']:
print(f" - {problem}")
return issues
def analyze_public_stream_table():
"""Analyze PublicStream table for legacy data issues"""
print("\n=== ANALYZING PUBLIC STREAM TABLE ===")
issues = []
with Session(engine) as session:
streams = session.exec(select(PublicStream)).all()
print(f"Total public stream records: {len(streams)}")
for stream in streams:
stream_issues = []
# Check if uid is in email format (current requirement)
if not validate_email_format(stream.uid):
stream_issues.append(f"UID not in email format: {stream.uid}")
# Check if username is also email format (should match uid)
if stream.username and not validate_email_format(stream.username):
stream_issues.append(f"Username not in email format: {stream.username}")
# Check if uid and username match (should be same after migration)
if stream.username and stream.uid != stream.username:
stream_issues.append(f"UID/username mismatch: uid={stream.uid}, username={stream.username}")
# Check for negative storage
if stream.storage_bytes < 0:
stream_issues.append(f"Negative storage: {stream.storage_bytes}")
# Check for missing display_name
if not stream.display_name or stream.display_name.strip() == "":
stream_issues.append(f"Empty display_name")
if stream_issues:
issues.append({
'uid': stream.uid,
'username': stream.username,
'display_name': stream.display_name,
'issues': stream_issues
})
print(f"Public stream records with issues: {len(issues)}")
for issue in issues:
print(f" Stream {issue['uid']} (username: {issue['username']}):")
for problem in issue['issues']:
print(f" - {problem}")
return issues
def check_referential_integrity():
"""Check for referential integrity issues between tables"""
print("\n=== CHECKING REFERENTIAL INTEGRITY ===")
issues = []
with Session(engine) as session:
# Get all unique UIDs from each table
users = session.exec(select(User.email)).all()
user_usernames = session.exec(select(User.username)).all()
quotas = session.exec(select(UserQuota.uid)).all()
uploads = session.exec(select(UploadLog.uid)).all()
streams = session.exec(select(PublicStream.uid)).all()
sessions = session.exec(select(DBSession.user_id)).all()
user_emails = set(users)
user_usernames_set = set(user_usernames)
quota_uids = set(quotas)
upload_uids = set(uploads)
stream_uids = set(streams)
session_uids = set(sessions)
print(f"Unique user emails: {len(user_emails)}")
print(f"Unique user usernames: {len(user_usernames_set)}")
print(f"Unique quota UIDs: {len(quota_uids)}")
print(f"Unique upload UIDs: {len(upload_uids)}")
print(f"Unique stream UIDs: {len(stream_uids)}")
print(f"Unique session user_ids: {len(session_uids)}")
# Check for orphaned records
orphaned_quotas = quota_uids - user_emails
orphaned_uploads = upload_uids - user_emails
orphaned_streams = stream_uids - user_emails
orphaned_sessions = session_uids - user_usernames_set # Sessions use username as user_id
if orphaned_quotas:
issues.append(f"Orphaned quota records (no matching user): {orphaned_quotas}")
if orphaned_uploads:
issues.append(f"Orphaned upload records (no matching user): {orphaned_uploads}")
if orphaned_streams:
issues.append(f"Orphaned stream records (no matching user): {orphaned_streams}")
if orphaned_sessions:
issues.append(f"Orphaned session records (no matching user): {orphaned_sessions}")
# Check for users without quota records
users_without_quota = user_emails - quota_uids
if users_without_quota:
issues.append(f"Users without quota records: {users_without_quota}")
# Check for users without stream records
users_without_streams = user_emails - stream_uids
if users_without_streams:
issues.append(f"Users without stream records: {users_without_streams}")
print(f"Referential integrity issues: {len(issues)}")
for issue in issues:
print(f" - {issue}")
return issues
def main():
"""Run complete database legacy analysis"""
print("=== DATABASE LEGACY DATA ANALYSIS ===")
print(f"Analysis started at: {datetime.utcnow()}")
all_issues = {}
try:
all_issues['users'] = analyze_user_table()
all_issues['sessions'] = analyze_session_table()
all_issues['quotas'] = analyze_quota_table()
all_issues['uploads'] = analyze_upload_log_table()
all_issues['streams'] = analyze_public_stream_table()
all_issues['integrity'] = check_referential_integrity()
# Summary
print("\n=== SUMMARY ===")
total_issues = sum(len(issues) if isinstance(issues, list) else 1 for issues in all_issues.values())
print(f"Total issues found: {total_issues}")
for table, issues in all_issues.items():
if issues:
count = len(issues) if isinstance(issues, list) else 1
print(f" {table}: {count} issues")
if total_issues == 0:
print("✅ No legacy data issues found! Database is clean.")
else:
print("⚠️ Legacy data issues found. Consider running cleanup scripts.")
except Exception as e:
print(f"❌ Error during analysis: {e}")
return 1
return 0
if __name__ == "__main__":
sys.exit(main())

88
auth.py
View File

@ -1,7 +1,7 @@
"""Authentication middleware and utilities for dicta2stream""" """Authentication middleware and utilities for dicta2stream"""
from fastapi import Request, HTTPException, Depends, status from fastapi import Request, HTTPException, Depends, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlmodel import Session from sqlmodel import Session, select
from typing import Optional from typing import Optional
from models import User, Session as DBSession, verify_session from models import User, Session as DBSession, verify_session
@ -11,40 +11,39 @@ security = HTTPBearer()
def get_current_user( def get_current_user(
request: Request, request: Request,
db: Session = Depends(get_db),
credentials: HTTPAuthorizationCredentials = Depends(security) credentials: HTTPAuthorizationCredentials = Depends(security)
) -> User: ) -> User:
"""Dependency to get the current authenticated user""" """Dependency to get the current authenticated user"""
token = credentials.credentials token = credentials.credentials
db_session = verify_session(db, token)
if not db_session: # Use the database session context manager
raise HTTPException( with get_db() as db:
status_code=status.HTTP_401_UNAUTHORIZED, db_session = verify_session(db, token)
detail="Invalid or expired session",
headers={"WWW-Authenticate": "Bearer"},
)
# Get the user from the session if not db_session:
user = db.exec( raise HTTPException(
select(User).where(User.username == db_session.user_id) status_code=status.HTTP_401_UNAUTHORIZED,
).first() detail="Invalid or expired session",
headers={"WWW-Authenticate": "Bearer"},
)
if not user: # Get the user from the session using query interface
raise HTTPException( user = db.query(User).filter(User.email == db_session.uid).first()
status_code=status.HTTP_401_UNAUTHORIZED,
detail="User not found",
headers={"WWW-Authenticate": "Bearer"},
)
# Attach the session to the request state for later use if not user:
request.state.session = db_session raise HTTPException(
return user status_code=status.HTTP_401_UNAUTHORIZED,
detail="User not found",
headers={"WWW-Authenticate": "Bearer"},
)
# Attach the session to the request state for later use
request.state.session = db_session
return user
def get_optional_user( def get_optional_user(
request: Request, request: Request,
db: Session = Depends(get_db),
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security, use_cache=False) credentials: Optional[HTTPAuthorizationCredentials] = Depends(security, use_cache=False)
) -> Optional[User]: ) -> Optional[User]:
"""Dependency that returns the current user if authenticated, None otherwise""" """Dependency that returns the current user if authenticated, None otherwise"""
@ -52,22 +51,45 @@ def get_optional_user(
return None return None
try: try:
return get_current_user(request, db, credentials) # get_current_user now handles its own database session
return get_current_user(request, credentials)
except HTTPException: except HTTPException:
return None return None
def create_session(db: Session, user: User, request: Request) -> DBSession: def create_session(user: User, request: Request) -> DBSession:
"""Create a new session for the user""" """Create a new session for the user (valid for 24 hours)"""
user_agent = request.headers.get("user-agent") import secrets
from datetime import datetime, timedelta
user_agent = request.headers.get("user-agent", "")
ip_address = request.client.host if request.client else "0.0.0.0" ip_address = request.client.host if request.client else "0.0.0.0"
session = DBSession.create_for_user( # Create session token and set 24-hour expiry
user_id=user.username, session_token = secrets.token_urlsafe(32)
expires_at = datetime.utcnow() + timedelta(hours=24)
# Create the session object
session = DBSession(
token=session_token,
user_id=user.email,
ip_address=ip_address, ip_address=ip_address,
user_agent=user_agent user_agent=user_agent,
expires_at=expires_at,
is_active=True
) )
db.add(session) # Use the database session context manager
db.commit() with get_db() as db:
return session try:
db.add(session)
db.commit()
db.refresh(session) # Ensure we have the latest data
return session
except Exception as e:
db.rollback()
# Debug messages disabled
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create session"
)

View File

@ -15,7 +15,6 @@ security = HTTPBearer()
async def logout( async def logout(
request: Request, request: Request,
response: Response, response: Response,
db: Session = Depends(get_db),
credentials: HTTPAuthorizationCredentials = Depends(security) credentials: HTTPAuthorizationCredentials = Depends(security)
): ):
"""Log out by invalidating the current session""" """Log out by invalidating the current session"""
@ -26,25 +25,28 @@ async def logout(
if not token: if not token:
return {"message": "No session to invalidate"} return {"message": "No session to invalidate"}
try: # Use the database session context manager
# Find and invalidate the session with get_db() as db:
session = db.exec( try:
select(DBSession) # Find and invalidate the session using query interface
.where(DBSession.token == token) session = db.query(DBSession).filter(
.where(DBSession.is_active == True) # noqa: E712 DBSession.token == token,
).first() DBSession.is_active == True # noqa: E712
).first()
if session: if session:
try: try:
session.is_active = False session.is_active = False
db.add(session) db.add(session)
db.commit() db.commit()
except Exception: except Exception as e:
db.rollback() db.rollback()
# Debug messages disabled
except Exception: # Continue with logout even if session update fails
# Continue with logout even if session lookup fails except Exception as e:
pass # Debug messages disabled
# Continue with logout even if session lookup fails
pass
# Clear the session cookie # Clear the session cookie
response.delete_cookie( response.delete_cookie(
@ -56,7 +58,7 @@ async def logout(
) )
# Clear any other auth-related cookies # Clear any other auth-related cookies
for cookie_name in ["uid", "authToken", "isAuthenticated", "token"]: for cookie_name in ["uid", "authToken", "username", "token"]:
response.delete_cookie( response.delete_cookie(
key=cookie_name, key=cookie_name,
path="/", path="/",
@ -71,15 +73,15 @@ async def logout(
except HTTPException: except HTTPException:
# Re-raise HTTP exceptions # Re-raise HTTP exceptions
raise raise
except Exception: except Exception as e:
# Debug messages disabled
# Don't expose internal errors to the client # Don't expose internal errors to the client
return {"message": "Logout processed"} return {"message": "Logout processed"}
@router.get("/me") @router.get("/me")
async def get_current_user_info( async def get_current_user_info(
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user)
db: Session = Depends(get_db)
): ):
"""Get current user information""" """Get current user information"""
return { return {
@ -92,15 +94,16 @@ async def get_current_user_info(
@router.get("/sessions") @router.get("/sessions")
async def list_sessions( async def list_sessions(
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user)
db: Session = Depends(get_db)
): ):
"""List all active sessions for the current user""" """List all active sessions for the current user"""
sessions = DBSession.get_active_sessions(db, current_user.username) # Use the database session context manager
return [ with get_db() as db:
{ sessions = DBSession.get_active_sessions(db, current_user.username)
"id": s.id, return [
"ip_address": s.ip_address, {
"id": s.id,
"ip_address": s.ip_address,
"user_agent": s.user_agent, "user_agent": s.user_agent,
"created_at": s.created_at.isoformat(), "created_at": s.created_at.isoformat(),
"last_used_at": s.last_used_at.isoformat(), "last_used_at": s.last_used_at.isoformat(),
@ -113,26 +116,34 @@ async def list_sessions(
@router.post("/sessions/{session_id}/revoke") @router.post("/sessions/{session_id}/revoke")
async def revoke_session( async def revoke_session(
session_id: int, session_id: int,
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user)
db: Session = Depends(get_db)
): ):
"""Revoke a specific session""" """Revoke a specific session"""
session = db.get(DBSession, session_id) # Use the database session context manager
with get_db() as db:
session = db.get(DBSession, session_id)
if not session or session.user_id != current_user.username: if not session or session.uid != current_user.email:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
detail="Session not found" detail="Session not found"
) )
if not session.is_active: if not session.is_active:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail="Session is already inactive" detail="Session is already inactive"
) )
session.is_active = False try:
db.add(session) session.is_active = False
db.commit() db.add(session)
db.commit()
return {"message": "Session revoked"} return {"message": "Session revoked successfully"}
except Exception as e:
db.rollback()
# Debug messages disabled
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to revoke session"
)

View File

@ -0,0 +1,31 @@
-- Cleanup script for old format user 'devuser'
-- This user has username-based UID instead of email-based UID
-- Show what will be deleted before deletion
SELECT 'publicstream entries to delete:' as info;
SELECT uid, username, storage_bytes, created_at FROM publicstream WHERE uid = 'devuser';
SELECT 'uploadlog entries to delete:' as info;
SELECT COUNT(*) as count, uid FROM uploadlog WHERE uid = 'devuser' GROUP BY uid;
SELECT 'userquota entries to delete:' as info;
SELECT uid FROM userquota WHERE uid = 'devuser';
-- Delete from all related tables
-- Start with dependent tables first
DELETE FROM uploadlog WHERE uid = 'devuser';
DELETE FROM userquota WHERE uid = 'devuser';
DELETE FROM publicstream WHERE uid = 'devuser';
-- Verify cleanup
SELECT 'Remaining entries for devuser in publicstream:' as info;
SELECT COUNT(*) as count FROM publicstream WHERE uid = 'devuser';
SELECT 'Remaining entries for devuser in uploadlog:' as info;
SELECT COUNT(*) as count FROM uploadlog WHERE uid = 'devuser';
SELECT 'Remaining entries for devuser in userquota:' as info;
SELECT COUNT(*) as count FROM userquota WHERE uid = 'devuser';
SELECT 'Total remaining old format entries in publicstream:' as info;
SELECT COUNT(*) as count FROM publicstream WHERE uid NOT LIKE '%@%' OR uid = username;

View File

@ -0,0 +1,19 @@
-- Final cleanup of orphaned entries that prevent proper account deletion
-- These entries have username-based UIDs that should have been deleted
-- Show what will be deleted
SELECT 'Orphaned publicstream entries to delete:' as info;
SELECT uid, username FROM publicstream WHERE uid = 'oibchello';
SELECT 'Orphaned userquota entries to delete:' as info;
SELECT uid, storage_bytes FROM userquota WHERE uid = 'oibchello';
-- Delete the orphaned entries
DELETE FROM publicstream WHERE uid = 'oibchello';
DELETE FROM userquota WHERE uid = 'oibchello';
-- Verify cleanup
SELECT 'Remaining entries for oibchello:' as info;
SELECT 'publicstream' as table_name, COUNT(*) as count FROM publicstream WHERE uid = 'oibchello'
UNION ALL
SELECT 'userquota' as table_name, COUNT(*) as count FROM userquota WHERE uid = 'oibchello';

169
cleanup_legacy_db.sql Normal file
View File

@ -0,0 +1,169 @@
-- Database Legacy Data Cleanup Script
-- Fixes issues identified in the database analysis
-- Execute these queries step by step to fix legacy data
-- =============================================================================
-- STEP 1: Fix User Table - Update username to match email format
-- =============================================================================
-- Issue: User has username 'oibchello' but email 'oib@chello.at'
-- Fix: Update username to match email (current authentication requirement)
UPDATE "user"
SET username = email,
display_name = CASE
WHEN display_name = '' OR display_name IS NULL
THEN split_part(email, '@', 1) -- Use email prefix as display name
ELSE display_name
END
WHERE email = 'oib@chello.at';
-- Verify the fix
SELECT email, username, display_name, confirmed FROM "user" WHERE email = 'oib@chello.at';
-- =============================================================================
-- STEP 2: Clean Up Expired Sessions
-- =============================================================================
-- Issue: 11 expired sessions still marked as active (security risk)
-- Fix: Mark expired sessions as inactive
UPDATE dbsession
SET is_active = false
WHERE expires_at < NOW() AND is_active = true;
-- Verify expired sessions are now inactive
SELECT COUNT(*) as expired_active_sessions
FROM dbsession
WHERE expires_at < NOW() AND is_active = true;
-- Optional: Delete very old expired sessions (older than 7 days)
DELETE FROM dbsession
WHERE expires_at < NOW() - INTERVAL '7 days';
-- =============================================================================
-- STEP 3: Update Session user_id to Email Format
-- =============================================================================
-- Issue: All sessions use old username format instead of email
-- Fix: Update session user_id to use email format
UPDATE dbsession
SET user_id = 'oib@chello.at'
WHERE user_id = 'oibchello';
-- Verify session user_id updates
SELECT DISTINCT user_id FROM dbsession;
-- =============================================================================
-- STEP 4: Fix PublicStream Username Fields
-- =============================================================================
-- Issue: PublicStream has username/UID mismatches
-- Fix: Update username to match UID (email format)
-- Fix the existing user record
UPDATE publicstream
SET username = uid,
display_name = CASE
WHEN display_name = 'oibchello'
THEN split_part(uid, '@', 1) -- Use email prefix as display name
ELSE display_name
END
WHERE uid = 'oib@chello.at';
-- Verify the fix
SELECT uid, username, display_name FROM publicstream WHERE uid = 'oib@chello.at';
-- =============================================================================
-- STEP 5: Remove Orphaned Records for Deleted User
-- =============================================================================
-- Issue: Records exist for 'oib@bubuit.net' but no user exists
-- Fix: Remove orphaned records
-- Remove orphaned quota record
DELETE FROM userquota WHERE uid = 'oib@bubuit.net';
-- Remove orphaned stream record
DELETE FROM publicstream WHERE uid = 'oib@bubuit.net';
-- Verify orphaned records are removed
SELECT 'userquota' as table_name, COUNT(*) as count FROM userquota WHERE uid = 'oib@bubuit.net'
UNION ALL
SELECT 'publicstream' as table_name, COUNT(*) as count FROM publicstream WHERE uid = 'oib@bubuit.net';
-- =============================================================================
-- VERIFICATION QUERIES
-- =============================================================================
-- Run these to verify all issues are fixed
-- 1. Check user table consistency
SELECT
email,
username,
display_name,
CASE WHEN email = username THEN '' ELSE '' END as email_username_match,
CASE WHEN display_name != '' THEN '' ELSE '' END as has_display_name
FROM "user";
-- 2. Check session table health
SELECT
COUNT(*) as total_sessions,
COUNT(CASE WHEN is_active THEN 1 END) as active_sessions,
COUNT(CASE WHEN expires_at < NOW() AND is_active THEN 1 END) as expired_but_active,
COUNT(CASE WHEN expires_at - created_at > INTERVAL '20 hours' THEN 1 END) as long_duration_sessions
FROM dbsession;
-- 3. Check PublicStream consistency
SELECT
uid,
username,
display_name,
CASE WHEN uid = username THEN '' ELSE '' END as uid_username_match
FROM publicstream;
-- 4. Check referential integrity
SELECT
'Users' as entity,
COUNT(*) as count
FROM "user"
UNION ALL
SELECT
'UserQuota records',
COUNT(*)
FROM userquota
UNION ALL
SELECT
'PublicStream records',
COUNT(*)
FROM publicstream
UNION ALL
SELECT
'Active Sessions',
COUNT(*)
FROM dbsession WHERE is_active = true;
-- 5. Final validation - should return no rows if all issues are fixed
SELECT 'ISSUE: User email/username mismatch' as issue
FROM "user"
WHERE email != username
UNION ALL
SELECT 'ISSUE: Expired active sessions'
FROM dbsession
WHERE expires_at < NOW() AND is_active = true
LIMIT 1
UNION ALL
SELECT 'ISSUE: PublicStream UID/username mismatch'
FROM publicstream
WHERE uid != username
LIMIT 1
UNION ALL
SELECT 'ISSUE: Orphaned quota records'
FROM userquota q
LEFT JOIN "user" u ON q.uid = u.email
WHERE u.email IS NULL
LIMIT 1
UNION ALL
SELECT 'ISSUE: Orphaned stream records'
FROM publicstream p
LEFT JOIN "user" u ON p.uid = u.email
WHERE u.email IS NULL
LIMIT 1;
-- If the final query returns no rows, all legacy issues are fixed! ✅

View File

@ -0,0 +1,31 @@
-- Cleanup script for old format user 'oibchello'
-- This user has username-based UID instead of email-based UID
-- Show what will be deleted before deletion
SELECT 'publicstream entries to delete:' as info;
SELECT uid, username, storage_bytes, created_at FROM publicstream WHERE uid = 'oibchello';
SELECT 'uploadlog entries to delete:' as info;
SELECT COUNT(*) as count, uid FROM uploadlog WHERE uid = 'oibchello' GROUP BY uid;
SELECT 'userquota entries to delete:' as info;
SELECT uid FROM userquota WHERE uid = 'oibchello';
-- Delete from all related tables
-- Start with dependent tables first
DELETE FROM uploadlog WHERE uid = 'oibchello';
DELETE FROM userquota WHERE uid = 'oibchello';
DELETE FROM publicstream WHERE uid = 'oibchello';
-- Verify cleanup
SELECT 'Remaining entries for oibchello in publicstream:' as info;
SELECT COUNT(*) as count FROM publicstream WHERE uid = 'oibchello';
SELECT 'Remaining entries for oibchello in uploadlog:' as info;
SELECT COUNT(*) as count FROM uploadlog WHERE uid = 'oibchello';
SELECT 'Remaining entries for oibchello in userquota:' as info;
SELECT COUNT(*) as count FROM userquota WHERE uid = 'oibchello';
SELECT 'Total remaining old format entries in publicstream:' as info;
SELECT COUNT(*) as count FROM publicstream WHERE uid NOT LIKE '%@%' OR uid = username;

View File

@ -0,0 +1,28 @@
-- Cleanup script for old format user entries
-- Removes users with username-based UIDs instead of email-based UIDs
-- Show what will be deleted before deletion
SELECT 'publicstream entries to delete:' as info;
SELECT uid, username, storage_bytes, created_at FROM publicstream WHERE uid IN ('devuser', 'oibchello');
SELECT 'uploadlog entries to delete:' as info;
SELECT COUNT(*) as count, uid FROM uploadlog WHERE uid IN ('devuser', 'oibchello') GROUP BY uid;
SELECT 'userquota entries to delete:' as info;
SELECT uid, quota_bytes, used_bytes FROM userquota WHERE uid IN ('devuser', 'oibchello');
-- Delete from all related tables
-- Start with dependent tables first
DELETE FROM uploadlog WHERE uid IN ('devuser', 'oibchello');
DELETE FROM userquota WHERE uid IN ('devuser', 'oibchello');
DELETE FROM publicstream WHERE uid IN ('devuser', 'oibchello');
-- Verify cleanup
SELECT 'Remaining old format entries in publicstream:' as info;
SELECT COUNT(*) as count FROM publicstream WHERE uid NOT LIKE '%@%' OR uid = username;
SELECT 'Remaining old format entries in uploadlog:' as info;
SELECT COUNT(*) as count FROM uploadlog WHERE uid NOT LIKE '%@%';
SELECT 'Remaining old format entries in userquota:' as info;
SELECT COUNT(*) as count FROM userquota WHERE uid NOT LIKE '%@%';

View File

@ -0,0 +1,17 @@
-- Cleanup script for orphaned uploadlog entries
-- These entries have username-based UIDs that should have been deleted with the user
-- Show what will be deleted
SELECT 'Orphaned uploadlog entries to delete:' as info;
SELECT uid, filename, processed_filename, created_at FROM uploadlog WHERE uid = 'oibchello';
-- Delete the orphaned entries
DELETE FROM uploadlog WHERE uid = 'oibchello';
-- Verify cleanup
SELECT 'Remaining uploadlog entries for oibchello:' as info;
SELECT COUNT(*) as count FROM uploadlog WHERE uid = 'oibchello';
-- Show all remaining uploadlog entries
SELECT 'All remaining uploadlog entries:' as info;
SELECT uid, filename, created_at FROM uploadlog ORDER BY created_at DESC;

View File

@ -0,0 +1,6 @@
-- Cleanup remaining orphaned uploadlog entries for devuser
DELETE FROM uploadlog WHERE uid = 'devuser';
-- Verify cleanup
SELECT 'All remaining uploadlog entries after cleanup:' as info;
SELECT uid, filename, created_at FROM uploadlog ORDER BY created_at DESC;

78
concat_opus.py Normal file
View File

@ -0,0 +1,78 @@
# concat_opus.py — Concatenate all opus files in a user directory in random order into a single stream.opus
import os
import random
import subprocess
from pathlib import Path
def concat_opus_files(user_dir: Path, output_file: Path):
"""
Concatenate all .opus files in user_dir (except stream.opus) in random order into output_file.
Overwrites output_file if exists. Creates it if missing.
"""
# Clean up any existing filelist.txt to prevent issues
filelist_path = user_dir / 'filelist.txt'
if filelist_path.exists():
try:
filelist_path.unlink()
except Exception as e:
print(f"Warning: Could not clean up old filelist.txt: {e}")
# Get all opus files except stream.opus and remove any duplicates
import hashlib
file_hashes = set()
files = []
for f in user_dir.glob('*.opus'):
if f.name == 'stream.opus':
continue
try:
# Calculate file hash for duplicate detection
hasher = hashlib.md5()
with open(f, 'rb') as file:
buf = file.read(65536) # Read in 64kb chunks
while len(buf) > 0:
hasher.update(buf)
buf = file.read(65536)
file_hash = hasher.hexdigest()
# Skip if we've seen this exact file before
if file_hash in file_hashes:
print(f"Removing duplicate file: {f.name}")
f.unlink()
continue
file_hashes.add(file_hash)
files.append(f)
except Exception as e:
print(f"Error processing {f}: {e}")
if not files:
# If no files, create an empty stream.opus
output_file.write_bytes(b'')
return output_file
random.shuffle(files)
# Create a filelist for ffmpeg concat
filelist_path = user_dir / 'filelist.txt'
with open(filelist_path, 'w') as f:
for opusfile in files:
f.write(f"file '{opusfile.resolve()}'\n")
# ffmpeg concat demuxer (no re-encoding)
cmd = [
'ffmpeg', '-y', '-f', 'concat', '-safe', '0', '-i', str(filelist_path),
'-c', 'copy', str(output_file)
]
try:
subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except subprocess.CalledProcessError as e:
raise RuntimeError(f"FFmpeg concat failed: {e}")
finally:
if filelist_path.exists():
filelist_path.unlink()
if not output_file.exists():
raise RuntimeError("Concatenation did not produce output.")
return output_file

39
convert_to_opus.py Normal file
View File

@ -0,0 +1,39 @@
# convert_to_opus.py — Default voice pipeline: bandpass + compressor + limiter + gate
import subprocess
import os
def convert_to_opus(input_path, output_path):
if not os.path.exists(input_path):
raise FileNotFoundError(f"Input file not found: {input_path}")
filters = [
"highpass=f=400", # low-cut below 400 Hz
"lowpass=f=12000", # high-cut above 12 kHz
"acompressor=threshold=-18dB",
"alimiter=limit=-1dB",
"agate=threshold=0.02"
]
cmd = [
"ffmpeg", "-y",
"-i", input_path,
"-af", ",".join(filters),
"-ac", "1",
"-ar", "24000",
"-c:a", "libopus",
"-b:a", "40k",
"-vbr", "on",
"-application", "voip",
output_path
]
try:
subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except subprocess.CalledProcessError as e:
raise RuntimeError(f"FFmpeg conversion failed: {e}")
if not os.path.exists(output_path):
raise RuntimeError("Conversion did not produce output.")
return output_path

View File

@ -1,14 +1,33 @@
# database.py — SQLModel engine/session for PostgreSQL # database.py — SQLModel engine/session for PostgreSQL
from sqlmodel import create_engine, Session, SQLModel from sqlmodel import create_engine, Session, SQLModel
from contextlib import contextmanager
import os import os
# Debug messages disabled
POSTGRES_URL = os.getenv("DATABASE_URL", "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream") POSTGRES_URL = os.getenv("DATABASE_URL", "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream")
engine = create_engine(POSTGRES_URL, echo=False) engine = create_engine(POSTGRES_URL, echo=False) # Disable echo for production
# SQLAlchemy Base class for models # SQLAlchemy Base class for models
Base = SQLModel Base = SQLModel
@contextmanager
def get_db(): def get_db():
with Session(engine) as session: """Session management context manager that ensures proper commit/rollback."""
session = Session(engine)
try:
# Debug messages disabled
yield session yield session
session.commit()
# Debug messages disabled
except Exception as e:
# Debug messages disabled
session.rollback()
raise
finally:
# Debug messages disabled
session.close()
# For backward compatibility
get_db_deprecated = get_db

View File

@ -0,0 +1,307 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 15.13 (Debian 15.13-0+deb12u1)
-- Dumped by pg_dump version 15.13 (Debian 15.13-0+deb12u1)
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: alembic_version; Type: TABLE; Schema: public; Owner: d2s
--
CREATE TABLE public.alembic_version (
version_num character varying(32) NOT NULL
);
ALTER TABLE public.alembic_version OWNER TO d2s;
--
-- Name: dbsession; Type: TABLE; Schema: public; Owner: d2s
--
CREATE TABLE public.dbsession (
token character varying NOT NULL,
uid character varying NOT NULL,
ip_address character varying NOT NULL,
user_agent character varying NOT NULL,
created_at timestamp without time zone NOT NULL,
expires_at timestamp without time zone NOT NULL,
is_active boolean NOT NULL,
last_activity timestamp without time zone NOT NULL
);
ALTER TABLE public.dbsession OWNER TO d2s;
--
-- Name: publicstream; Type: TABLE; Schema: public; Owner: d2s
--
CREATE TABLE public.publicstream (
uid character varying NOT NULL,
username character varying,
storage_bytes integer NOT NULL,
mtime integer NOT NULL,
last_updated timestamp without time zone,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
ALTER TABLE public.publicstream OWNER TO d2s;
--
-- Name: uploadlog; Type: TABLE; Schema: public; Owner: d2s
--
CREATE TABLE public.uploadlog (
id integer NOT NULL,
uid character varying NOT NULL,
ip character varying NOT NULL,
filename character varying,
processed_filename character varying,
size_bytes integer NOT NULL,
created_at timestamp without time zone NOT NULL
);
ALTER TABLE public.uploadlog OWNER TO d2s;
--
-- Name: uploadlog_id_seq; Type: SEQUENCE; Schema: public; Owner: d2s
--
CREATE SEQUENCE public.uploadlog_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.uploadlog_id_seq OWNER TO d2s;
--
-- Name: uploadlog_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: d2s
--
ALTER SEQUENCE public.uploadlog_id_seq OWNED BY public.uploadlog.id;
--
-- Name: user; Type: TABLE; Schema: public; Owner: d2s
--
CREATE TABLE public."user" (
token_created timestamp without time zone NOT NULL,
email character varying NOT NULL,
username character varying NOT NULL,
token character varying NOT NULL,
confirmed boolean NOT NULL,
ip character varying NOT NULL
);
ALTER TABLE public."user" OWNER TO d2s;
--
-- Name: userquota; Type: TABLE; Schema: public; Owner: d2s
--
CREATE TABLE public.userquota (
uid character varying NOT NULL,
storage_bytes integer NOT NULL
);
ALTER TABLE public.userquota OWNER TO d2s;
--
-- Name: uploadlog id; Type: DEFAULT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.uploadlog ALTER COLUMN id SET DEFAULT nextval('public.uploadlog_id_seq'::regclass);
--
-- Data for Name: alembic_version; Type: TABLE DATA; Schema: public; Owner: d2s
--
COPY public.alembic_version (version_num) FROM stdin;
\.
--
-- Data for Name: dbsession; Type: TABLE DATA; Schema: public; Owner: d2s
--
COPY public.dbsession (token, uid, ip_address, user_agent, created_at, expires_at, is_active, last_activity) FROM stdin;
6Y3PfCj-Mk3qLRttXCul8GTFZU9XWZtoHjk9I4EqnTE oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:32:21.725005 2025-08-07 10:32:21.724909 t 2025-08-06 10:32:21.725012
uGnwnfsAUzbNJZoqYsbT__tVxqfl4NtOD04UKYp8FEY oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:35:43.931018 2025-08-07 10:35:43.930918 t 2025-08-06 10:35:43.931023
OmKl-RrM8D4624xmNQigD3tdG4aXq8CzUq7Ch0qEhP4 oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:36:02.758938 2025-08-07 10:36:02.758873 t 2025-08-06 10:36:02.758941
gGpgdAbmpwY3a-zY1Ri92l7hUEjg-GyIt1o2kIDwBE8 oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:45:59.701084 2025-08-07 10:45:59.70098 t 2025-08-06 10:45:59.701091
GT9OKNxnhThcFXKvMBBVop7kczUH-4fE4bkCcRd17xE oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:14.181147 2025-08-07 10:46:14.181055 t 2025-08-06 10:46:14.181152
Ok0mwpRLa5Fuimt9eN0l-xUaaCmpipokTkOILSxJNuA oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:27.910441 2025-08-07 10:46:27.91036 t 2025-08-06 10:46:27.910444
DCTd4zCq_Lp_GxdwI14hFwZiDjfvNVvQrUVznllTdIA oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:35.928008 2025-08-07 10:46:35.927945 t 2025-08-06 10:46:35.928011
dtv0uti4QUudgMTnS1NRzZ9nD9vhLO1stM5bdXL4I1o oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:36.104031 2025-08-07 10:46:36.103944 t 2025-08-06 10:46:36.104034
NHZQSW6C2H-5Wq6Un6NqcAmnfSt1PqJeYJnwFKSjAss oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:51:33.897379 2025-08-07 10:51:33.897295 t 2025-08-06 10:51:33.897385
yYZeeLyXmwpyr8Uu1szIyyoIpLc7qiWfQwB57f4kqNI oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:53:43.711315 2025-08-07 10:53:43.711223 t 2025-08-06 10:53:43.71132
KhH9FO4D15l3-SUUkFHjR5Oj1N6Ld-NLmkzaM1QMhtU oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:56:22.050456 2025-08-07 10:56:22.050377 t 2025-08-06 10:56:22.050461
zPQqqHEY4l7ZhLrBPBnvQdsQhQj1_j0n9H6CCnIAME8 oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 11:29:49.412786 2025-08-07 11:29:49.412706 t 2025-08-06 11:29:49.412792
oxYZ9qTaezYliV6UtsI62RpPClj7rIAVXK_1FB3gYMQ oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 11:34:42.099366 2025-08-07 11:34:42.099276 t 2025-08-06 11:34:42.099371
Ml6aHvae2EPXs9SWZX1BI_mNKgasjIVRMWnUSwKwixQ oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 11:38:06.002942 2025-08-07 11:38:06.002845 t 2025-08-06 11:38:06.002949
\.
--
-- Data for Name: publicstream; Type: TABLE DATA; Schema: public; Owner: d2s
--
COPY public.publicstream (uid, username, storage_bytes, mtime, last_updated, created_at, updated_at) FROM stdin;
oib@chello.at oibchello 16151127 1754453233 2025-08-06 06:22:53.97839 2025-08-06 06:07:13.525122 2025-08-06 06:07:13.525126
\.
--
-- Data for Name: uploadlog; Type: TABLE DATA; Schema: public; Owner: d2s
--
COPY public.uploadlog (id, uid, ip, filename, processed_filename, size_bytes, created_at) FROM stdin;
111 oib@chello.at 127.0.0.1 Taös - Bobstep [ Dubstep ] [1YGV5cNJrt0].opus 210388e1-2a9b-4b7c-a72f-d4059111ee80.opus 688750 2025-08-06 06:22:53.970258
112 oib@chello.at backfilled 107_5e6c3567-7457-48f4-83fc-f3073f065718.opus 107_5e6c3567-7457-48f4-83fc-f3073f065718.opus 671050 2025-08-06 08:14:43.312825
99 oib@chello.at 127.0.0.1 Pendulum - Set Me On Fire (Rasta Dubstep Rastep Raggastep) [ndShSlWMaeA].opus b0afe675-de49-43eb-ab77-86e592934342.opus 1051596 2025-08-06 06:07:13.504649
100 oib@chello.at 127.0.0.1 Roots Reggae (1976) [Unreleased Album] Judah Khamani - Twelve Gates of Rebirth [94NDoPCjRL0].opus 6e0e4d7c-31a6-4d3b-ad26-1ccb8aeaaf55.opus 4751764 2025-08-06 06:08:00.96213
101 oib@chello.at backfilled 98_15ba146a-8285-4233-9d44-e77e5fc19cd6.opus 98_15ba146a-8285-4233-9d44-e77e5fc19cd6.opus 805775 2025-08-06 08:05:27.805988
102 oib@chello.at backfilled 97_74e975bf-22f8-4b98-8111-dbcd195a62a2.opus 97_74e975bf-22f8-4b98-8111-dbcd195a62a2.opus 775404 2025-08-06 07:57:50.570271
103 oib@chello.at backfilled 99_b0afe675-de49-43eb-ab77-86e592934342.opus 99_b0afe675-de49-43eb-ab77-86e592934342.opus 1051596 2025-08-06 08:07:13.493002
104 oib@chello.at backfilled 100_6e0e4d7c-31a6-4d3b-ad26-1ccb8aeaaf55.opus 100_6e0e4d7c-31a6-4d3b-ad26-1ccb8aeaaf55.opus 4751764 2025-08-06 08:08:00.944561
105 oib@chello.at backfilled stream.opus stream.opus 7384026 2025-08-06 08:08:01.540555
106 oib@chello.at 127.0.0.1 Roots Reggae (1973) [Unreleased Album] Judah Khamani - Scrolls of the Fire Lion🔥 [wZvlYr5Baa8].opus 516c2ea1-6bf3-4461-91c6-e7c47e913743.opus 4760432 2025-08-06 06:14:17.072377
107 oib@chello.at 127.0.0.1 Reggae Shark Dubstep remix [101PfefUH5A].opus 5e6c3567-7457-48f4-83fc-f3073f065718.opus 671050 2025-08-06 06:14:43.326351
108 oib@chello.at 127.0.0.1 SiriuX - RastaFari (Dubstep REMIX) [VVAWgX0IgxY].opus 25aa73c3-2a9c-4659-835d-8280a0381dc4.opus 939266 2025-08-06 06:17:55.519608
109 oib@chello.at 127.0.0.1 I'm Death, Straight Up DEATH WHISTLE (Wubbaduck x Auphinity DUBSTEP REMIX) [BK6_6RB2h64].opus 9c9b6356-d5b7-427f-9179-942593cd97e6.opus 805775 2025-08-06 06:19:41.29278
110 oib@chello.at 127.0.0.1 N.A.S.A. Way Down (feat. RZA, Barbie Hatch, & John Frusciante).mp3 72c4ce3e-c991-4fb4-b5ab-b2f83b6f616d.opus 901315 2025-08-06 06:22:01.727741
113 oib@chello.at backfilled 110_72c4ce3e-c991-4fb4-b5ab-b2f83b6f616d.opus 110_72c4ce3e-c991-4fb4-b5ab-b2f83b6f616d.opus 901315 2025-08-06 08:22:01.71671
114 oib@chello.at backfilled 108_25aa73c3-2a9c-4659-835d-8280a0381dc4.opus 108_25aa73c3-2a9c-4659-835d-8280a0381dc4.opus 939266 2025-08-06 08:17:55.511047
115 oib@chello.at backfilled 106_516c2ea1-6bf3-4461-91c6-e7c47e913743.opus 106_516c2ea1-6bf3-4461-91c6-e7c47e913743.opus 4760432 2025-08-06 08:14:17.057068
116 oib@chello.at backfilled 109_9c9b6356-d5b7-427f-9179-942593cd97e6.opus 109_9c9b6356-d5b7-427f-9179-942593cd97e6.opus 805775 2025-08-06 08:19:41.282058
117 oib@chello.at backfilled 111_210388e1-2a9b-4b7c-a72f-d4059111ee80.opus 111_210388e1-2a9b-4b7c-a72f-d4059111ee80.opus 688750 2025-08-06 08:22:53.960209
\.
--
-- Data for Name: user; Type: TABLE DATA; Schema: public; Owner: d2s
--
COPY public."user" (token_created, email, username, token, confirmed, ip) FROM stdin;
2025-08-06 11:37:50.164201 oib@chello.at oibchello 69aef338-4f18-44b2-96bb-403245901d06 t 127.0.0.1
\.
--
-- Data for Name: userquota; Type: TABLE DATA; Schema: public; Owner: d2s
--
COPY public.userquota (uid, storage_bytes) FROM stdin;
oib@chello.at 16151127
\.
--
-- Name: uploadlog_id_seq; Type: SEQUENCE SET; Schema: public; Owner: d2s
--
SELECT pg_catalog.setval('public.uploadlog_id_seq', 117, true);
--
-- Name: alembic_version alembic_version_pkc; Type: CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.alembic_version
ADD CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num);
--
-- Name: dbsession dbsession_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.dbsession
ADD CONSTRAINT dbsession_pkey PRIMARY KEY (token);
--
-- Name: publicstream publicstream_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.publicstream
ADD CONSTRAINT publicstream_pkey PRIMARY KEY (uid);
--
-- Name: uploadlog uploadlog_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.uploadlog
ADD CONSTRAINT uploadlog_pkey PRIMARY KEY (id);
--
-- Name: user user_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_pkey PRIMARY KEY (email);
--
-- Name: userquota userquota_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.userquota
ADD CONSTRAINT userquota_pkey PRIMARY KEY (uid);
--
-- Name: ix_publicstream_username; Type: INDEX; Schema: public; Owner: d2s
--
CREATE INDEX ix_publicstream_username ON public.publicstream USING btree (username);
--
-- Name: ix_user_username; Type: INDEX; Schema: public; Owner: d2s
--
CREATE UNIQUE INDEX ix_user_username ON public."user" USING btree (username);
--
-- Name: dbsession dbsession_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: d2s
--
ALTER TABLE ONLY public.dbsession
ADD CONSTRAINT dbsession_user_id_fkey FOREIGN KEY (uid) REFERENCES public."user"(email);
--
-- PostgreSQL database dump complete
--

131
docs/auth-consolidation.md Normal file
View File

@ -0,0 +1,131 @@
# Authentication Logic Consolidation
## Overview
The authentication logic has been consolidated from multiple scattered files into a single, centralized `AuthManager` class. This improves maintainability, reduces code duplication, and provides a consistent authentication interface.
## Files Changed
### 1. New Centralized Module
- **`static/auth-manager.js`** - New centralized authentication manager class
### 2. Refactored Files
- **`static/auth.js`** - Simplified to use AuthManager
- **`static/magic-login.js`** - Simplified to use AuthManager
- **`static/cleanup-auth.js`** - Simplified to use AuthManager
## AuthManager Features
### Core Functionality
- **Centralized State Management** - Single source of truth for authentication state
- **Cookie & localStorage Management** - Consistent handling of auth data storage
- **Magic Link Processing** - Handles both URL-based and token-based magic login
- **Authentication Polling** - Periodic state checks with caching and debouncing
- **User Session Management** - Login, logout, and account deletion
### Key Methods
- `initialize()` - Initialize the auth manager and handle magic login
- `setAuthState(email, username, token)` - Set authentication state
- `clearAuthState()` - Clear all authentication data
- `isAuthenticated()` - Check current authentication status
- `getCurrentUser()` - Get current user data
- `logout()` - Perform logout and redirect
- `deleteAccount()` - Handle account deletion
- `cleanupAuthState(email)` - Clean up inconsistent auth state
### Authentication Flow
1. **Magic Login Detection** - Checks URL parameters for login tokens/success
2. **User Info Retrieval** - Fetches email from `/api/me` endpoint
3. **State Setting** - Sets email as primary UID, username for display
4. **UI Updates** - Updates body classes and initializes user session
5. **Navigation** - Redirects to user profile page
## Data Storage Strategy
### localStorage Keys
- `uid` - Primary identifier (email-based)
- `user_email` - Explicit email storage
- `username` - Display name (separate from UID)
- `authToken` - Authentication token
- `isAuthenticated` - Boolean authentication state
- `uid_time` - Session timestamp
### Cookie Strategy
- `uid` - Email-based UID with `SameSite=Lax`
- `authToken` - Auth token with `SameSite=Lax; Secure`
- `isAuthenticated` - Boolean flag with `SameSite=Lax`
## Removed Redundancy
### Eliminated Duplicate Code
- **User info fetching** - Centralized in `fetchUserInfo()`
- **Auth state setting** - Centralized in `setAuthState()`
- **Cookie management** - Centralized in `setAuthState()` and `clearAuthState()`
- **Magic login processing** - Centralized in `processMagicLogin()` and `processTokenLogin()`
### Removed Fields
- `confirmed_uid` - Was duplicate of `uid`, now eliminated
## Backward Compatibility
### Global Functions (Legacy Support)
- `window.getCurrentUser()` - Get current user data
- `window.isAuthenticated()` - Check authentication status
- `window.logout()` - Perform logout
- `window.cleanupAuthState(email)` - Clean up auth state
### Existing Function Exports
- `initMagicLogin()` - Maintained in magic-login.js for compatibility
- `cleanupAuthState()` - Maintained in cleanup-auth.js for compatibility
## Benefits Achieved
### 1. **Maintainability**
- Single source of authentication logic
- Consistent error handling and logging
- Easier to debug and modify
### 2. **Performance**
- Reduced code duplication
- Optimized caching and debouncing
- Fewer redundant API calls
### 3. **Reliability**
- Consistent state management
- Proper cleanup on logout
- Robust error handling
### 4. **Security**
- Consistent cookie security attributes
- Proper state clearing on logout
- Centralized validation
## Migration Notes
### For Developers
- Import `authManager` from `./auth-manager.js` for new code
- Use `authManager.isAuthenticated()` instead of manual checks
- Use `authManager.getCurrentUser()` for user data
- Legacy global functions still work for existing code
### Testing
- Test magic link login (both URL and token-based)
- Test authentication state persistence
- Test logout and account deletion
- Test authentication polling and state changes
## Future Improvements
### Potential Enhancements
1. **Token Refresh** - Automatic token renewal
2. **Session Timeout** - Configurable session expiration
3. **Multi-tab Sync** - Better cross-tab authentication sync
4. **Audit Logging** - Enhanced authentication event logging
5. **Rate Limiting** - Protection against auth abuse
### Configuration Options
Consider adding configuration for:
- Polling intervals
- Cache TTL values
- Debug logging levels
- Cookie security settings

221
execute_db_cleanup.py Normal file
View File

@ -0,0 +1,221 @@
#!/usr/bin/env python3
"""
Execute Database Legacy Data Cleanup
Fixes issues identified in the database analysis using direct SQL execution
"""
import sys
from sqlmodel import Session, text
from database import engine
def execute_step(session, step_name, query, description):
"""Execute a cleanup step and report results"""
print(f"\n=== {step_name} ===")
print(f"Description: {description}")
print(f"Query: {query}")
try:
result = session.exec(text(query))
if query.strip().upper().startswith('SELECT'):
rows = result.fetchall()
print(f"Result: {len(rows)} rows")
for row in rows:
print(f" {row}")
else:
session.commit()
print(f"✅ Success: {result.rowcount} rows affected")
return True
except Exception as e:
print(f"❌ Error: {e}")
session.rollback()
return False
def main():
"""Execute database cleanup step by step"""
print("=== DATABASE LEGACY DATA CLEANUP ===")
with Session(engine) as session:
success_count = 0
total_steps = 0
# Step 1: Fix User Table - Update username to match email format
total_steps += 1
if execute_step(
session,
"STEP 1: Fix User Table",
"""UPDATE "user"
SET username = email,
display_name = CASE
WHEN display_name = '' OR display_name IS NULL
THEN split_part(email, '@', 1)
ELSE display_name
END
WHERE email = 'oib@chello.at'""",
"Update username to match email format and set display_name"
):
success_count += 1
# Verify Step 1
execute_step(
session,
"VERIFY STEP 1",
"""SELECT email, username, display_name, confirmed
FROM "user" WHERE email = 'oib@chello.at'""",
"Verify user table fix"
)
# Step 2: Clean Up Expired Sessions
total_steps += 1
if execute_step(
session,
"STEP 2: Mark Expired Sessions Inactive",
"""UPDATE dbsession
SET is_active = false
WHERE expires_at < NOW() AND is_active = true""",
"Mark expired sessions as inactive for security"
):
success_count += 1
# Verify Step 2
execute_step(
session,
"VERIFY STEP 2",
"""SELECT COUNT(*) as expired_active_sessions
FROM dbsession
WHERE expires_at < NOW() AND is_active = true""",
"Check for remaining expired active sessions"
)
# Step 3: Update Session user_id to Email Format
total_steps += 1
if execute_step(
session,
"STEP 3: Update Session user_id",
"""UPDATE dbsession
SET user_id = 'oib@chello.at'
WHERE user_id = 'oibchello'""",
"Update session user_id to use email format"
):
success_count += 1
# Verify Step 3
execute_step(
session,
"VERIFY STEP 3",
"""SELECT DISTINCT user_id FROM dbsession""",
"Check session user_id values"
)
# Step 4: Fix PublicStream Username Fields
total_steps += 1
if execute_step(
session,
"STEP 4: Fix PublicStream",
"""UPDATE publicstream
SET username = uid,
display_name = CASE
WHEN display_name = 'oibchello'
THEN split_part(uid, '@', 1)
ELSE display_name
END
WHERE uid = 'oib@chello.at'""",
"Update PublicStream username to match UID"
):
success_count += 1
# Verify Step 4
execute_step(
session,
"VERIFY STEP 4",
"""SELECT uid, username, display_name
FROM publicstream WHERE uid = 'oib@chello.at'""",
"Verify PublicStream fix"
)
# Step 5: Remove Orphaned Records
total_steps += 1
orphan_success = True
# Remove orphaned quota record
if not execute_step(
session,
"STEP 5a: Remove Orphaned Quota",
"""DELETE FROM userquota WHERE uid = 'oib@bubuit.net'""",
"Remove orphaned quota record for deleted user"
):
orphan_success = False
# Remove orphaned stream record
if not execute_step(
session,
"STEP 5b: Remove Orphaned Stream",
"""DELETE FROM publicstream WHERE uid = 'oib@bubuit.net'""",
"Remove orphaned stream record for deleted user"
):
orphan_success = False
if orphan_success:
success_count += 1
# Verify Step 5
execute_step(
session,
"VERIFY STEP 5",
"""SELECT 'userquota' as table_name, COUNT(*) as count
FROM userquota WHERE uid = 'oib@bubuit.net'
UNION ALL
SELECT 'publicstream' as table_name, COUNT(*) as count
FROM publicstream WHERE uid = 'oib@bubuit.net'""",
"Verify orphaned records are removed"
)
# Final Verification
print(f"\n=== FINAL VERIFICATION ===")
# Check for remaining issues
execute_step(
session,
"FINAL CHECK",
"""SELECT 'ISSUE: User email/username mismatch' as issue
FROM "user"
WHERE email != username
UNION ALL
SELECT 'ISSUE: Expired active sessions'
FROM dbsession
WHERE expires_at < NOW() AND is_active = true
LIMIT 1
UNION ALL
SELECT 'ISSUE: PublicStream UID/username mismatch'
FROM publicstream
WHERE uid != username
LIMIT 1
UNION ALL
SELECT 'ISSUE: Orphaned quota records'
FROM userquota q
LEFT JOIN "user" u ON q.uid = u.email
WHERE u.email IS NULL
LIMIT 1
UNION ALL
SELECT 'ISSUE: Orphaned stream records'
FROM publicstream p
LEFT JOIN "user" u ON p.uid = u.email
WHERE u.email IS NULL
LIMIT 1""",
"Check for any remaining legacy issues"
)
# Summary
print(f"\n=== CLEANUP SUMMARY ===")
print(f"Total steps: {total_steps}")
print(f"Successful steps: {success_count}")
print(f"Failed steps: {total_steps - success_count}")
if success_count == total_steps:
print("✅ All legacy database issues have been fixed!")
else:
print("⚠️ Some issues remain. Check the output above for details.")
return 0 if success_count == total_steps else 1
if __name__ == "__main__":
sys.exit(main())

174
fix_db_constraints.py Normal file
View File

@ -0,0 +1,174 @@
#!/usr/bin/env python3
"""
Fix Database Constraints and Legacy Data
Handles foreign key constraints properly during cleanup
"""
import sys
from sqlmodel import Session, text
from database import engine
def execute_query(session, query, description):
"""Execute a query and report results"""
print(f"\n{description}")
print(f"Query: {query}")
try:
result = session.exec(text(query))
if query.strip().upper().startswith('SELECT'):
rows = result.fetchall()
print(f"Result: {len(rows)} rows")
for row in rows:
print(f" {row}")
else:
session.commit()
print(f"✅ Success: {result.rowcount} rows affected")
return True
except Exception as e:
print(f"❌ Error: {e}")
session.rollback()
return False
def main():
"""Fix database constraints and legacy data"""
print("=== FIXING DATABASE CONSTRAINTS AND LEGACY DATA ===")
with Session(engine) as session:
# Step 1: First, let's temporarily drop the foreign key constraint
print("\n=== STEP 1: Handle Foreign Key Constraint ===")
# Check current constraint
execute_query(
session,
"""SELECT conname, conrelid::regclass, confrelid::regclass
FROM pg_constraint
WHERE conname = 'dbsession_user_id_fkey'""",
"Check existing foreign key constraint"
)
# Drop the constraint temporarily
execute_query(
session,
"""ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey""",
"Drop foreign key constraint temporarily"
)
# Step 2: Update user table
print("\n=== STEP 2: Update User Table ===")
execute_query(
session,
"""UPDATE "user"
SET username = email,
display_name = CASE
WHEN display_name = '' OR display_name IS NULL
THEN split_part(email, '@', 1)
ELSE display_name
END
WHERE email = 'oib@chello.at'""",
"Update user username to match email"
)
# Verify user update
execute_query(
session,
"""SELECT email, username, display_name FROM "user" WHERE email = 'oib@chello.at'""",
"Verify user table update"
)
# Step 3: Update session user_id references
print("\n=== STEP 3: Update Session References ===")
execute_query(
session,
"""UPDATE dbsession
SET user_id = 'oib@chello.at'
WHERE user_id = 'oibchello'""",
"Update session user_id to email format"
)
# Verify session updates
execute_query(
session,
"""SELECT DISTINCT user_id FROM dbsession""",
"Verify session user_id updates"
)
# Step 4: Recreate the foreign key constraint
print("\n=== STEP 4: Recreate Foreign Key Constraint ===")
execute_query(
session,
"""ALTER TABLE dbsession
ADD CONSTRAINT dbsession_user_id_fkey
FOREIGN KEY (user_id) REFERENCES "user"(username)""",
"Recreate foreign key constraint"
)
# Step 5: Final verification - check for remaining issues
print("\n=== STEP 5: Final Verification ===")
# Check user email/username match
execute_query(
session,
"""SELECT email, username,
CASE WHEN email = username THEN '✓ Match' ELSE '✗ Mismatch' END as status
FROM "user""",
"Check user email/username consistency"
)
# Check expired sessions
execute_query(
session,
"""SELECT COUNT(*) as expired_active_sessions
FROM dbsession
WHERE expires_at < NOW() AND is_active = true""",
"Check for expired active sessions"
)
# Check PublicStream consistency
execute_query(
session,
"""SELECT uid, username,
CASE WHEN uid = username THEN '✓ Match' ELSE '✗ Mismatch' END as status
FROM publicstream""",
"Check PublicStream UID/username consistency"
)
# Check for orphaned records
execute_query(
session,
"""SELECT 'userquota' as table_name, COUNT(*) as orphaned_records
FROM userquota q
LEFT JOIN "user" u ON q.uid = u.email
WHERE u.email IS NULL
UNION ALL
SELECT 'publicstream' as table_name, COUNT(*) as orphaned_records
FROM publicstream p
LEFT JOIN "user" u ON p.uid = u.email
WHERE u.email IS NULL""",
"Check for orphaned records"
)
# Summary of current state
print("\n=== DATABASE STATE SUMMARY ===")
execute_query(
session,
"""SELECT
COUNT(DISTINCT u.email) as total_users,
COUNT(DISTINCT q.uid) as quota_records,
COUNT(DISTINCT p.uid) as stream_records,
COUNT(CASE WHEN s.is_active THEN 1 END) as active_sessions,
COUNT(CASE WHEN s.expires_at < NOW() AND s.is_active THEN 1 END) as expired_active_sessions
FROM "user" u
FULL OUTER JOIN userquota q ON u.email = q.uid
FULL OUTER JOIN publicstream p ON u.email = p.uid
FULL OUTER JOIN dbsession s ON u.username = s.user_id""",
"Database state summary"
)
print("\n✅ Database cleanup completed!")
print("All legacy data issues should now be resolved.")
return 0
if __name__ == "__main__":
sys.exit(main())

13
fix_dbsession_fk.sql Normal file
View File

@ -0,0 +1,13 @@
-- Migration script to update DBSession foreign key to reference user.email
-- Run this when no active sessions exist to avoid deadlocks
BEGIN;
-- Step 1: Drop the existing foreign key constraint if it exists
ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey;
-- Step 2: Add the new foreign key constraint referencing user.email
ALTER TABLE dbsession ADD CONSTRAINT dbsession_uid_fkey
FOREIGN KEY (uid) REFERENCES "user"(email);
COMMIT;

156
list_streams.py Normal file
View File

@ -0,0 +1,156 @@
# list_streams.py — FastAPI route to list all public streams (users with stream.opus)
from fastapi import APIRouter, Request, Depends
from fastapi.responses import StreamingResponse, Response
from sqlalchemy.orm import Session
from sqlalchemy import select
from models import PublicStream
from database import get_db
from pathlib import Path
import asyncio
import os
import json
router = APIRouter()
DATA_ROOT = Path("./data")
@router.get("/streams-sse")
async def streams_sse(request: Request):
# Add CORS headers for SSE
origin = request.headers.get('origin', '')
allowed_origins = ["https://dicta2stream.net", "http://localhost:8000", "http://127.0.0.1:8000"]
# Use the request origin if it's in the allowed list, otherwise use the first allowed origin
cors_origin = origin if origin in allowed_origins else allowed_origins[0]
headers = {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
"Connection": "keep-alive",
"Access-Control-Allow-Origin": cors_origin,
"Access-Control-Allow-Credentials": "true",
"Access-Control-Expose-Headers": "Content-Type",
"X-Accel-Buffering": "no" # Disable buffering for nginx
}
# Handle preflight requests
if request.method == "OPTIONS":
headers.update({
"Access-Control-Allow-Methods": "GET, OPTIONS",
"Access-Control-Allow-Headers": request.headers.get("access-control-request-headers", "*"),
"Access-Control-Max-Age": "86400" # 24 hours
})
return Response(status_code=204, headers=headers)
async def event_wrapper():
# Use the database session context manager
with get_db() as db:
try:
async for event in list_streams_sse(db):
yield event
except Exception as e:
# Only log errors if DEBUG is enabled
# Debug messages disabled
yield f"data: {json.dumps({'error': True, 'message': 'An error occurred'})}\n\n"
return StreamingResponse(
event_wrapper(),
media_type="text/event-stream",
headers=headers
)
async def list_streams_sse(db):
"""Stream public streams from the database as Server-Sent Events"""
try:
# Send initial ping
yield ":ping\n\n"
# Query all public streams from the database with required fields
# Also get all valid users to filter out orphaned streams
from models import User
# Use the query interface instead of execute
all_streams = db.query(PublicStream).order_by(PublicStream.mtime.desc()).all()
# Get all valid user UIDs (email and username)
all_users = db.query(User).all()
valid_uids = set()
for user in all_users:
valid_uids.add(user.email)
valid_uids.add(user.username)
# Filter out orphaned streams (streams without corresponding user accounts)
streams = []
orphaned_count = 0
for stream in all_streams:
if stream.uid in valid_uids:
streams.append(stream)
else:
orphaned_count += 1
print(f"[STREAMS] Filtering out orphaned stream: {stream.uid} (username: {stream.username})")
if orphaned_count > 0:
print(f"[STREAMS] Filtered out {orphaned_count} orphaned streams from public display")
if not streams:
print("No public streams found in the database")
yield f"data: {json.dumps({'end': True})}\n\n"
return
# Debug messages disabled
# Send each stream as an SSE event
for stream in streams:
try:
# Ensure we have all required fields with fallbacks
stream_data = {
'uid': stream.uid or '',
'size': stream.storage_bytes or 0,
'mtime': int(stream.mtime) if stream.mtime is not None else 0,
'username': stream.username or '',
'created_at': stream.created_at.isoformat() if stream.created_at else None,
'updated_at': stream.updated_at.isoformat() if stream.updated_at else None
}
# Debug messages disabled
yield f"data: {json.dumps(stream_data)}\n\n"
# Small delay to prevent overwhelming the client
await asyncio.sleep(0.1)
except Exception as e:
print(f"Error processing stream {stream.uid}: {str(e)}")
# Debug messages disabled
continue
# Send end of stream marker
# Debug messages disabled
yield f"data: {json.dumps({'end': True})}\n\n"
except Exception as e:
print(f"Error in list_streams_sse: {str(e)}")
# Debug messages disabled
yield f"data: {json.dumps({'error': True, 'message': str(e)})}\n\n"
@router.get("/streams")
def list_streams():
"""List all public streams from the database"""
# Use the database session context manager
with get_db() as db:
try:
# Use the query interface instead of execute
streams = db.query(PublicStream).order_by(PublicStream.mtime.desc()).all()
return {
"streams": [
{
'uid': stream.uid,
'size': stream.size,
'mtime': stream.mtime,
'created_at': stream.created_at.isoformat() if stream.created_at else None,
'updated_at': stream.updated_at.isoformat() if stream.updated_at else None
}
for stream in streams
]
}
except Exception as e:
# Debug messages disabled
return {"streams": []}

3
log.py
View File

@ -15,5 +15,6 @@ def log_violation(event: str, ip: str, uid: str, reason: str):
f.write(log_entry) f.write(log_entry)
# If DEBUG mode, also print to stdout # If DEBUG mode, also print to stdout
if os.getenv("DEBUG", "0") in ("1", "true", "True"): # Set DEBUG=1 in .env to enable if os.getenv("DEBUG", "0") in ("1", "true", "True"): # Set DEBUG=1 in .env to enable
print(f"[DEBUG] {log_entry.strip()}") # Debug messages disabled
pass

129
magic.py
View File

@ -12,58 +12,107 @@ import json
router = APIRouter() router = APIRouter()
@router.post("/magic-login") @router.post("/magic-login")
async def magic_login(request: Request, response: Response, db: Session = Depends(get_db), token: str = Form(...)): async def magic_login(request: Request, response: Response, token: str = Form(...)):
print(f"[magic-login] Received token: {token}") # Debug messages disabled
user = db.exec(select(User).where(User.token == token)).first()
print(f"[magic-login] User lookup: {'found' if user else 'not found'}")
if not user: # Use the database session context manager
print("[magic-login] Invalid or expired token") with get_db() as db:
return RedirectResponse(url="/?error=Invalid%20or%20expired%20token", status_code=302) try:
# Look up user by token
user = db.query(User).filter(User.token == token).first()
# Debug messages disabled
if datetime.utcnow() - user.token_created > timedelta(minutes=30): if not user:
print(f"[magic-login] Token expired for user: {user.username}") # Debug messages disabled
return RedirectResponse(url="/?error=Token%20expired", status_code=302) raise HTTPException(status_code=401, detail="Invalid or expired token")
# Mark user as confirmed if not already if datetime.utcnow() - user.token_created > timedelta(minutes=30):
if not user.confirmed: # Debug messages disabled
user.confirmed = True raise HTTPException(status_code=401, detail="Token expired")
user.ip = request.client.host
db.add(user)
print(f"[magic-login] User {user.username} confirmed.")
# Create a new session for the user (valid for 1 hour) # Mark user as confirmed if not already
session_token = secrets.token_urlsafe(32) if not user.confirmed:
expires_at = datetime.utcnow() + timedelta(hours=1) user.confirmed = True
user.ip = request.client.host
db.add(user)
# Debug messages disabled
# Create new session # Create a new session for the user (valid for 24 hours)
session = DBSession( session_token = secrets.token_urlsafe(32)
token=session_token, expires_at = datetime.utcnow() + timedelta(hours=24)
user_id=user.username,
ip_address=request.client.host or "", # Create new session
user_agent=request.headers.get("user-agent", ""), session = DBSession(
expires_at=expires_at, token=session_token,
is_active=True uid=user.email or user.username, # Use email as UID
ip_address=request.client.host or "",
user_agent=request.headers.get("user-agent", ""),
expires_at=expires_at,
is_active=True
)
db.add(session)
db.commit()
# Store user data for use after the session is committed
user_email = user.email or user.username
username = user.username
except Exception as e:
db.rollback()
# Debug messages disabled
# Debug messages disabled
raise HTTPException(status_code=500, detail="Database error during login")
# Determine if we're running in development (localhost) or production
is_localhost = request.url.hostname == "localhost"
# Prepare response data
response_data = {
"success": True,
"message": "Login successful",
"user": {
"email": user_email,
"username": username
},
"token": session_token # Include the token in the JSON response
}
# Create the response
response = JSONResponse(
content=response_data,
status_code=200
) )
db.add(session)
db.commit()
# Set cookie with the session token (valid for 1 hour) # Set cookies
response.set_cookie( response.set_cookie(
key="sessionid", key="sessionid",
value=session_token, value=session_token,
httponly=True, httponly=True,
secure=not request.url.hostname == "localhost", secure=not is_localhost,
samesite="lax", samesite="lax" if is_localhost else "none",
max_age=3600, # 1 hour max_age=86400, # 24 hours
path="/" path="/"
) )
print(f"[magic-login] Session created for user: {user.username}") response.set_cookie(
key="uid",
# Redirect to success page value=user_email,
return RedirectResponse( samesite="lax" if is_localhost else "none",
url=f"/?login=success&confirmed_uid={user.username}", secure=not is_localhost,
status_code=302, max_age=86400, # 24 hours
headers=dict(response.headers) path="/"
) )
response.set_cookie(
key="authToken",
value=session_token,
samesite="lax" if is_localhost else "none",
secure=not is_localhost,
max_age=86400, # 24 hours
path="/"
)
# Debug messages disabled
# Debug messages disabled
# Debug messages disabled
return response

655
main.py
View File

@ -90,20 +90,30 @@ def get_current_user(request: Request, db: Session = Depends(get_db)):
from range_response import range_response from range_response import range_response
@app.get("/audio/{uid}/{filename}") @app.get("/audio/{uid}/{filename}")
def get_audio(uid: str, filename: str, request: Request, db: Session = Depends(get_db)): def get_audio(uid: str, filename: str, request: Request):
# Allow public access ONLY to stream.opus # Allow public access ONLY to stream.opus
# Map email-based UID to username for file system access # Use the database session context manager
# If UID contains @, it's an email - look up the corresponding username with get_db() as db:
if '@' in uid: try:
from models import User # Use email-based UID directly for file system access
user = db.exec(select(User).where(User.email == uid)).first() # If UID contains @, it's an email - use it directly
if not user: if '@' in uid:
raise HTTPException(status_code=404, detail="User not found") from models import User
filesystem_uid = user.username user = db.query(User).filter(User.email == uid).first()
else: if not user:
# Legacy support for username-based UIDs raise HTTPException(status_code=404, detail="User not found")
filesystem_uid = uid filesystem_uid = uid # Use email directly for directory
else:
# Legacy support for username-based UIDs - convert to email
from models import User
user = db.query(User).filter(User.username == uid).first()
if not user:
raise HTTPException(status_code=404, detail="User not found")
filesystem_uid = user.email # Convert username to email for directory
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
user_dir = os.path.join("data", filesystem_uid) user_dir = os.path.join("data", filesystem_uid)
file_path = os.path.join(user_dir, filename) file_path = os.path.join(user_dir, filename)
@ -127,7 +137,8 @@ def get_audio(uid: str, filename: str, request: Request, db: Session = Depends(g
return FileResponse(real_file_path, media_type="audio/ogg") return FileResponse(real_file_path, media_type="audio/ogg")
if debug_mode: if debug_mode:
print("[DEBUG] FastAPI running in debug mode.") # Debug messages disabled
pass
# Global error handler to always return JSON # Global error handler to always return JSON
from slowapi.errors import RateLimitExceeded from slowapi.errors import RateLimitExceeded
@ -179,7 +190,7 @@ from register import router as register_router
from magic import router as magic_router from magic import router as magic_router
from upload import router as upload_router from upload import router as upload_router
from streams import router as streams_router from streams import router as streams_router
from list_user_files import router as list_user_files_router
from auth_router import router as auth_router from auth_router import router as auth_router
app.include_router(streams_router) app.include_router(streams_router)
@ -188,14 +199,100 @@ from list_streams import router as list_streams_router
from account_router import router as account_router from account_router import router as account_router
# Include all routers # Include all routers
app.include_router(auth_router) app.include_router(auth_router, prefix="/api")
app.include_router(account_router) app.include_router(account_router)
app.include_router(register_router) app.include_router(register_router)
app.include_router(magic_router) app.include_router(magic_router)
app.include_router(upload_router) app.include_router(upload_router)
app.include_router(list_user_files_router)
app.include_router(list_streams_router) app.include_router(list_streams_router)
@app.get("/user-files/{uid}")
async def list_user_files(uid: str):
from pathlib import Path
# Get the user's directory and check for files first
user_dir = Path("data") / uid
if not user_dir.exists() or not user_dir.is_dir():
return {"files": []}
# Get all files that actually exist on disk
existing_files = {f.name for f in user_dir.iterdir() if f.is_file()}
# Use the database session context manager for all database operations
with get_db() as db:
# Verify the user exists
user_check = db.query(User).filter((User.username == uid) | (User.email == uid)).first()
if not user_check:
raise HTTPException(status_code=404, detail="User not found")
# Query the UploadLog table for this user
all_upload_logs = db.query(UploadLog).filter(
UploadLog.uid == uid
).order_by(UploadLog.created_at.desc()).all()
# Track processed files to avoid duplicates
processed_files = set()
files_metadata = []
for log in all_upload_logs:
# Skip if no processed filename
if not log.processed_filename:
continue
# Skip if we've already processed this file
if log.processed_filename in processed_files:
continue
# Skip stream.opus from uploads list (it's a special file)
if log.processed_filename == 'stream.opus':
continue
# Skip if file doesn't exist on disk
# Files are stored with the pattern: {upload_id}_{processed_filename}
expected_filename = f"{log.id}_{log.processed_filename}"
if expected_filename not in existing_files:
# Only delete records older than 5 minutes to avoid race conditions
from datetime import datetime, timedelta
cutoff_time = datetime.utcnow() - timedelta(minutes=5)
if log.created_at < cutoff_time:
print(f"[CLEANUP] Removing orphaned DB record (older than 5min): {expected_filename}")
db.delete(log)
continue
# Add to processed files to avoid duplicates
processed_files.add(log.processed_filename)
# Always use the original filename if present
display_name = log.filename if log.filename else log.processed_filename
# Only include files that exist on disk
# Files are stored with the pattern: {upload_id}_{processed_filename}
stored_filename = f"{log.id}_{log.processed_filename}"
file_path = user_dir / stored_filename
if file_path.exists() and file_path.is_file():
try:
# Get the actual file size in case it changed
actual_size = file_path.stat().st_size
files_metadata.append({
"original_name": display_name,
"stored_name": log.processed_filename,
"size": actual_size
})
except OSError:
# If we can't access the file, skip it
continue
# Commit any database changes (deletions of non-existent files)
try:
db.commit()
except Exception as e:
print(f"[ERROR] Failed to commit database changes: {e}")
db.rollback()
return {"files": files_metadata}
# Serve static files # Serve static files
app.mount("/static", StaticFiles(directory="static"), name="static") app.mount("/static", StaticFiles(directory="static"), name="static")
@ -258,9 +355,9 @@ def serve_me():
@app.get("/admin/stats") @app.get("/admin/stats")
def admin_stats(request: Request, db: Session = Depends(get_db)): def admin_stats(request: Request, db: Session = Depends(get_db)):
from sqlmodel import select from sqlmodel import select
users = db.exec(select(User)).all() users = db.query(User).all()
users_count = len(users) users_count = len(users)
total_quota = db.exec(select(UserQuota)).all() total_quota = db.query(UserQuota).all()
total_quota_sum = sum(q.storage_bytes for q in total_quota) total_quota_sum = sum(q.storage_bytes for q in total_quota)
violations_log = 0 violations_log = 0
try: try:
@ -292,10 +389,224 @@ def debug(request: Request):
MAX_QUOTA_BYTES = 100 * 1024 * 1024 MAX_QUOTA_BYTES = 100 * 1024 * 1024
# Delete account endpoint has been moved to account_router.py # Delete account endpoint - fallback implementation since account_router.py has loading issues
@app.post("/api/delete-account")
async def delete_account_fallback(request: Request, db: Session = Depends(get_db)):
try:
# Get request data
data = await request.json()
uid = data.get("uid")
if not uid:
raise HTTPException(status_code=400, detail="Missing UID")
ip = request.client.host
# Debug messages disabled
# Find user by email or username
user = None
if '@' in uid:
user = db.exec(select(User).where(User.email == uid)).first()
if not user:
user = db.exec(select(User).where(User.username == uid)).first()
# If still not found, check if this UID exists in upload logs and try to find the associated user
if not user:
# Look for upload logs with this UID to find the real user
upload_log = db.exec(select(UploadLog).where(UploadLog.uid == uid)).first()
if upload_log:
# Try to find a user that might be associated with this UID
# Check if there's a user with the same IP or similar identifier
all_users = db.exec(select(User)).all()
for potential_user in all_users:
# Use the first confirmed user as fallback (for orphaned UIDs)
if potential_user.confirmed:
user = potential_user
# Debug messages disabled
break
if not user:
# Debug messages disabled
raise HTTPException(status_code=404, detail="User not found")
if user.ip != ip:
raise HTTPException(status_code=403, detail="Unauthorized: IP address does not match")
# Delete user data from database using the original UID
# The original UID is what's stored in the database records
# Delete upload logs for all possible UIDs (original UID, email, username)
upload_logs_to_delete = []
# Check for upload logs with original UID
upload_logs_original = db.query(UploadLog).filter(UploadLog.uid == uid).all()
if upload_logs_original:
# Debug messages disabled
upload_logs_to_delete.extend(upload_logs_original)
# Check for upload logs with user email
upload_logs_email = db.query(UploadLog).filter(UploadLog.uid == user.email).all()
if upload_logs_email:
# Debug messages disabled
upload_logs_to_delete.extend(upload_logs_email)
# Check for upload logs with username
upload_logs_username = db.query(UploadLog).filter(UploadLog.uid == user.username).all()
if upload_logs_username:
# Debug messages disabled
upload_logs_to_delete.extend(upload_logs_username)
# Delete all found upload log records
for log in upload_logs_to_delete:
try:
db.delete(log)
except Exception as e:
# Debug messages disabled
pass
# Debug messages disabled
# Delete user quota for both the original UID and user email (to cover all cases)
quota_original = db.get(UserQuota, uid)
if quota_original:
# Debug messages disabled
db.delete(quota_original)
quota_email = db.get(UserQuota, user.email)
if quota_email:
# Debug messages disabled
db.delete(quota_email)
# Delete user sessions
sessions = db.query(DBSession).filter(DBSession.user_id == user.username).all()
# Debug messages disabled
for session in sessions:
db.delete(session)
# Delete public stream entries for all possible UIDs
# Use select() instead of get() to find all matching records
public_streams_to_delete = []
# Check for public stream with original UID
public_stream_original = db.query(PublicStream).filter(PublicStream.uid == uid).first()
if public_stream_original:
# Debug messages disabled
public_streams_to_delete.append(public_stream_original)
# Check for public stream with user email
public_stream_email = db.query(PublicStream).filter(PublicStream.uid == user.email).first()
if public_stream_email:
# Debug messages disabled
public_streams_to_delete.append(public_stream_email)
# Check for public stream with username
public_stream_username = db.query(PublicStream).filter(PublicStream.uid == user.username).first()
if public_stream_username:
# Debug messages disabled
public_streams_to_delete.append(public_stream_username)
# Delete all found public stream records
for ps in public_streams_to_delete:
try:
# Debug messages disabled
db.delete(ps)
except Exception as e:
# Debug messages disabled
pass
# Debug messages disabled
# Delete user directory BEFORE deleting user record - check all possible locations
import shutil
# Try to delete directory with UID (email) - current standard
uid_dir = os.path.join('data', uid)
if os.path.exists(uid_dir):
# Debug messages disabled
shutil.rmtree(uid_dir, ignore_errors=True)
# Also try to delete directory with email (in case of different UID formats)
email_dir = os.path.join('data', user.email)
if os.path.exists(email_dir) and email_dir != uid_dir:
# Debug messages disabled
shutil.rmtree(email_dir, ignore_errors=True)
# Also try to delete directory with username (legacy format)
username_dir = os.path.join('data', user.username)
if os.path.exists(username_dir) and username_dir != uid_dir and username_dir != email_dir:
# Debug messages disabled
shutil.rmtree(username_dir, ignore_errors=True)
# Delete user account AFTER directory cleanup
db.delete(user)
db.commit()
# Debug messages disabled
return {"status": "success", "message": "Account deleted successfully"}
except HTTPException:
raise
except Exception as e:
# Debug messages disabled
db.rollback()
raise HTTPException(status_code=500, detail=f"Failed to delete account: {str(e)}")
# Cleanup endpoint for orphaned public streams
@app.post("/api/cleanup-streams")
async def cleanup_orphaned_streams(request: Request, db: Session = Depends(get_db)):
try:
# Get request data
data = await request.json()
admin_secret = data.get("admin_secret")
# Verify admin access
if admin_secret != ADMIN_SECRET:
raise HTTPException(status_code=403, detail="Unauthorized")
# Find orphaned public streams (streams without corresponding user accounts)
all_streams = db.query(PublicStream).all()
all_users = db.query(User).all()
# Create sets of valid UIDs from user accounts
valid_uids = set()
for user in all_users:
valid_uids.add(user.email)
valid_uids.add(user.username)
orphaned_streams = []
for stream in all_streams:
if stream.uid not in valid_uids:
orphaned_streams.append(stream)
# Delete orphaned streams
deleted_count = 0
for stream in orphaned_streams:
try:
print(f"[CLEANUP] Deleting orphaned stream: {stream.uid} (username: {stream.username})")
db.delete(stream)
deleted_count += 1
except Exception as e:
print(f"[CLEANUP] Error deleting stream {stream.uid}: {e}")
db.commit()
print(f"[CLEANUP] Deleted {deleted_count} orphaned public streams")
return {
"status": "success",
"message": f"Deleted {deleted_count} orphaned public streams",
"deleted_streams": [s.uid for s in orphaned_streams]
}
except HTTPException:
raise
except Exception as e:
print(f"[CLEANUP] Error: {str(e)}")
db.rollback()
raise HTTPException(status_code=500, detail=f"Cleanup failed: {str(e)}")
# Original delete account endpoint has been moved to account_router.py
@app.delete("/uploads/{uid}/{filename}") @app.delete("/uploads/{uid}/{filename}")
async def delete_file(uid: str, filename: str, request: Request, db: Session = Depends(get_db)): async def delete_file(uid: str, filename: str, request: Request):
""" """
Delete a file for a specific user. Delete a file for a specific user.
@ -319,26 +630,84 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D
if user.ip != ip: if user.ip != ip:
raise HTTPException(status_code=403, detail="Device/IP mismatch. Please log in again.") raise HTTPException(status_code=403, detail="Device/IP mismatch. Please log in again.")
# Set up user directory and validate paths # Set up user directory using email (matching upload logic)
user_dir = os.path.join('data', user.username) user_dir = os.path.join('data', user.email)
os.makedirs(user_dir, exist_ok=True) os.makedirs(user_dir, exist_ok=True)
# Decode URL-encoded filename # Decode URL-encoded filename
from urllib.parse import unquote from urllib.parse import unquote
filename = unquote(filename) filename = unquote(filename)
# Debug: Print the user info and filename being used
# Debug messages disabled
# Debug messages disabled
# Debug messages disabled
# Debug messages disabled
if os.path.exists(user_dir):
# Debug messages disabled
pass
# Construct and validate target path # Construct and validate target path
target_path = os.path.join(user_dir, filename) target_path = os.path.join(user_dir, filename)
real_target_path = os.path.realpath(target_path) real_target_path = os.path.realpath(target_path)
real_user_dir = os.path.realpath(user_dir) real_user_dir = os.path.realpath(user_dir)
# Debug: Print the constructed paths
# Debug messages disabled
# Debug messages disabled
# Debug messages disabled
# Security check: Ensure the target path is inside the user's directory # Security check: Ensure the target path is inside the user's directory
if not real_target_path.startswith(real_user_dir + os.sep): if not real_target_path.startswith(real_user_dir + os.sep):
# Debug messages disabled
raise HTTPException(status_code=403, detail="Invalid file path") raise HTTPException(status_code=403, detail="Invalid file path")
# Check if file exists # Check if file exists
if not os.path.isfile(real_target_path): if not os.path.isfile(real_target_path):
raise HTTPException(status_code=404, detail=f"File not found: {filename}") # Debug: List files in the directory to help diagnose the issue
try:
# Debug messages disabled
# Debug messages disabled
# Debug messages disabled
if os.path.exists(real_user_dir):
files_in_dir = os.listdir(real_user_dir)
# Debug messages disabled
# Print detailed file info
for f in files_in_dir:
full_path = os.path.join(real_user_dir, f)
try:
# Debug messages disabled
pass
except Exception as e:
# Debug messages disabled
pass
# Debug messages disabled
# Debug messages disabled
# Debug messages disabled
# Try to find a matching file (case-insensitive, partial match)
matching_files = [f for f in files_in_dir if filename.lower() in f.lower()]
if matching_files:
# Debug messages disabled
# Use the first matching file
real_target_path = os.path.join(real_user_dir, matching_files[0])
# Debug messages disabled
# Debug messages disabled
else:
# Debug messages disabled
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
else:
# Debug messages disabled
raise HTTPException(status_code=404, detail=f"User directory not found")
except HTTPException:
raise
except Exception as e:
# Debug messages disabled
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
# Delete both the target file and its UUID-only variant # Delete both the target file and its UUID-only variant
deleted_files = [] deleted_files = []
@ -377,20 +746,23 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D
# Clean up the database record for this file # Clean up the database record for this file
try: try:
# Find and delete the upload log entry with get_db() as db:
log_entry = db.exec( try:
select(UploadLog) # Find and delete the upload log entry
.where(UploadLog.uid == uid) log_entry = db.query(UploadLog).filter(
.where(UploadLog.processed_filename == filename) UploadLog.uid == uid,
).first() UploadLog.processed_filename == filename
).first()
if log_entry: if log_entry:
db.delete(log_entry) db.delete(log_entry)
db.commit() db.commit()
log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}") log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}")
except Exception as e:
db.rollback()
raise e
except Exception as e: except Exception as e:
log_violation("DB_CLEANUP_ERROR", ip, uid, f"Failed to clean up DB record: {str(e)}") log_violation("DB_CLEANUP_ERROR", ip, uid, f"Failed to clean up DB record: {str(e)}")
db.rollback()
# Regenerate stream.opus after file deletion # Regenerate stream.opus after file deletion
try: try:
@ -405,14 +777,17 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D
# Update user quota in a separate try-except to not fail the entire operation # Update user quota in a separate try-except to not fail the entire operation
try: try:
# Use verify_and_fix_quota to ensure consistency between disk and DB with get_db() as db:
total_size = verify_and_fix_quota(db, user.username, user_dir) try:
log_violation("QUOTA_UPDATE", ip, uid, # Use verify_and_fix_quota to ensure consistency between disk and DB
f"Updated quota: {total_size} bytes") total_size = verify_and_fix_quota(db, user.username, user_dir)
log_violation("QUOTA_UPDATE", ip, uid,
f"Updated quota: {total_size} bytes")
except Exception as e:
db.rollback()
raise e
except Exception as e: except Exception as e:
log_violation("QUOTA_ERROR", ip, uid, f"Quota update failed: {str(e)}") log_violation("QUOTA_ERROR", ip, uid, f"Quota update failed: {str(e)}")
db.rollback()
return {"status": "deleted"} return {"status": "deleted"}
@ -444,11 +819,13 @@ def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int:
if os.path.isfile(stream_opus_path): if os.path.isfile(stream_opus_path):
try: try:
total_size = os.path.getsize(stream_opus_path) total_size = os.path.getsize(stream_opus_path)
print(f"[QUOTA] Stream.opus size for {uid}: {total_size} bytes") # Debug messages disabled
except (OSError, FileNotFoundError) as e: except (OSError, FileNotFoundError) as e:
print(f"[QUOTA] Error getting size for stream.opus: {e}") # Debug messages disabled
pass
else: else:
print(f"[QUOTA] stream.opus not found in {user_dir}") # Debug messages disabled
pass
# Update quota in database # Update quota in database
q = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0) q = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0)
@ -456,123 +833,143 @@ def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int:
db.add(q) db.add(q)
# Clean up any database records for files that don't exist # Clean up any database records for files that don't exist
uploads = db.exec(select(UploadLog).where(UploadLog.uid == uid)).all() # BUT only for records older than 5 minutes to avoid race conditions with recent uploads
from datetime import datetime, timedelta
cutoff_time = datetime.utcnow() - timedelta(minutes=5)
uploads = db.query(UploadLog).filter(
UploadLog.uid == uid,
UploadLog.created_at < cutoff_time # Only check older records
).all()
for upload in uploads: for upload in uploads:
if upload.processed_filename: # Only check if processed_filename exists if upload.processed_filename: # Only check if processed_filename exists
stored_filename = f"{upload.id}_{upload.processed_filename}" stored_filename = f"{upload.id}_{upload.processed_filename}"
file_path = os.path.join(user_dir, stored_filename) file_path = os.path.join(user_dir, stored_filename)
if not os.path.isfile(file_path): if not os.path.isfile(file_path):
print(f"[QUOTA] Removing orphaned DB record: {stored_filename}") # Debug messages disabled
db.delete(upload) db.delete(upload)
try: try:
db.commit() db.commit()
print(f"[QUOTA] Updated quota for {uid}: {total_size} bytes") # Debug messages disabled
except Exception as e: except Exception as e:
print(f"[QUOTA] Error committing quota update: {e}") # Debug messages disabled
db.rollback() db.rollback()
raise raise
return total_size return total_size
@app.get("/me/{uid}") @app.get("/me/{uid}")
def get_me(uid: str, request: Request, response: Response, db: Session = Depends(get_db)): def get_me(uid: str, request: Request, response: Response):
# Add headers to prevent caching # Add headers to prevent caching
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Pragma"] = "no-cache" response.headers["Pragma"] = "no-cache"
response.headers["Expires"] = "0" response.headers["Expires"] = "0"
print(f"[DEBUG] GET /me/{uid} - Client IP: {request.client.host}")
try:
# Get user info
user = get_user_by_uid(uid)
if not user:
print(f"[ERROR] User with UID {uid} not found")
raise HTTPException(status_code=404, detail="User not found")
# Only enforce IP check in production # Debug messages disabled
if not debug_mode:
if user.ip != request.client.host:
print(f"[WARNING] IP mismatch for UID {uid}: {request.client.host} != {user.ip}")
# In production, we might want to be more strict
# But for now, we'll just log a warning in development
if not debug_mode:
raise HTTPException(status_code=403, detail="IP address mismatch")
# Get user directory # Use the database session context manager for all database operations
user_dir = os.path.join('data', uid) with get_db() as db:
os.makedirs(user_dir, exist_ok=True) try:
# Get user info
user = db.query(User).filter((User.username == uid) | (User.email == uid)).first()
if not user:
print(f"[ERROR] User with UID {uid} not found")
raise HTTPException(status_code=404, detail="User not found")
# Get all upload logs for this user # Only enforce IP check in production
upload_logs = db.exec( if not debug_mode:
select(UploadLog) if user.ip != request.client.host:
.where(UploadLog.uid == uid) print(f"[WARNING] IP mismatch for UID {uid}: {request.client.host} != {user.ip}")
.order_by(UploadLog.created_at.desc()) # In production, we might want to be more strict
).all() if not debug_mode:
print(f"[DEBUG] Found {len(upload_logs)} upload logs for UID {uid}") raise HTTPException(status_code=403, detail="IP address mismatch")
# Build file list from database records, checking if files exist on disk # Get user directory
files = [] user_dir = os.path.join('data', uid)
seen_files = set() # Track seen files to avoid duplicates os.makedirs(user_dir, exist_ok=True)
print(f"[DEBUG] Processing {len(upload_logs)} upload logs for UID {uid}") # Get all upload logs for this user using the query interface
upload_logs = db.query(UploadLog).filter(
UploadLog.uid == uid
).order_by(UploadLog.created_at.desc()).all()
for i, log in enumerate(upload_logs): # Debug messages disabled
if not log.filename or not log.processed_filename:
print(f"[DEBUG] Skipping log entry {i}: missing filename or processed_filename")
continue
# The actual filename on disk has the log ID prepended # Build file list from database records, checking if files exist on disk
stored_filename = f"{log.id}_{log.processed_filename}" files = []
file_path = os.path.join(user_dir, stored_filename) seen_files = set() # Track seen files to avoid duplicates
# Skip if we've already seen this file # Debug messages disabled
if stored_filename in seen_files:
print(f"[DEBUG] Skipping duplicate file: {stored_filename}")
continue
seen_files.add(stored_filename) for i, log in enumerate(upload_logs):
if not log.filename or not log.processed_filename:
# Debug messages disabled
continue
# Only include the file if it exists on disk and is not stream.opus # The actual filename on disk has the log ID prepended
if os.path.isfile(file_path) and stored_filename != 'stream.opus': stored_filename = f"{log.id}_{log.processed_filename}"
try: file_path = os.path.join(user_dir, stored_filename)
# Get the actual file size in case it changed
file_size = os.path.getsize(file_path)
file_info = {
"name": stored_filename,
"original_name": log.filename,
"size": file_size
}
files.append(file_info)
print(f"[DEBUG] Added file {len(files)}: {log.filename} (stored as {stored_filename}, {file_size} bytes)")
except OSError as e:
print(f"[WARNING] Could not access file {stored_filename}: {e}")
else:
print(f"[DEBUG] File not found on disk or is stream.opus: {stored_filename}")
# Log all files being returned # Skip if we've already seen this file
print("[DEBUG] All files being returned:") if stored_filename in seen_files:
for i, file_info in enumerate(files, 1): # Debug messages disabled
print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)") continue
# Verify and fix quota based on actual files on disk seen_files.add(stored_filename)
total_size = verify_and_fix_quota(db, uid, user_dir)
quota_mb = round(total_size / (1024 * 1024), 2)
print(f"[DEBUG] Verified quota for UID {uid}: {quota_mb} MB")
response_data = { # Only include the file if it exists on disk and is not stream.opus
"files": files, if os.path.isfile(file_path) and stored_filename != 'stream.opus':
"quota": quota_mb try:
} # Get the actual file size in case it changed
print(f"[DEBUG] Returning {len(files)} files and quota info") file_size = os.path.getsize(file_path)
return response_data file_info = {
"name": stored_filename,
"original_name": log.filename,
"size": file_size
}
files.append(file_info)
# Debug messages disabled
except OSError as e:
print(f"[WARNING] Could not access file {stored_filename}: {e}")
else:
# Debug messages disabled
pass
except HTTPException: # Log all files being returned
# Re-raise HTTP exceptions as they are # Debug messages disabled
raise # for i, file_info in enumerate(files, 1):
except Exception as e: # print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)")
# Log the full traceback for debugging
import traceback # Verify and fix quota based on actual files on disk
error_trace = traceback.format_exc() total_size = verify_and_fix_quota(db, uid, user_dir)
print(f"[ERROR] Error in /me/{uid} endpoint: {str(e)}\n{error_trace}") quota_mb = round(total_size / (1024 * 1024), 2)
# Return a 500 error with a generic message max_quota_mb = round(MAX_QUOTA_BYTES / (1024 * 1024), 2)
raise HTTPException(status_code=500, detail="Internal server error") # Debug messages disabled
response_data = {
"files": files,
"quota": {
"used": quota_mb,
"max": max_quota_mb,
"used_bytes": total_size,
"max_bytes": MAX_QUOTA_BYTES,
"percentage": round((total_size / MAX_QUOTA_BYTES) * 100, 2) if MAX_QUOTA_BYTES > 0 else 0
}
}
# Debug messages disabled
return response_data
except HTTPException:
# Re-raise HTTP exceptions as they are
raise
except Exception as e:
# Log the full traceback for debugging
import traceback
error_trace = traceback.format_exc()
print(f"[ERROR] Error in /me/{uid} endpoint: {str(e)}\n{error_trace}")
# Rollback any database changes in case of error
db.rollback()
# Return a 500 error with a generic message
raise HTTPException(status_code=500, detail="Internal server error")

13
migrate_dbsession_fk.sql Normal file
View File

@ -0,0 +1,13 @@
-- Migration script to update DBSession foreign key to reference user.email
-- Run this when no active sessions exist to avoid deadlocks
BEGIN;
-- Step 1: Drop the existing foreign key constraint
ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey;
-- Step 2: Add the new foreign key constraint referencing user.email
ALTER TABLE dbsession ADD CONSTRAINT dbsession_user_id_fkey
FOREIGN KEY (user_id) REFERENCES "user"(email);
COMMIT;

168
migrate_uid_to_email.py Normal file
View File

@ -0,0 +1,168 @@
#!/usr/bin/env python3
"""
UID Migration Script - Complete migration from username-based to email-based UIDs
This script completes the UID migration by updating remaining username-based UIDs
in the database to use proper email format.
Based on previous migration history:
- devuser -> oib@bubuit.net (as per migration memory)
- oibchello -> oib@chello.at (already completed)
"""
import psycopg2
import sys
from datetime import datetime
# Database connection string
DATABASE_URL = "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream"
def log_message(message):
"""Log message with timestamp"""
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"[{timestamp}] {message}")
def check_current_state(cursor):
"""Check current state of UID migration"""
log_message("Checking current UID state...")
# Check publicstream table
cursor.execute("SELECT uid, username FROM publicstream WHERE uid NOT LIKE '%@%'")
non_email_uids = cursor.fetchall()
if non_email_uids:
log_message(f"Found {len(non_email_uids)} non-email UIDs in publicstream:")
for uid, username in non_email_uids:
log_message(f" - UID: {uid}, Username: {username}")
else:
log_message("All UIDs in publicstream are already in email format")
# Check userquota table
cursor.execute("SELECT uid FROM userquota WHERE uid NOT LIKE '%@%'")
quota_non_email_uids = cursor.fetchall()
if quota_non_email_uids:
log_message(f"Found {len(quota_non_email_uids)} non-email UIDs in userquota:")
for (uid,) in quota_non_email_uids:
log_message(f" - UID: {uid}")
else:
log_message("All UIDs in userquota are already in email format")
return non_email_uids, quota_non_email_uids
def migrate_uids(cursor):
"""Migrate remaining username-based UIDs to email format"""
log_message("Starting UID migration...")
# Migration mapping based on previous migration history
uid_mapping = {
'devuser': 'oib@bubuit.net'
}
migration_count = 0
for old_uid, new_uid in uid_mapping.items():
log_message(f"Migrating UID: {old_uid} -> {new_uid}")
# Update publicstream table
cursor.execute(
"UPDATE publicstream SET uid = %s WHERE uid = %s",
(new_uid, old_uid)
)
publicstream_updated = cursor.rowcount
# Update userquota table
cursor.execute(
"UPDATE userquota SET uid = %s WHERE uid = %s",
(new_uid, old_uid)
)
userquota_updated = cursor.rowcount
# Update uploadlog table (if any records exist)
cursor.execute(
"UPDATE uploadlog SET uid = %s WHERE uid = %s",
(new_uid, old_uid)
)
uploadlog_updated = cursor.rowcount
log_message(f" - Updated {publicstream_updated} records in publicstream")
log_message(f" - Updated {userquota_updated} records in userquota")
log_message(f" - Updated {uploadlog_updated} records in uploadlog")
migration_count += publicstream_updated + userquota_updated + uploadlog_updated
return migration_count
def verify_migration(cursor):
"""Verify migration was successful"""
log_message("Verifying migration...")
# Check for any remaining non-email UIDs
cursor.execute("""
SELECT 'publicstream' as table_name, uid FROM publicstream WHERE uid NOT LIKE '%@%'
UNION ALL
SELECT 'userquota' as table_name, uid FROM userquota WHERE uid NOT LIKE '%@%'
UNION ALL
SELECT 'uploadlog' as table_name, uid FROM uploadlog WHERE uid NOT LIKE '%@%'
""")
remaining_non_email = cursor.fetchall()
if remaining_non_email:
log_message("WARNING: Found remaining non-email UIDs:")
for table_name, uid in remaining_non_email:
log_message(f" - {table_name}: {uid}")
return False
else:
log_message("SUCCESS: All UIDs are now in email format")
return True
def main():
"""Main migration function"""
log_message("Starting UID migration script")
try:
# Connect to database
log_message("Connecting to database...")
conn = psycopg2.connect(DATABASE_URL)
cursor = conn.cursor()
# Check current state
non_email_uids, quota_non_email_uids = check_current_state(cursor)
if not non_email_uids and not quota_non_email_uids:
log_message("No migration needed - all UIDs are already in email format")
return
# Perform migration
migration_count = migrate_uids(cursor)
# Commit changes
conn.commit()
log_message(f"Migration committed - {migration_count} records updated")
# Verify migration
if verify_migration(cursor):
log_message("UID migration completed successfully!")
else:
log_message("UID migration completed with warnings - manual review needed")
except psycopg2.Error as e:
log_message(f"Database error: {e}")
if conn:
conn.rollback()
sys.exit(1)
except Exception as e:
log_message(f"Unexpected error: {e}")
if conn:
conn.rollback()
sys.exit(1)
finally:
if cursor:
cursor.close()
if conn:
conn.close()
log_message("Database connection closed")
if __name__ == "__main__":
main()

View File

@ -9,7 +9,6 @@ class User(SQLModel, table=True):
token_created: datetime = Field(default_factory=datetime.utcnow) token_created: datetime = Field(default_factory=datetime.utcnow)
email: str = Field(primary_key=True) email: str = Field(primary_key=True)
username: str = Field(unique=True, index=True) username: str = Field(unique=True, index=True)
display_name: str = Field(default="", nullable=True)
token: str token: str
confirmed: bool = False confirmed: bool = False
ip: str = Field(default="") ip: str = Field(default="")
@ -32,7 +31,7 @@ class UploadLog(SQLModel, table=True):
class DBSession(SQLModel, table=True): class DBSession(SQLModel, table=True):
token: str = Field(primary_key=True) token: str = Field(primary_key=True)
user_id: str = Field(foreign_key="user.username") uid: str = Field(foreign_key="user.email") # This references User.email (primary key)
ip_address: str ip_address: str
user_agent: str user_agent: str
created_at: datetime = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow)
@ -45,7 +44,6 @@ class PublicStream(SQLModel, table=True):
"""Stores public stream metadata for all users""" """Stores public stream metadata for all users"""
uid: str = Field(primary_key=True) uid: str = Field(primary_key=True)
username: Optional[str] = Field(default=None, index=True) username: Optional[str] = Field(default=None, index=True)
display_name: Optional[str] = Field(default=None)
storage_bytes: int = 0 storage_bytes: int = 0
mtime: int = Field(default_factory=lambda: int(datetime.utcnow().timestamp())) mtime: int = Field(default_factory=lambda: int(datetime.utcnow().timestamp()))
last_updated: Optional[datetime] = Field(default_factory=datetime.utcnow) last_updated: Optional[datetime] = Field(default_factory=datetime.utcnow)
@ -55,26 +53,26 @@ class PublicStream(SQLModel, table=True):
def get_user_by_uid(uid: str) -> Optional[User]: def get_user_by_uid(uid: str) -> Optional[User]:
""" """
Retrieve a user by their UID (username). Retrieve a user by their UID (email).
Note: In this application, the User model uses email as primary key, Note: In this application, UIDs are consistently email-based.
but we're using username as UID for API routes. This function looks up The User model uses email as primary key, and all user references
users by username. throughout the system use email format.
Args: Args:
uid: The username to look up uid: The email to look up
Returns: Returns:
User object if found, None otherwise User object if found, None otherwise
""" """
with Session(engine) as session: with Session(engine) as session:
# First try to find by username (which is what we're using as UID) # Primary lookup by email (which is what we're using as UID)
statement = select(User).where(User.username == uid) statement = select(User).where(User.email == uid)
user = session.exec(statement).first() user = session.exec(statement).first()
# If not found by username, try by email (for backward compatibility) # Fallback: try by username for legacy compatibility
if not user and '@' in uid: if not user and '@' not in uid:
statement = select(User).where(User.email == uid) statement = select(User).where(User.username == uid)
user = session.exec(statement).first() user = session.exec(statement).first()
return user return user
@ -85,11 +83,10 @@ def verify_session(db: Session, token: str) -> DBSession:
from datetime import datetime from datetime import datetime
# Find the session # Find the session
session = db.exec( session = db.query(DBSession).filter(
select(DBSession) DBSession.token == token,
.where(DBSession.token == token) DBSession.is_active == True, # noqa: E712
.where(DBSession.is_active == True) # noqa: E712 DBSession.expires_at > datetime.utcnow()
.where(DBSession.expires_at > datetime.utcnow())
).first() ).first()
if not session: if not session:

View File

@ -16,27 +16,27 @@ MAGIC_FROM = "noreply@dicta2stream.net"
MAGIC_DOMAIN = "https://dicta2stream.net" MAGIC_DOMAIN = "https://dicta2stream.net"
DATA_ROOT = Path("./data") DATA_ROOT = Path("./data")
def initialize_user_directory(username: str): def initialize_user_directory(uid: str):
"""Initialize user directory with a silent stream.opus file""" """Initialize user directory with a silent stream.opus file"""
try: try:
user_dir = DATA_ROOT / username user_dir = DATA_ROOT / uid
default_stream_path = DATA_ROOT / "stream.opus" default_stream_path = DATA_ROOT / "stream.opus"
print(f"[DEBUG] Initializing user directory: {user_dir.absolute()}") # Debug messages disabled
# Create the directory if it doesn't exist # Create the directory if it doesn't exist
user_dir.mkdir(parents=True, exist_ok=True) user_dir.mkdir(parents=True, exist_ok=True)
print(f"[DEBUG] Directory created or already exists: {user_dir.exists()}") # Debug messages disabled
# Create stream.opus by copying the default stream.opus file # Create stream.opus by copying the default stream.opus file
user_stream_path = user_dir / "stream.opus" user_stream_path = user_dir / "stream.opus"
print(f"[DEBUG] Creating stream.opus at: {user_stream_path.absolute()}") # Debug messages disabled
if not user_stream_path.exists(): if not user_stream_path.exists():
if default_stream_path.exists(): if default_stream_path.exists():
import shutil import shutil
shutil.copy2(default_stream_path, user_stream_path) shutil.copy2(default_stream_path, user_stream_path)
print(f"[DEBUG] Copied default stream.opus to {user_stream_path}") # Debug messages disabled
else: else:
print(f"[ERROR] Default stream.opus not found at {default_stream_path}") print(f"[ERROR] Default stream.opus not found at {default_stream_path}")
# Fallback: create an empty file to prevent errors # Fallback: create an empty file to prevent errors
@ -45,62 +45,69 @@ def initialize_user_directory(username: str):
return True return True
except Exception as e: except Exception as e:
print(f"Error initializing user directory for {username}: {str(e)}") print(f"Error initializing user directory for {uid}: {str(e)}")
return False return False
@router.post("/register") @router.post("/register")
def register(request: Request, email: str = Form(...), user: str = Form(...), db: Session = Depends(get_db)): def register(request: Request, email: str = Form(...), user: str = Form(...)):
from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import IntegrityError
from datetime import datetime from datetime import datetime
# Check if user exists by email # Use the database session context manager
existing_user_by_email = db.get(User, email) with get_db() as db:
# Check if user exists by username
stmt = select(User).where(User.username == user)
existing_user_by_username = db.exec(stmt).first()
token = str(uuid.uuid4())
# Case 1: Email and username match in db - it's a login
if existing_user_by_email and existing_user_by_username and existing_user_by_email.email == existing_user_by_username.email:
# Update token for existing user (login)
existing_user_by_email.token = token
existing_user_by_email.token_created = datetime.utcnow()
existing_user_by_email.confirmed = False
existing_user_by_email.ip = request.client.host
db.add(existing_user_by_email)
try: try:
db.commit() # Check if user exists by email
except Exception as e: existing_user_by_email = db.get(User, email)
db.rollback()
raise HTTPException(status_code=500, detail=f"Database error: {e}")
action = "login" # Check if user exists by username
existing_user_by_username = db.query(User).filter(User.username == user).first()
# Case 2: Email matches but username does not - only one account per email token = str(uuid.uuid4())
elif existing_user_by_email and (not existing_user_by_username or existing_user_by_email.email != existing_user_by_username.email): action = None
raise HTTPException(status_code=409, detail="📧 This email is already registered with a different username.\nOnly one account per email is allowed.")
# Case 3: Email does not match but username is in db - username already taken # Case 1: Email and username match in db - it's a login
elif not existing_user_by_email and existing_user_by_username: if existing_user_by_email and existing_user_by_username and existing_user_by_email.email == existing_user_by_username.email:
raise HTTPException(status_code=409, detail="👤 This username is already taken.\nPlease choose a different username.") # Update token for existing user (login)
existing_user_by_email.token = token
existing_user_by_email.token_created = datetime.utcnow()
existing_user_by_email.confirmed = False
existing_user_by_email.ip = request.client.host
db.add(existing_user_by_email)
db.commit()
action = "login"
# Case 4: Neither email nor username exist - create new user # Case 2: Email matches but username does not - only one account per email
elif not existing_user_by_email and not existing_user_by_username: elif existing_user_by_email and (not existing_user_by_username or existing_user_by_email.email != existing_user_by_username.email):
# Register new user raise HTTPException(status_code=409, detail="📧 This email is already registered with a different username.\nOnly one account per email is allowed.")
new_user = User(email=email, username=user, token=token, confirmed=False, ip=request.client.host)
new_quota = UserQuota(uid=email) # Use email as UID for quota tracking
db.add(new_user) # Case 3: Email does not match but username is in db - username already taken
db.add(new_quota) elif not existing_user_by_email and existing_user_by_username:
raise HTTPException(status_code=409, detail="👤 This username is already taken.\nPlease choose a different username.")
try: # Case 4: Neither email nor username exist - create new user
# First commit the user to the database elif not existing_user_by_email and not existing_user_by_username:
db.commit() # Register new user
new_user = User(email=email, username=user, token=token, confirmed=False, ip=request.client.host)
new_quota = UserQuota(uid=email) # Use email as UID for quota tracking
db.add(new_user)
db.add(new_quota)
db.commit()
action = "register"
# Initialize user directory after successful registration
if not initialize_user_directory(email):
print(f"[WARNING] Failed to initialize user directory for {email}")
# If we get here, we've either logged in or registered successfully
if action not in ["login", "register"]:
raise HTTPException(status_code=400, detail="Invalid registration request")
# Store the email for use after the session is committed
user_email = email
# Only after successful commit, initialize the user directory # Only after successful commit, initialize the user directory
initialize_user_directory(user) initialize_user_directory(email)
except Exception as e: except Exception as e:
db.rollback() db.rollback()
if isinstance(e, IntegrityError): if isinstance(e, IntegrityError):
@ -118,15 +125,10 @@ def register(request: Request, email: str = Form(...), user: str = Form(...), db
else: else:
raise HTTPException(status_code=500, detail=f"Database error: {e}") raise HTTPException(status_code=500, detail=f"Database error: {e}")
action = "registration" # Send magic link with appropriate message based on action
msg = EmailMessage()
else: msg["From"] = MAGIC_FROM
# This should not happen, but handle it gracefully msg["To"] = email
raise HTTPException(status_code=500, detail="Unexpected error during registration.")
# Send magic link with appropriate message based on action
msg = EmailMessage()
msg["From"] = MAGIC_FROM
msg["To"] = email
if action == "login": if action == "login":
msg["Subject"] = "Your magic login link" msg["Subject"] = "Your magic login link"

107
simple_db_cleanup.py Normal file
View File

@ -0,0 +1,107 @@
#!/usr/bin/env python3
"""
Simple Database Cleanup Script
Uses the provided connection string to fix legacy data issues
"""
import psycopg2
import sys
# Database connection string provided by user
DATABASE_URL = "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream"
def execute_query(conn, query, description):
"""Execute a query and report results"""
print(f"\n{description}")
print(f"Query: {query}")
print("[DEBUG] Starting query execution...")
try:
print("[DEBUG] Creating cursor...")
with conn.cursor() as cur:
print("[DEBUG] Executing query...")
cur.execute(query)
print("[DEBUG] Query executed successfully")
if query.strip().upper().startswith('SELECT'):
print("[DEBUG] Fetching results...")
rows = cur.fetchall()
print(f"Result: {len(rows)} rows")
for row in rows:
print(f" {row}")
else:
print("[DEBUG] Committing transaction...")
conn.commit()
print(f"✅ Success: {cur.rowcount} rows affected")
print("[DEBUG] Query completed successfully")
return True
except Exception as e:
print(f"❌ Error: {e}")
print(f"[DEBUG] Error type: {type(e).__name__}")
print("[DEBUG] Rolling back transaction...")
conn.rollback()
return False
def main():
"""Execute database cleanup step by step"""
print("=== DATABASE LEGACY DATA CLEANUP ===")
print(f"Attempting to connect to: {DATABASE_URL}")
try:
print("[DEBUG] Creating database connection...")
conn = psycopg2.connect(DATABASE_URL)
print("✅ Connected to database successfully")
print(f"[DEBUG] Connection status: {conn.status}")
print(f"[DEBUG] Database info: {conn.get_dsn_parameters()}")
# Step 1: Check current state
print("\n=== STEP 1: Check Current State ===")
execute_query(conn, 'SELECT email, username, display_name FROM "user"', "Check user table")
execute_query(conn, 'SELECT COUNT(*) as expired_active FROM dbsession WHERE expires_at < NOW() AND is_active = true', "Check expired sessions")
# Step 2: Mark expired sessions as inactive (this was successful before)
print("\n=== STEP 2: Fix Expired Sessions ===")
execute_query(conn, 'UPDATE dbsession SET is_active = false WHERE expires_at < NOW() AND is_active = true', "Mark expired sessions inactive")
# Step 3: Handle foreign key constraint by dropping it temporarily
print("\n=== STEP 3: Handle Foreign Key Constraint ===")
execute_query(conn, 'ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey', "Drop foreign key constraint")
# Step 4: Update user table
print("\n=== STEP 4: Update User Table ===")
execute_query(conn, """UPDATE "user"
SET username = email,
display_name = CASE
WHEN display_name = '' OR display_name IS NULL
THEN split_part(email, '@', 1)
ELSE display_name
END
WHERE email = 'oib@chello.at'""", "Update user username to email")
# Step 5: Update session references
print("\n=== STEP 5: Update Session References ===")
execute_query(conn, "UPDATE dbsession SET user_id = 'oib@chello.at' WHERE user_id = 'oibchello'", "Update session user_id")
# Step 6: Recreate foreign key constraint
print("\n=== STEP 6: Recreate Foreign Key ===")
execute_query(conn, 'ALTER TABLE dbsession ADD CONSTRAINT dbsession_user_id_fkey FOREIGN KEY (user_id) REFERENCES "user"(username)', "Recreate foreign key")
# Step 7: Final verification
print("\n=== STEP 7: Final Verification ===")
execute_query(conn, 'SELECT email, username, display_name FROM "user"', "Verify user table")
execute_query(conn, 'SELECT DISTINCT user_id FROM dbsession', "Verify session user_id")
execute_query(conn, 'SELECT uid, username FROM publicstream', "Check publicstream")
print("\n✅ Database cleanup completed successfully!")
except Exception as e:
print(f"❌ Database connection error: {e}")
return 1
finally:
if 'conn' in locals():
conn.close()
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -16,6 +16,14 @@ export class AudioPlayer {
this.lastPlayTime = 0; this.lastPlayTime = 0;
this.isLoading = false; this.isLoading = false;
this.loadTimeout = null; // For tracking loading timeouts this.loadTimeout = null; // For tracking loading timeouts
this.retryCount = 0;
this.maxRetries = 3;
this.retryDelay = 3000; // 3 seconds
this.buffering = false;
this.bufferRetryTimeout = null;
this.lastLoadTime = 0;
this.minLoadInterval = 2000; // 2 seconds between loads
this.pendingLoad = false;
// Create a single audio element that we'll reuse // Create a single audio element that we'll reuse
this.audioElement = new Audio(); this.audioElement = new Audio();
@ -26,6 +34,14 @@ export class AudioPlayer {
this.loadAndPlay = this.loadAndPlay.bind(this); this.loadAndPlay = this.loadAndPlay.bind(this);
this.stop = this.stop.bind(this); this.stop = this.stop.bind(this);
this.cleanup = this.cleanup.bind(this); this.cleanup = this.cleanup.bind(this);
this.handlePlayError = this.handlePlayError.bind(this);
this.handleStalled = this.handleStalled.bind(this);
this.handleWaiting = this.handleWaiting.bind(this);
this.handlePlaying = this.handlePlaying.bind(this);
this.handleEnded = this.handleEnded.bind(this);
// Set up event listeners
this.setupEventListeners();
// Register with global audio manager to handle stop requests from other players // Register with global audio manager to handle stop requests from other players
globalAudioManager.addListener('personal', () => { globalAudioManager.addListener('personal', () => {
@ -63,14 +79,41 @@ export class AudioPlayer {
} }
async loadAndPlay(uid, button) { async loadAndPlay(uid, button) {
const now = Date.now();
// Prevent rapid successive load attempts
if (this.pendingLoad || (now - this.lastLoadTime < this.minLoadInterval)) {
console.log('[AudioPlayer] Skipping duplicate load request');
return;
}
// Validate UID exists and is in correct format // Validate UID exists and is in correct format
if (!uid) { if (!uid) {
this.handleError(button, 'No UID provided for audio playback'); this.handleError(button, 'No UID provided for audio playback');
return; return;
} }
if (!this.isValidUuid(uid)) { // For logging purposes
this.handleError(button, `Invalid UID format: ${uid}. Expected UUID v4 format.`); const requestId = Math.random().toString(36).substr(2, 8);
console.log(`[AudioPlayer] Load request ${requestId} for UID: ${uid}`);
this.pendingLoad = true;
this.lastLoadTime = now;
// If we're in the middle of loading, check if it's for the same UID
if (this.isLoading) {
// If same UID, ignore duplicate request
if (this.currentUid === uid) {
console.log(`[AudioPlayer] Already loading this UID, ignoring duplicate request: ${uid}`);
this.pendingLoad = false;
return;
}
// If different UID, queue the new request
console.log(`[AudioPlayer] Already loading, queuing request for UID: ${uid}`);
setTimeout(() => {
this.pendingLoad = false;
this.loadAndPlay(uid, button);
}, 500);
return; return;
} }
@ -170,8 +213,10 @@ export class AudioPlayer {
// Set the source URL with proper encoding and cache-busting timestamp // Set the source URL with proper encoding and cache-busting timestamp
// Using the format: /audio/{uid}/stream.opus?t={timestamp} // Using the format: /audio/{uid}/stream.opus?t={timestamp}
const timestamp = new Date().getTime(); // Only update timestamp if we're loading a different UID or after a retry
const timestamp = this.retryCount > 0 ? new Date().getTime() : this.lastLoadTime;
this.audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus?t=${timestamp}`; this.audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus?t=${timestamp}`;
console.log(`[AudioPlayer] Loading audio from URL: ${this.audioUrl} (attempt ${this.retryCount + 1}/${this.maxRetries})`);
console.log('Loading audio from URL:', this.audioUrl); console.log('Loading audio from URL:', this.audioUrl);
this.audioElement.src = this.audioUrl; this.audioElement.src = this.audioUrl;
@ -312,10 +357,150 @@ export class AudioPlayer {
} }
} }
/**
* Set up event listeners for the audio element
*/
setupEventListeners() {
if (!this.audioElement) return;
// Remove any existing listeners to prevent duplicates
this.audioElement.removeEventListener('error', this.handlePlayError);
this.audioElement.removeEventListener('stalled', this.handleStalled);
this.audioElement.removeEventListener('waiting', this.handleWaiting);
this.audioElement.removeEventListener('playing', this.handlePlaying);
this.audioElement.removeEventListener('ended', this.handleEnded);
// Add new listeners
this.audioElement.addEventListener('error', this.handlePlayError);
this.audioElement.addEventListener('stalled', this.handleStalled);
this.audioElement.addEventListener('waiting', this.handleWaiting);
this.audioElement.addEventListener('playing', this.handlePlaying);
this.audioElement.addEventListener('ended', this.handleEnded);
}
/**
* Handle play errors
*/
handlePlayError(event) {
console.error('[AudioPlayer] Playback error:', {
event: event.type,
error: this.audioElement.error,
currentTime: this.audioElement.currentTime,
readyState: this.audioElement.readyState,
networkState: this.audioElement.networkState,
src: this.audioElement.src
});
this.isPlaying = false;
this.buffering = false;
this.pendingLoad = false;
if (this.currentButton) {
this.updateButtonState(this.currentButton, 'error');
}
// Auto-retry logic
if (this.retryCount < this.maxRetries) {
this.retryCount++;
console.log(`Retrying playback (attempt ${this.retryCount}/${this.maxRetries})...`);
setTimeout(() => {
if (this.currentUid && this.currentButton) {
this.loadAndPlay(this.currentUid, this.currentButton);
}
}, this.retryDelay);
} else {
console.error('Max retry attempts reached');
this.retryCount = 0; // Reset for next time
}
}
/**
* Handle stalled audio (buffering issues)
*/
handleStalled() {
console.log('[AudioPlayer] Playback stalled, attempting to recover...');
this.buffering = true;
if (this.bufferRetryTimeout) {
clearTimeout(this.bufferRetryTimeout);
}
this.bufferRetryTimeout = setTimeout(() => {
if (this.buffering) {
console.log('[AudioPlayer] Buffer recovery timeout, attempting to reload...');
if (this.currentUid && this.currentButton) {
// Only retry if we're still supposed to be playing
if (this.isPlaying) {
this.retryCount++;
if (this.retryCount <= this.maxRetries) {
console.log(`[AudioPlayer] Retry ${this.retryCount}/${this.maxRetries} for UID: ${this.currentUid}`);
this.loadAndPlay(this.currentUid, this.currentButton);
} else {
console.error('[AudioPlayer] Max retry attempts reached');
this.retryCount = 0;
this.updateButtonState(this.currentButton, 'error');
}
}
}
}
}, 5000); // 5 second buffer recovery timeout
}
/**
* Handle waiting event (buffering)
*/
handleWaiting() {
console.log('Audio waiting for data...');
this.buffering = true;
if (this.currentButton) {
this.updateButtonState(this.currentButton, 'loading');
}
}
/**
* Handle playing event (playback started/resumed)
*/
handlePlaying() {
console.log('Audio playback started/resumed');
this.buffering = false;
this.retryCount = 0; // Reset retry counter on successful playback
if (this.bufferRetryTimeout) {
clearTimeout(this.bufferRetryTimeout);
this.bufferRetryTimeout = null;
}
if (this.currentButton) {
this.updateButtonState(this.currentButton, 'playing');
}
}
/**
* Handle ended event (playback completed)
*/
handleEnded() {
console.log('Audio playback ended');
this.isPlaying = false;
this.buffering = false;
if (this.currentButton) {
this.updateButtonState(this.currentButton, 'paused');
}
}
/** /**
* Clean up resources * Clean up resources
*/ */
cleanup() { cleanup() {
// Clear any pending timeouts
if (this.loadTimeout) {
clearTimeout(this.loadTimeout);
this.loadTimeout = null;
}
if (this.bufferRetryTimeout) {
clearTimeout(this.bufferRetryTimeout);
this.bufferRetryTimeout = null;
}
// Update button state if we have a reference to the current button // Update button state if we have a reference to the current button
if (this.currentButton) { if (this.currentButton) {
this.updateButtonState(this.currentButton, 'paused'); this.updateButtonState(this.currentButton, 'paused');
@ -324,6 +509,13 @@ export class AudioPlayer {
// Pause the audio and store the current time // Pause the audio and store the current time
if (this.audioElement) { if (this.audioElement) {
try { try {
// Remove event listeners to prevent memory leaks
this.audioElement.removeEventListener('error', this.handlePlayError);
this.audioElement.removeEventListener('stalled', this.handleStalled);
this.audioElement.removeEventListener('waiting', this.handleWaiting);
this.audioElement.removeEventListener('playing', this.handlePlaying);
this.audioElement.removeEventListener('ended', this.handleEnded);
try { try {
this.audioElement.pause(); this.audioElement.pause();
this.lastPlayTime = this.audioElement.currentTime; this.lastPlayTime = this.audioElement.currentTime;
@ -357,6 +549,8 @@ export class AudioPlayer {
this.currentButton = null; this.currentButton = null;
this.audioUrl = ''; this.audioUrl = '';
this.isPlaying = false; this.isPlaying = false;
this.buffering = false;
this.retryCount = 0;
// Notify global audio manager that personal player has stopped // Notify global audio manager that personal player has stopped
globalAudioManager.stopPlayback('personal'); globalAudioManager.stopPlayback('personal');

688
static/auth-manager.js Normal file
View File

@ -0,0 +1,688 @@
/**
* Centralized Authentication Manager
*
* This module consolidates all authentication logic from auth.js, magic-login.js,
* and cleanup-auth.js into a single, maintainable module.
*/
import { showToast } from './toast.js';
class AuthManager {
constructor() {
this.DEBUG_AUTH_STATE = false;
this.AUTH_CHECK_DEBOUNCE = 1000; // 1 second
this.AUTH_CHECK_INTERVAL = 30000; // 30 seconds
this.CACHE_TTL = 5000; // 5 seconds
// Authentication state cache
this.authStateCache = {
timestamp: 0,
value: null,
ttl: this.CACHE_TTL
};
// Track auth check calls
this.lastAuthCheckTime = 0;
this.authCheckCounter = 0;
this.wasAuthenticated = null;
// Bind all methods that will be used as event handlers
this.checkAuthState = this.checkAuthState.bind(this);
this.handleMagicLoginRedirect = this.handleMagicLoginRedirect.bind(this);
this.logout = this.logout.bind(this);
this.deleteAccount = this.deleteAccount.bind(this);
this.handleStorageEvent = this.handleStorageEvent.bind(this);
this.handleVisibilityChange = this.handleVisibilityChange.bind(this);
// Initialize
this.initialize = this.initialize.bind(this);
}
/**
* Validate UID format - must be a valid email address
*/
validateUidFormat(uid) {
if (!uid || typeof uid !== 'string') {
// Debug messages disabled
return false;
}
// Email regex pattern - RFC 5322 compliant basic validation
const emailRegex = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/;
const isValid = emailRegex.test(uid);
if (!isValid) {
// Debug messages disabled
} else {
// Debug messages disabled
}
return isValid;
}
/**
* Sanitize and validate UID - ensures consistent format
*/
sanitizeUid(uid) {
if (!uid || typeof uid !== 'string') {
// Debug messages disabled
return null;
}
// Trim whitespace and convert to lowercase
const sanitized = uid.trim().toLowerCase();
// Validate the sanitized UID
if (!this.validateUidFormat(sanitized)) {
// Debug messages disabled
return null;
}
// Debug messages disabled
return sanitized;
}
/**
* Check if current stored UID is valid and fix if needed
*/
validateStoredUid() {
const storedUid = localStorage.getItem('uid');
if (!storedUid) {
// Debug messages disabled
return null;
}
const sanitizedUid = this.sanitizeUid(storedUid);
if (!sanitizedUid) {
// Debug messages disabled
this.clearAuthState();
return null;
}
// Update stored UID if sanitization changed it
if (sanitizedUid !== storedUid) {
// Debug messages disabled
localStorage.setItem('uid', sanitizedUid);
// Update cookies as well
document.cookie = `uid=${sanitizedUid}; path=/; SameSite=Lax; Secure`;
}
return sanitizedUid;
}
/**
* Get cookie value by name
*/
getCookieValue(name) {
const value = `; ${document.cookie}`;
const parts = value.split(`; ${name}=`);
if (parts.length === 2) {
return parts.pop().split(';').shift();
}
return null;
}
/**
* Initialize the authentication manager
*/
async initialize() {
// Debug messages disabled
// Validate stored UID format and fix if needed
const validUid = this.validateStoredUid();
if (validUid) {
// Debug messages disabled
} else {
// Debug messages disabled
}
// Handle magic link login if present
await this.handleMagicLoginRedirect();
// Setup authentication state polling
this.setupAuthStatePolling();
// Setup event listeners
document.addEventListener('visibilitychange', this.handleVisibilityChange);
this.setupEventListeners();
// Debug messages disabled
}
/**
* Fetch user information from the server
*/
async fetchUserInfo() {
try {
// Get the auth token from cookies
const authToken = this.getCookieValue('authToken') || localStorage.getItem('authToken');
// Debug messages disabled
const headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
};
// Add Authorization header if we have a token
if (authToken) {
headers['Authorization'] = `Bearer ${authToken}`;
// Debug messages disabled
} else {
// Debug messages disabled
}
// Debug messages disabled
const response = await fetch('/api/me', {
method: 'GET',
credentials: 'include',
headers: headers
});
// Debug messages disabled
if (response.ok) {
const contentType = response.headers.get('content-type');
// Debug messages disabled
if (contentType && contentType.includes('application/json')) {
const userInfo = await response.json();
// Debug messages disabled
return userInfo;
} else {
const text = await response.text();
// Debug messages disabled
}
} else {
const errorText = await response.text();
// Debug messages disabled
}
return null;
} catch (error) {
// Debug messages disabled
return null;
}
}
/**
* Set authentication state in localStorage and cookies
*/
setAuthState(userEmail, username, authToken = null) {
// Debug messages disabled
// Validate and sanitize the UID (email)
const sanitizedUid = this.sanitizeUid(userEmail);
if (!sanitizedUid) {
// Debug messages disabled
throw new Error(`Invalid UID format: ${userEmail}. UID must be a valid email address.`);
}
// Validate username (basic check)
if (!username || typeof username !== 'string' || username.trim().length === 0) {
// Debug messages disabled
throw new Error(`Invalid username: ${username}. Username cannot be empty.`);
}
const sanitizedUsername = username.trim();
// Generate auth token if not provided
if (!authToken) {
authToken = 'token-' + Math.random().toString(36).substring(2, 15);
}
// Debug messages disabled
// Set localStorage for client-side access (not sent to server)
localStorage.setItem('uid', sanitizedUid); // Primary UID is email
localStorage.setItem('username', sanitizedUsername); // Username for display
localStorage.setItem('uid_time', Date.now().toString());
// Set cookies for server authentication (sent with requests)
document.cookie = `uid=${encodeURIComponent(sanitizedUid)}; path=/; SameSite=Lax`;
document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`;
// Note: isAuthenticated is determined by presence of valid authToken, no need to duplicate
// Clear cache to force refresh
this.authStateCache.timestamp = 0;
}
/**
* Clear authentication state
*/
clearAuthState() {
// Debug messages disabled
// Clear localStorage (client-side data only)
const authKeys = ['uid', 'username', 'uid_time'];
authKeys.forEach(key => localStorage.removeItem(key));
// Clear cookies
document.cookie.split(';').forEach(cookie => {
const eqPos = cookie.indexOf('=');
const name = eqPos > -1 ? cookie.substr(0, eqPos).trim() : cookie.trim();
document.cookie = `${name}=;expires=Thu, 01 Jan 1970 00:00:00 GMT;path=/; SameSite=Lax`;
});
// Clear cache
this.authStateCache.timestamp = 0;
}
/**
* Check if user is currently authenticated
*/
isAuthenticated() {
const now = Date.now();
// Use cached value if still valid
if (this.authStateCache.timestamp > 0 &&
(now - this.authStateCache.timestamp) < this.authStateCache.ttl) {
return this.authStateCache.value;
}
// Check authentication state - simplified approach
const hasUid = !!(document.cookie.includes('uid=') || localStorage.getItem('uid'));
const hasAuthToken = !!document.cookie.includes('authToken=');
const isAuth = hasUid && hasAuthToken;
// Update cache
this.authStateCache.timestamp = now;
this.authStateCache.value = isAuth;
return isAuth;
}
/**
* Get current user data
*/
getCurrentUser() {
if (!this.isAuthenticated()) {
return null;
}
return {
uid: localStorage.getItem('uid'),
email: localStorage.getItem('uid'), // uid is the email
username: localStorage.getItem('username'),
authToken: this.getCookieValue('authToken') // authToken is in cookies
};
}
/**
* Handle magic link login redirect
*/
async handleMagicLoginRedirect() {
const params = new URLSearchParams(window.location.search);
// Handle secure token-based magic login only
const token = params.get('token');
if (token) {
// Debug messages disabled
// Clean up URL immediately
const url = new URL(window.location.href);
url.searchParams.delete('token');
window.history.replaceState({}, document.title, url.pathname + url.search);
await this.processTokenLogin(token);
return true;
}
return false;
}
/**
* Process token-based login
*/
async processTokenLogin(token) {
try {
// Debug messages disabled
const formData = new FormData();
formData.append('token', token);
// Debug messages disabled
const response = await fetch('/magic-login', {
method: 'POST',
body: formData,
});
// Debug messages disabled
// Handle successful token login response
const contentType = response.headers.get('content-type');
// Debug messages disabled
if (contentType && contentType.includes('application/json')) {
const data = await response.json();
// Debug messages disabled
if (data && data.success && data.user) {
// Debug messages disabled
// Use the user data and token from the response
const { email, username } = data.user;
const authToken = data.token; // Get token from JSON response
// Debug messages disabled
// Set auth state with the token from the response
this.setAuthState(email, username, authToken);
this.updateUIState(true);
await this.initializeUserSession(username, email);
showToast('✅ Login successful!');
this.navigateToProfile();
return;
} else {
// Debug messages disabled
throw new Error('Invalid user data received from server');
}
} else {
const text = await response.text();
// Debug messages disabled
throw new Error(`Unexpected response format: ${text || 'No details available'}`);
}
} catch (error) {
// Debug messages disabled
showToast(`Login failed: ${error.message}`, 'error');
}
}
/**
* Initialize user session after login
*/
async initializeUserSession(username, userEmail) {
// Initialize dashboard
if (window.initDashboard) {
await window.initDashboard(username);
} else {
// Debug messages disabled
}
// Fetch and display file list
if (window.fetchAndDisplayFiles) {
// Debug messages disabled
await window.fetchAndDisplayFiles(userEmail);
} else {
// Debug messages disabled
}
}
/**
* Navigate to user profile
*/
navigateToProfile() {
if (window.showOnly) {
// Debug messages disabled
window.showOnly('me-page');
} else if (window.location.hash !== '#me-page') {
window.location.hash = '#me-page';
}
}
/**
* Update UI state based on authentication
*/
updateUIState(isAuthenticated) {
if (isAuthenticated) {
document.body.classList.add('authenticated');
document.body.classList.remove('guest');
// Note: Removed auto-loading of profile stream to prevent auto-play on page load
// Profile stream will only play when user clicks the play button
} else {
document.body.classList.remove('authenticated');
document.body.classList.add('guest');
}
this.updateAccountDeletionVisibility(isAuthenticated);
// Force reflow
void document.body.offsetHeight;
}
/**
* Update account deletion section visibility
*/
updateAccountDeletionVisibility(isAuthenticated) {
const accountDeletionSection = document.getElementById('account-deletion-section');
const deleteAccountFromPrivacy = document.getElementById('delete-account-from-privacy');
if (isAuthenticated) {
this.showElement(accountDeletionSection);
this.showElement(deleteAccountFromPrivacy);
} else {
this.hideElement(accountDeletionSection);
this.hideElement(deleteAccountFromPrivacy);
}
}
showElement(element) {
if (element) {
element.style.display = 'block';
element.style.visibility = 'visible';
}
}
hideElement(element) {
if (element) {
element.style.display = 'none';
}
}
/**
* Check authentication state with caching and debouncing
*/
checkAuthState(force = false) {
const now = Date.now();
// Debounce frequent calls
if (!force && (now - this.lastAuthCheckTime) < this.AUTH_CHECK_DEBOUNCE) {
return this.authStateCache.value;
}
this.lastAuthCheckTime = now;
this.authCheckCounter++;
if (this.DEBUG_AUTH_STATE) {
// Debug messages disabled
}
const isAuthenticated = this.isAuthenticated();
// Only update UI if state changed or forced
if (force || this.wasAuthenticated !== isAuthenticated) {
if (this.DEBUG_AUTH_STATE) {
// Debug messages disabled
}
// Handle logout detection
if (this.wasAuthenticated === true && isAuthenticated === false) {
// Debug messages disabled
this.logout();
return false;
}
this.updateUIState(isAuthenticated);
this.wasAuthenticated = isAuthenticated;
}
return isAuthenticated;
}
/**
* Setup authentication state polling
*/
setupAuthStatePolling() {
// Initial check
this.checkAuthState(true);
// Periodic checks
setInterval(() => {
this.checkAuthState(!document.hidden);
}, this.AUTH_CHECK_INTERVAL);
// Storage event listener
window.addEventListener('storage', this.handleStorageEvent);
// Visibility change listener
document.addEventListener('visibilitychange', this.handleVisibilityChange);
}
/**
* Handle storage events
*/
handleStorageEvent(e) {
if (['isAuthenticated', 'authToken', 'uid'].includes(e.key)) {
this.checkAuthState(true);
}
}
/**
* Handle visibility change events
*/
handleVisibilityChange() {
if (!document.hidden) {
this.checkAuthState(true);
}
}
/**
* Setup event listeners
*/
setupEventListeners() {
document.addEventListener('click', (e) => {
// Delete account buttons
if (e.target.closest('#delete-account') || e.target.closest('#delete-account-from-privacy')) {
this.deleteAccount(e);
return;
}
});
}
/**
* Delete user account
*/
async deleteAccount(e) {
if (e) e.preventDefault();
if (this.deleteAccount.inProgress) return;
if (!confirm('Are you sure you want to delete your account?\nThis action is permanent.')) {
return;
}
this.deleteAccount.inProgress = true;
const deleteBtn = e?.target.closest('button');
const originalText = deleteBtn?.textContent;
if (deleteBtn) {
deleteBtn.disabled = true;
deleteBtn.textContent = 'Deleting...';
}
try {
const response = await fetch('/api/delete-account', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
credentials: 'include',
body: JSON.stringify({ uid: localStorage.getItem('uid') })
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({ detail: 'Failed to delete account.' }));
throw new Error(errorData.detail);
}
showToast('Account deleted successfully.', 'success');
this.logout();
} catch (error) {
// Debug messages disabled
showToast(error.message, 'error');
} finally {
this.deleteAccount.inProgress = false;
if (deleteBtn) {
deleteBtn.disabled = false;
deleteBtn.textContent = originalText;
}
}
}
/**
* Logout user
*/
logout() {
// Debug messages disabled
this.clearAuthState();
window.location.href = '/';
}
/**
* Cleanup authentication state (for migration/debugging)
*/
async cleanupAuthState(manualEmail = null) {
// Debug messages disabled
let userEmail = manualEmail;
// Try to get email from server if not provided
if (!userEmail) {
const userInfo = await this.fetchUserInfo();
userEmail = userInfo?.email;
if (!userEmail) {
userEmail = prompt('Please enter your email address (e.g., oib@chello.at):');
if (!userEmail || !userEmail.includes('@')) {
// Debug messages disabled
return { success: false, error: 'Invalid email' };
}
}
}
if (!userEmail) {
// Debug messages disabled
return { success: false, error: 'No email available' };
}
// Get current username for reference
const currentUsername = localStorage.getItem('username') || localStorage.getItem('uid');
// Clear and reset authentication state
this.clearAuthState();
this.setAuthState(userEmail, currentUsername || userEmail);
// Debug messages disabled
// Debug messages disabled
// Refresh if on profile page
if (window.location.hash === '#me-page') {
window.location.reload();
}
return {
email: userEmail,
username: currentUsername,
success: true
};
}
/**
* Destroy the authentication manager
*/
destroy() {
window.removeEventListener('storage', this.handleStorageEvent);
document.removeEventListener('visibilitychange', this.handleVisibilityChange);
}
}
// Create and export singleton instance
const authManager = new AuthManager();
// Export for global access
window.authManager = authManager;
export default authManager;

View File

@ -1,252 +1,31 @@
import { showToast } from './toast.js'; /**
* Simplified Authentication Module
*
* This file now uses the centralized AuthManager for all authentication logic.
* Legacy code has been replaced with the new consolidated approach.
*/
import authManager from './auth-manager.js';
import { loadProfileStream } from './personal-player.js'; import { loadProfileStream } from './personal-player.js';
document.addEventListener('DOMContentLoaded', () => { // Initialize authentication manager when DOM is ready
// Track previous authentication state document.addEventListener('DOMContentLoaded', async () => {
let wasAuthenticated = null; // Debug messages disabled
// Debug flag - set to false to disable auth state change logs
const DEBUG_AUTH_STATE = false;
// Track auth check calls and cache state // Initialize the centralized auth manager
let lastAuthCheckTime = 0; await authManager.initialize();
let authCheckCounter = 0;
const AUTH_CHECK_DEBOUNCE = 1000; // 1 second
let authStateCache = {
timestamp: 0,
value: null,
ttl: 5000 // Cache TTL in milliseconds
};
// Handle magic link login redirect // Make loadProfileStream available globally for auth manager
function handleMagicLoginRedirect() { window.loadProfileStream = loadProfileStream;
const params = new URLSearchParams(window.location.search);
if (params.get('login') === 'success' && params.get('confirmed_uid')) {
const username = params.get('confirmed_uid');
console.log('Magic link login detected for user:', username);
// Update authentication state // Debug messages disabled
localStorage.setItem('uid', username);
localStorage.setItem('confirmed_uid', username);
localStorage.setItem('uid_time', Date.now().toString());
document.cookie = `uid=${encodeURIComponent(username)}; path=/; SameSite=Lax`;
// Update UI state
document.body.classList.add('authenticated');
document.body.classList.remove('guest');
// Update local storage and cookies
localStorage.setItem('isAuthenticated', 'true');
document.cookie = `isAuthenticated=true; path=/; SameSite=Lax`;
// Update URL and history without reloading
window.history.replaceState({}, document.title, window.location.pathname);
// Update navigation
if (typeof injectNavigation === 'function') {
console.log('Updating navigation after magic link login');
injectNavigation(true);
} else {
console.warn('injectNavigation function not available after magic link login');
}
// Navigate to user's profile page
if (window.showOnly) {
console.log('Navigating to me-page');
window.showOnly('me-page');
} else if (window.location.hash !== '#me') {
window.location.hash = '#me';
}
// Auth state will be updated by the polling mechanism
}
}
// Update the visibility of the account deletion section based on authentication state
function updateAccountDeletionVisibility(isAuthenticated) {
const authOnlyWrapper = document.querySelector('#privacy-page .auth-only');
const accountDeletionSection = document.getElementById('account-deletion');
const showElement = (element) => {
if (!element) return;
element.classList.remove('hidden', 'auth-only-hidden');
element.style.display = 'block';
};
const hideElement = (element) => {
if (!element) return;
element.style.display = 'none';
};
if (isAuthenticated) {
const isPrivacyPage = window.location.hash === '#privacy-page';
if (isPrivacyPage) {
if (authOnlyWrapper) showElement(authOnlyWrapper);
if (accountDeletionSection) showElement(accountDeletionSection);
} else {
if (accountDeletionSection) hideElement(accountDeletionSection);
if (authOnlyWrapper) hideElement(authOnlyWrapper);
}
} else {
if (accountDeletionSection) hideElement(accountDeletionSection);
if (authOnlyWrapper) {
const hasOtherContent = Array.from(authOnlyWrapper.children).some(
child => child.id !== 'account-deletion' && child.offsetParent !== null
);
if (!hasOtherContent) {
hideElement(authOnlyWrapper);
}
}
}
}
// Check authentication state and update UI with caching and debouncing
function checkAuthState(force = false) {
const now = Date.now();
if (!force && authStateCache.value !== null && now - authStateCache.timestamp < authStateCache.ttl) {
return authStateCache.value;
}
if (now - lastAuthCheckTime < AUTH_CHECK_DEBOUNCE && !force) {
return wasAuthenticated;
}
lastAuthCheckTime = now;
authCheckCounter++;
const isAuthenticated =
(document.cookie.includes('isAuthenticated=true') || localStorage.getItem('isAuthenticated') === 'true') &&
(document.cookie.includes('uid=') || localStorage.getItem('uid')) &&
!!localStorage.getItem('authToken');
authStateCache = {
timestamp: now,
value: isAuthenticated,
ttl: isAuthenticated ? 30000 : 5000
};
if (isAuthenticated !== wasAuthenticated) {
if (DEBUG_AUTH_STATE) {
console.log('Auth state changed, updating UI...');
}
if (!isAuthenticated && wasAuthenticated) {
console.log('User was authenticated, but is no longer. Triggering logout.');
basicLogout();
return; // Stop further processing after logout
}
if (isAuthenticated) {
document.body.classList.add('authenticated');
document.body.classList.remove('guest');
const uid = localStorage.getItem('uid');
if (uid && (window.location.hash === '#me-page' || window.location.hash === '#me' || window.location.pathname.startsWith('/~'))) {
loadProfileStream(uid);
}
} else {
document.body.classList.remove('authenticated');
document.body.classList.add('guest');
}
updateAccountDeletionVisibility(isAuthenticated);
wasAuthenticated = isAuthenticated;
void document.body.offsetHeight; // Force reflow
}
return isAuthenticated;
}
// Periodically check authentication state with optimized polling
function setupAuthStatePolling() {
checkAuthState(true);
const checkAndUpdate = () => {
checkAuthState(!document.hidden);
};
const AUTH_CHECK_INTERVAL = 30000;
setInterval(checkAndUpdate, AUTH_CHECK_INTERVAL);
const handleStorageEvent = (e) => {
if (['isAuthenticated', 'authToken', 'uid'].includes(e.key)) {
checkAuthState(true);
}
};
window.addEventListener('storage', handleStorageEvent);
const handleVisibilityChange = () => {
if (!document.hidden) {
checkAuthState(true);
}
};
document.addEventListener('visibilitychange', handleVisibilityChange);
return () => {
window.removeEventListener('storage', handleStorageEvent);
document.removeEventListener('visibilitychange', handleVisibilityChange);
};
}
// --- ACCOUNT DELETION ---
const deleteAccount = async (e) => {
if (e) e.preventDefault();
if (deleteAccount.inProgress) return;
if (!confirm('Are you sure you want to delete your account?\nThis action is permanent.')) return;
deleteAccount.inProgress = true;
const deleteBtn = e?.target.closest('button');
const originalText = deleteBtn?.textContent;
if (deleteBtn) {
deleteBtn.disabled = true;
deleteBtn.textContent = 'Deleting...';
}
try {
const response = await fetch('/api/delete-account', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
credentials: 'include',
body: JSON.stringify({ uid: localStorage.getItem('uid') })
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({ detail: 'Failed to delete account.' }));
throw new Error(errorData.detail);
}
showToast('Account deleted successfully.', 'success');
// Perform a full client-side logout and redirect
basicLogout();
} catch (error) {
showToast(error.message, 'error');
} finally {
deleteAccount.inProgress = false;
if (deleteBtn) {
deleteBtn.disabled = false;
deleteBtn.textContent = originalText;
}
}
};
// --- LOGOUT ---
function basicLogout() {
['isAuthenticated', 'uid', 'confirmed_uid', 'uid_time', 'authToken'].forEach(k => localStorage.removeItem(k));
document.cookie.split(';').forEach(c => document.cookie = c.replace(/^ +/, '').replace(/=.*/, `=;expires=${new Date().toUTCString()};path=/`));
window.location.href = '/';
}
// --- DELEGATED EVENT LISTENERS ---
document.addEventListener('click', (e) => {
// Delete Account Buttons
if (e.target.closest('#delete-account') || e.target.closest('#delete-account-from-privacy')) {
deleteAccount(e);
return;
}
});
// --- INITIALIZATION ---
handleMagicLoginRedirect();
setupAuthStatePolling();
}); });
// Export auth manager for other modules to use
export { authManager };
// Legacy compatibility - expose some functions globally
window.getCurrentUser = () => authManager.getCurrentUser();
window.isAuthenticated = () => authManager.isAuthenticated();
window.logout = () => authManager.logout();
window.cleanupAuthState = (email) => authManager.cleanupAuthState(email);

38
static/cleanup-auth.js Normal file
View File

@ -0,0 +1,38 @@
/**
* Simplified Authentication Cleanup Module
*
* This file now uses the centralized AuthManager for authentication cleanup.
* The cleanup logic has been moved to the AuthManager.
*/
import authManager from './auth-manager.js';
/**
* Clean up authentication state - now delegated to AuthManager
* This function is kept for backward compatibility.
*/
async function cleanupAuthState(manualEmail = null) {
console.log('[CLEANUP] Starting authentication state cleanup via AuthManager...');
// Delegate to the centralized AuthManager
return await authManager.cleanupAuthState(manualEmail);
}
// Auto-run cleanup if this script is loaded directly
if (typeof window !== 'undefined') {
// Export function for manual use
window.cleanupAuthState = cleanupAuthState;
// Auto-run if URL contains cleanup parameter
const urlParams = new URLSearchParams(window.location.search);
if (urlParams.get('cleanup') === 'auth') {
cleanupAuthState().then(result => {
if (result && result.success) {
console.log('[CLEANUP] Auto-cleanup completed successfully');
}
});
}
}
// Export for ES6 modules
export { cleanupAuthState };

View File

@ -34,8 +34,7 @@
#file-list li { #file-list li {
display: flex; display: flex;
justify-content: space-between; flex-direction: column;
align-items: center;
padding: 0.75rem 1rem; padding: 0.75rem 1rem;
margin: 0.5rem 0; margin: 0.5rem 0;
background-color: var(--surface); background-color: var(--surface);
@ -97,36 +96,58 @@
.file-info { .file-info {
display: flex; display: flex;
align-items: center; align-items: flex-start;
flex: 1; flex: 1;
min-width: 0; /* Allows text truncation */ min-width: 0;
flex-direction: column;
gap: 0.25rem;
} }
.file-icon { .file-header {
margin-right: 0.75rem; display: flex;
font-size: 1.2em; align-items: flex-start;
flex-shrink: 0; justify-content: space-between;
width: 100%;
gap: 0.75rem;
} }
.file-name { .file-name {
color: var(--primary); color: var(--text-color);
text-decoration: none; word-break: break-word;
white-space: nowrap; overflow-wrap: break-word;
overflow: hidden; line-height: 1.3;
text-overflow: ellipsis; flex: 1;
margin-right: 0.5rem; font-size: 0.95em;
}
.file-name:hover {
text-decoration: underline;
} }
.file-size { .file-size {
color: var(--text-muted); color: var(--text-muted);
font-size: 0.85em; font-size: 0.8em;
margin-left: 0.5rem;
white-space: nowrap; white-space: nowrap;
flex-shrink: 0; flex-shrink: 0;
font-style: italic;
align-self: flex-start;
}
.delete-file {
align-self: center;
background: none;
border: none;
font-size: 1.1em;
cursor: pointer;
padding: 0.3rem 0.5rem;
border-radius: 4px;
transition: all 0.2s ease;
color: var(--text-muted);
margin-top: 0.2rem;
}
.delete-file:hover {
background-color: var(--error);
color: white;
transform: scale(1.1);
} }
.file-actions { .file-actions {

View File

@ -12,13 +12,14 @@ function getCookie(name) {
// Global state // Global state
let isLoggingOut = false; let isLoggingOut = false;
let dashboardInitialized = false;
async function handleLogout(event) { async function handleLogout(event) {
console.log('[LOGOUT] Logout initiated'); // Debug messages disabled
// Prevent multiple simultaneous logout attempts // Prevent multiple simultaneous logout attempts
if (isLoggingOut) { if (isLoggingOut) {
console.log('[LOGOUT] Logout already in progress'); // Debug messages disabled
return; return;
} }
isLoggingOut = true; isLoggingOut = true;
@ -34,11 +35,11 @@ async function handleLogout(event) {
const authToken = localStorage.getItem('authToken'); const authToken = localStorage.getItem('authToken');
// 1. Clear all client-side state first (most important) // 1. Clear all client-side state first (most important)
console.log('[LOGOUT] Clearing all client-side state'); // Debug messages disabled
// Clear localStorage and sessionStorage // Clear localStorage and sessionStorage
const storageKeys = [ const storageKeys = [
'uid', 'uid_time', 'confirmed_uid', 'last_page', 'uid', 'uid_time', 'last_page',
'isAuthenticated', 'authToken', 'user', 'token', 'sessionid', 'sessionId' 'isAuthenticated', 'authToken', 'user', 'token', 'sessionid', 'sessionId'
]; ];
@ -49,22 +50,22 @@ async function handleLogout(event) {
// Get all current cookies for debugging // Get all current cookies for debugging
const allCookies = document.cookie.split(';'); const allCookies = document.cookie.split(';');
console.log('[LOGOUT] Current cookies before clearing:', allCookies); // Debug messages disabled
// Clear ALL cookies (aggressive approach) // Clear ALL cookies (aggressive approach)
allCookies.forEach(cookie => { allCookies.forEach(cookie => {
const [name] = cookie.trim().split('='); const [name] = cookie.trim().split('=');
if (name) { if (name) {
const cookieName = name.trim(); const cookieName = name.trim();
console.log(`[LOGOUT] Clearing cookie: ${cookieName}`); // Debug messages disabled
// Try multiple clearing strategies to ensure cookies are removed // Try multiple clearing strategies to ensure cookies are removed
const clearStrategies = [ const clearStrategies = [
`${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;`, `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; SameSite=Lax;`,
`${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname};`, `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname}; SameSite=Lax;`,
`${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname};`, `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname}; SameSite=Lax;`,
`${cookieName}=; max-age=0; path=/;`, `${cookieName}=; max-age=0; path=/; SameSite=Lax;`,
`${cookieName}=; max-age=0; path=/; domain=${window.location.hostname};` `${cookieName}=; max-age=0; path=/; domain=${window.location.hostname}; SameSite=Lax;`
]; ];
clearStrategies.forEach(strategy => { clearStrategies.forEach(strategy => {
@ -75,7 +76,7 @@ async function handleLogout(event) {
// Verify cookies are cleared // Verify cookies are cleared
const remainingCookies = document.cookie.split(';').filter(c => c.trim()); const remainingCookies = document.cookie.split(';').filter(c => c.trim());
console.log('[LOGOUT] Remaining cookies after clearing:', remainingCookies); // Debug messages disabled
// Update UI state // Update UI state
document.body.classList.remove('authenticated', 'logged-in'); document.body.classList.remove('authenticated', 'logged-in');
@ -84,7 +85,7 @@ async function handleLogout(event) {
// 2. Try to invalidate server session (non-blocking) // 2. Try to invalidate server session (non-blocking)
if (authToken) { if (authToken) {
try { try {
console.log('[LOGOUT] Attempting to invalidate server session'); // Debug messages disabled
const controller = new AbortController(); const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 2000); const timeoutId = setTimeout(() => controller.abort(), 2000);
@ -99,18 +100,18 @@ async function handleLogout(event) {
}); });
clearTimeout(timeoutId); clearTimeout(timeoutId);
console.log('[LOGOUT] Server session invalidation completed'); // Debug messages disabled
} catch (error) { } catch (error) {
console.warn('[LOGOUT] Server session invalidation failed (non-critical):', error); // Debug messages disabled
} }
} }
// 3. Final redirect // 3. Final redirect
console.log('[LOGOUT] Redirecting to home page'); // Debug messages disabled
window.location.href = '/?logout=' + Date.now(); window.location.href = '/?logout=' + Date.now();
} catch (error) { } catch (error) {
console.error('[LOGOUT] Unexpected error during logout:', error); // Debug messages disabled
if (window.showToast) { if (window.showToast) {
showToast('Logout failed. Please try again.'); showToast('Logout failed. Please try again.');
} }
@ -138,7 +139,7 @@ async function handleDeleteAccount() {
} }
// Show loading state // Show loading state
const deleteButton = document.getElementById('delete-account-button'); const deleteButton = document.getElementById('delete-account-from-privacy');
const originalText = deleteButton.textContent; const originalText = deleteButton.textContent;
deleteButton.disabled = true; deleteButton.disabled = true;
deleteButton.textContent = 'Deleting...'; deleteButton.textContent = 'Deleting...';
@ -162,7 +163,7 @@ async function handleDeleteAccount() {
// Clear all authentication-related data from localStorage // Clear all authentication-related data from localStorage
const keysToRemove = [ const keysToRemove = [
'uid', 'uid_time', 'confirmed_uid', 'last_page', 'uid', 'uid_time', 'last_page',
'isAuthenticated', 'authToken', 'user', 'token', 'sessionid' 'isAuthenticated', 'authToken', 'user', 'token', 'sessionid'
]; ];
@ -180,11 +181,11 @@ async function handleDeleteAccount() {
// Clear all cookies using multiple strategies // Clear all cookies using multiple strategies
const clearCookie = (cookieName) => { const clearCookie = (cookieName) => {
const clearStrategies = [ const clearStrategies = [
`${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;`, `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; SameSite=Lax;`,
`${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname};`, `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname}; SameSite=Lax;`,
`${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname};`, `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname}; SameSite=Lax;`,
`${cookieName}=; max-age=0; path=/;`, `${cookieName}=; max-age=0; path=/; SameSite=Lax;`,
`${cookieName}=; max-age=0; path=/; domain=${window.location.hostname};` `${cookieName}=; max-age=0; path=/; domain=${window.location.hostname}; SameSite=Lax;`
]; ];
clearStrategies.forEach(strategy => { clearStrategies.forEach(strategy => {
@ -224,7 +225,7 @@ async function handleDeleteAccount() {
showToast(`Failed to delete account: ${error.message}`); showToast(`Failed to delete account: ${error.message}`);
// Reset button state // Reset button state
const deleteButton = document.getElementById('delete-account-button'); const deleteButton = document.getElementById('delete-account-from-privacy');
if (deleteButton) { if (deleteButton) {
deleteButton.disabled = false; deleteButton.disabled = false;
deleteButton.textContent = '🗑️ Delete Account'; deleteButton.textContent = '🗑️ Delete Account';
@ -251,33 +252,37 @@ function debugElementVisibility(elementId) {
parentDisplay: el.parentElement ? window.getComputedStyle(el.parentElement).display : 'no-parent', parentDisplay: el.parentElement ? window.getComputedStyle(el.parentElement).display : 'no-parent',
parentVisibility: el.parentElement ? window.getComputedStyle(el.parentElement).visibility : 'no-parent', parentVisibility: el.parentElement ? window.getComputedStyle(el.parentElement).visibility : 'no-parent',
rect: el.getBoundingClientRect() rect: el.getBoundingClientRect()
}; }
} }
// Make updateQuotaDisplay available globally
window.updateQuotaDisplay = updateQuotaDisplay;
/** /**
* Initialize the dashboard and handle authentication state * Initialize the dashboard and handle authentication state
*/ */
async function initDashboard() { async function initDashboard(uid = null) {
console.log('[DASHBOARD] Initializing dashboard...'); // Debug messages disabled
try { try {
const guestDashboard = document.getElementById('guest-dashboard'); const guestDashboard = document.getElementById('guest-dashboard');
const userDashboard = document.getElementById('user-dashboard'); const userDashboard = document.getElementById('user-dashboard');
const userUpload = document.getElementById('user-upload-area'); const userUpload = document.getElementById('user-upload-area');
const logoutButton = document.getElementById('logout-button'); const logoutButton = document.getElementById('logout-button');
const deleteAccountButton = document.getElementById('delete-account-button'); const deleteAccountButton = document.getElementById('delete-account-from-privacy');
const fileList = document.getElementById('file-list'); const fileList = document.getElementById('file-list');
if (logoutButton) { // Only attach event listeners once to prevent duplicates
logoutButton.addEventListener('click', handleLogout); if (!dashboardInitialized) {
} if (logoutButton) {
if (deleteAccountButton) { logoutButton.addEventListener('click', handleLogout);
deleteAccountButton.addEventListener('click', (e) => { }
e.preventDefault(); // Delete account button is handled by auth.js delegated event listener
handleDeleteAccount(); // Removed duplicate event listener to prevent double confirmation dialogs
}); dashboardInitialized = true;
} }
const isAuthenticated = (document.cookie.includes('isAuthenticated=true') || localStorage.getItem('isAuthenticated') === 'true'); const effectiveUid = uid || getCookie('uid') || localStorage.getItem('uid');
const isAuthenticated = !!effectiveUid;
if (isAuthenticated) { if (isAuthenticated) {
document.body.classList.add('authenticated'); document.body.classList.add('authenticated');
@ -286,9 +291,11 @@ async function initDashboard() {
if (userUpload) userUpload.style.display = 'block'; if (userUpload) userUpload.style.display = 'block';
if (guestDashboard) guestDashboard.style.display = 'none'; if (guestDashboard) guestDashboard.style.display = 'none';
const uid = getCookie('uid') || localStorage.getItem('uid'); if (window.fetchAndDisplayFiles) {
if (uid && window.fetchAndDisplayFiles) { // Use email-based UID for file operations if available, fallback to effectiveUid
await window.fetchAndDisplayFiles(uid); const fileOperationUid = localStorage.getItem('uid') || effectiveUid; // uid is now email-based
// Debug messages disabled
await window.fetchAndDisplayFiles(fileOperationUid);
} }
} else { } else {
document.body.classList.remove('authenticated'); document.body.classList.remove('authenticated');
@ -297,7 +304,7 @@ async function initDashboard() {
if (userDashboard) userDashboard.style.display = 'none'; if (userDashboard) userDashboard.style.display = 'none';
if (userUpload) userUpload.style.display = 'none'; if (userUpload) userUpload.style.display = 'none';
if (fileList) { if (fileList) {
fileList.innerHTML = `<li class="error-message">Please <a href="/#login" class="login-link">log in</a> to view your files.</li>`; fileList.innerHTML = `<li>Please <a href="/#login" class="login-link">log in</a> to view your files.</li>`;
} }
} }
} catch (e) { } catch (e) {
@ -326,11 +333,11 @@ async function fetchAndDisplayFiles(uid) {
const fileList = document.getElementById('file-list'); const fileList = document.getElementById('file-list');
if (!fileList) { if (!fileList) {
console.error('[FILES] File list element not found'); // Debug messages disabled
return; return;
} }
console.log(`[FILES] Fetching files for user: ${uid}`); // Debug messages disabled
fileList.innerHTML = '<li class="loading-message">Loading your files...</li>'; fileList.innerHTML = '<li class="loading-message">Loading your files...</li>';
// Prepare headers with auth token if available // Prepare headers with auth token if available
@ -344,44 +351,44 @@ async function fetchAndDisplayFiles(uid) {
headers['Authorization'] = `Bearer ${authToken}`; headers['Authorization'] = `Bearer ${authToken}`;
} }
console.log('[FILES] Making request to /me with headers:', headers); // Debug messages disabled
try { try {
// The backend should handle authentication via session cookies // The backend should handle authentication via session cookies
// We include the auth token in headers if available, but don't rely on it for auth // We include the auth token in headers if available, but don't rely on it for auth
console.log(`[FILES] Making request to /me/${uid} with credentials...`); // Debug messages disabled
const response = await fetch(`/me/${uid}`, { const response = await fetch(`/user-files/${uid}`, {
method: 'GET', method: 'GET',
credentials: 'include', // Important: include cookies for session auth credentials: 'include', // Important: include cookies for session auth
headers: headers headers: headers
}); });
console.log('[FILES] Response status:', response.status); // Debug messages disabled
console.log('[FILES] Response headers:', Object.fromEntries([...response.headers.entries()])); // Debug messages disabled
// Get response as text first to handle potential JSON parsing errors // Get response as text first to handle potential JSON parsing errors
const responseText = await response.text(); const responseText = await response.text();
console.log('[FILES] Raw response text:', responseText); // Debug messages disabled
// Parse the JSON response // Parse the JSON response
let responseData = {}; let responseData = {};
if (responseText && responseText.trim() !== '') { if (responseText && responseText.trim() !== '') {
try { try {
responseData = JSON.parse(responseText); responseData = JSON.parse(responseText);
console.log('[FILES] Successfully parsed JSON response:', responseData); // Debug messages disabled
} catch (e) { } catch (e) {
console.error('[FILES] Failed to parse JSON response. Response text:', responseText); // Debug messages disabled
console.error('[FILES] Error details:', e); // Debug messages disabled
// If we have a non-JSON response but the status is 200, try to handle it // If we have a non-JSON response but the status is 200, try to handle it
if (response.ok) { if (response.ok) {
console.warn('[FILES] Non-JSON response with 200 status, treating as empty response'); // Debug messages disabled
} else { } else {
throw new Error(`Invalid JSON response from server: ${e.message}`); throw new Error(`Invalid JSON response from server: ${e.message}`);
} }
} }
} else { } else {
console.log('[FILES] Empty response received, using empty object'); // Debug messages disabled
} }
// Note: Authentication is handled by the parent component // Note: Authentication is handled by the parent component
@ -390,13 +397,13 @@ async function fetchAndDisplayFiles(uid) {
if (response.ok) { if (response.ok) {
// Check if the response has the expected format // Check if the response has the expected format
if (!responseData || !Array.isArray(responseData.files)) { if (!responseData || !Array.isArray(responseData.files)) {
console.error('[FILES] Invalid response format, expected {files: [...]}:', responseData); // Debug messages disabled
fileList.innerHTML = '<li>Error: Invalid response from server</li>'; fileList.innerHTML = '<li>Error: Invalid response from server</li>';
return; return;
} }
const files = responseData.files; const files = responseData.files;
console.log('[FILES] Files array:', files); // Debug messages disabled
if (files.length === 0) { if (files.length === 0) {
fileList.innerHTML = '<li class="no-files">No files uploaded yet.</li>'; fileList.innerHTML = '<li class="no-files">No files uploaded yet.</li>';
@ -406,68 +413,9 @@ async function fetchAndDisplayFiles(uid) {
// Clear the loading message // Clear the loading message
fileList.innerHTML = ''; fileList.innerHTML = '';
// Track displayed files to prevent duplicates using stored filenames as unique identifiers // Use the new global function to render the files
const displayedFiles = new Set(); window.displayUserFiles(uid, files);
// Add each file to the list
files.forEach(file => {
// Get the stored filename (with UUID) - this is our unique identifier
const storedFileName = file.stored_name || file.name || file;
// Skip if we've already displayed this file
if (displayedFiles.has(storedFileName)) {
console.log(`[FILES] Skipping duplicate file with stored name: ${storedFileName}`);
return;
}
displayedFiles.add(storedFileName);
const fileExt = storedFileName.split('.').pop().toLowerCase();
const fileUrl = `/data/${uid}/${encodeURIComponent(storedFileName)}`;
const fileSize = file.size ? formatFileSize(file.size) : 'N/A';
const listItem = document.createElement('li');
listItem.className = 'file-item';
listItem.setAttribute('data-uid', uid);
// Create file icon based on file extension
let fileIcon = '📄'; // Default icon
if (['mp3', 'wav', 'ogg', 'm4a', 'opus'].includes(fileExt)) {
fileIcon = '🎵';
} else if (['jpg', 'jpeg', 'png', 'gif', 'webp'].includes(fileExt)) {
fileIcon = '🖼️';
} else if (['pdf', 'doc', 'docx', 'txt'].includes(fileExt)) {
fileIcon = '📄';
}
// Use original_name if available, otherwise use the stored filename for display
const displayName = file.original_name || storedFileName;
listItem.innerHTML = `
<div class="file-info">
<span class="file-icon">${fileIcon}</span>
<a href="${fileUrl}" class="file-name" target="_blank" rel="noopener noreferrer">
${displayName}
</a>
<span class="file-size">${fileSize}</span>
</div>
<div class="file-actions">
<a href="${fileUrl}" class="download-button" download>
<span class="button-icon">⬇️</span>
<span class="button-text">Download</span>
</a>
<button class="delete-file" data-filename="${storedFileName}" data-original-name="${displayName}">
<span class="button-icon">🗑️</span>
<span class="button-text">Delete</span>
</button>
</div>
`;
// Delete button handler will be handled by event delegation
// No need to add individual event listeners here
fileList.appendChild(listItem);
});
} else { } else {
// Handle non-OK responses // Handle non-OK responses
if (response.status === 401) { if (response.status === 401) {
@ -482,10 +430,10 @@ async function fetchAndDisplayFiles(uid) {
Error loading files (${response.status}). Please try again later. Error loading files (${response.status}). Please try again later.
</li>`; </li>`;
} }
console.error('[FILES] Server error:', response.status, response.statusText); // Debug messages disabled
} }
} catch (error) { } catch (error) {
console.error('[FILES] Error fetching files:', error); // Debug messages disabled
const fileList = document.getElementById('file-list'); const fileList = document.getElementById('file-list');
if (fileList) { if (fileList) {
fileList.innerHTML = ` fileList.innerHTML = `
@ -496,6 +444,69 @@ async function fetchAndDisplayFiles(uid) {
} }
} }
// Function to update the quota display
async function updateQuotaDisplay(uid) {
// Debug messages disabled
try {
const authToken = localStorage.getItem('authToken');
const headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
};
if (authToken) {
headers['Authorization'] = `Bearer ${authToken}`;
}
// Debug messages disabled
// Fetch user info which includes quota
const response = await fetch(`/me/${uid}`, {
method: 'GET',
credentials: 'include',
headers: headers
});
// Debug messages disabled
if (response.ok) {
const userData = await response.json();
// Debug messages disabled
// Update the quota display
const quotaText = document.getElementById('quota-text');
const quotaBar = document.getElementById('quota-bar');
// Debug messages disabled
// Debug messages disabled
if (quotaText && userData.quota) {
const usedMB = (userData.quota.used_bytes / (1024 * 1024)).toFixed(2);
const maxMB = (userData.quota.max_bytes / (1024 * 1024)).toFixed(2);
const percentage = userData.quota.percentage || 0;
// Debug messages disabled
const quotaDisplayText = `${usedMB} MB of ${maxMB} MB (${percentage}%)`;
quotaText.textContent = quotaDisplayText;
// Debug messages disabled
if (quotaBar) {
quotaBar.value = percentage;
// Debug messages disabled
}
} else {
// Debug messages disabled
}
} else {
// Debug messages disabled
}
} catch (error) {
// Debug messages disabled
}
}
// Make fetchAndDisplayFiles globally accessible
window.fetchAndDisplayFiles = fetchAndDisplayFiles;
// Function to handle file deletion // Function to handle file deletion
async function deleteFile(uid, fileName, listItem, displayName = '') { async function deleteFile(uid, fileName, listItem, displayName = '') {
const fileToDelete = displayName || fileName; const fileToDelete = displayName || fileName;
@ -519,7 +530,7 @@ async function deleteFile(uid, fileName, listItem, displayName = '') {
throw new Error('User not authenticated. Please log in again.'); throw new Error('User not authenticated. Please log in again.');
} }
console.log(`[DELETE] Attempting to delete file: ${fileName} for user: ${uid}`); // Debug messages disabled
const authToken = localStorage.getItem('authToken'); const authToken = localStorage.getItem('authToken');
const headers = { 'Content-Type': 'application/json' }; const headers = { 'Content-Type': 'application/json' };
@ -553,7 +564,7 @@ async function deleteFile(uid, fileName, listItem, displayName = '') {
fileList.innerHTML = '<li class="no-files">No files uploaded yet.</li>'; fileList.innerHTML = '<li class="no-files">No files uploaded yet.</li>';
} }
} catch (error) { } catch (error) {
console.error('[DELETE] Error deleting file:', error); // Debug messages disabled
showToast(`Error deleting "${fileToDelete}": ${error.message}`, 'error'); showToast(`Error deleting "${fileToDelete}": ${error.message}`, 'error');
// Reset the button state if there was an error // Reset the button state if there was an error
@ -575,7 +586,7 @@ function initFileUpload() {
const fileInput = document.getElementById('fileInputUser'); const fileInput = document.getElementById('fileInputUser');
if (!uploadArea || !fileInput) { if (!uploadArea || !fileInput) {
console.warn('[UPLOAD] Required elements not found for file upload'); // Debug messages disabled
return; return;
} }
@ -630,7 +641,7 @@ function initFileUpload() {
} }
} catch (error) { } catch (error) {
console.error('[UPLOAD] Error uploading file:', error); // Debug messages disabled
showToast(`Upload failed: ${error.message}`, 'error'); showToast(`Upload failed: ${error.message}`, 'error');
} finally { } finally {
// Reset file input and restore upload area text // Reset file input and restore upload area text
@ -679,9 +690,15 @@ function initFileUpload() {
} }
// Main initialization when the DOM is fully loaded // Main initialization when the DOM is fully loaded
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', async () => {
// Initialize dashboard components // Initialize dashboard components
initDashboard(); // initFileUpload is called from within initDashboard await initDashboard(); // initFileUpload is called from within initDashboard
// Update quota display if user is logged in
const uid = localStorage.getItem('uid');
if (uid) {
updateQuotaDisplay(uid);
}
// Delegated event listener for clicks on the document // Delegated event listener for clicks on the document
document.addEventListener('click', (e) => { document.addEventListener('click', (e) => {
@ -701,10 +718,10 @@ document.addEventListener('DOMContentLoaded', () => {
const listItem = deleteButton.closest('.file-item'); const listItem = deleteButton.closest('.file-item');
if (!listItem) return; if (!listItem) return;
const uid = localStorage.getItem('uid') || localStorage.getItem('confirmed_uid'); const uid = localStorage.getItem('uid');
if (!uid) { if (!uid) {
showToast('You need to be logged in to delete files', 'error'); showToast('You need to be logged in to delete files', 'error');
console.error('[DELETE] No UID found in localStorage'); // Debug messages disabled
return; return;
} }
@ -715,8 +732,9 @@ document.addEventListener('DOMContentLoaded', () => {
} }
}); });
// Make fetchAndDisplayFiles available globally // Make dashboard functions available globally
window.fetchAndDisplayFiles = fetchAndDisplayFiles; window.fetchAndDisplayFiles = fetchAndDisplayFiles;
window.initDashboard = initDashboard;
// Login/Register (guest) // Login/Register (guest)
const regForm = document.getElementById('register-form'); const regForm = document.getElementById('register-form');
@ -757,7 +775,7 @@ document.addEventListener('DOMContentLoaded', () => {
regForm.reset(); regForm.reset();
} else { } else {
showToast(`Error: ${data.detail || 'Unknown error occurred'}`, 'error'); showToast(`Error: ${data.detail || 'Unknown error occurred'}`, 'error');
console.error('Registration failed:', data); // Debug messages disabled
} }
} catch (parseError) { } catch (parseError) {
console.error('Error parsing response:', parseError); console.error('Error parsing response:', parseError);

220
static/file-display.js Normal file
View File

@ -0,0 +1,220 @@
// This function is responsible for rendering the list of files to the DOM.
// It is globally accessible via window.displayUserFiles.
window.displayUserFiles = function(uid, files) {
const fileList = document.getElementById('file-list');
if (!fileList) {
// Debug messages disabled
return;
}
if (!files || files.length === 0) {
fileList.innerHTML = '<li>You have no uploaded files yet.</li>';
return;
}
const fragment = document.createDocumentFragment();
const displayedFiles = new Set();
files.forEach(file => {
// Use original_name for display, stored_name for operations.
let displayName = file.original_name || file.stored_name || 'Unnamed File';
const storedFileName = file.stored_name || file.original_name;
// No UUID pattern replacement: always show the original_name from backend.
// Skip if no valid identifier is found or if it's a duplicate.
if (!storedFileName || displayedFiles.has(storedFileName)) {
return;
}
displayedFiles.add(storedFileName);
const listItem = document.createElement('li');
const fileUrl = `/user-uploads/${uid}/${encodeURIComponent(storedFileName)}`;
const fileSize = file.size ? (file.size / 1024 / 1024).toFixed(2) + ' MB' : 'N/A';
let fileIcon = '🎵'; // Default icon
const fileExt = displayName.split('.').pop().toLowerCase();
if (['mp3', 'wav', 'ogg', 'flac', 'm4a'].includes(fileExt)) {
fileIcon = '🎵';
} else if (['jpg', 'jpeg', 'png', 'gif', 'svg'].includes(fileExt)) {
fileIcon = '🖼️';
} else if (['pdf', 'doc', 'docx', 'txt'].includes(fileExt)) {
fileIcon = '📄';
}
listItem.innerHTML = `
<div class="file-info">
<div class="file-header">
<span class="file-name">${displayName}</span>
<span class="file-size">${fileSize}</span>
</div>
</div>
<button class="delete-file" title="Delete file" data-filename="${storedFileName}" data-display-name="${displayName}">🗑️</button>
`;
fragment.appendChild(listItem);
});
fileList.appendChild(fragment);
};
// Function to handle file deletion
async function deleteFile(uid, fileName, listItem, displayName = '') {
const fileToDelete = displayName || fileName;
if (!confirm(`Are you sure you want to delete "${fileToDelete}"?`)) {
return;
}
// Show loading state
if (listItem) {
listItem.style.opacity = '0.6';
listItem.style.pointerEvents = 'none';
const deleteButton = listItem.querySelector('.delete-file');
if (deleteButton) {
deleteButton.disabled = true;
deleteButton.textContent = '⏳';
}
}
try {
if (!uid) {
throw new Error('User not authenticated. Please log in again.');
}
// Debug messages disabled
const authToken = localStorage.getItem('authToken');
const headers = { 'Content-Type': 'application/json' };
if (authToken) {
headers['Authorization'] = `Bearer ${authToken}`;
}
// Get the email from localStorage (it's the UID)
const email = localStorage.getItem('uid');
if (!email) {
throw new Error('User not authenticated');
}
// The backend expects the full email as the UID in the path
// We need to ensure it's properly encoded for the URL
const username = email;
// Debug messages disabled
// Check if the filename is just a UUID (without log ID prefix)
const uuidPattern = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\.\w+$/i;
let fileToDelete = fileName;
// If the filename is just a UUID, try to find the actual file with log ID prefix
if (uuidPattern.test(fileName)) {
// Debug messages disabled
try {
// First try to get the list of files to find the one with the matching UUID
const filesResponse = await fetch(`/user-files/${uid}`, {
method: 'GET',
headers: headers,
credentials: 'include'
});
if (filesResponse.ok) {
const filesData = await filesResponse.json();
if (filesData.files && Array.isArray(filesData.files)) {
// Look for a file that contains our UUID in its name
const matchingFile = filesData.files.find(f =>
f.stored_name && f.stored_name.includes(fileName)
);
if (matchingFile && matchingFile.stored_name) {
// Debug messages disabled
fileToDelete = matchingFile.stored_name;
}
}
}
} catch (e) {
// Debug messages disabled
// Continue with the original filename if there's an error
}
}
// Use the username in the URL with the correct filename
// Debug messages disabled
const response = await fetch(`/uploads/${username}/${encodeURIComponent(fileToDelete)}`, {
method: 'DELETE',
headers: headers,
credentials: 'include'
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || `HTTP error! status: ${response.status}`);
}
// Remove the file from the UI immediately
if (listItem && listItem.parentNode) {
listItem.parentNode.removeChild(listItem);
}
// Show success message
window.showToast(`Successfully deleted "${fileToDelete}"`, 'success');
// If the file list is now empty, show a message
const fileList = document.getElementById('file-list');
if (fileList && fileList.children.length === 0) {
fileList.innerHTML = '<li class="no-files">No files uploaded yet.</li>';
}
// Refresh the file list and stream
const uid_current = localStorage.getItem('uid');
if (window.fetchAndDisplayFiles) {
// Use email-based UID for file operations if available, fallback to uid_current
const fileOperationUid = localStorage.getItem('uid') || uid_current; // uid is now email-based
// Debug messages disabled
await window.fetchAndDisplayFiles(fileOperationUid);
}
if (window.loadProfileStream) {
await window.loadProfileStream(uid_current);
}
} catch (error) {
// Debug messages disabled
window.showToast(`Error deleting "${fileToDelete}": ${error.message}`, 'error');
// Reset the button state if there was an error
if (listItem) {
listItem.style.opacity = '';
listItem.style.pointerEvents = '';
const deleteButton = listItem.querySelector('.delete-file');
if (deleteButton) {
deleteButton.disabled = false;
deleteButton.textContent = '🗑️';
}
}
}
}
// Add event delegation for delete buttons
document.addEventListener('DOMContentLoaded', () => {
const fileList = document.getElementById('file-list');
if (fileList) {
fileList.addEventListener('click', (e) => {
const deleteButton = e.target.closest('.delete-file');
if (deleteButton) {
e.preventDefault();
e.stopPropagation();
const listItem = deleteButton.closest('li');
if (!listItem) return;
const uid = localStorage.getItem('uid');
if (!uid) {
window.showToast('You need to be logged in to delete files', 'error');
// Debug messages disabled
return;
}
const fileName = deleteButton.getAttribute('data-filename');
const displayName = deleteButton.getAttribute('data-display-name') || fileName;
deleteFile(uid, fileName, listItem, displayName);
}
});
}
});

View File

@ -23,7 +23,7 @@ class GlobalAudioManager {
* @param {Object} playerInstance - Reference to the player instance * @param {Object} playerInstance - Reference to the player instance
*/ */
startPlayback(playerType, uid, playerInstance = null) { startPlayback(playerType, uid, playerInstance = null) {
console.log(`[GlobalAudioManager] startPlayback called by: ${playerType} for UID: ${uid}`); // Debug messages disabled
// If the same player is already playing the same UID, allow it // If the same player is already playing the same UID, allow it
if (this.currentPlayer === playerType && this.currentUid === uid) { if (this.currentPlayer === playerType && this.currentUid === uid) {
return true; return true;
@ -38,7 +38,7 @@ class GlobalAudioManager {
this.currentPlayer = playerType; this.currentPlayer = playerType;
this.currentUid = uid; this.currentUid = uid;
console.log(`Global Audio Manager: ${playerType} player started playing UID: ${uid}`); // Debug messages disabled
return true; return true;
} }
@ -48,7 +48,7 @@ class GlobalAudioManager {
*/ */
stopPlayback(playerType) { stopPlayback(playerType) {
if (this.currentPlayer === playerType) { if (this.currentPlayer === playerType) {
console.log(`Global Audio Manager: ${playerType} player stopped`); // Debug messages disabled
this.currentPlayer = null; this.currentPlayer = null;
this.currentUid = null; this.currentUid = null;
} }
@ -93,7 +93,7 @@ class GlobalAudioManager {
* Notify a specific player type to stop * Notify a specific player type to stop
*/ */
notifyStop(playerType) { notifyStop(playerType) {
console.log(`Global Audio Manager: Notifying ${playerType} player to stop`); // Debug messages disabled
this.listeners.forEach(listener => { this.listeners.forEach(listener => {
if (listener.playerType === playerType) { if (listener.playerType === playerType) {
try { try {

View File

@ -21,9 +21,11 @@
} }
</style> </style>
<link rel="modulepreload" href="/static/sound.js" /> <link rel="modulepreload" href="/static/sound.js" />
<script src="/static/file-display.js?v=3"></script>
<script type="module" src="/static/dashboard.js?v=7"></script>
<script src="/static/streams-ui.js?v=3" type="module"></script> <script src="/static/streams-ui.js?v=3" type="module"></script>
<script src="/static/auth.js?v=2" type="module"></script> <script src="/static/auth.js?v=5" type="module"></script>
<script src="/static/app.js?v=5" type="module"></script> <script src="/static/app.js?v=6" type="module"></script>
</head> </head>
<body> <body>
<header> <header>
@ -66,12 +68,12 @@
<button id="logout-button" class="button">🚪 Log Out</button> <button id="logout-button" class="button">🚪 Log Out</button>
</article> </article>
<section id="quota-meter" class="auth-only"> <section id="uploaded-files" class="auth-only">
<p class="quota-meter">Quota: <progress id="quota-bar" value="0" max="100"></progress> <span id="quota-text">0 MB</span></p> <h3>Uploaded Files</h3>
<h4>Uploaded Files</h4>
<ul id="file-list" class="file-list"> <ul id="file-list" class="file-list">
<li>Loading files...</li> <li>Loading files...</li>
</ul> </ul>
<p class="quota-meter">Quota: <progress id="quota-bar" value="0" max="100"></progress> <span id="quota-text">0 MB</span></p>
</section> </section>
<!-- Account Deletion Section --> <!-- Account Deletion Section -->
@ -194,7 +196,6 @@
</p> </p>
</footer> </footer>
<script type="module" src="/static/dashboard.js?v=5"></script>
<!-- Load public streams UI logic --> <!-- Load public streams UI logic -->
<script type="module" src="/static/streams-ui.js?v=3"></script> <script type="module" src="/static/streams-ui.js?v=3"></script>
<!-- Load upload functionality --> <!-- Load upload functionality -->

View File

@ -1,90 +1,43 @@
// static/magic-login.js — handles magiclink token UI /**
* Simplified Magic Login Module
*
* This file now uses the centralized AuthManager for authentication logic.
* The token-based magic login is handled by the AuthManager.
*/
import authManager from './auth-manager.js';
import { showSection } from './nav.js'; import { showSection } from './nav.js';
let magicLoginSubmitted = false; let magicLoginSubmitted = false;
/**
* Initialize magic login - now delegated to AuthManager
* This function is kept for backward compatibility but the actual
* magic login logic is handled by the AuthManager during initialization.
*/
export async function initMagicLogin() { export async function initMagicLogin() {
console.debug('[magic-login] initMagicLogin called'); // Debug messages disabled
// The AuthManager handles both URL-based and token-based magic login
// during its initialization, so we just need to ensure it's initialized
if (!window.authManager) {
// Debug messages disabled
await authManager.initialize();
}
// Check if there was a magic login processed
const params = new URLSearchParams(location.search); const params = new URLSearchParams(location.search);
const token = params.get('token'); const token = params.get('token');
if (!token) {
console.debug('[magic-login] No token in URL');
return;
}
// Remove token from URL immediately to prevent loops
const url = new URL(window.location.href);
url.searchParams.delete('token');
window.history.replaceState({}, document.title, url.pathname + url.search);
try {
const formData = new FormData();
formData.append('token', token);
const res = await fetch('/magic-login', {
method: 'POST',
body: formData,
});
if (res.redirected) {
// If redirected, backend should set cookie; but set localStorage for SPA
const url = new URL(res.url);
const confirmedUid = url.searchParams.get('confirmed_uid');
if (confirmedUid) {
// Generate a simple auth token (in a real app, this would come from the server)
const authToken = 'token-' + Math.random().toString(36).substring(2, 15);
// Set cookies and localStorage for SPA session logic if (token) {
document.cookie = `uid=${encodeURIComponent(confirmedUid)}; path=/; SameSite=Lax`; // Debug messages disabled
document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`; } else {
// Debug messages disabled
// Store in localStorage for client-side access
localStorage.setItem('uid', confirmedUid);
localStorage.setItem('confirmed_uid', confirmedUid);
localStorage.setItem('authToken', authToken);
localStorage.setItem('uid_time', Date.now().toString());
}
window.location.href = res.url;
return;
}
// If not redirected, show error (shouldn't happen in normal flow)
let data;
const contentType = res.headers.get('content-type');
if (contentType && contentType.includes('application/json')) {
data = await res.json();
if (data && data.confirmed_uid) {
// Generate a simple auth token (in a real app, this would come from the server)
const authToken = 'token-' + Math.random().toString(36).substring(2, 15);
// Set cookies and localStorage for SPA session logic
document.cookie = `uid=${encodeURIComponent(data.confirmed_uid)}; path=/; SameSite=Lax`;
document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`;
// Store in localStorage for client-side access
localStorage.setItem('uid', data.confirmed_uid);
localStorage.setItem('confirmed_uid', data.confirmed_uid);
localStorage.setItem('authToken', authToken);
localStorage.setItem('uid_time', Date.now().toString());
import('./toast.js').then(({ showToast }) => {
showToast('✅ Login successful!');
// Update UI state after login
const guestDashboard = document.getElementById('guest-dashboard');
const userDashboard = document.getElementById('user-dashboard');
const registerPage = document.getElementById('register-page');
if (guestDashboard) guestDashboard.style.display = 'none';
if (userDashboard) userDashboard.style.display = 'block';
if (registerPage) registerPage.style.display = 'none';
// Show the user's stream page
if (typeof showSection === 'function') {
showSection('me-page');
}
});
return;
}
alert(data.detail || 'Login failed.');
} else {
const text = await res.text();
alert(text || 'Login failed.');
}
} catch (err) {
alert('Network error: ' + err);
} }
} }
// Export for backward compatibility
export { magicLoginSubmitted };
// Make showSection available globally for AuthManager
window.showSection = showSection;

View File

@ -1,81 +1,57 @@
import { showToast } from "./toast.js"; import { showToast } from "./toast.js";
import { globalAudioManager } from './global-audio-manager.js'; import { SharedAudioPlayer } from './shared-audio-player.js';
// Module-level state for the personal player function getPersonalStreamUrl(uid) {
let audio = null; return `/audio/${encodeURIComponent(uid)}/stream.opus`;
}
function updatePlayPauseButton(button, isPlaying) {
if (button) button.textContent = isPlaying ? '⏸️' : '▶️';
// Optionally, update other UI elements here
}
const personalPlayer = new SharedAudioPlayer({
playerType: 'personal',
getStreamUrl: getPersonalStreamUrl,
onUpdateButton: updatePlayPauseButton
});
/** /**
* Finds or creates the audio element for the personal stream. * Finds or creates the audio element for the personal stream.
* @returns {HTMLAudioElement | null} * @returns {HTMLAudioElement | null}
*/ */
function getOrCreateAudioElement() { function cleanupPersonalAudio() {
if (audio) { if (audioElement) {
return audio; try {
} if (audioElement._eventHandlers) {
const { onPlay, onPause, onEnded, onError } = audioElement._eventHandlers;
audio = document.createElement('audio'); if (onPlay) audioElement.removeEventListener('play', onPlay);
audio.id = 'me-audio'; if (onPause) audioElement.removeEventListener('pause', onPause);
audio.preload = 'metadata'; if (onEnded) audioElement.removeEventListener('ended', onEnded);
audio.crossOrigin = 'use-credentials'; if (onError) audioElement.removeEventListener('error', onError);
document.body.appendChild(audio);
// --- Setup Event Listeners (only once) ---
audio.addEventListener('error', (e) => {
console.error('Personal Player: Audio Element Error', e);
const error = audio.error;
let errorMessage = 'An unknown audio error occurred.';
if (error) {
switch (error.code) {
case error.MEDIA_ERR_ABORTED:
errorMessage = 'Audio playback was aborted.';
break;
case error.MEDIA_ERR_NETWORK:
errorMessage = 'A network error caused the audio to fail.';
break;
case error.MEDIA_ERR_DECODE:
errorMessage = 'The audio could not be decoded.';
break;
case error.MEDIA_ERR_SRC_NOT_SUPPORTED:
errorMessage = 'The audio format is not supported by your browser.';
break;
default:
errorMessage = `An unexpected error occurred (Code: ${error.code}).`;
break;
} }
audioElement.pause();
audioElement.removeAttribute('src');
audioElement.load();
if (audioElement._eventHandlers) delete audioElement._eventHandlers;
// Remove from DOM
if (audioElement.parentNode) audioElement.parentNode.removeChild(audioElement);
} catch (e) {
console.warn('[personal-player.js] Error cleaning up audio element:', e);
} }
showToast(errorMessage, 'error'); audioElement = null;
});
audio.addEventListener('play', () => updatePlayPauseButton(true));
audio.addEventListener('pause', () => updatePlayPauseButton(false));
audio.addEventListener('ended', () => updatePlayPauseButton(false));
// The canplaythrough listener is removed as it violates autoplay policies.
// The user will perform a second click to play the media after it's loaded.
return audio;
}
/**
* Updates the play/pause button icon based on audio state.
* @param {boolean} isPlaying - Whether the audio is currently playing.
*/
function updatePlayPauseButton(isPlaying) {
const playPauseBtn = document.querySelector('#me-page .play-pause-btn');
if (playPauseBtn) {
playPauseBtn.textContent = isPlaying ? '⏸️' : '▶️';
} }
} }
/**
* Loads the user's personal audio stream into the player.
* @param {string} uid - The user's unique ID. // Use the shared player for loading and playing the personal stream
*/ export function loadProfileStream(uid, playPauseBtn) {
export async function loadProfileStream(uid) { if (!uid) {
const audioElement = getOrCreateAudioElement(); showToast('No UID provided for profile stream', 'error');
const audioSrc = `/audio/${uid}/stream.opus?t=${Date.now()}`; return;
console.log(`[personal-player.js] Setting personal audio source to: ${audioSrc}`); }
audioElement.src = audioSrc; personalPlayer.play(uid, playPauseBtn);
} }
/** /**
@ -91,50 +67,19 @@ export function initPersonalPlayer() {
if (!playPauseBtn) return; if (!playPauseBtn) return;
e.stopPropagation(); e.stopPropagation();
const audio = getOrCreateAudioElement(); const uid = localStorage.getItem('uid');
if (!audio) return; if (!uid) {
showToast('Please log in to play audio.', 'error');
try { return;
if (audio.paused) { }
if (!audio.src || audio.src.endsWith('/#')) { // Toggle play/pause
showToast('No audio file available. Please upload one first.', 'info'); if (personalPlayer.audioElement && !personalPlayer.audioElement.paused && !personalPlayer.audioElement.ended) {
return; personalPlayer.pause();
} } else {
loadProfileStream(uid, playPauseBtn);
console.log('Attempting to play...');
globalAudioManager.startPlayback('personal', localStorage.getItem('uid') || 'personal');
const playPromise = audio.play();
if (playPromise !== undefined) {
playPromise.catch(error => {
console.error(`Initial play() failed: ${error.name}. This is expected on first load.`);
// If play fails, it's because the content isn't loaded.
// The recovery is to call load(). The user will need to click play again.
console.log('Calling load() to fetch media...');
audio.load();
showToast('Stream is loading. Please click play again in a moment.', 'info');
});
}
} else {
console.log('Attempting to pause...');
audio.pause();
}
} catch (err) {
console.error('A synchronous error occurred in handlePlayPause:', err);
showToast('An unexpected error occurred with the audio player.', 'error');
} }
}); });
// Listen for stop requests from the global manager // Make loadProfileStream globally accessible for upload.js
globalAudioManager.addListener('personal', () => { window.loadProfileStream = loadProfileStream;
console.log('[personal-player.js] Received stop request from global audio manager.');
const audio = getOrCreateAudioElement();
if (audio && !audio.paused) {
console.log('[personal-player.js] Pausing personal audio player.');
audio.pause();
}
});
// Initial setup
getOrCreateAudioElement();
} }

View File

@ -0,0 +1,70 @@
/**
* Cleanup Script: Remove Redundant confirmed_uid from localStorage
*
* This script removes the redundant confirmed_uid field from localStorage
* for users who might have it stored from the old authentication system.
*/
(function() {
'use strict';
console.log('[CONFIRMED_UID_CLEANUP] Starting cleanup of redundant confirmed_uid field...');
// Check if confirmed_uid exists in localStorage
const confirmedUid = localStorage.getItem('confirmed_uid');
const currentUid = localStorage.getItem('uid');
if (confirmedUid) {
console.log(`[CONFIRMED_UID_CLEANUP] Found confirmed_uid: ${confirmedUid}`);
console.log(`[CONFIRMED_UID_CLEANUP] Current uid: ${currentUid}`);
// Verify that uid exists and is properly set
if (!currentUid) {
console.warn('[CONFIRMED_UID_CLEANUP] No uid found, setting uid from confirmed_uid');
localStorage.setItem('uid', confirmedUid);
} else if (currentUid !== confirmedUid) {
console.warn(`[CONFIRMED_UID_CLEANUP] UID mismatch - uid: ${currentUid}, confirmed_uid: ${confirmedUid}`);
console.log('[CONFIRMED_UID_CLEANUP] Keeping current uid value');
}
// Remove the redundant confirmed_uid
localStorage.removeItem('confirmed_uid');
console.log('[CONFIRMED_UID_CLEANUP] Removed redundant confirmed_uid from localStorage');
// Log the cleanup action
console.log('[CONFIRMED_UID_CLEANUP] Cleanup completed successfully');
} else {
console.log('[CONFIRMED_UID_CLEANUP] No confirmed_uid found, no cleanup needed');
}
// Also check for any other potential redundant fields
const redundantFields = [
'confirmed_uid', // Main target
'confirmedUid', // Camel case variant
'confirmed-uid' // Hyphenated variant
];
let removedCount = 0;
redundantFields.forEach(field => {
if (localStorage.getItem(field)) {
localStorage.removeItem(field);
removedCount++;
console.log(`[CONFIRMED_UID_CLEANUP] Removed redundant field: ${field}`);
}
});
if (removedCount > 0) {
console.log(`[CONFIRMED_UID_CLEANUP] Removed ${removedCount} redundant authentication fields`);
}
console.log('[CONFIRMED_UID_CLEANUP] Cleanup process completed');
})();
// Export for manual execution if needed
if (typeof window !== 'undefined') {
window.removeConfirmedUidCleanup = function() {
const script = document.createElement('script');
script.src = '/static/remove-confirmed-uid.js';
document.head.appendChild(script);
};
}

View File

@ -0,0 +1,162 @@
// shared-audio-player.js
// Unified audio player logic for both streams and personal player
import { globalAudioManager } from './global-audio-manager.js';
export class SharedAudioPlayer {
constructor({ playerType, getStreamUrl, onUpdateButton }) {
this.playerType = playerType; // 'streams' or 'personal'
this.getStreamUrl = getStreamUrl; // function(uid) => url
this.onUpdateButton = onUpdateButton; // function(button, isPlaying)
this.audioElement = null;
this.currentUid = null;
this.isPlaying = false;
this.currentButton = null;
this._eventHandlers = {};
// Register stop listener
globalAudioManager.addListener(playerType, () => {
this.stop();
});
}
pause() {
if (this.audioElement && !this.audioElement.paused && !this.audioElement.ended) {
this.audioElement.pause();
this.isPlaying = false;
if (this.onUpdateButton && this.currentButton) {
this.onUpdateButton(this.currentButton, false);
}
}
}
async play(uid, button) {
const ctx = `[SharedAudioPlayer][${this.playerType}]${uid ? `[${uid}]` : ''}`;
const isSameUid = this.currentUid === uid;
const isActive = this.audioElement && !this.audioElement.paused && !this.audioElement.ended;
// Guard: If already playing the requested UID and not paused/ended, do nothing
if (isSameUid && isActive) {
if (this.onUpdateButton) this.onUpdateButton(button || this.currentButton, true);
return;
}
// If same UID but paused, resume
if (isSameUid && this.audioElement && this.audioElement.paused && !this.audioElement.ended) {
try {
await this.audioElement.play();
this.isPlaying = true;
if (this.onUpdateButton) this.onUpdateButton(button || this.currentButton, true);
globalAudioManager.startPlayback(this.playerType, uid);
} catch (err) {
this.isPlaying = false;
if (this.onUpdateButton) this.onUpdateButton(button || this.currentButton, false);
console.error(`${ctx} play() resume failed:`, err);
}
return;
}
// Otherwise, stop current and start new
if (!isSameUid && this.audioElement) {
} else {
}
this.stop();
this.currentUid = uid;
this.currentButton = button;
const url = this.getStreamUrl(uid);
this.audioElement = new Audio(url);
this.audioElement.preload = 'auto';
this.audioElement.crossOrigin = 'anonymous';
this.audioElement.style.display = 'none';
document.body.appendChild(this.audioElement);
this._attachEventHandlers();
try {
await this.audioElement.play();
this.isPlaying = true;
if (this.onUpdateButton) this.onUpdateButton(button, true);
globalAudioManager.startPlayback(this.playerType, uid);
} catch (err) {
this.isPlaying = false;
if (this.onUpdateButton) this.onUpdateButton(button, false);
console.error(`${ctx} play() failed:`, err);
}
}
stop() {
if (this.audioElement) {
this._removeEventHandlers();
try {
this.audioElement.pause();
this.audioElement.removeAttribute('src');
this.audioElement.load();
if (this.audioElement.parentNode) {
this.audioElement.parentNode.removeChild(this.audioElement);
}
} catch (e) {
console.warn('[shared-audio-player] Error cleaning up audio element:', e);
}
this.audioElement = null;
}
this.isPlaying = false;
this.currentUid = null;
if (this.currentButton && this.onUpdateButton) {
this.onUpdateButton(this.currentButton, false);
}
this.currentButton = null;
}
_attachEventHandlers() {
if (!this.audioElement) return;
const ctx = `[SharedAudioPlayer][${this.playerType}]${this.currentUid ? `[${this.currentUid}]` : ''}`;
const logEvent = (event) => {
// Debug logging disabled
};
// Core handlers
const onPlay = (e) => {
logEvent(e);
this.isPlaying = true;
if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, true);
};
const onPause = (e) => {
logEvent(e);
// console.trace(`${ctx} Audio pause stack trace:`);
this.isPlaying = false;
if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, false);
};
const onEnded = (e) => {
logEvent(e);
this.isPlaying = false;
if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, false);
};
const onError = (e) => {
logEvent(e);
this.isPlaying = false;
if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, false);
console.error(`${ctx} Audio error:`, e);
};
// Attach handlers
this.audioElement.addEventListener('play', onPlay);
this.audioElement.addEventListener('pause', onPause);
this.audioElement.addEventListener('ended', onEnded);
this.audioElement.addEventListener('error', onError);
// Attach debug logging for all relevant events
const debugEvents = [
'abort','canplay','canplaythrough','durationchange','emptied','encrypted','loadeddata','loadedmetadata',
'loadstart','playing','progress','ratechange','seeked','seeking','stalled','suspend','timeupdate','volumechange','waiting'
];
debugEvents.forEach(evt => {
this.audioElement.addEventListener(evt, logEvent);
}); // Logging now disabled
this._eventHandlers = { onPlay, onPause, onEnded, onError, debugEvents, logEvent };
}
_removeEventHandlers() {
if (!this.audioElement || !this._eventHandlers) return;
const { onPlay, onPause, onEnded, onError } = this._eventHandlers;
if (onPlay) this.audioElement.removeEventListener('play', onPlay);
if (onPause) this.audioElement.removeEventListener('pause', onPause);
if (onEnded) this.audioElement.removeEventListener('ended', onEnded);
if (onError) this.audioElement.removeEventListener('error', onError);
this._eventHandlers = {};
}
}

View File

@ -1,17 +1,30 @@
// sound.js — reusable Web Audio beep // sound.js — reusable Web Audio beep
export function playBeep(frequency = 432, duration = 0.2, type = 'sine') { export function playBeep(frequency = 432, duration = 0.2, type = 'sine') {
const ctx = new (window.AudioContext || window.webkitAudioContext)(); try {
const osc = ctx.createOscillator(); // Validate parameters to prevent audio errors
const gain = ctx.createGain(); if (!Number.isFinite(frequency) || frequency <= 0) {
frequency = 432; // fallback to default
}
if (!Number.isFinite(duration) || duration <= 0) {
duration = 0.2; // fallback to default
}
osc.type = type; const ctx = new (window.AudioContext || window.webkitAudioContext)();
osc.frequency.value = frequency; const osc = ctx.createOscillator();
const gain = ctx.createGain();
osc.connect(gain); osc.type = type;
gain.connect(ctx.destination); osc.frequency.value = frequency;
gain.gain.setValueAtTime(0.1, ctx.currentTime); // subtle volume osc.connect(gain);
osc.start(); gain.connect(ctx.destination);
osc.stop(ctx.currentTime + duration);
gain.gain.setValueAtTime(0.1, ctx.currentTime); // subtle volume
osc.start();
osc.stop(ctx.currentTime + duration);
} catch (error) {
// Silently handle audio errors to prevent breaking upload flow
console.warn('[SOUND] Audio beep failed:', error.message);
}
} }

View File

@ -28,7 +28,7 @@ export function initStreamsUI() {
// Register with global audio manager to handle stop requests from other players // Register with global audio manager to handle stop requests from other players
globalAudioManager.addListener('streams', () => { globalAudioManager.addListener('streams', () => {
console.log('[streams-ui] Received stop request from global audio manager'); // Debug messages disabled
stopPlayback(); stopPlayback();
}); });
} }
@ -79,10 +79,10 @@ document.addEventListener('DOMContentLoaded', () => {
function loadAndRenderStreams() { function loadAndRenderStreams() {
const ul = document.getElementById('stream-list'); const ul = document.getElementById('stream-list');
if (!ul) { if (!ul) {
console.error('[STREAMS-UI] Stream list element not found'); // Debug messages disabled
return; return;
} }
console.log('[STREAMS-UI] loadAndRenderStreams called, shouldForceRefresh:', shouldForceRefresh); // Debug messages disabled
// Don't start a new connection if one is already active and we're not forcing a refresh // Don't start a new connection if one is already active and we're not forcing a refresh
if (activeSSEConnection && !shouldForceRefresh) { if (activeSSEConnection && !shouldForceRefresh) {
@ -140,7 +140,7 @@ function loadAndRenderStreams() {
window.location.hostname === '127.0.0.1'; window.location.hostname === '127.0.0.1';
if (isLocalDevelopment || window.DEBUG_STREAMS) { if (isLocalDevelopment || window.DEBUG_STREAMS) {
const duration = Date.now() - connectionStartTime; const duration = Date.now() - connectionStartTime;
console.group('[streams-ui] Connection timeout reached'); // Debug messages disabled
console.log(`Duration: ${duration}ms`); console.log(`Duration: ${duration}ms`);
console.log('Current time:', new Date().toISOString()); console.log('Current time:', new Date().toISOString());
console.log('Streams received:', streams.length); console.log('Streams received:', streams.length);
@ -203,18 +203,18 @@ function loadAndRenderStreams() {
// Process the stream // Process the stream
function processStream({ done, value }) { function processStream({ done, value }) {
console.log('[STREAMS-UI] processStream called with done:', done); // Debug messages disabled
if (done) { if (done) {
console.log('[STREAMS-UI] Stream processing complete'); // Debug messages disabled
// Process any remaining data in the buffer // Process any remaining data in the buffer
if (buffer.trim()) { if (buffer.trim()) {
console.log('[STREAMS-UI] Processing remaining buffer data'); // Debug messages disabled
try { try {
const data = JSON.parse(buffer); const data = JSON.parse(buffer);
console.log('[STREAMS-UI] Parsed data from buffer:', data); // Debug messages disabled
processSSEEvent(data); processSSEEvent(data);
} catch (e) { } catch (e) {
console.error('[STREAMS-UI] Error parsing buffer data:', e); // Debug messages disabled
} }
} }
return; return;
@ -237,7 +237,7 @@ function loadAndRenderStreams() {
const data = JSON.parse(dataMatch[1]); const data = JSON.parse(dataMatch[1]);
processSSEEvent(data); processSSEEvent(data);
} catch (e) { } catch (e) {
console.error('[streams-ui] Error parsing event data:', e, 'Event:', event); // Debug messages disabled
} }
} }
} }
@ -298,7 +298,7 @@ function loadAndRenderStreams() {
// Function to process SSE events // Function to process SSE events
function processSSEEvent(data) { function processSSEEvent(data) {
console.log('[STREAMS-UI] Processing SSE event:', data); // Debug messages disabled
if (data.end) { if (data.end) {
if (streams.length === 0) { if (streams.length === 0) {
ul.innerHTML = '<li>No active streams.</li>'; ul.innerHTML = '<li>No active streams.</li>';
@ -356,7 +356,7 @@ function loadAndRenderStreams() {
// Function to handle SSE errors // Function to handle SSE errors
function handleSSEError(error) { function handleSSEError(error) {
console.error('[streams-ui] SSE error:', error); // Debug messages disabled
// Only show error if we haven't already loaded any streams // Only show error if we haven't already loaded any streams
if (streams.length === 0) { if (streams.length === 0) {
@ -386,11 +386,11 @@ function loadAndRenderStreams() {
export function renderStreamList(streams) { export function renderStreamList(streams) {
const ul = document.getElementById('stream-list'); const ul = document.getElementById('stream-list');
if (!ul) { if (!ul) {
console.warn('[STREAMS-UI] renderStreamList: #stream-list not found'); // Debug messages disabled
return; return;
} }
console.log('[STREAMS-UI] Rendering stream list with', streams.length, 'streams'); // Debug messages disabled
console.debug('[STREAMS-UI] Streams data:', streams); // Debug messages disabled
if (Array.isArray(streams)) { if (Array.isArray(streams)) {
if (streams.length) { if (streams.length) {
// Sort by mtime descending (most recent first) // Sort by mtime descending (most recent first)
@ -409,10 +409,10 @@ export function renderStreamList(streams) {
} }
} else { } else {
ul.innerHTML = '<li>Error: Invalid stream data.</li>'; ul.innerHTML = '<li>Error: Invalid stream data.</li>';
console.error('[streams-ui] renderStreamList: streams is not an array', streams); // Debug messages disabled
} }
highlightActiveProfileLink(); highlightActiveProfileLink();
console.debug('[streams-ui] renderStreamList complete'); // Debug messages disabled
} }
export function highlightActiveProfileLink() { export function highlightActiveProfileLink() {
@ -463,12 +463,7 @@ function escapeHtml(unsafe) {
.replace(/'/g, "&#039;"); .replace(/'/g, "&#039;");
} }
// Function to update play/pause button state
function updatePlayPauseButton(button, isPlaying) {
if (!button) return;
button.textContent = isPlaying ? '⏸️' : '▶️';
button.setAttribute('aria-label', isPlaying ? 'Pause' : 'Play');
}
// Audio context for Web Audio API // Audio context for Web Audio API
let audioContext = null; let audioContext = null;
@ -492,7 +487,7 @@ function getAudioContext() {
// Stop current playback completely // Stop current playback completely
function stopPlayback() { function stopPlayback() {
console.log('[streams-ui] Stopping playback'); // Debug messages disabled
// Stop Web Audio API if active // Stop Web Audio API if active
if (audioSource) { if (audioSource) {
@ -561,120 +556,28 @@ function stopPlayback() {
currentlyPlayingAudio = null; currentlyPlayingAudio = null;
} }
// Load and play audio using HTML5 Audio element for Opus // --- Shared Audio Player Integration ---
async function loadAndPlayAudio(uid, playPauseBtn) { import { SharedAudioPlayer } from './shared-audio-player.js';
// If we already have an audio element for this UID and it's paused, just resume it
if (audioElement && currentUid === uid && audioElement.paused) {
try {
await audioElement.play();
isPlaying = true;
updatePlayPauseButton(playPauseBtn, true);
return;
} catch (error) {
// Fall through to reload if resume fails
}
}
// Stop any current playback function getStreamUrl(uid) {
stopPlayback(); return `/audio/${encodeURIComponent(uid)}/stream.opus`;
}
// Notify global audio manager that streams player is starting function updatePlayPauseButton(button, isPlaying) {
globalAudioManager.startPlayback('streams', uid); if (button) button.textContent = isPlaying ? '⏸️' : '▶️';
// Optionally, update other UI elements here
}
// Only this definition should remain; remove any other updatePlayPauseButton functions.
// Update UI const streamsPlayer = new SharedAudioPlayer({
updatePlayPauseButton(playPauseBtn, true); playerType: 'streams',
currentlyPlayingButton = playPauseBtn; getStreamUrl,
currentUid = uid; onUpdateButton: updatePlayPauseButton
});
try { // Load and play audio using SharedAudioPlayer
// Create a new audio element with the correct MIME type function loadAndPlayAudio(uid, playPauseBtn) {
const audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus`; streamsPlayer.play(uid, playPauseBtn);
// Create a new audio element with a small delay to prevent race conditions
await new Promise(resolve => setTimeout(resolve, 50));
audioElement = new Audio(audioUrl);
audioElement.preload = 'auto';
audioElement.crossOrigin = 'anonymous'; // Important for CORS
// Set up event handlers with proper binding
const onPlay = () => {
isPlaying = true;
updatePlayPauseButton(playPauseBtn, true);
};
const onPause = () => {
isPlaying = false;
updatePlayPauseButton(playPauseBtn, false);
};
const onEnded = () => {
isPlaying = false;
cleanupAudio();
};
const onError = (e) => {
// Ignore errors from previous audio elements that were cleaned up
if (!audioElement || audioElement.readyState === 0) {
return;
}
isPlaying = false;
updatePlayPauseButton(playPauseBtn, false);
// Don't show error to user for aborted requests
if (audioElement.error && audioElement.error.code === MediaError.MEDIA_ERR_ABORTED) {
return;
}
// Show error to user for other errors
if (typeof showToast === 'function') {
showToast('Error playing audio. The format may not be supported.', 'error');
}
};
// Add event listeners
audioElement.addEventListener('play', onPlay, { once: true });
audioElement.addEventListener('pause', onPause);
audioElement.addEventListener('ended', onEnded, { once: true });
audioElement.addEventListener('error', onError);
// Store references for cleanup
audioElement._eventHandlers = { onPlay, onPause, onEnded, onError };
// Start playback with error handling
try {
const playPromise = audioElement.play();
if (playPromise !== undefined) {
await playPromise.catch(error => {
// Ignore abort errors when switching between streams
if (error.name !== 'AbortError') {
throw error;
}
});
}
isPlaying = true;
} catch (error) {
// Only log unexpected errors
if (error.name !== 'AbortError') {
console.error('[streams-ui] Error during playback:', error);
throw error;
}
}
} catch (error) {
console.error('[streams-ui] Error loading/playing audio:', error);
if (playPauseBtn) {
updatePlayPauseButton(playPauseBtn, false);
}
// Only show error if it's not an abort error
if (error.name !== 'AbortError' && typeof showToast === 'function') {
showToast('Error playing audio. Please try again.', 'error');
}
}
} }
// Handle audio ended event // Handle audio ended event
@ -688,7 +591,7 @@ function handleAudioEnded() {
// Clean up audio resources // Clean up audio resources
function cleanupAudio() { function cleanupAudio() {
console.log('[streams-ui] Cleaning up audio resources'); // Debug messages disabled
// Clean up Web Audio API resources if they exist // Clean up Web Audio API resources if they exist
if (audioSource) { if (audioSource) {
@ -756,32 +659,14 @@ if (streamList) {
e.preventDefault(); e.preventDefault();
const uid = playPauseBtn.dataset.uid; const uid = playPauseBtn.dataset.uid;
if (!uid) { if (!uid) return;
return;
}
// If clicking the currently playing button, toggle pause/play // Toggle play/pause using SharedAudioPlayer
if (currentUid === uid) { if (streamsPlayer.currentUid === uid && streamsPlayer.audioElement && !streamsPlayer.audioElement.paused && !streamsPlayer.audioElement.ended) {
if (isPlaying) { streamsPlayer.pause();
await audioElement.pause(); } else {
isPlaying = false; await loadAndPlayAudio(uid, playPauseBtn);
updatePlayPauseButton(playPauseBtn, false);
} else {
try {
await audioElement.play();
isPlaying = true;
updatePlayPauseButton(playPauseBtn, true);
} catch (error) {
// If resume fails, try reloading the audio
await loadAndPlayAudio(uid, playPauseBtn);
}
}
return;
} }
// If a different stream is playing, stop it and start the new one
stopPlayback();
await loadAndPlayAudio(uid, playPauseBtn);
}); });
} }

View File

@ -490,7 +490,7 @@ nav#guest-dashboard.dashboard-nav {
box-shadow: 0 4px 20px rgba(0, 0, 0, 0.4), 0 0 0 1px rgba(255, 255, 255, 0.1); box-shadow: 0 4px 20px rgba(0, 0, 0, 0.4), 0 0 0 1px rgba(255, 255, 255, 0.1);
margin-top: 0.8em; margin-top: 0.8em;
opacity: 0; opacity: 0;
animation: fadeInOut 3.5s both; animation: fadeInOut 15s both;
font-size: 1.1em; font-size: 1.1em;
pointer-events: auto; pointer-events: auto;
border: 1px solid rgba(255, 255, 255, 0.1); border: 1px solid rgba(255, 255, 255, 0.1);
@ -580,7 +580,7 @@ nav#guest-dashboard.dashboard-nav {
} }
/* Quota meter and uploaded files section */ /* Quota meter and uploaded files section */
#quota-meter { #uploaded-files {
background: var(--surface); /* Match article background */ background: var(--surface); /* Match article background */
border: 1px solid var(--border); border: 1px solid var(--border);
border-radius: 8px; border-radius: 8px;
@ -593,19 +593,19 @@ nav#guest-dashboard.dashboard-nav {
color: var(--text-light); color: var(--text-light);
} }
#quota-meter { #uploaded-files {
transition: all 0.2s ease; transition: all 0.2s ease;
} }
#quota-meter h4 { #uploaded-files h3 {
font-weight: 400; font-weight: 400;
text-align: center; text-align: center;
margin: 1.5rem 0 0.75rem; margin: 0 0 27px 0;
color: var(--text); color: var(--text);
} }
#quota-meter > h4 { #uploaded-files > h3 {
margin-top: 1.5rem; margin: 0 0 27px 0;
text-align: center; text-align: center;
font-weight: 400; font-weight: 400;
color: var(--text); color: var(--text);
@ -732,7 +732,7 @@ nav#guest-dashboard.dashboard-nav {
border-bottom: none; border-bottom: none;
} }
#quota-meter:hover { #uploaded-files:hover {
transform: translateY(-2px); transform: translateY(-2px);
box-shadow: 0 6px 16px rgba(0, 0, 0, 0.15); box-shadow: 0 6px 16px rgba(0, 0, 0, 0.15);
} }
@ -740,7 +740,7 @@ nav#guest-dashboard.dashboard-nav {
.quota-meter { .quota-meter {
font-size: 0.9em; font-size: 0.9em;
color: var(--text-muted); color: var(--text-muted);
margin: 0 0 1rem 0; margin: 1rem 0 0 0;
} }
#file-list { #file-list {

View File

@ -14,6 +14,6 @@ export function showToast(message) {
setTimeout(() => { setTimeout(() => {
toast.remove(); toast.remove();
// Do not remove the container; let it persist for stacking // Do not remove the container; let it persist for stacking
}, 3500); }, 15000);
} }

169
static/uid-validator.js Normal file
View File

@ -0,0 +1,169 @@
/**
* UID Validation Utility
*
* Provides comprehensive UID format validation and sanitization
* to ensure all UIDs are properly formatted as email addresses.
*/
export class UidValidator {
constructor() {
// RFC 5322 compliant email regex (basic validation)
this.emailRegex = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/;
// Common invalid patterns to check against
this.invalidPatterns = [
/^devuser$/i, // Legacy username pattern
/^user\d+$/i, // Generic user patterns
/^test$/i, // Test user
/^admin$/i, // Admin user
/^\d+$/, // Pure numeric
/^[a-zA-Z]+$/, // Pure alphabetic (no @ symbol)
];
}
/**
* Validate UID format - must be a valid email address
*/
isValidFormat(uid) {
if (!uid || typeof uid !== 'string') {
return {
valid: false,
error: 'UID must be a non-empty string',
code: 'INVALID_TYPE'
};
}
const trimmed = uid.trim();
if (trimmed.length === 0) {
return {
valid: false,
error: 'UID cannot be empty',
code: 'EMPTY_UID'
};
}
// Check against invalid patterns
for (const pattern of this.invalidPatterns) {
if (pattern.test(trimmed)) {
return {
valid: false,
error: `UID matches invalid pattern: ${pattern}`,
code: 'INVALID_PATTERN'
};
}
}
// Validate email format
if (!this.emailRegex.test(trimmed)) {
return {
valid: false,
error: 'UID must be a valid email address',
code: 'INVALID_EMAIL_FORMAT'
};
}
return {
valid: true,
sanitized: trimmed.toLowerCase()
};
}
/**
* Sanitize and validate UID - ensures consistent format
*/
sanitize(uid) {
const validation = this.isValidFormat(uid);
if (!validation.valid) {
console.error('[UID-VALIDATOR] Validation failed:', validation.error, { uid });
return null;
}
return validation.sanitized;
}
/**
* Validate and throw error if invalid
*/
validateOrThrow(uid, context = 'UID') {
const validation = this.isValidFormat(uid);
if (!validation.valid) {
throw new Error(`${context} validation failed: ${validation.error} (${validation.code})`);
}
return validation.sanitized;
}
/**
* Check if a UID needs migration (legacy format)
*/
needsMigration(uid) {
if (!uid || typeof uid !== 'string') {
return false;
}
const trimmed = uid.trim();
// Check if it's already a valid email
if (this.emailRegex.test(trimmed)) {
return false;
}
// Check if it matches known legacy patterns
for (const pattern of this.invalidPatterns) {
if (pattern.test(trimmed)) {
return true;
}
}
return true; // Any non-email format needs migration
}
/**
* Get validation statistics for debugging
*/
getValidationStats(uids) {
const stats = {
total: uids.length,
valid: 0,
invalid: 0,
needsMigration: 0,
errors: {}
};
uids.forEach(uid => {
const validation = this.isValidFormat(uid);
if (validation.valid) {
stats.valid++;
} else {
stats.invalid++;
const code = validation.code || 'UNKNOWN';
stats.errors[code] = (stats.errors[code] || 0) + 1;
}
if (this.needsMigration(uid)) {
stats.needsMigration++;
}
});
return stats;
}
}
// Create singleton instance
export const uidValidator = new UidValidator();
// Legacy exports for backward compatibility
export function validateUidFormat(uid) {
return uidValidator.isValidFormat(uid).valid;
}
export function sanitizeUid(uid) {
return uidValidator.sanitize(uid);
}
export function validateUidOrThrow(uid, context) {
return uidValidator.validateOrThrow(uid, context);
}

View File

@ -1,266 +1,178 @@
// upload.js — Frontend file upload handler
import { showToast } from "./toast.js"; import { showToast } from "./toast.js";
import { playBeep } from "./sound.js"; import { playBeep } from "./sound.js";
import { logToServer } from "./logger.js";
// Initialize upload system when DOM is loaded
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
// This module handles the file upload functionality, including drag-and-drop,
// progress indication, and post-upload actions like refreshing the file list.
// DOM elements are fetched once the DOM is ready
const dropzone = document.getElementById("user-upload-area"); const dropzone = document.getElementById("user-upload-area");
if (dropzone) {
dropzone.setAttribute("aria-label", "Upload area. Click or drop an audio file to upload.");
}
const fileInput = document.getElementById("fileInputUser"); const fileInput = document.getElementById("fileInputUser");
const fileInfo = document.createElement("div"); const fileList = document.getElementById("file-list");
fileInfo.id = "file-info";
fileInfo.style.textAlign = "center"; // Early exit if critical UI elements are missing
if (fileInput) { if (!dropzone || !fileInput || !fileList) {
fileInput.parentNode.insertBefore(fileInfo, fileInput.nextSibling); // Debug messages disabled
return;
} }
const streamInfo = document.getElementById("stream-info");
const streamUrlEl = document.getElementById("streamUrl");
const spinner = document.getElementById("spinner") || { style: { display: 'none' } };
let abortController;
// Upload function // Attach all event listeners
const upload = async (file) => { initializeUploadListeners();
if (abortController) abortController.abort();
abortController = new AbortController();
fileInfo.innerText = `📁 ${file.name}${(file.size / 1024 / 1024).toFixed(2)} MB`;
if (file.size > 100 * 1024 * 1024) {
showToast("❌ File too large. Please upload a file smaller than 100MB.");
return;
}
spinner.style.display = "block";
showToast('📡 Uploading…');
fileInput.disabled = true;
dropzone.classList.add("uploading");
const formData = new FormData();
const sessionUid = localStorage.getItem("uid");
formData.append("uid", sessionUid);
formData.append("file", file);
const res = await fetch("/upload", {
signal: abortController.signal,
method: "POST",
body: formData,
});
let data, parseError;
try {
data = await res.json();
} catch (e) {
parseError = e;
}
if (!data) {
showToast("❌ Upload failed: " + (parseError && parseError.message ? parseError.message : "Unknown error"));
spinner.style.display = "none";
fileInput.disabled = false;
dropzone.classList.remove("uploading");
return;
}
if (res.ok) {
if (data.quota && data.quota.used_mb !== undefined) {
const bar = document.getElementById("quota-bar");
const text = document.getElementById("quota-text");
const quotaSec = document.getElementById("quota-meter");
if (bar && text && quotaSec) {
quotaSec.hidden = false;
const used = parseFloat(data.quota.used_mb);
bar.value = used;
bar.max = 100;
text.textContent = `${used.toFixed(1)} MB used`;
}
}
spinner.style.display = "none";
fileInput.disabled = false;
dropzone.classList.remove("uploading");
showToast("✅ Upload successful.");
// Refresh the audio player and file list
const uid = localStorage.getItem("uid");
if (uid) {
try {
if (window.loadProfileStream) {
await window.loadProfileStream(uid);
}
// Refresh the file list
if (window.fetchAndDisplayFiles) {
await window.fetchAndDisplayFiles(uid);
}
// Refresh the stream list to update the last update time
if (window.refreshStreamList) {
await window.refreshStreamList();
}
} catch (e) {
console.error('Failed to refresh:', e);
}
}
playBeep(432, 0.25, "sine");
} else {
if (streamInfo) streamInfo.hidden = true;
if (spinner) spinner.style.display = "none";
if ((data.detail || data.error || "").includes("music")) {
showToast("🎵 Upload rejected: singing or music detected.");
} else {
showToast(`❌ Upload failed: ${data.detail || data.error}`);
}
if (fileInput) fileInput.value = null;
if (dropzone) dropzone.classList.remove("uploading");
if (fileInput) fileInput.disabled = false;
if (streamInfo) streamInfo.classList.remove("visible", "slide-in");
}
};
// Function to fetch and display uploaded files
async function fetchAndDisplayFiles(uidFromParam) {
console.log('[UPLOAD] fetchAndDisplayFiles called with uid:', uidFromParam);
// Get the file list element
const fileList = document.getElementById('file-list');
if (!fileList) {
const errorMsg = 'File list element not found in DOM';
console.error(errorMsg);
return showErrorInUI(errorMsg);
}
// Get UID from parameter, localStorage, or cookie
const uid = uidFromParam || localStorage.getItem('uid') || getCookie('uid');
const authToken = localStorage.getItem('authToken');
const headers = {
'Accept': 'application/json',
};
// Include auth token in headers if available, but don't fail if it's not
// The server should handle both token-based and UID-based auth
if (authToken) {
headers['Authorization'] = `Bearer ${authToken}`;
} else {
console.debug('[UPLOAD] No auth token available, using UID-only authentication');
}
console.log('[UPLOAD] Auth state - UID:', uid, 'Token exists:', !!authToken);
/**
* Main upload function
* @param {File} file - The file to upload
*/
async function upload(file) {
// Get user ID from localStorage or cookie
const uid = localStorage.getItem('uid') || getCookie('uid');
if (!uid) { if (!uid) {
console.error('[UPLOAD] No UID found in any source'); // Debug messages disabled
fileList.innerHTML = '<li class="error-message">User session expired. Please refresh the page.</li>'; showToast("You must be logged in to upload files.", "error");
return; return;
} }
// Log the authentication method being used // Debug messages disabled
if (!authToken) {
console.debug('[UPLOAD] No auth token found, using UID-only authentication');
} else {
console.debug('[UPLOAD] Using token-based authentication');
}
// Show loading state // Create and display the upload status indicator
fileList.innerHTML = '<li class="loading-message">Loading files...</li>'; const statusDiv = createStatusIndicator(file.name);
fileList.prepend(statusDiv);
const progressBar = statusDiv.querySelector('.progress-bar');
const statusText = statusDiv.querySelector('.status-text');
const formData = new FormData();
formData.append("file", file);
formData.append("uid", uid);
try { try {
console.log(`[DEBUG] Fetching files for user: ${uid}`); const response = await fetch(`/upload`, {
const response = await fetch(`/me/${uid}`, { method: "POST",
body: formData,
headers: { headers: {
'Authorization': authToken ? `Bearer ${authToken}` : '', 'Accept': 'application/json',
'Content-Type': 'application/json',
}, },
}); });
console.log('[DEBUG] Response status:', response.status, response.statusText);
if (!response.ok) { if (!response.ok) {
const errorText = await response.text(); const errorData = await response.json().catch(() => ({ detail: 'Upload failed with non-JSON response.' }));
const errorMsg = `Failed to fetch files: ${response.status} ${response.statusText} - ${errorText}`; throw new Error(errorData.detail || 'Unknown upload error');
console.error(`[ERROR] ${errorMsg}`);
throw new Error(errorMsg);
} }
const data = await response.json(); const result = await response.json();
console.log('[DEBUG] Received files data:', data); // Debug messages disabled
playBeep(800, 0.2); // Success beep - higher frequency
if (!data.files) { // Update UI to show success
throw new Error('Invalid response format: missing files array'); statusText.textContent = 'Success!';
} progressBar.style.width = '100%';
progressBar.style.backgroundColor = 'var(--success-color)';
if (data.files.length > 0) { // Remove the status indicator after a short delay
// Sort files by name setTimeout(() => {
const sortedFiles = [...data.files].sort((a, b) => a.name.localeCompare(b.name)); statusDiv.remove();
}, 2000);
fileList.innerHTML = sortedFiles.map(file => { // --- Post-Upload Actions ---
const sizeMB = (file.size / (1024 * 1024)).toFixed(2); await postUploadActions(uid);
const displayName = file.original_name || file.name;
const isRenamed = file.original_name && file.original_name !== file.name;
return `
<li class="file-item" data-filename="${file.name}">
<div class="file-name" title="${isRenamed ? `Stored as: ${file.name}` : displayName}">
${displayName}
${isRenamed ? `<div class="stored-as"><button class="delete-file" data-filename="${file.name}" data-original-name="${file.original_name}" title="Delete file">🗑️</button></div>` :
`<button class="delete-file" data-filename="${file.name}" data-original-name="${file.original_name}" title="Delete file">🗑️</button>`}
</div>
<span class="file-size">${sizeMB} MB</span>
</li>
`;
}).join('');
} else {
fileList.innerHTML = '<li class="empty-message">No files uploaded yet</li>';
}
// Delete button handling is now managed by dashboard.js
// Update quota display if available
if (data.quota !== undefined) {
const bar = document.getElementById('quota-bar');
const text = document.getElementById('quota-text');
const quotaSec = document.getElementById('quota-meter');
if (bar && text && quotaSec) {
quotaSec.hidden = false;
bar.value = data.quota;
bar.max = 100;
text.textContent = `${data.quota.toFixed(1)} MB`;
}
}
} catch (error) { } catch (error) {
const errorMessage = `Error loading file list: ${error.message || 'Unknown error'}`; // Debug messages disabled
console.error('[ERROR]', errorMessage, error); playBeep(200, 0.5); // Error beep - lower frequency, longer duration
showErrorInUI(errorMessage, fileList); statusText.textContent = `Error: ${error.message}`;
} progressBar.style.backgroundColor = 'var(--error-color)';
statusDiv.classList.add('upload-error');
// Helper function to show error messages in the UI
function showErrorInUI(message, targetElement = null) {
const errorHtml = `
<div style="
padding: 10px;
margin: 5px 0;
background: #2a0f0f;
border-left: 3px solid #f55;
color: var(--error-hover);
font-family: monospace;
font-size: 0.9em;
white-space: pre-wrap;
word-break: break-word;
">
<div style="font-weight: bold; color: var(--error);">Error loading files</div>
<div style="margin-top: 5px;">${message}</div>
<div style="margin-top: 10px; font-size: 0.8em; color: var(--text-muted);">
Check browser console for details
</div>
</div>
`;
if (targetElement) {
targetElement.innerHTML = errorHtml;
} else {
// If no target element, try to find it
const fileList = document.getElementById('file-list');
if (fileList) fileList.innerHTML = errorHtml;
}
} }
} }
// Helper function to get cookie value by name /**
* Actions to perform after a successful upload.
* @param {string} uid - The user's ID
*/
async function postUploadActions(uid) {
// 1. Refresh the user's personal stream if the function is available
if (window.loadProfileStream) {
await window.loadProfileStream(uid);
}
// 2. Refresh the file list by re-fetching and then displaying.
if (window.fetchAndDisplayFiles) {
// Use email-based UID for file operations if available, fallback to uid
const fileOperationUid = localStorage.getItem('uid') || uid; // uid is now email-based
// Debug messages disabled
await window.fetchAndDisplayFiles(fileOperationUid);
}
// 3. Update quota display after upload
if (window.updateQuotaDisplay) {
const quotaUid = localStorage.getItem('uid') || uid;
// Debug messages disabled
await window.updateQuotaDisplay(quotaUid);
}
// 4. Refresh the public stream list to update the last update time
if (window.refreshStreamList) {
await window.refreshStreamList();
}
}
/**
* Creates the DOM element for the upload status indicator.
* @param {string} fileName - The name of the file being uploaded.
* @returns {HTMLElement}
*/
function createStatusIndicator(fileName) {
const statusDiv = document.createElement('div');
statusDiv.className = 'upload-status-indicator';
statusDiv.innerHTML = `
<div class="file-info">
<span class="file-name">${fileName}</span>
<span class="status-text">Uploading...</span>
</div>
<div class="progress-container">
<div class="progress-bar"></div>
</div>
`;
return statusDiv;
}
/**
* Initializes all event listeners for the upload UI.
*/
function initializeUploadListeners() {
dropzone.addEventListener("click", () => {
fileInput.click();
});
dropzone.addEventListener("dragover", (e) => {
e.preventDefault();
dropzone.classList.add("dragover");
});
dropzone.addEventListener("dragleave", () => {
dropzone.classList.remove("dragover");
});
dropzone.addEventListener("drop", (e) => {
e.preventDefault();
dropzone.classList.remove("dragover");
const file = e.dataTransfer.files[0];
if (file) {
upload(file);
}
});
fileInput.addEventListener("change", (e) => {
const file = e.target.files[0];
if (file) {
upload(file);
}
});
}
/**
* Helper function to get a cookie value by name.
* @param {string} name - The name of the cookie.
* @returns {string|null}
*/
function getCookie(name) { function getCookie(name) {
const value = `; ${document.cookie}`; const value = `; ${document.cookie}`;
const parts = value.split(`; ${name}=`); const parts = value.split(`; ${name}=`);
@ -268,35 +180,6 @@ document.addEventListener('DOMContentLoaded', () => {
return null; return null;
} }
// Export functions for use in other modules // Make the upload function globally accessible if needed by other scripts
window.upload = upload; window.upload = upload;
window.fetchAndDisplayFiles = fetchAndDisplayFiles;
if (dropzone && fileInput) {
dropzone.addEventListener("click", () => {
console.log("[DEBUG] Dropzone clicked");
fileInput.click();
console.log("[DEBUG] fileInput.click() called");
});
dropzone.addEventListener("dragover", (e) => {
e.preventDefault();
dropzone.classList.add("dragover");
dropzone.style.transition = "background-color 0.3s ease";
});
dropzone.addEventListener("dragleave", () => {
dropzone.classList.remove("dragover");
});
dropzone.addEventListener("drop", (e) => {
dropzone.classList.add("pulse");
setTimeout(() => dropzone.classList.remove("pulse"), 400);
e.preventDefault();
dropzone.classList.remove("dragover");
const file = e.dataTransfer.files[0];
if (file) upload(file);
});
fileInput.addEventListener("change", (e) => {
const file = e.target.files[0];
if (file) upload(file);
});
}
}); });

376
upload.py
View File

@ -23,7 +23,8 @@ DATA_ROOT = Path("./data")
@limiter.limit("5/minute") @limiter.limit("5/minute")
@router.post("/upload") @router.post("/upload")
async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), file: UploadFile = Form(...)): def upload(request: Request, uid: str = Form(...), file: UploadFile = Form(...)):
# Import here to avoid circular imports
from log import log_violation from log import log_violation
import time import time
@ -32,183 +33,259 @@ async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), f
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Starting upload of {file.filename}") log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Starting upload of {file.filename}")
try: try:
# First, verify the user exists and is confirmed # Use the database session context manager to handle the session
user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first() with get_db() as db:
if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"):
user = user[0]
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] User check - found: {user is not None}, confirmed: {getattr(user, 'confirmed', False) if user else 'N/A'}")
if not user or not hasattr(user, "confirmed") or not user.confirmed:
raise HTTPException(status_code=403, detail="Account not confirmed")
# Check quota before doing any file operations
quota = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0)
if quota.storage_bytes >= 100 * 1024 * 1024:
raise HTTPException(status_code=400, detail="Quota exceeded")
# Create user directory if it doesn't exist
user_dir = DATA_ROOT / uid
user_dir.mkdir(parents=True, exist_ok=True)
# Generate a unique filename for the processed file first
import uuid
unique_name = f"{uuid.uuid4()}.opus"
raw_ext = file.filename.split(".")[-1].lower()
raw_path = user_dir / ("raw." + raw_ext)
processed_path = user_dir / unique_name
# Clean up any existing raw files first (except the one we're about to create)
for old_file in user_dir.glob('raw.*'):
try: try:
if old_file != raw_path: # Don't delete the file we're about to create # First, verify the user exists and is confirmed
old_file.unlink(missing_ok=True) user = db.query(User).filter(
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Cleaned up old file: {old_file}") (User.username == uid) | (User.email == uid)
except Exception as e: ).first()
log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_file}: {e}")
# Save the uploaded file temporarily if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"):
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Saving temporary file to {raw_path}") user = user[0]
try: if not user:
with open(raw_path, "wb") as f: log_violation("UPLOAD", request.client.host, uid, f"User {uid} not found")
content = await file.read() raise HTTPException(status_code=404, detail="User not found")
if not content:
raise ValueError("Uploaded file is empty")
f.write(content)
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Successfully wrote {len(content)} bytes to {raw_path}")
except Exception as e:
log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to save {raw_path}: {e}")
raise HTTPException(status_code=500, detail=f"Failed to save uploaded file: {e}")
# Ollama music/singing check is disabled for this release log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] User check - found: {user is not None}, confirmed: {getattr(user, 'confirmed', False) if user else 'N/A'}")
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Ollama music/singing check is disabled")
try: # Check if user is confirmed
convert_to_opus(str(raw_path), str(processed_path)) if not hasattr(user, 'confirmed') or not user.confirmed:
except Exception as e: raise HTTPException(status_code=403, detail="Account not confirmed")
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=500, detail=str(e))
original_size = raw_path.stat().st_size # Use user.email as the proper UID for quota and directory operations
raw_path.unlink(missing_ok=True) # cleanup user_email = user.email
quota = db.get(UserQuota, user_email) or UserQuota(uid=user_email, storage_bytes=0)
# First, verify the file was created and has content if quota.storage_bytes >= 100 * 1024 * 1024:
if not processed_path.exists() or processed_path.stat().st_size == 0: raise HTTPException(status_code=400, detail="Quota exceeded")
raise HTTPException(status_code=500, detail="Failed to process audio file")
# Concatenate all .opus files in random order to stream.opus for public playback # Create user directory using email (proper UID) - not the uid parameter which could be username
# This is now done after the file is in its final location with log ID user_dir = DATA_ROOT / user_email
from concat_opus import concat_opus_files user_dir.mkdir(parents=True, exist_ok=True)
def update_stream_opus():
try:
concat_opus_files(user_dir, user_dir / "stream.opus")
except Exception as e:
# fallback: just use the latest processed file if concat fails
import shutil
stream_path = user_dir / "stream.opus"
shutil.copy2(processed_path, stream_path)
log_violation("STREAM_UPDATE", request.client.host, uid,
f"[fallback] Updated stream.opus with {processed_path}")
# We'll call this after the file is in its final location # Generate a unique filename for the processed file first
import uuid
unique_name = f"{uuid.uuid4()}.opus"
raw_ext = file.filename.split(".")[-1].lower()
raw_path = user_dir / ("raw." + raw_ext)
processed_path = user_dir / unique_name
# Get the final file size # Clean up any existing raw files first (except the one we're about to create)
size = processed_path.stat().st_size for old_file in user_dir.glob('raw.*'):
# Start a transaction
try:
# Create a log entry with the original filename
log = UploadLog(
uid=uid,
ip=request.client.host,
filename=file.filename, # Store original filename
processed_filename=unique_name, # Store the processed filename
size_bytes=size
)
db.add(log)
db.flush() # Get the log ID without committing
# Rename the processed file to include the log ID for better tracking
processed_with_id = user_dir / f"{log.id}_{unique_name}"
if processed_path.exists():
# First check if there's already a file with the same UUID but different prefix
for existing_file in user_dir.glob(f"*_{unique_name}"):
if existing_file != processed_path:
log_violation("CLEANUP", request.client.host, uid,
f"[UPLOAD] Removing duplicate file: {existing_file}")
existing_file.unlink(missing_ok=True)
# Now do the rename
if processed_path != processed_with_id:
if processed_with_id.exists():
processed_with_id.unlink(missing_ok=True)
processed_path.rename(processed_with_id)
processed_path = processed_with_id
# Only clean up raw.* files, not previously uploaded opus files
for old_temp_file in user_dir.glob('raw.*'):
try: try:
old_temp_file.unlink(missing_ok=True) if old_file != raw_path: # Don't delete the file we're about to create
log_violation("CLEANUP", request.client.host, uid, f"[{request_id}] Cleaned up temp file: {old_temp_file}") old_file.unlink(missing_ok=True)
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Cleaned up old file: {old_file}")
except Exception as e: except Exception as e:
log_violation("CLEANUP_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_temp_file}: {e}") log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_file}: {e}")
# Get or create quota # Save the uploaded file temporarily
quota = db.query(UserQuota).filter(UserQuota.uid == uid).first() log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Saving temporary file to {raw_path}")
if not quota:
quota = UserQuota(uid=uid, storage_bytes=0)
db.add(quota)
# Update quota with the new file size try:
quota.storage_bytes = sum( with open(raw_path, "wb") as f:
f.stat().st_size content = file.file.read()
for f in user_dir.glob('*.opus') if not content:
if f.name != 'stream.opus' and f != processed_path raise ValueError("Uploaded file is empty")
) + size f.write(content)
log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Successfully wrote {len(content)} bytes to {raw_path}")
# Update public streams # EARLY DB RECORD CREATION: after upload completes, before processing
update_public_streams(uid, quota.storage_bytes, db) early_log = UploadLog(
uid=user_email,
ip=request.client.host,
filename=file.filename, # original filename from user
processed_filename=None, # not yet processed
size_bytes=None # not yet known
)
db.add(early_log)
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE FLUSH] Before db.flush() after early_log add")
db.flush()
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE FLUSH] After db.flush() after early_log add")
db.commit()
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE COMMIT] After db.commit() after early_log add")
early_log_id = early_log.id
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[DEBUG] Early UploadLog created: id={early_log_id}, filename={file.filename}, UploadLog.filename={early_log.filename}")
except Exception as e:
log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to save {raw_path}: {e}")
raise HTTPException(status_code=500, detail=f"Failed to save uploaded file: {e}")
# Commit the transaction # Ollama music/singing check is disabled for this release
db.commit() log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Ollama music/singing check is disabled")
# Now that the transaction is committed and files are in their final location, try:
# update the stream.opus file to include all files convert_to_opus(str(raw_path), str(processed_path))
update_stream_opus() except Exception as e:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=500, detail=str(e))
except Exception as e: original_size = raw_path.stat().st_size
db.rollback() raw_path.unlink(missing_ok=True) # cleanup
# Clean up the processed file if something went wrong
if processed_path.exists(): # First, verify the file was created and has content
processed_path.unlink(missing_ok=True) if not processed_path.exists() or processed_path.stat().st_size == 0:
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") raise HTTPException(status_code=500, detail="Failed to process audio file")
# Get the final file size
size = processed_path.stat().st_size
# Concatenate all .opus files in random order to stream.opus for public playback
# This is now done after the file is in its final location with log ID
from concat_opus import concat_opus_files
def update_stream_opus():
try:
concat_opus_files(user_dir, user_dir / "stream.opus")
except Exception as e:
# fallback: just use the latest processed file if concat fails
import shutil
stream_path = user_dir / "stream.opus"
shutil.copy2(processed_path, stream_path)
log_violation("STREAM_UPDATE", request.client.host, uid,
f"[fallback] Updated stream.opus with {processed_path}")
# Start a transaction
try:
# Update the early DB record with processed filename and size
log = db.get(UploadLog, early_log_id)
log.processed_filename = unique_name
log.size_bytes = size
db.add(log)
db.flush() # Ensure update is committed
# Assert that log.filename is still the original filename, never overwritten
if log.filename is None or (log.filename.endswith('.opus') and log.filename == log.processed_filename):
log_violation("UPLOAD_ERROR", request.client.host, uid,
f"[ASSERTION FAILED] UploadLog.filename was overwritten! id={log.id}, filename={log.filename}, processed_filename={log.processed_filename}")
raise RuntimeError(f"UploadLog.filename was overwritten! id={log.id}, filename={log.filename}, processed_filename={log.processed_filename}")
else:
log_violation("UPLOAD_DEBUG", request.client.host, uid,
f"[ASSERTION OK] After update: id={log.id}, filename={log.filename}, processed_filename={log.processed_filename}")
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[COMMIT] Committing UploadLog for id={log.id}")
db.commit()
log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[COMMIT OK] UploadLog committed for id={log.id}")
# Rename the processed file to include the log ID for better tracking
processed_with_id = user_dir / f"{log.id}_{unique_name}"
if processed_path.exists():
# First check if there's already a file with the same UUID but different prefix
for existing_file in user_dir.glob(f"*_{unique_name}"):
if existing_file != processed_path:
log_violation("CLEANUP", request.client.host, uid,
f"[UPLOAD] Removing duplicate file: {existing_file}")
existing_file.unlink(missing_ok=True)
# Now do the rename
if processed_path != processed_with_id:
if processed_with_id.exists():
processed_with_id.unlink(missing_ok=True)
processed_path.rename(processed_with_id)
processed_path = processed_with_id
# Only clean up raw.* files, not previously uploaded opus files
for old_temp_file in user_dir.glob('raw.*'):
try:
old_temp_file.unlink(missing_ok=True)
log_violation("CLEANUP", request.client.host, uid, f"[{request_id}] Cleaned up temp file: {old_temp_file}")
except Exception as e:
log_violation("CLEANUP_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_temp_file}: {e}")
# Get or create quota
quota = db.query(UserQuota).filter(UserQuota.uid == user_email).first()
if not quota:
quota = UserQuota(uid=user_email, storage_bytes=0)
db.add(quota)
# Update quota with the new file size
quota.storage_bytes = sum(
f.stat().st_size
for f in user_dir.glob('*.opus')
if f.name != 'stream.opus' and f != processed_path
) + size
# Update public streams
update_public_streams(user_email, quota.storage_bytes, db)
# The context manager will handle commit/rollback
# Now that the transaction is committed and files are in their final location,
# update the stream.opus file to include all files
update_stream_opus()
return {
"filename": file.filename,
"original_size": round(original_size / 1024, 1),
"quota": {
"used_mb": round(quota.storage_bytes / (1024 * 1024), 2)
}
}
except HTTPException as e:
# Re-raise HTTP exceptions as they are already properly formatted
db.rollback()
raise e
except Exception as e:
# Log the error and return a 500 response
db.rollback()
import traceback
tb = traceback.format_exc()
# Try to log the error
try:
log_violation("UPLOAD_ERROR", request.client.host, uid, f"Error processing upload: {str(e)}\n{tb}")
except Exception:
pass # If logging fails, continue with the error response
# Clean up the processed file if it exists
if 'processed_path' in locals() and processed_path.exists():
processed_path.unlink(missing_ok=True)
raise HTTPException(status_code=500, detail=f"Error processing upload: {str(e)}")
except HTTPException as e:
# Re-raise HTTP exceptions as they are already properly formatted
db.rollback()
raise e
except Exception as e:
# Log the error and return a 500 response
db.rollback()
import traceback
tb = traceback.format_exc()
# Try to log the error
try:
log_violation("UPLOAD_ERROR", request.client.host, uid, f"Error processing upload: {str(e)}\n{tb}")
except Exception:
pass # If logging fails, continue with the error response
# Clean up the processed file if it exists
if 'processed_path' in locals() and processed_path.exists():
processed_path.unlink(missing_ok=True)
raise HTTPException(status_code=500, detail=f"Error processing upload: {str(e)}")
return {
"filename": file.filename,
"original_size": round(original_size / 1024, 1),
"quota": {
"used_mb": round(quota.storage_bytes / (1024 * 1024), 2)
}
}
except HTTPException as e: except HTTPException as e:
# Already a JSON response, just re-raise # Re-raise HTTP exceptions as they are already properly formatted
raise e raise e
except Exception as e: except Exception as e:
# Catch any other exceptions that might occur outside the main processing block
import traceback import traceback
tb = traceback.format_exc() tb = traceback.format_exc()
# Log and return a JSON error
try: try:
log_violation("UPLOAD", request.client.host, uid, f"Unexpected error: {type(e).__name__}: {str(e)}\n{tb}") log_violation("UPLOAD_ERROR", request.client.host, uid, f"Unhandled error in upload handler: {str(e)}\n{tb}")
except Exception: except:
pass pass # If logging fails, continue with the error response
return {"detail": f"Server error: {type(e).__name__}: {str(e)}"} raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
def update_public_streams(uid: str, storage_bytes: int, db: Session): def update_public_streams(uid: str, storage_bytes: int, db: Session):
"""Update the public streams list in the database with the latest user upload info""" """Update the public streams list in the database with the latest user upload info"""
try: try:
# Get the user's info # Get the user's info - uid is now email-based
user = db.query(User).filter(User.username == uid).first() user = db.query(User).filter(User.email == uid).first()
if not user: if not user:
print(f"[WARNING] User {uid} not found when updating public streams") print(f"[WARNING] User {uid} not found when updating public streams")
return return
@ -221,7 +298,6 @@ def update_public_streams(uid: str, storage_bytes: int, db: Session):
# Update the public stream info # Update the public stream info
public_stream.username = user.username public_stream.username = user.username
public_stream.display_name = user.display_name or user.username
public_stream.storage_bytes = storage_bytes public_stream.storage_bytes = storage_bytes
public_stream.last_updated = datetime.utcnow() public_stream.last_updated = datetime.utcnow()