From 72f79b1059dc2d4ddc0f3cd92848ee701fdd4b3b Mon Sep 17 00:00:00 2001 From: oib Date: Thu, 7 Aug 2025 19:39:22 +0200 Subject: [PATCH] Update authentication system, database models, and UI components --- account_router.py | 150 +++--- analyze_db_legacy.py | 355 +++++++++++++ auth.py | 94 ++-- auth_router.py | 115 +++-- cleanup_devuser_old_format.sql | 31 ++ cleanup_final_orphaned.sql | 19 + cleanup_legacy_db.sql | 169 ++++++ cleanup_oibchello_old_format.sql | 31 ++ cleanup_old_format_users.sql | 28 + cleanup_orphaned_uploadlog.sql | 17 + cleanup_remaining_orphaned.sql | 6 + concat_opus.py | 78 +++ convert_to_opus.py | 39 ++ database.py | 23 +- dictastream_backup_20250806_134951.sql | 307 +++++++++++ docs/auth-consolidation.md | 131 +++++ execute_db_cleanup.py | 221 ++++++++ fix_db_constraints.py | 174 +++++++ fix_dbsession_fk.sql | 13 + list_streams.py | 156 ++++++ log.py | 3 +- magic.py | 137 +++-- main.py | 681 +++++++++++++++++++----- migrate_dbsession_fk.sql | 13 + migrate_uid_to_email.py | 168 ++++++ models.py | 33 +- register.py | 126 ++--- simple_db_cleanup.py | 107 ++++ static/audio-player.js | 202 +++++++- static/auth-manager.js | 688 +++++++++++++++++++++++++ static/auth.js | 275 +--------- static/cleanup-auth.js | 38 ++ static/css/components/file-upload.css | 61 ++- static/dashboard.js | 280 +++++----- static/file-display.js | 220 ++++++++ static/global-audio-manager.js | 8 +- static/index.html | 13 +- static/magic-login.js | 115 ++--- static/personal-player.js | 163 ++---- static/remove-confirmed-uid.js | 70 +++ static/shared-audio-player.js | 162 ++++++ static/sound.js | 33 +- static/streams-ui.js | 213 ++------ static/style.css | 18 +- static/toast.js | 2 +- static/uid-validator.js | 169 ++++++ static/upload.js | 417 ++++++--------- upload.py | 398 ++++++++------ 48 files changed, 5328 insertions(+), 1642 deletions(-) create mode 100644 analyze_db_legacy.py create mode 100644 cleanup_devuser_old_format.sql create mode 100644 cleanup_final_orphaned.sql create mode 100644 cleanup_legacy_db.sql create mode 100644 cleanup_oibchello_old_format.sql create mode 100644 cleanup_old_format_users.sql create mode 100644 cleanup_orphaned_uploadlog.sql create mode 100644 cleanup_remaining_orphaned.sql create mode 100644 concat_opus.py create mode 100644 convert_to_opus.py create mode 100644 dictastream_backup_20250806_134951.sql create mode 100644 docs/auth-consolidation.md create mode 100644 execute_db_cleanup.py create mode 100644 fix_db_constraints.py create mode 100644 fix_dbsession_fk.sql create mode 100644 list_streams.py create mode 100644 migrate_dbsession_fk.sql create mode 100644 migrate_uid_to_email.py create mode 100644 simple_db_cleanup.py create mode 100644 static/auth-manager.js create mode 100644 static/cleanup-auth.js create mode 100644 static/file-display.js create mode 100644 static/remove-confirmed-uid.js create mode 100644 static/shared-audio-player.js create mode 100644 static/uid-validator.js diff --git a/account_router.py b/account_router.py index 39951ec..11926da 100644 --- a/account_router.py +++ b/account_router.py @@ -11,116 +11,126 @@ from typing import Dict, Any router = APIRouter(prefix="/api", tags=["account"]) @router.post("/delete-account") -async def delete_account(data: Dict[str, Any], request: Request, db: Session = Depends(get_db)): +async def delete_account(data: Dict[str, Any], request: Request): try: # Get UID from request data uid = data.get("uid") if not uid: - print(f"[DELETE_ACCOUNT] Error: Missing UID in request data") + # Debug messages disabled raise HTTPException(status_code=400, detail="Missing UID") ip = request.client.host - print(f"[DELETE_ACCOUNT] Processing delete request for UID: {uid} from IP: {ip}") + # Debug messages disabled # Verify user exists and IP matches - # Handle both email-based and username-based UIDs for backward compatibility - user = None - - # First try to find by email (new UID format) - if '@' in uid: - user = db.exec(select(User).where(User.email == uid)).first() - print(f"[DELETE_ACCOUNT] Looking up user by email: {uid}") - - # If not found by email, try by username (legacy UID format) - if not user: - user = db.exec(select(User).where(User.username == uid)).first() - print(f"[DELETE_ACCOUNT] Looking up user by username: {uid}") + # Use the database session context manager + with get_db() as db: + # Handle both email-based and username-based UIDs for backward compatibility + user = None - if not user: - print(f"[DELETE_ACCOUNT] Error: User {uid} not found (tried both email and username lookup)") - raise HTTPException(status_code=404, detail="User not found") + # First try to find by email (new UID format) + if '@' in uid: + user = db.query(User).filter(User.email == uid).first() + # Debug messages disabled - # Use the actual email as the UID for database operations - actual_uid = user.email - print(f"[DELETE_ACCOUNT] Found user: {user.username} ({user.email}), using email as UID: {actual_uid}") + # If not found by email, try by username (legacy UID format) + if not user: + user = db.query(User).filter(User.username == uid).first() + # Debug messages disabled + + if not user: + # Debug messages disabled + raise HTTPException(status_code=404, detail="User not found") - if user.ip != ip: - print(f"[DELETE_ACCOUNT] Error: IP mismatch. User IP: {user.ip}, Request IP: {ip}") + # Extract user attributes while the object is still bound to the session + actual_uid = user.email + user_ip = user.ip + username = user.username + + # Debug messages disabled + + if user_ip != ip: + # Debug messages disabled raise HTTPException(status_code=403, detail="Unauthorized: IP address does not match") - # Start transaction - try: - # Delete user's upload logs (use actual_uid which is always the email) - uploads = db.exec(select(UploadLog).where(UploadLog.uid == actual_uid)).all() - for upload in uploads: - db.delete(upload) - print(f"[DELETE_ACCOUNT] Deleted {len(uploads)} upload logs for user {actual_uid}") + # Use the database session context manager for all database operations + with get_db() as db: + try: + # Delete user's upload logs (use actual_uid which is always the email) + uploads = db.query(UploadLog).filter(UploadLog.uid == actual_uid).all() + for upload in uploads: + db.delete(upload) + # Debug messages disabled - # Delete user's public streams - streams = db.exec(select(PublicStream).where(PublicStream.uid == actual_uid)).all() - for stream in streams: - db.delete(stream) - print(f"[DELETE_ACCOUNT] Deleted {len(streams)} public streams for user {actual_uid}") + # Delete user's public streams + streams = db.query(PublicStream).filter(PublicStream.uid == actual_uid).all() + for stream in streams: + db.delete(stream) + # Debug messages disabled - # Delete user's quota - quota = db.get(UserQuota, actual_uid) - if quota: - db.delete(quota) - print(f"[DELETE_ACCOUNT] Deleted quota for user {actual_uid}") + # Delete user's quota + quota = db.get(UserQuota, actual_uid) + if quota: + db.delete(quota) + # Debug messages disabled - # Delete user's active sessions (check both email and username as user_id) - sessions_by_email = db.exec(select(DBSession).where(DBSession.user_id == actual_uid)).all() - sessions_by_username = db.exec(select(DBSession).where(DBSession.user_id == user.username)).all() - - all_sessions = list(sessions_by_email) + list(sessions_by_username) - # Remove duplicates using token (primary key) instead of id - unique_sessions = {session.token: session for session in all_sessions}.values() - - for session in unique_sessions: - db.delete(session) - print(f"[DELETE_ACCOUNT] Deleted {len(unique_sessions)} active sessions for user {actual_uid} (checked both email and username)") + # Delete user's active sessions (check both email and username as uid) + sessions_by_email = db.query(DBSession).filter(DBSession.uid == actual_uid).all() + sessions_by_username = db.query(DBSession).filter(DBSession.uid == username).all() + + all_sessions = list(sessions_by_email) + list(sessions_by_username) + # Remove duplicates using token (primary key) + unique_sessions = {session.token: session for session in all_sessions}.values() + + for session in unique_sessions: + db.delete(session) + # Debug messages disabled - # Delete user account - user_obj = db.get(User, actual_uid) # Use actual_uid which is the email - if user_obj: - db.delete(user_obj) - print(f"[DELETE_ACCOUNT] Deleted user account {actual_uid}") + # Delete user account + user_obj = db.get(User, actual_uid) # Use actual_uid which is the email + if user_obj: + db.delete(user_obj) + # Debug messages disabled - db.commit() - print(f"[DELETE_ACCOUNT] Database changes committed for user {actual_uid}") + db.commit() + # Debug messages disabled - except Exception as e: - db.rollback() - print(f"[DELETE_ACCOUNT] Database error during account deletion: {str(e)}") - raise HTTPException(status_code=500, detail="Database error during account deletion") + except Exception as e: + db.rollback() + # Debug messages disabled + # Debug messages disabled + raise HTTPException(status_code=500, detail="Database error during account deletion") # Delete user's files try: - user_dir = os.path.join('data', user.username) + # Use the email (actual_uid) for the directory name, which matches how files are stored + user_dir = os.path.join('data', actual_uid) real_user_dir = os.path.realpath(user_dir) # Security check to prevent directory traversal if not real_user_dir.startswith(os.path.realpath('data')): - print(f"[DELETE_ACCOUNT] Security alert: Invalid user directory path: {user_dir}") + # Debug messages disabled raise HTTPException(status_code=400, detail="Invalid user directory") if os.path.exists(real_user_dir): import shutil shutil.rmtree(real_user_dir, ignore_errors=True) - print(f"[DELETE_ACCOUNT] Deleted user directory: {real_user_dir}") + # Debug messages disabled else: - print(f"[DELETE_ACCOUNT] User directory not found: {real_user_dir}") + # Debug messages disabled + pass except Exception as e: - print(f"[DELETE_ACCOUNT] Error deleting user files: {str(e)}") + # Debug messages disabled # Continue even if file deletion fails, as the account is already deleted from the DB + pass - print(f"[DELETE_ACCOUNT] Successfully deleted account for user {actual_uid} (original UID: {uid})") + # Debug messages disabled return {"status": "success", "message": "Account and all associated data have been deleted"} except HTTPException as he: - print(f"[DELETE_ACCOUNT] HTTP Error {he.status_code}: {he.detail}") + # Debug messages disabled raise except Exception as e: - print(f"[DELETE_ACCOUNT] Unexpected error: {str(e)}") + # Debug messages disabled raise HTTPException(status_code=500, detail="An unexpected error occurred") diff --git a/analyze_db_legacy.py b/analyze_db_legacy.py new file mode 100644 index 0000000..fadee60 --- /dev/null +++ b/analyze_db_legacy.py @@ -0,0 +1,355 @@ +#!/usr/bin/env python3 +""" +Database Legacy Data Analysis Script +Analyzes the database for legacy data that doesn't match current authentication implementation +""" + +import sys +from datetime import datetime, timedelta +from sqlmodel import Session, select +from database import engine +from models import User, UserQuota, UploadLog, DBSession, PublicStream +import re + +def validate_email_format(email): + """Validate email format using RFC 5322 compliant regex""" + pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$' + return re.match(pattern, email) is not None + +def analyze_user_table(): + """Analyze User table for legacy data issues""" + print("\n=== ANALYZING USER TABLE ===") + issues = [] + + with Session(engine) as session: + users = session.exec(select(User)).all() + print(f"Total users: {len(users)}") + + for user in users: + user_issues = [] + + # Check if email (primary key) is valid email format + if not validate_email_format(user.email): + user_issues.append(f"Invalid email format: {user.email}") + + # Check if username is also email format (current requirement) + if not validate_email_format(user.username): + user_issues.append(f"Username not in email format: {user.username}") + + # Check if email and username match (should be same after migration) + if user.email != user.username: + user_issues.append(f"Email/username mismatch: email={user.email}, username={user.username}") + + # Check for missing or empty display_name + if not user.display_name or user.display_name.strip() == "": + user_issues.append(f"Empty display_name") + + # Check for very old tokens (potential security issue) + if user.token_created < datetime.utcnow() - timedelta(days=30): + user_issues.append(f"Very old token (created: {user.token_created})") + + # Check for unconfirmed users + if not user.confirmed: + user_issues.append(f"Unconfirmed user") + + if user_issues: + issues.append({ + 'email': user.email, + 'username': user.username, + 'issues': user_issues + }) + + print(f"Users with issues: {len(issues)}") + for issue in issues: + print(f" User {issue['email']}:") + for problem in issue['issues']: + print(f" - {problem}") + + return issues + +def analyze_session_table(): + """Analyze DBSession table for legacy data issues""" + print("\n=== ANALYZING SESSION TABLE ===") + issues = [] + + with Session(engine) as session: + sessions = session.exec(select(DBSession)).all() + print(f"Total sessions: {len(sessions)}") + + active_sessions = [s for s in sessions if s.is_active] + expired_sessions = [s for s in sessions if s.expires_at < datetime.utcnow()] + old_sessions = [s for s in sessions if s.created_at < datetime.utcnow() - timedelta(days=7)] + + print(f"Active sessions: {len(active_sessions)}") + print(f"Expired sessions: {len(expired_sessions)}") + print(f"Sessions older than 7 days: {len(old_sessions)}") + + for db_session in sessions: + session_issues = [] + + # Check if user_id is in email format (current requirement) + if not validate_email_format(db_session.user_id): + session_issues.append(f"user_id not in email format: {db_session.user_id}") + + # Check for expired but still active sessions + if db_session.is_active and db_session.expires_at < datetime.utcnow(): + session_issues.append(f"Expired but still marked active (expires: {db_session.expires_at})") + + # Check for very old sessions that should be cleaned up + if db_session.created_at < datetime.utcnow() - timedelta(days=30): + session_issues.append(f"Very old session (created: {db_session.created_at})") + + # Check for sessions with 1-hour expiry (old system) + session_duration = db_session.expires_at - db_session.created_at + if session_duration < timedelta(hours=2): # Less than 2 hours indicates old 1-hour sessions + session_issues.append(f"Short session duration: {session_duration} (should be 24h)") + + if session_issues: + issues.append({ + 'token': db_session.token[:10] + '...', + 'user_id': db_session.user_id, + 'created_at': db_session.created_at, + 'expires_at': db_session.expires_at, + 'issues': session_issues + }) + + print(f"Sessions with issues: {len(issues)}") + for issue in issues: + print(f" Session {issue['token']} (user: {issue['user_id']}):") + for problem in issue['issues']: + print(f" - {problem}") + + return issues + +def analyze_quota_table(): + """Analyze UserQuota table for legacy data issues""" + print("\n=== ANALYZING USER QUOTA TABLE ===") + issues = [] + + with Session(engine) as session: + quotas = session.exec(select(UserQuota)).all() + print(f"Total quota records: {len(quotas)}") + + for quota in quotas: + quota_issues = [] + + # Check if uid is in email format (current requirement) + if not validate_email_format(quota.uid): + quota_issues.append(f"UID not in email format: {quota.uid}") + + # Check for negative storage + if quota.storage_bytes < 0: + quota_issues.append(f"Negative storage: {quota.storage_bytes}") + + # Check for excessive storage (over 100MB limit) + if quota.storage_bytes > 100 * 1024 * 1024: + quota_issues.append(f"Storage over 100MB limit: {quota.storage_bytes / (1024*1024):.1f}MB") + + if quota_issues: + issues.append({ + 'uid': quota.uid, + 'storage_bytes': quota.storage_bytes, + 'issues': quota_issues + }) + + print(f"Quota records with issues: {len(issues)}") + for issue in issues: + print(f" Quota {issue['uid']} ({issue['storage_bytes']} bytes):") + for problem in issue['issues']: + print(f" - {problem}") + + return issues + +def analyze_upload_log_table(): + """Analyze UploadLog table for legacy data issues""" + print("\n=== ANALYZING UPLOAD LOG TABLE ===") + issues = [] + + with Session(engine) as session: + uploads = session.exec(select(UploadLog)).all() + print(f"Total upload records: {len(uploads)}") + + for upload in uploads: + upload_issues = [] + + # Check if uid is in email format (current requirement) + if not validate_email_format(upload.uid): + upload_issues.append(f"UID not in email format: {upload.uid}") + + # Check for missing processed_filename + if not upload.processed_filename: + upload_issues.append(f"Missing processed_filename") + + # Check for negative file size + if upload.size_bytes < 0: + upload_issues.append(f"Negative file size: {upload.size_bytes}") + + # Check for very old uploads + if upload.created_at < datetime.utcnow() - timedelta(days=365): + upload_issues.append(f"Very old upload (created: {upload.created_at})") + + if upload_issues: + issues.append({ + 'id': upload.id, + 'uid': upload.uid, + 'filename': upload.filename, + 'created_at': upload.created_at, + 'issues': upload_issues + }) + + print(f"Upload records with issues: {len(issues)}") + for issue in issues: + print(f" Upload {issue['id']} (user: {issue['uid']}, file: {issue['filename']}):") + for problem in issue['issues']: + print(f" - {problem}") + + return issues + +def analyze_public_stream_table(): + """Analyze PublicStream table for legacy data issues""" + print("\n=== ANALYZING PUBLIC STREAM TABLE ===") + issues = [] + + with Session(engine) as session: + streams = session.exec(select(PublicStream)).all() + print(f"Total public stream records: {len(streams)}") + + for stream in streams: + stream_issues = [] + + # Check if uid is in email format (current requirement) + if not validate_email_format(stream.uid): + stream_issues.append(f"UID not in email format: {stream.uid}") + + # Check if username is also email format (should match uid) + if stream.username and not validate_email_format(stream.username): + stream_issues.append(f"Username not in email format: {stream.username}") + + # Check if uid and username match (should be same after migration) + if stream.username and stream.uid != stream.username: + stream_issues.append(f"UID/username mismatch: uid={stream.uid}, username={stream.username}") + + # Check for negative storage + if stream.storage_bytes < 0: + stream_issues.append(f"Negative storage: {stream.storage_bytes}") + + # Check for missing display_name + if not stream.display_name or stream.display_name.strip() == "": + stream_issues.append(f"Empty display_name") + + if stream_issues: + issues.append({ + 'uid': stream.uid, + 'username': stream.username, + 'display_name': stream.display_name, + 'issues': stream_issues + }) + + print(f"Public stream records with issues: {len(issues)}") + for issue in issues: + print(f" Stream {issue['uid']} (username: {issue['username']}):") + for problem in issue['issues']: + print(f" - {problem}") + + return issues + +def check_referential_integrity(): + """Check for referential integrity issues between tables""" + print("\n=== CHECKING REFERENTIAL INTEGRITY ===") + issues = [] + + with Session(engine) as session: + # Get all unique UIDs from each table + users = session.exec(select(User.email)).all() + user_usernames = session.exec(select(User.username)).all() + quotas = session.exec(select(UserQuota.uid)).all() + uploads = session.exec(select(UploadLog.uid)).all() + streams = session.exec(select(PublicStream.uid)).all() + sessions = session.exec(select(DBSession.user_id)).all() + + user_emails = set(users) + user_usernames_set = set(user_usernames) + quota_uids = set(quotas) + upload_uids = set(uploads) + stream_uids = set(streams) + session_uids = set(sessions) + + print(f"Unique user emails: {len(user_emails)}") + print(f"Unique user usernames: {len(user_usernames_set)}") + print(f"Unique quota UIDs: {len(quota_uids)}") + print(f"Unique upload UIDs: {len(upload_uids)}") + print(f"Unique stream UIDs: {len(stream_uids)}") + print(f"Unique session user_ids: {len(session_uids)}") + + # Check for orphaned records + orphaned_quotas = quota_uids - user_emails + orphaned_uploads = upload_uids - user_emails + orphaned_streams = stream_uids - user_emails + orphaned_sessions = session_uids - user_usernames_set # Sessions use username as user_id + + if orphaned_quotas: + issues.append(f"Orphaned quota records (no matching user): {orphaned_quotas}") + + if orphaned_uploads: + issues.append(f"Orphaned upload records (no matching user): {orphaned_uploads}") + + if orphaned_streams: + issues.append(f"Orphaned stream records (no matching user): {orphaned_streams}") + + if orphaned_sessions: + issues.append(f"Orphaned session records (no matching user): {orphaned_sessions}") + + # Check for users without quota records + users_without_quota = user_emails - quota_uids + if users_without_quota: + issues.append(f"Users without quota records: {users_without_quota}") + + # Check for users without stream records + users_without_streams = user_emails - stream_uids + if users_without_streams: + issues.append(f"Users without stream records: {users_without_streams}") + + print(f"Referential integrity issues: {len(issues)}") + for issue in issues: + print(f" - {issue}") + + return issues + +def main(): + """Run complete database legacy analysis""" + print("=== DATABASE LEGACY DATA ANALYSIS ===") + print(f"Analysis started at: {datetime.utcnow()}") + + all_issues = {} + + try: + all_issues['users'] = analyze_user_table() + all_issues['sessions'] = analyze_session_table() + all_issues['quotas'] = analyze_quota_table() + all_issues['uploads'] = analyze_upload_log_table() + all_issues['streams'] = analyze_public_stream_table() + all_issues['integrity'] = check_referential_integrity() + + # Summary + print("\n=== SUMMARY ===") + total_issues = sum(len(issues) if isinstance(issues, list) else 1 for issues in all_issues.values()) + print(f"Total issues found: {total_issues}") + + for table, issues in all_issues.items(): + if issues: + count = len(issues) if isinstance(issues, list) else 1 + print(f" {table}: {count} issues") + + if total_issues == 0: + print("✅ No legacy data issues found! Database is clean.") + else: + print("⚠️ Legacy data issues found. Consider running cleanup scripts.") + + except Exception as e: + print(f"❌ Error during analysis: {e}") + return 1 + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/auth.py b/auth.py index e74286c..781648e 100644 --- a/auth.py +++ b/auth.py @@ -1,7 +1,7 @@ """Authentication middleware and utilities for dicta2stream""" from fastapi import Request, HTTPException, Depends, status from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials -from sqlmodel import Session +from sqlmodel import Session, select from typing import Optional from models import User, Session as DBSession, verify_session @@ -11,40 +11,39 @@ security = HTTPBearer() def get_current_user( request: Request, - db: Session = Depends(get_db), credentials: HTTPAuthorizationCredentials = Depends(security) ) -> User: """Dependency to get the current authenticated user""" token = credentials.credentials - db_session = verify_session(db, token) - if not db_session: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid or expired session", - headers={"WWW-Authenticate": "Bearer"}, - ) - - # Get the user from the session - user = db.exec( - select(User).where(User.username == db_session.user_id) - ).first() - - if not user: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="User not found", - headers={"WWW-Authenticate": "Bearer"}, - ) - - # Attach the session to the request state for later use - request.state.session = db_session - return user + # Use the database session context manager + with get_db() as db: + db_session = verify_session(db, token) + + if not db_session: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired session", + headers={"WWW-Authenticate": "Bearer"}, + ) + + # Get the user from the session using query interface + user = db.query(User).filter(User.email == db_session.uid).first() + + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User not found", + headers={"WWW-Authenticate": "Bearer"}, + ) + + # Attach the session to the request state for later use + request.state.session = db_session + return user def get_optional_user( request: Request, - db: Session = Depends(get_db), credentials: Optional[HTTPAuthorizationCredentials] = Depends(security, use_cache=False) ) -> Optional[User]: """Dependency that returns the current user if authenticated, None otherwise""" @@ -52,22 +51,45 @@ def get_optional_user( return None try: - return get_current_user(request, db, credentials) + # get_current_user now handles its own database session + return get_current_user(request, credentials) except HTTPException: return None -def create_session(db: Session, user: User, request: Request) -> DBSession: - """Create a new session for the user""" - user_agent = request.headers.get("user-agent") +def create_session(user: User, request: Request) -> DBSession: + """Create a new session for the user (valid for 24 hours)""" + import secrets + from datetime import datetime, timedelta + + user_agent = request.headers.get("user-agent", "") ip_address = request.client.host if request.client else "0.0.0.0" - session = DBSession.create_for_user( - user_id=user.username, + # Create session token and set 24-hour expiry + session_token = secrets.token_urlsafe(32) + expires_at = datetime.utcnow() + timedelta(hours=24) + + # Create the session object + session = DBSession( + token=session_token, + user_id=user.email, ip_address=ip_address, - user_agent=user_agent + user_agent=user_agent, + expires_at=expires_at, + is_active=True ) - db.add(session) - db.commit() - return session + # Use the database session context manager + with get_db() as db: + try: + db.add(session) + db.commit() + db.refresh(session) # Ensure we have the latest data + return session + except Exception as e: + db.rollback() + # Debug messages disabled + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to create session" + ) diff --git a/auth_router.py b/auth_router.py index da43959..cdeabb0 100644 --- a/auth_router.py +++ b/auth_router.py @@ -15,7 +15,6 @@ security = HTTPBearer() async def logout( request: Request, response: Response, - db: Session = Depends(get_db), credentials: HTTPAuthorizationCredentials = Depends(security) ): """Log out by invalidating the current session""" @@ -26,25 +25,28 @@ async def logout( if not token: return {"message": "No session to invalidate"} - try: - # Find and invalidate the session - session = db.exec( - select(DBSession) - .where(DBSession.token == token) - .where(DBSession.is_active == True) # noqa: E712 - ).first() - - if session: - try: - session.is_active = False - db.add(session) - db.commit() - except Exception: - db.rollback() - - except Exception: - # Continue with logout even if session lookup fails - pass + # Use the database session context manager + with get_db() as db: + try: + # Find and invalidate the session using query interface + session = db.query(DBSession).filter( + DBSession.token == token, + DBSession.is_active == True # noqa: E712 + ).first() + + if session: + try: + session.is_active = False + db.add(session) + db.commit() + except Exception as e: + db.rollback() + # Debug messages disabled + # Continue with logout even if session update fails + except Exception as e: + # Debug messages disabled + # Continue with logout even if session lookup fails + pass # Clear the session cookie response.delete_cookie( @@ -56,7 +58,7 @@ async def logout( ) # Clear any other auth-related cookies - for cookie_name in ["uid", "authToken", "isAuthenticated", "token"]: + for cookie_name in ["uid", "authToken", "username", "token"]: response.delete_cookie( key=cookie_name, path="/", @@ -71,15 +73,15 @@ async def logout( except HTTPException: # Re-raise HTTP exceptions raise - except Exception: + except Exception as e: + # Debug messages disabled # Don't expose internal errors to the client return {"message": "Logout processed"} @router.get("/me") async def get_current_user_info( - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db) + current_user: User = Depends(get_current_user) ): """Get current user information""" return { @@ -92,15 +94,16 @@ async def get_current_user_info( @router.get("/sessions") async def list_sessions( - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db) + current_user: User = Depends(get_current_user) ): """List all active sessions for the current user""" - sessions = DBSession.get_active_sessions(db, current_user.username) - return [ - { - "id": s.id, - "ip_address": s.ip_address, + # Use the database session context manager + with get_db() as db: + sessions = DBSession.get_active_sessions(db, current_user.username) + return [ + { + "id": s.id, + "ip_address": s.ip_address, "user_agent": s.user_agent, "created_at": s.created_at.isoformat(), "last_used_at": s.last_used_at.isoformat(), @@ -113,26 +116,34 @@ async def list_sessions( @router.post("/sessions/{session_id}/revoke") async def revoke_session( session_id: int, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db) + current_user: User = Depends(get_current_user) ): """Revoke a specific session""" - session = db.get(DBSession, session_id) - - if not session or session.user_id != current_user.username: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Session not found" - ) - - if not session.is_active: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="Session is already inactive" - ) - - session.is_active = False - db.add(session) - db.commit() - - return {"message": "Session revoked"} + # Use the database session context manager + with get_db() as db: + session = db.get(DBSession, session_id) + + if not session or session.uid != current_user.email: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Session not found" + ) + + if not session.is_active: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Session is already inactive" + ) + + try: + session.is_active = False + db.add(session) + db.commit() + return {"message": "Session revoked successfully"} + except Exception as e: + db.rollback() + # Debug messages disabled + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to revoke session" + ) diff --git a/cleanup_devuser_old_format.sql b/cleanup_devuser_old_format.sql new file mode 100644 index 0000000..fe52eb3 --- /dev/null +++ b/cleanup_devuser_old_format.sql @@ -0,0 +1,31 @@ +-- Cleanup script for old format user 'devuser' +-- This user has username-based UID instead of email-based UID + +-- Show what will be deleted before deletion +SELECT 'publicstream entries to delete:' as info; +SELECT uid, username, storage_bytes, created_at FROM publicstream WHERE uid = 'devuser'; + +SELECT 'uploadlog entries to delete:' as info; +SELECT COUNT(*) as count, uid FROM uploadlog WHERE uid = 'devuser' GROUP BY uid; + +SELECT 'userquota entries to delete:' as info; +SELECT uid FROM userquota WHERE uid = 'devuser'; + +-- Delete from all related tables +-- Start with dependent tables first +DELETE FROM uploadlog WHERE uid = 'devuser'; +DELETE FROM userquota WHERE uid = 'devuser'; +DELETE FROM publicstream WHERE uid = 'devuser'; + +-- Verify cleanup +SELECT 'Remaining entries for devuser in publicstream:' as info; +SELECT COUNT(*) as count FROM publicstream WHERE uid = 'devuser'; + +SELECT 'Remaining entries for devuser in uploadlog:' as info; +SELECT COUNT(*) as count FROM uploadlog WHERE uid = 'devuser'; + +SELECT 'Remaining entries for devuser in userquota:' as info; +SELECT COUNT(*) as count FROM userquota WHERE uid = 'devuser'; + +SELECT 'Total remaining old format entries in publicstream:' as info; +SELECT COUNT(*) as count FROM publicstream WHERE uid NOT LIKE '%@%' OR uid = username; diff --git a/cleanup_final_orphaned.sql b/cleanup_final_orphaned.sql new file mode 100644 index 0000000..7f9e490 --- /dev/null +++ b/cleanup_final_orphaned.sql @@ -0,0 +1,19 @@ +-- Final cleanup of orphaned entries that prevent proper account deletion +-- These entries have username-based UIDs that should have been deleted + +-- Show what will be deleted +SELECT 'Orphaned publicstream entries to delete:' as info; +SELECT uid, username FROM publicstream WHERE uid = 'oibchello'; + +SELECT 'Orphaned userquota entries to delete:' as info; +SELECT uid, storage_bytes FROM userquota WHERE uid = 'oibchello'; + +-- Delete the orphaned entries +DELETE FROM publicstream WHERE uid = 'oibchello'; +DELETE FROM userquota WHERE uid = 'oibchello'; + +-- Verify cleanup +SELECT 'Remaining entries for oibchello:' as info; +SELECT 'publicstream' as table_name, COUNT(*) as count FROM publicstream WHERE uid = 'oibchello' +UNION ALL +SELECT 'userquota' as table_name, COUNT(*) as count FROM userquota WHERE uid = 'oibchello'; diff --git a/cleanup_legacy_db.sql b/cleanup_legacy_db.sql new file mode 100644 index 0000000..814990f --- /dev/null +++ b/cleanup_legacy_db.sql @@ -0,0 +1,169 @@ +-- Database Legacy Data Cleanup Script +-- Fixes issues identified in the database analysis +-- Execute these queries step by step to fix legacy data + +-- ============================================================================= +-- STEP 1: Fix User Table - Update username to match email format +-- ============================================================================= +-- Issue: User has username 'oibchello' but email 'oib@chello.at' +-- Fix: Update username to match email (current authentication requirement) + +UPDATE "user" +SET username = email, + display_name = CASE + WHEN display_name = '' OR display_name IS NULL + THEN split_part(email, '@', 1) -- Use email prefix as display name + ELSE display_name + END +WHERE email = 'oib@chello.at'; + +-- Verify the fix +SELECT email, username, display_name, confirmed FROM "user" WHERE email = 'oib@chello.at'; + +-- ============================================================================= +-- STEP 2: Clean Up Expired Sessions +-- ============================================================================= +-- Issue: 11 expired sessions still marked as active (security risk) +-- Fix: Mark expired sessions as inactive + +UPDATE dbsession +SET is_active = false +WHERE expires_at < NOW() AND is_active = true; + +-- Verify expired sessions are now inactive +SELECT COUNT(*) as expired_active_sessions +FROM dbsession +WHERE expires_at < NOW() AND is_active = true; + +-- Optional: Delete very old expired sessions (older than 7 days) +DELETE FROM dbsession +WHERE expires_at < NOW() - INTERVAL '7 days'; + +-- ============================================================================= +-- STEP 3: Update Session user_id to Email Format +-- ============================================================================= +-- Issue: All sessions use old username format instead of email +-- Fix: Update session user_id to use email format + +UPDATE dbsession +SET user_id = 'oib@chello.at' +WHERE user_id = 'oibchello'; + +-- Verify session user_id updates +SELECT DISTINCT user_id FROM dbsession; + +-- ============================================================================= +-- STEP 4: Fix PublicStream Username Fields +-- ============================================================================= +-- Issue: PublicStream has username/UID mismatches +-- Fix: Update username to match UID (email format) + +-- Fix the existing user record +UPDATE publicstream +SET username = uid, + display_name = CASE + WHEN display_name = 'oibchello' + THEN split_part(uid, '@', 1) -- Use email prefix as display name + ELSE display_name + END +WHERE uid = 'oib@chello.at'; + +-- Verify the fix +SELECT uid, username, display_name FROM publicstream WHERE uid = 'oib@chello.at'; + +-- ============================================================================= +-- STEP 5: Remove Orphaned Records for Deleted User +-- ============================================================================= +-- Issue: Records exist for 'oib@bubuit.net' but no user exists +-- Fix: Remove orphaned records + +-- Remove orphaned quota record +DELETE FROM userquota WHERE uid = 'oib@bubuit.net'; + +-- Remove orphaned stream record +DELETE FROM publicstream WHERE uid = 'oib@bubuit.net'; + +-- Verify orphaned records are removed +SELECT 'userquota' as table_name, COUNT(*) as count FROM userquota WHERE uid = 'oib@bubuit.net' +UNION ALL +SELECT 'publicstream' as table_name, COUNT(*) as count FROM publicstream WHERE uid = 'oib@bubuit.net'; + +-- ============================================================================= +-- VERIFICATION QUERIES +-- ============================================================================= +-- Run these to verify all issues are fixed + +-- 1. Check user table consistency +SELECT + email, + username, + display_name, + CASE WHEN email = username THEN '✓' ELSE '✗' END as email_username_match, + CASE WHEN display_name != '' THEN '✓' ELSE '✗' END as has_display_name +FROM "user"; + +-- 2. Check session table health +SELECT + COUNT(*) as total_sessions, + COUNT(CASE WHEN is_active THEN 1 END) as active_sessions, + COUNT(CASE WHEN expires_at < NOW() AND is_active THEN 1 END) as expired_but_active, + COUNT(CASE WHEN expires_at - created_at > INTERVAL '20 hours' THEN 1 END) as long_duration_sessions +FROM dbsession; + +-- 3. Check PublicStream consistency +SELECT + uid, + username, + display_name, + CASE WHEN uid = username THEN '✓' ELSE '✗' END as uid_username_match +FROM publicstream; + +-- 4. Check referential integrity +SELECT + 'Users' as entity, + COUNT(*) as count +FROM "user" +UNION ALL +SELECT + 'UserQuota records', + COUNT(*) +FROM userquota +UNION ALL +SELECT + 'PublicStream records', + COUNT(*) +FROM publicstream +UNION ALL +SELECT + 'Active Sessions', + COUNT(*) +FROM dbsession WHERE is_active = true; + +-- 5. Final validation - should return no rows if all issues are fixed +SELECT 'ISSUE: User email/username mismatch' as issue +FROM "user" +WHERE email != username +UNION ALL +SELECT 'ISSUE: Expired active sessions' +FROM dbsession +WHERE expires_at < NOW() AND is_active = true +LIMIT 1 +UNION ALL +SELECT 'ISSUE: PublicStream UID/username mismatch' +FROM publicstream +WHERE uid != username +LIMIT 1 +UNION ALL +SELECT 'ISSUE: Orphaned quota records' +FROM userquota q +LEFT JOIN "user" u ON q.uid = u.email +WHERE u.email IS NULL +LIMIT 1 +UNION ALL +SELECT 'ISSUE: Orphaned stream records' +FROM publicstream p +LEFT JOIN "user" u ON p.uid = u.email +WHERE u.email IS NULL +LIMIT 1; + +-- If the final query returns no rows, all legacy issues are fixed! ✅ diff --git a/cleanup_oibchello_old_format.sql b/cleanup_oibchello_old_format.sql new file mode 100644 index 0000000..e2e3f6e --- /dev/null +++ b/cleanup_oibchello_old_format.sql @@ -0,0 +1,31 @@ +-- Cleanup script for old format user 'oibchello' +-- This user has username-based UID instead of email-based UID + +-- Show what will be deleted before deletion +SELECT 'publicstream entries to delete:' as info; +SELECT uid, username, storage_bytes, created_at FROM publicstream WHERE uid = 'oibchello'; + +SELECT 'uploadlog entries to delete:' as info; +SELECT COUNT(*) as count, uid FROM uploadlog WHERE uid = 'oibchello' GROUP BY uid; + +SELECT 'userquota entries to delete:' as info; +SELECT uid FROM userquota WHERE uid = 'oibchello'; + +-- Delete from all related tables +-- Start with dependent tables first +DELETE FROM uploadlog WHERE uid = 'oibchello'; +DELETE FROM userquota WHERE uid = 'oibchello'; +DELETE FROM publicstream WHERE uid = 'oibchello'; + +-- Verify cleanup +SELECT 'Remaining entries for oibchello in publicstream:' as info; +SELECT COUNT(*) as count FROM publicstream WHERE uid = 'oibchello'; + +SELECT 'Remaining entries for oibchello in uploadlog:' as info; +SELECT COUNT(*) as count FROM uploadlog WHERE uid = 'oibchello'; + +SELECT 'Remaining entries for oibchello in userquota:' as info; +SELECT COUNT(*) as count FROM userquota WHERE uid = 'oibchello'; + +SELECT 'Total remaining old format entries in publicstream:' as info; +SELECT COUNT(*) as count FROM publicstream WHERE uid NOT LIKE '%@%' OR uid = username; diff --git a/cleanup_old_format_users.sql b/cleanup_old_format_users.sql new file mode 100644 index 0000000..955c62d --- /dev/null +++ b/cleanup_old_format_users.sql @@ -0,0 +1,28 @@ +-- Cleanup script for old format user entries +-- Removes users with username-based UIDs instead of email-based UIDs + +-- Show what will be deleted before deletion +SELECT 'publicstream entries to delete:' as info; +SELECT uid, username, storage_bytes, created_at FROM publicstream WHERE uid IN ('devuser', 'oibchello'); + +SELECT 'uploadlog entries to delete:' as info; +SELECT COUNT(*) as count, uid FROM uploadlog WHERE uid IN ('devuser', 'oibchello') GROUP BY uid; + +SELECT 'userquota entries to delete:' as info; +SELECT uid, quota_bytes, used_bytes FROM userquota WHERE uid IN ('devuser', 'oibchello'); + +-- Delete from all related tables +-- Start with dependent tables first +DELETE FROM uploadlog WHERE uid IN ('devuser', 'oibchello'); +DELETE FROM userquota WHERE uid IN ('devuser', 'oibchello'); +DELETE FROM publicstream WHERE uid IN ('devuser', 'oibchello'); + +-- Verify cleanup +SELECT 'Remaining old format entries in publicstream:' as info; +SELECT COUNT(*) as count FROM publicstream WHERE uid NOT LIKE '%@%' OR uid = username; + +SELECT 'Remaining old format entries in uploadlog:' as info; +SELECT COUNT(*) as count FROM uploadlog WHERE uid NOT LIKE '%@%'; + +SELECT 'Remaining old format entries in userquota:' as info; +SELECT COUNT(*) as count FROM userquota WHERE uid NOT LIKE '%@%'; diff --git a/cleanup_orphaned_uploadlog.sql b/cleanup_orphaned_uploadlog.sql new file mode 100644 index 0000000..3c8795f --- /dev/null +++ b/cleanup_orphaned_uploadlog.sql @@ -0,0 +1,17 @@ +-- Cleanup script for orphaned uploadlog entries +-- These entries have username-based UIDs that should have been deleted with the user + +-- Show what will be deleted +SELECT 'Orphaned uploadlog entries to delete:' as info; +SELECT uid, filename, processed_filename, created_at FROM uploadlog WHERE uid = 'oibchello'; + +-- Delete the orphaned entries +DELETE FROM uploadlog WHERE uid = 'oibchello'; + +-- Verify cleanup +SELECT 'Remaining uploadlog entries for oibchello:' as info; +SELECT COUNT(*) as count FROM uploadlog WHERE uid = 'oibchello'; + +-- Show all remaining uploadlog entries +SELECT 'All remaining uploadlog entries:' as info; +SELECT uid, filename, created_at FROM uploadlog ORDER BY created_at DESC; diff --git a/cleanup_remaining_orphaned.sql b/cleanup_remaining_orphaned.sql new file mode 100644 index 0000000..6883390 --- /dev/null +++ b/cleanup_remaining_orphaned.sql @@ -0,0 +1,6 @@ +-- Cleanup remaining orphaned uploadlog entries for devuser +DELETE FROM uploadlog WHERE uid = 'devuser'; + +-- Verify cleanup +SELECT 'All remaining uploadlog entries after cleanup:' as info; +SELECT uid, filename, created_at FROM uploadlog ORDER BY created_at DESC; diff --git a/concat_opus.py b/concat_opus.py new file mode 100644 index 0000000..3a109f8 --- /dev/null +++ b/concat_opus.py @@ -0,0 +1,78 @@ +# concat_opus.py — Concatenate all opus files in a user directory in random order into a single stream.opus +import os +import random +import subprocess +from pathlib import Path + +def concat_opus_files(user_dir: Path, output_file: Path): + """ + Concatenate all .opus files in user_dir (except stream.opus) in random order into output_file. + Overwrites output_file if exists. Creates it if missing. + """ + # Clean up any existing filelist.txt to prevent issues + filelist_path = user_dir / 'filelist.txt' + if filelist_path.exists(): + try: + filelist_path.unlink() + except Exception as e: + print(f"Warning: Could not clean up old filelist.txt: {e}") + + # Get all opus files except stream.opus and remove any duplicates + import hashlib + file_hashes = set() + files = [] + + for f in user_dir.glob('*.opus'): + if f.name == 'stream.opus': + continue + + try: + # Calculate file hash for duplicate detection + hasher = hashlib.md5() + with open(f, 'rb') as file: + buf = file.read(65536) # Read in 64kb chunks + while len(buf) > 0: + hasher.update(buf) + buf = file.read(65536) + file_hash = hasher.hexdigest() + + # Skip if we've seen this exact file before + if file_hash in file_hashes: + print(f"Removing duplicate file: {f.name}") + f.unlink() + continue + + file_hashes.add(file_hash) + files.append(f) + + except Exception as e: + print(f"Error processing {f}: {e}") + + if not files: + # If no files, create an empty stream.opus + output_file.write_bytes(b'') + return output_file + + random.shuffle(files) + + # Create a filelist for ffmpeg concat + filelist_path = user_dir / 'filelist.txt' + with open(filelist_path, 'w') as f: + for opusfile in files: + f.write(f"file '{opusfile.resolve()}'\n") + + # ffmpeg concat demuxer (no re-encoding) + cmd = [ + 'ffmpeg', '-y', '-f', 'concat', '-safe', '0', '-i', str(filelist_path), + '-c', 'copy', str(output_file) + ] + try: + subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + except subprocess.CalledProcessError as e: + raise RuntimeError(f"FFmpeg concat failed: {e}") + finally: + if filelist_path.exists(): + filelist_path.unlink() + if not output_file.exists(): + raise RuntimeError("Concatenation did not produce output.") + return output_file diff --git a/convert_to_opus.py b/convert_to_opus.py new file mode 100644 index 0000000..a62fb00 --- /dev/null +++ b/convert_to_opus.py @@ -0,0 +1,39 @@ +# convert_to_opus.py — Default voice pipeline: bandpass + compressor + limiter + gate + +import subprocess +import os + +def convert_to_opus(input_path, output_path): + if not os.path.exists(input_path): + raise FileNotFoundError(f"Input file not found: {input_path}") + + filters = [ + "highpass=f=400", # low-cut below 400 Hz + "lowpass=f=12000", # high-cut above 12 kHz + "acompressor=threshold=-18dB", + "alimiter=limit=-1dB", + "agate=threshold=0.02" + ] + + cmd = [ + "ffmpeg", "-y", + "-i", input_path, + "-af", ",".join(filters), + "-ac", "1", + "-ar", "24000", + "-c:a", "libopus", + "-b:a", "40k", + "-vbr", "on", + "-application", "voip", + output_path + ] + + try: + subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + except subprocess.CalledProcessError as e: + raise RuntimeError(f"FFmpeg conversion failed: {e}") + + if not os.path.exists(output_path): + raise RuntimeError("Conversion did not produce output.") + + return output_path diff --git a/database.py b/database.py index 504c7ac..a4f2067 100644 --- a/database.py +++ b/database.py @@ -1,14 +1,33 @@ # database.py — SQLModel engine/session for PostgreSQL from sqlmodel import create_engine, Session, SQLModel +from contextlib import contextmanager import os +# Debug messages disabled + POSTGRES_URL = os.getenv("DATABASE_URL", "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream") -engine = create_engine(POSTGRES_URL, echo=False) +engine = create_engine(POSTGRES_URL, echo=False) # Disable echo for production # SQLAlchemy Base class for models Base = SQLModel +@contextmanager def get_db(): - with Session(engine) as session: + """Session management context manager that ensures proper commit/rollback.""" + session = Session(engine) + try: + # Debug messages disabled yield session + session.commit() + # Debug messages disabled + except Exception as e: + # Debug messages disabled + session.rollback() + raise + finally: + # Debug messages disabled + session.close() + +# For backward compatibility +get_db_deprecated = get_db diff --git a/dictastream_backup_20250806_134951.sql b/dictastream_backup_20250806_134951.sql new file mode 100644 index 0000000..3e8175b --- /dev/null +++ b/dictastream_backup_20250806_134951.sql @@ -0,0 +1,307 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 15.13 (Debian 15.13-0+deb12u1) +-- Dumped by pg_dump version 15.13 (Debian 15.13-0+deb12u1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: alembic_version; Type: TABLE; Schema: public; Owner: d2s +-- + +CREATE TABLE public.alembic_version ( + version_num character varying(32) NOT NULL +); + + +ALTER TABLE public.alembic_version OWNER TO d2s; + +-- +-- Name: dbsession; Type: TABLE; Schema: public; Owner: d2s +-- + +CREATE TABLE public.dbsession ( + token character varying NOT NULL, + uid character varying NOT NULL, + ip_address character varying NOT NULL, + user_agent character varying NOT NULL, + created_at timestamp without time zone NOT NULL, + expires_at timestamp without time zone NOT NULL, + is_active boolean NOT NULL, + last_activity timestamp without time zone NOT NULL +); + + +ALTER TABLE public.dbsession OWNER TO d2s; + +-- +-- Name: publicstream; Type: TABLE; Schema: public; Owner: d2s +-- + +CREATE TABLE public.publicstream ( + uid character varying NOT NULL, + username character varying, + storage_bytes integer NOT NULL, + mtime integer NOT NULL, + last_updated timestamp without time zone, + created_at timestamp without time zone NOT NULL, + updated_at timestamp without time zone NOT NULL +); + + +ALTER TABLE public.publicstream OWNER TO d2s; + +-- +-- Name: uploadlog; Type: TABLE; Schema: public; Owner: d2s +-- + +CREATE TABLE public.uploadlog ( + id integer NOT NULL, + uid character varying NOT NULL, + ip character varying NOT NULL, + filename character varying, + processed_filename character varying, + size_bytes integer NOT NULL, + created_at timestamp without time zone NOT NULL +); + + +ALTER TABLE public.uploadlog OWNER TO d2s; + +-- +-- Name: uploadlog_id_seq; Type: SEQUENCE; Schema: public; Owner: d2s +-- + +CREATE SEQUENCE public.uploadlog_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.uploadlog_id_seq OWNER TO d2s; + +-- +-- Name: uploadlog_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: d2s +-- + +ALTER SEQUENCE public.uploadlog_id_seq OWNED BY public.uploadlog.id; + + +-- +-- Name: user; Type: TABLE; Schema: public; Owner: d2s +-- + +CREATE TABLE public."user" ( + token_created timestamp without time zone NOT NULL, + email character varying NOT NULL, + username character varying NOT NULL, + token character varying NOT NULL, + confirmed boolean NOT NULL, + ip character varying NOT NULL +); + + +ALTER TABLE public."user" OWNER TO d2s; + +-- +-- Name: userquota; Type: TABLE; Schema: public; Owner: d2s +-- + +CREATE TABLE public.userquota ( + uid character varying NOT NULL, + storage_bytes integer NOT NULL +); + + +ALTER TABLE public.userquota OWNER TO d2s; + +-- +-- Name: uploadlog id; Type: DEFAULT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.uploadlog ALTER COLUMN id SET DEFAULT nextval('public.uploadlog_id_seq'::regclass); + + +-- +-- Data for Name: alembic_version; Type: TABLE DATA; Schema: public; Owner: d2s +-- + +COPY public.alembic_version (version_num) FROM stdin; +\. + + +-- +-- Data for Name: dbsession; Type: TABLE DATA; Schema: public; Owner: d2s +-- + +COPY public.dbsession (token, uid, ip_address, user_agent, created_at, expires_at, is_active, last_activity) FROM stdin; +6Y3PfCj-Mk3qLRttXCul8GTFZU9XWZtoHjk9I4EqnTE oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:32:21.725005 2025-08-07 10:32:21.724909 t 2025-08-06 10:32:21.725012 +uGnwnfsAUzbNJZoqYsbT__tVxqfl4NtOD04UKYp8FEY oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:35:43.931018 2025-08-07 10:35:43.930918 t 2025-08-06 10:35:43.931023 +OmKl-RrM8D4624xmNQigD3tdG4aXq8CzUq7Ch0qEhP4 oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:36:02.758938 2025-08-07 10:36:02.758873 t 2025-08-06 10:36:02.758941 +gGpgdAbmpwY3a-zY1Ri92l7hUEjg-GyIt1o2kIDwBE8 oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:45:59.701084 2025-08-07 10:45:59.70098 t 2025-08-06 10:45:59.701091 +GT9OKNxnhThcFXKvMBBVop7kczUH-4fE4bkCcRd17xE oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:14.181147 2025-08-07 10:46:14.181055 t 2025-08-06 10:46:14.181152 +Ok0mwpRLa5Fuimt9eN0l-xUaaCmpipokTkOILSxJNuA oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:27.910441 2025-08-07 10:46:27.91036 t 2025-08-06 10:46:27.910444 +DCTd4zCq_Lp_GxdwI14hFwZiDjfvNVvQrUVznllTdIA oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:35.928008 2025-08-07 10:46:35.927945 t 2025-08-06 10:46:35.928011 +dtv0uti4QUudgMTnS1NRzZ9nD9vhLO1stM5bdXL4I1o oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:46:36.104031 2025-08-07 10:46:36.103944 t 2025-08-06 10:46:36.104034 +NHZQSW6C2H-5Wq6Un6NqcAmnfSt1PqJeYJnwFKSjAss oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:51:33.897379 2025-08-07 10:51:33.897295 t 2025-08-06 10:51:33.897385 +yYZeeLyXmwpyr8Uu1szIyyoIpLc7qiWfQwB57f4kqNI oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:53:43.711315 2025-08-07 10:53:43.711223 t 2025-08-06 10:53:43.71132 +KhH9FO4D15l3-SUUkFHjR5Oj1N6Ld-NLmkzaM1QMhtU oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 10:56:22.050456 2025-08-07 10:56:22.050377 t 2025-08-06 10:56:22.050461 +zPQqqHEY4l7ZhLrBPBnvQdsQhQj1_j0n9H6CCnIAME8 oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 11:29:49.412786 2025-08-07 11:29:49.412706 t 2025-08-06 11:29:49.412792 +oxYZ9qTaezYliV6UtsI62RpPClj7rIAVXK_1FB3gYMQ oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 11:34:42.099366 2025-08-07 11:34:42.099276 t 2025-08-06 11:34:42.099371 +Ml6aHvae2EPXs9SWZX1BI_mNKgasjIVRMWnUSwKwixQ oib@chello.at 127.0.0.1 Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0 2025-08-06 11:38:06.002942 2025-08-07 11:38:06.002845 t 2025-08-06 11:38:06.002949 +\. + + +-- +-- Data for Name: publicstream; Type: TABLE DATA; Schema: public; Owner: d2s +-- + +COPY public.publicstream (uid, username, storage_bytes, mtime, last_updated, created_at, updated_at) FROM stdin; +oib@chello.at oibchello 16151127 1754453233 2025-08-06 06:22:53.97839 2025-08-06 06:07:13.525122 2025-08-06 06:07:13.525126 +\. + + +-- +-- Data for Name: uploadlog; Type: TABLE DATA; Schema: public; Owner: d2s +-- + +COPY public.uploadlog (id, uid, ip, filename, processed_filename, size_bytes, created_at) FROM stdin; +111 oib@chello.at 127.0.0.1 Taös - Bobstep [ Dubstep ] [1YGV5cNJrt0].opus 210388e1-2a9b-4b7c-a72f-d4059111ee80.opus 688750 2025-08-06 06:22:53.970258 +112 oib@chello.at backfilled 107_5e6c3567-7457-48f4-83fc-f3073f065718.opus 107_5e6c3567-7457-48f4-83fc-f3073f065718.opus 671050 2025-08-06 08:14:43.312825 +99 oib@chello.at 127.0.0.1 Pendulum - Set Me On Fire (Rasta Dubstep Rastep Raggastep) [ndShSlWMaeA].opus b0afe675-de49-43eb-ab77-86e592934342.opus 1051596 2025-08-06 06:07:13.504649 +100 oib@chello.at 127.0.0.1 Roots Reggae (1976) [Unreleased Album] Judah Khamani - Twelve Gates of Rebirth [94NDoPCjRL0].opus 6e0e4d7c-31a6-4d3b-ad26-1ccb8aeaaf55.opus 4751764 2025-08-06 06:08:00.96213 +101 oib@chello.at backfilled 98_15ba146a-8285-4233-9d44-e77e5fc19cd6.opus 98_15ba146a-8285-4233-9d44-e77e5fc19cd6.opus 805775 2025-08-06 08:05:27.805988 +102 oib@chello.at backfilled 97_74e975bf-22f8-4b98-8111-dbcd195a62a2.opus 97_74e975bf-22f8-4b98-8111-dbcd195a62a2.opus 775404 2025-08-06 07:57:50.570271 +103 oib@chello.at backfilled 99_b0afe675-de49-43eb-ab77-86e592934342.opus 99_b0afe675-de49-43eb-ab77-86e592934342.opus 1051596 2025-08-06 08:07:13.493002 +104 oib@chello.at backfilled 100_6e0e4d7c-31a6-4d3b-ad26-1ccb8aeaaf55.opus 100_6e0e4d7c-31a6-4d3b-ad26-1ccb8aeaaf55.opus 4751764 2025-08-06 08:08:00.944561 +105 oib@chello.at backfilled stream.opus stream.opus 7384026 2025-08-06 08:08:01.540555 +106 oib@chello.at 127.0.0.1 Roots Reggae (1973) [Unreleased Album] Judah Khamani - Scrolls of the Fire Lion🔥 [wZvlYr5Baa8].opus 516c2ea1-6bf3-4461-91c6-e7c47e913743.opus 4760432 2025-08-06 06:14:17.072377 +107 oib@chello.at 127.0.0.1 Reggae Shark Dubstep remix [101PfefUH5A].opus 5e6c3567-7457-48f4-83fc-f3073f065718.opus 671050 2025-08-06 06:14:43.326351 +108 oib@chello.at 127.0.0.1 SiriuX - RastaFari (Dubstep REMIX) [VVAWgX0IgxY].opus 25aa73c3-2a9c-4659-835d-8280a0381dc4.opus 939266 2025-08-06 06:17:55.519608 +109 oib@chello.at 127.0.0.1 I'm Death, Straight Up | DEATH WHISTLE (Wubbaduck x Auphinity DUBSTEP REMIX) [BK6_6RB2h64].opus 9c9b6356-d5b7-427f-9179-942593cd97e6.opus 805775 2025-08-06 06:19:41.29278 +110 oib@chello.at 127.0.0.1 N.A.S.A. Way Down (feat. RZA, Barbie Hatch, & John Frusciante).mp3 72c4ce3e-c991-4fb4-b5ab-b2f83b6f616d.opus 901315 2025-08-06 06:22:01.727741 +113 oib@chello.at backfilled 110_72c4ce3e-c991-4fb4-b5ab-b2f83b6f616d.opus 110_72c4ce3e-c991-4fb4-b5ab-b2f83b6f616d.opus 901315 2025-08-06 08:22:01.71671 +114 oib@chello.at backfilled 108_25aa73c3-2a9c-4659-835d-8280a0381dc4.opus 108_25aa73c3-2a9c-4659-835d-8280a0381dc4.opus 939266 2025-08-06 08:17:55.511047 +115 oib@chello.at backfilled 106_516c2ea1-6bf3-4461-91c6-e7c47e913743.opus 106_516c2ea1-6bf3-4461-91c6-e7c47e913743.opus 4760432 2025-08-06 08:14:17.057068 +116 oib@chello.at backfilled 109_9c9b6356-d5b7-427f-9179-942593cd97e6.opus 109_9c9b6356-d5b7-427f-9179-942593cd97e6.opus 805775 2025-08-06 08:19:41.282058 +117 oib@chello.at backfilled 111_210388e1-2a9b-4b7c-a72f-d4059111ee80.opus 111_210388e1-2a9b-4b7c-a72f-d4059111ee80.opus 688750 2025-08-06 08:22:53.960209 +\. + + +-- +-- Data for Name: user; Type: TABLE DATA; Schema: public; Owner: d2s +-- + +COPY public."user" (token_created, email, username, token, confirmed, ip) FROM stdin; +2025-08-06 11:37:50.164201 oib@chello.at oibchello 69aef338-4f18-44b2-96bb-403245901d06 t 127.0.0.1 +\. + + +-- +-- Data for Name: userquota; Type: TABLE DATA; Schema: public; Owner: d2s +-- + +COPY public.userquota (uid, storage_bytes) FROM stdin; +oib@chello.at 16151127 +\. + + +-- +-- Name: uploadlog_id_seq; Type: SEQUENCE SET; Schema: public; Owner: d2s +-- + +SELECT pg_catalog.setval('public.uploadlog_id_seq', 117, true); + + +-- +-- Name: alembic_version alembic_version_pkc; Type: CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.alembic_version + ADD CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num); + + +-- +-- Name: dbsession dbsession_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.dbsession + ADD CONSTRAINT dbsession_pkey PRIMARY KEY (token); + + +-- +-- Name: publicstream publicstream_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.publicstream + ADD CONSTRAINT publicstream_pkey PRIMARY KEY (uid); + + +-- +-- Name: uploadlog uploadlog_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.uploadlog + ADD CONSTRAINT uploadlog_pkey PRIMARY KEY (id); + + +-- +-- Name: user user_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public."user" + ADD CONSTRAINT user_pkey PRIMARY KEY (email); + + +-- +-- Name: userquota userquota_pkey; Type: CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.userquota + ADD CONSTRAINT userquota_pkey PRIMARY KEY (uid); + + +-- +-- Name: ix_publicstream_username; Type: INDEX; Schema: public; Owner: d2s +-- + +CREATE INDEX ix_publicstream_username ON public.publicstream USING btree (username); + + +-- +-- Name: ix_user_username; Type: INDEX; Schema: public; Owner: d2s +-- + +CREATE UNIQUE INDEX ix_user_username ON public."user" USING btree (username); + + +-- +-- Name: dbsession dbsession_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: d2s +-- + +ALTER TABLE ONLY public.dbsession + ADD CONSTRAINT dbsession_user_id_fkey FOREIGN KEY (uid) REFERENCES public."user"(email); + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/docs/auth-consolidation.md b/docs/auth-consolidation.md new file mode 100644 index 0000000..9f3a005 --- /dev/null +++ b/docs/auth-consolidation.md @@ -0,0 +1,131 @@ +# Authentication Logic Consolidation + +## Overview + +The authentication logic has been consolidated from multiple scattered files into a single, centralized `AuthManager` class. This improves maintainability, reduces code duplication, and provides a consistent authentication interface. + +## Files Changed + +### 1. New Centralized Module +- **`static/auth-manager.js`** - New centralized authentication manager class + +### 2. Refactored Files +- **`static/auth.js`** - Simplified to use AuthManager +- **`static/magic-login.js`** - Simplified to use AuthManager +- **`static/cleanup-auth.js`** - Simplified to use AuthManager + +## AuthManager Features + +### Core Functionality +- **Centralized State Management** - Single source of truth for authentication state +- **Cookie & localStorage Management** - Consistent handling of auth data storage +- **Magic Link Processing** - Handles both URL-based and token-based magic login +- **Authentication Polling** - Periodic state checks with caching and debouncing +- **User Session Management** - Login, logout, and account deletion + +### Key Methods +- `initialize()` - Initialize the auth manager and handle magic login +- `setAuthState(email, username, token)` - Set authentication state +- `clearAuthState()` - Clear all authentication data +- `isAuthenticated()` - Check current authentication status +- `getCurrentUser()` - Get current user data +- `logout()` - Perform logout and redirect +- `deleteAccount()` - Handle account deletion +- `cleanupAuthState(email)` - Clean up inconsistent auth state + +### Authentication Flow +1. **Magic Login Detection** - Checks URL parameters for login tokens/success +2. **User Info Retrieval** - Fetches email from `/api/me` endpoint +3. **State Setting** - Sets email as primary UID, username for display +4. **UI Updates** - Updates body classes and initializes user session +5. **Navigation** - Redirects to user profile page + +## Data Storage Strategy + +### localStorage Keys +- `uid` - Primary identifier (email-based) +- `user_email` - Explicit email storage +- `username` - Display name (separate from UID) +- `authToken` - Authentication token +- `isAuthenticated` - Boolean authentication state +- `uid_time` - Session timestamp + +### Cookie Strategy +- `uid` - Email-based UID with `SameSite=Lax` +- `authToken` - Auth token with `SameSite=Lax; Secure` +- `isAuthenticated` - Boolean flag with `SameSite=Lax` + +## Removed Redundancy + +### Eliminated Duplicate Code +- **User info fetching** - Centralized in `fetchUserInfo()` +- **Auth state setting** - Centralized in `setAuthState()` +- **Cookie management** - Centralized in `setAuthState()` and `clearAuthState()` +- **Magic login processing** - Centralized in `processMagicLogin()` and `processTokenLogin()` + +### Removed Fields +- `confirmed_uid` - Was duplicate of `uid`, now eliminated + +## Backward Compatibility + +### Global Functions (Legacy Support) +- `window.getCurrentUser()` - Get current user data +- `window.isAuthenticated()` - Check authentication status +- `window.logout()` - Perform logout +- `window.cleanupAuthState(email)` - Clean up auth state + +### Existing Function Exports +- `initMagicLogin()` - Maintained in magic-login.js for compatibility +- `cleanupAuthState()` - Maintained in cleanup-auth.js for compatibility + +## Benefits Achieved + +### 1. **Maintainability** +- Single source of authentication logic +- Consistent error handling and logging +- Easier to debug and modify + +### 2. **Performance** +- Reduced code duplication +- Optimized caching and debouncing +- Fewer redundant API calls + +### 3. **Reliability** +- Consistent state management +- Proper cleanup on logout +- Robust error handling + +### 4. **Security** +- Consistent cookie security attributes +- Proper state clearing on logout +- Centralized validation + +## Migration Notes + +### For Developers +- Import `authManager` from `./auth-manager.js` for new code +- Use `authManager.isAuthenticated()` instead of manual checks +- Use `authManager.getCurrentUser()` for user data +- Legacy global functions still work for existing code + +### Testing +- Test magic link login (both URL and token-based) +- Test authentication state persistence +- Test logout and account deletion +- Test authentication polling and state changes + +## Future Improvements + +### Potential Enhancements +1. **Token Refresh** - Automatic token renewal +2. **Session Timeout** - Configurable session expiration +3. **Multi-tab Sync** - Better cross-tab authentication sync +4. **Audit Logging** - Enhanced authentication event logging +5. **Rate Limiting** - Protection against auth abuse + +### Configuration Options +Consider adding configuration for: +- Polling intervals +- Cache TTL values +- Debug logging levels +- Cookie security settings diff --git a/execute_db_cleanup.py b/execute_db_cleanup.py new file mode 100644 index 0000000..9ecb8ca --- /dev/null +++ b/execute_db_cleanup.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python3 +""" +Execute Database Legacy Data Cleanup +Fixes issues identified in the database analysis using direct SQL execution +""" + +import sys +from sqlmodel import Session, text +from database import engine + +def execute_step(session, step_name, query, description): + """Execute a cleanup step and report results""" + print(f"\n=== {step_name} ===") + print(f"Description: {description}") + print(f"Query: {query}") + + try: + result = session.exec(text(query)) + if query.strip().upper().startswith('SELECT'): + rows = result.fetchall() + print(f"Result: {len(rows)} rows") + for row in rows: + print(f" {row}") + else: + session.commit() + print(f"✅ Success: {result.rowcount} rows affected") + return True + except Exception as e: + print(f"❌ Error: {e}") + session.rollback() + return False + +def main(): + """Execute database cleanup step by step""" + print("=== DATABASE LEGACY DATA CLEANUP ===") + + with Session(engine) as session: + success_count = 0 + total_steps = 0 + + # Step 1: Fix User Table - Update username to match email format + total_steps += 1 + if execute_step( + session, + "STEP 1: Fix User Table", + """UPDATE "user" + SET username = email, + display_name = CASE + WHEN display_name = '' OR display_name IS NULL + THEN split_part(email, '@', 1) + ELSE display_name + END + WHERE email = 'oib@chello.at'""", + "Update username to match email format and set display_name" + ): + success_count += 1 + + # Verify Step 1 + execute_step( + session, + "VERIFY STEP 1", + """SELECT email, username, display_name, confirmed + FROM "user" WHERE email = 'oib@chello.at'""", + "Verify user table fix" + ) + + # Step 2: Clean Up Expired Sessions + total_steps += 1 + if execute_step( + session, + "STEP 2: Mark Expired Sessions Inactive", + """UPDATE dbsession + SET is_active = false + WHERE expires_at < NOW() AND is_active = true""", + "Mark expired sessions as inactive for security" + ): + success_count += 1 + + # Verify Step 2 + execute_step( + session, + "VERIFY STEP 2", + """SELECT COUNT(*) as expired_active_sessions + FROM dbsession + WHERE expires_at < NOW() AND is_active = true""", + "Check for remaining expired active sessions" + ) + + # Step 3: Update Session user_id to Email Format + total_steps += 1 + if execute_step( + session, + "STEP 3: Update Session user_id", + """UPDATE dbsession + SET user_id = 'oib@chello.at' + WHERE user_id = 'oibchello'""", + "Update session user_id to use email format" + ): + success_count += 1 + + # Verify Step 3 + execute_step( + session, + "VERIFY STEP 3", + """SELECT DISTINCT user_id FROM dbsession""", + "Check session user_id values" + ) + + # Step 4: Fix PublicStream Username Fields + total_steps += 1 + if execute_step( + session, + "STEP 4: Fix PublicStream", + """UPDATE publicstream + SET username = uid, + display_name = CASE + WHEN display_name = 'oibchello' + THEN split_part(uid, '@', 1) + ELSE display_name + END + WHERE uid = 'oib@chello.at'""", + "Update PublicStream username to match UID" + ): + success_count += 1 + + # Verify Step 4 + execute_step( + session, + "VERIFY STEP 4", + """SELECT uid, username, display_name + FROM publicstream WHERE uid = 'oib@chello.at'""", + "Verify PublicStream fix" + ) + + # Step 5: Remove Orphaned Records + total_steps += 1 + orphan_success = True + + # Remove orphaned quota record + if not execute_step( + session, + "STEP 5a: Remove Orphaned Quota", + """DELETE FROM userquota WHERE uid = 'oib@bubuit.net'""", + "Remove orphaned quota record for deleted user" + ): + orphan_success = False + + # Remove orphaned stream record + if not execute_step( + session, + "STEP 5b: Remove Orphaned Stream", + """DELETE FROM publicstream WHERE uid = 'oib@bubuit.net'""", + "Remove orphaned stream record for deleted user" + ): + orphan_success = False + + if orphan_success: + success_count += 1 + + # Verify Step 5 + execute_step( + session, + "VERIFY STEP 5", + """SELECT 'userquota' as table_name, COUNT(*) as count + FROM userquota WHERE uid = 'oib@bubuit.net' + UNION ALL + SELECT 'publicstream' as table_name, COUNT(*) as count + FROM publicstream WHERE uid = 'oib@bubuit.net'""", + "Verify orphaned records are removed" + ) + + # Final Verification + print(f"\n=== FINAL VERIFICATION ===") + + # Check for remaining issues + execute_step( + session, + "FINAL CHECK", + """SELECT 'ISSUE: User email/username mismatch' as issue + FROM "user" + WHERE email != username + UNION ALL + SELECT 'ISSUE: Expired active sessions' + FROM dbsession + WHERE expires_at < NOW() AND is_active = true + LIMIT 1 + UNION ALL + SELECT 'ISSUE: PublicStream UID/username mismatch' + FROM publicstream + WHERE uid != username + LIMIT 1 + UNION ALL + SELECT 'ISSUE: Orphaned quota records' + FROM userquota q + LEFT JOIN "user" u ON q.uid = u.email + WHERE u.email IS NULL + LIMIT 1 + UNION ALL + SELECT 'ISSUE: Orphaned stream records' + FROM publicstream p + LEFT JOIN "user" u ON p.uid = u.email + WHERE u.email IS NULL + LIMIT 1""", + "Check for any remaining legacy issues" + ) + + # Summary + print(f"\n=== CLEANUP SUMMARY ===") + print(f"Total steps: {total_steps}") + print(f"Successful steps: {success_count}") + print(f"Failed steps: {total_steps - success_count}") + + if success_count == total_steps: + print("✅ All legacy database issues have been fixed!") + else: + print("⚠️ Some issues remain. Check the output above for details.") + + return 0 if success_count == total_steps else 1 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/fix_db_constraints.py b/fix_db_constraints.py new file mode 100644 index 0000000..8575681 --- /dev/null +++ b/fix_db_constraints.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 +""" +Fix Database Constraints and Legacy Data +Handles foreign key constraints properly during cleanup +""" + +import sys +from sqlmodel import Session, text +from database import engine + +def execute_query(session, query, description): + """Execute a query and report results""" + print(f"\n{description}") + print(f"Query: {query}") + + try: + result = session.exec(text(query)) + if query.strip().upper().startswith('SELECT'): + rows = result.fetchall() + print(f"Result: {len(rows)} rows") + for row in rows: + print(f" {row}") + else: + session.commit() + print(f"✅ Success: {result.rowcount} rows affected") + return True + except Exception as e: + print(f"❌ Error: {e}") + session.rollback() + return False + +def main(): + """Fix database constraints and legacy data""" + print("=== FIXING DATABASE CONSTRAINTS AND LEGACY DATA ===") + + with Session(engine) as session: + + # Step 1: First, let's temporarily drop the foreign key constraint + print("\n=== STEP 1: Handle Foreign Key Constraint ===") + + # Check current constraint + execute_query( + session, + """SELECT conname, conrelid::regclass, confrelid::regclass + FROM pg_constraint + WHERE conname = 'dbsession_user_id_fkey'""", + "Check existing foreign key constraint" + ) + + # Drop the constraint temporarily + execute_query( + session, + """ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey""", + "Drop foreign key constraint temporarily" + ) + + # Step 2: Update user table + print("\n=== STEP 2: Update User Table ===") + execute_query( + session, + """UPDATE "user" + SET username = email, + display_name = CASE + WHEN display_name = '' OR display_name IS NULL + THEN split_part(email, '@', 1) + ELSE display_name + END + WHERE email = 'oib@chello.at'""", + "Update user username to match email" + ) + + # Verify user update + execute_query( + session, + """SELECT email, username, display_name FROM "user" WHERE email = 'oib@chello.at'""", + "Verify user table update" + ) + + # Step 3: Update session user_id references + print("\n=== STEP 3: Update Session References ===") + execute_query( + session, + """UPDATE dbsession + SET user_id = 'oib@chello.at' + WHERE user_id = 'oibchello'""", + "Update session user_id to email format" + ) + + # Verify session updates + execute_query( + session, + """SELECT DISTINCT user_id FROM dbsession""", + "Verify session user_id updates" + ) + + # Step 4: Recreate the foreign key constraint + print("\n=== STEP 4: Recreate Foreign Key Constraint ===") + execute_query( + session, + """ALTER TABLE dbsession + ADD CONSTRAINT dbsession_user_id_fkey + FOREIGN KEY (user_id) REFERENCES "user"(username)""", + "Recreate foreign key constraint" + ) + + # Step 5: Final verification - check for remaining issues + print("\n=== STEP 5: Final Verification ===") + + # Check user email/username match + execute_query( + session, + """SELECT email, username, + CASE WHEN email = username THEN '✓ Match' ELSE '✗ Mismatch' END as status + FROM "user""", + "Check user email/username consistency" + ) + + # Check expired sessions + execute_query( + session, + """SELECT COUNT(*) as expired_active_sessions + FROM dbsession + WHERE expires_at < NOW() AND is_active = true""", + "Check for expired active sessions" + ) + + # Check PublicStream consistency + execute_query( + session, + """SELECT uid, username, + CASE WHEN uid = username THEN '✓ Match' ELSE '✗ Mismatch' END as status + FROM publicstream""", + "Check PublicStream UID/username consistency" + ) + + # Check for orphaned records + execute_query( + session, + """SELECT 'userquota' as table_name, COUNT(*) as orphaned_records + FROM userquota q + LEFT JOIN "user" u ON q.uid = u.email + WHERE u.email IS NULL + UNION ALL + SELECT 'publicstream' as table_name, COUNT(*) as orphaned_records + FROM publicstream p + LEFT JOIN "user" u ON p.uid = u.email + WHERE u.email IS NULL""", + "Check for orphaned records" + ) + + # Summary of current state + print("\n=== DATABASE STATE SUMMARY ===") + execute_query( + session, + """SELECT + COUNT(DISTINCT u.email) as total_users, + COUNT(DISTINCT q.uid) as quota_records, + COUNT(DISTINCT p.uid) as stream_records, + COUNT(CASE WHEN s.is_active THEN 1 END) as active_sessions, + COUNT(CASE WHEN s.expires_at < NOW() AND s.is_active THEN 1 END) as expired_active_sessions + FROM "user" u + FULL OUTER JOIN userquota q ON u.email = q.uid + FULL OUTER JOIN publicstream p ON u.email = p.uid + FULL OUTER JOIN dbsession s ON u.username = s.user_id""", + "Database state summary" + ) + + print("\n✅ Database cleanup completed!") + print("All legacy data issues should now be resolved.") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/fix_dbsession_fk.sql b/fix_dbsession_fk.sql new file mode 100644 index 0000000..1f6be37 --- /dev/null +++ b/fix_dbsession_fk.sql @@ -0,0 +1,13 @@ +-- Migration script to update DBSession foreign key to reference user.email +-- Run this when no active sessions exist to avoid deadlocks + +BEGIN; + +-- Step 1: Drop the existing foreign key constraint if it exists +ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey; + +-- Step 2: Add the new foreign key constraint referencing user.email +ALTER TABLE dbsession ADD CONSTRAINT dbsession_uid_fkey + FOREIGN KEY (uid) REFERENCES "user"(email); + +COMMIT; diff --git a/list_streams.py b/list_streams.py new file mode 100644 index 0000000..a8c0335 --- /dev/null +++ b/list_streams.py @@ -0,0 +1,156 @@ +# list_streams.py — FastAPI route to list all public streams (users with stream.opus) + +from fastapi import APIRouter, Request, Depends +from fastapi.responses import StreamingResponse, Response +from sqlalchemy.orm import Session +from sqlalchemy import select +from models import PublicStream +from database import get_db +from pathlib import Path +import asyncio +import os +import json + +router = APIRouter() +DATA_ROOT = Path("./data") + +@router.get("/streams-sse") +async def streams_sse(request: Request): + # Add CORS headers for SSE + origin = request.headers.get('origin', '') + allowed_origins = ["https://dicta2stream.net", "http://localhost:8000", "http://127.0.0.1:8000"] + + # Use the request origin if it's in the allowed list, otherwise use the first allowed origin + cors_origin = origin if origin in allowed_origins else allowed_origins[0] + + headers = { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache, no-transform", + "Connection": "keep-alive", + "Access-Control-Allow-Origin": cors_origin, + "Access-Control-Allow-Credentials": "true", + "Access-Control-Expose-Headers": "Content-Type", + "X-Accel-Buffering": "no" # Disable buffering for nginx + } + + # Handle preflight requests + if request.method == "OPTIONS": + headers.update({ + "Access-Control-Allow-Methods": "GET, OPTIONS", + "Access-Control-Allow-Headers": request.headers.get("access-control-request-headers", "*"), + "Access-Control-Max-Age": "86400" # 24 hours + }) + return Response(status_code=204, headers=headers) + + async def event_wrapper(): + # Use the database session context manager + with get_db() as db: + try: + async for event in list_streams_sse(db): + yield event + except Exception as e: + # Only log errors if DEBUG is enabled + # Debug messages disabled + yield f"data: {json.dumps({'error': True, 'message': 'An error occurred'})}\n\n" + + return StreamingResponse( + event_wrapper(), + media_type="text/event-stream", + headers=headers + ) + +async def list_streams_sse(db): + """Stream public streams from the database as Server-Sent Events""" + try: + # Send initial ping + yield ":ping\n\n" + + # Query all public streams from the database with required fields + # Also get all valid users to filter out orphaned streams + from models import User + + # Use the query interface instead of execute + all_streams = db.query(PublicStream).order_by(PublicStream.mtime.desc()).all() + + # Get all valid user UIDs (email and username) + all_users = db.query(User).all() + + valid_uids = set() + for user in all_users: + valid_uids.add(user.email) + valid_uids.add(user.username) + + # Filter out orphaned streams (streams without corresponding user accounts) + streams = [] + orphaned_count = 0 + for stream in all_streams: + if stream.uid in valid_uids: + streams.append(stream) + else: + orphaned_count += 1 + print(f"[STREAMS] Filtering out orphaned stream: {stream.uid} (username: {stream.username})") + + if orphaned_count > 0: + print(f"[STREAMS] Filtered out {orphaned_count} orphaned streams from public display") + + if not streams: + print("No public streams found in the database") + yield f"data: {json.dumps({'end': True})}\n\n" + return + + # Debug messages disabled + + # Send each stream as an SSE event + for stream in streams: + try: + # Ensure we have all required fields with fallbacks + stream_data = { + 'uid': stream.uid or '', + 'size': stream.storage_bytes or 0, + 'mtime': int(stream.mtime) if stream.mtime is not None else 0, + 'username': stream.username or '', + 'created_at': stream.created_at.isoformat() if stream.created_at else None, + 'updated_at': stream.updated_at.isoformat() if stream.updated_at else None + } + # Debug messages disabled + yield f"data: {json.dumps(stream_data)}\n\n" + # Small delay to prevent overwhelming the client + await asyncio.sleep(0.1) + except Exception as e: + print(f"Error processing stream {stream.uid}: {str(e)}") + # Debug messages disabled + continue + + # Send end of stream marker + # Debug messages disabled + yield f"data: {json.dumps({'end': True})}\n\n" + + except Exception as e: + print(f"Error in list_streams_sse: {str(e)}") + # Debug messages disabled + yield f"data: {json.dumps({'error': True, 'message': str(e)})}\n\n" + +@router.get("/streams") +def list_streams(): + """List all public streams from the database""" + # Use the database session context manager + with get_db() as db: + try: + # Use the query interface instead of execute + streams = db.query(PublicStream).order_by(PublicStream.mtime.desc()).all() + + return { + "streams": [ + { + 'uid': stream.uid, + 'size': stream.size, + 'mtime': stream.mtime, + 'created_at': stream.created_at.isoformat() if stream.created_at else None, + 'updated_at': stream.updated_at.isoformat() if stream.updated_at else None + } + for stream in streams + ] + } + except Exception as e: + # Debug messages disabled + return {"streams": []} diff --git a/log.py b/log.py index 4ffa88b..3c21863 100644 --- a/log.py +++ b/log.py @@ -15,5 +15,6 @@ def log_violation(event: str, ip: str, uid: str, reason: str): f.write(log_entry) # If DEBUG mode, also print to stdout if os.getenv("DEBUG", "0") in ("1", "true", "True"): # Set DEBUG=1 in .env to enable - print(f"[DEBUG] {log_entry.strip()}") + # Debug messages disabled + pass diff --git a/magic.py b/magic.py index 99138c3..53d06a8 100644 --- a/magic.py +++ b/magic.py @@ -12,58 +12,107 @@ import json router = APIRouter() @router.post("/magic-login") -async def magic_login(request: Request, response: Response, db: Session = Depends(get_db), token: str = Form(...)): - print(f"[magic-login] Received token: {token}") - user = db.exec(select(User).where(User.token == token)).first() - print(f"[magic-login] User lookup: {'found' if user else 'not found'}") - - if not user: - print("[magic-login] Invalid or expired token") - return RedirectResponse(url="/?error=Invalid%20or%20expired%20token", status_code=302) - - if datetime.utcnow() - user.token_created > timedelta(minutes=30): - print(f"[magic-login] Token expired for user: {user.username}") - return RedirectResponse(url="/?error=Token%20expired", status_code=302) - - # Mark user as confirmed if not already - if not user.confirmed: - user.confirmed = True - user.ip = request.client.host - db.add(user) - print(f"[magic-login] User {user.username} confirmed.") - - # Create a new session for the user (valid for 1 hour) - session_token = secrets.token_urlsafe(32) - expires_at = datetime.utcnow() + timedelta(hours=1) +async def magic_login(request: Request, response: Response, token: str = Form(...)): + # Debug messages disabled - # Create new session - session = DBSession( - token=session_token, - user_id=user.username, - ip_address=request.client.host or "", - user_agent=request.headers.get("user-agent", ""), - expires_at=expires_at, - is_active=True + # Use the database session context manager + with get_db() as db: + try: + # Look up user by token + user = db.query(User).filter(User.token == token).first() + # Debug messages disabled + + if not user: + # Debug messages disabled + raise HTTPException(status_code=401, detail="Invalid or expired token") + + if datetime.utcnow() - user.token_created > timedelta(minutes=30): + # Debug messages disabled + raise HTTPException(status_code=401, detail="Token expired") + + # Mark user as confirmed if not already + if not user.confirmed: + user.confirmed = True + user.ip = request.client.host + db.add(user) + # Debug messages disabled + + # Create a new session for the user (valid for 24 hours) + session_token = secrets.token_urlsafe(32) + expires_at = datetime.utcnow() + timedelta(hours=24) + + # Create new session + session = DBSession( + token=session_token, + uid=user.email or user.username, # Use email as UID + ip_address=request.client.host or "", + user_agent=request.headers.get("user-agent", ""), + expires_at=expires_at, + is_active=True + ) + db.add(session) + db.commit() + + # Store user data for use after the session is committed + user_email = user.email or user.username + username = user.username + + except Exception as e: + db.rollback() + # Debug messages disabled + # Debug messages disabled + raise HTTPException(status_code=500, detail="Database error during login") + + # Determine if we're running in development (localhost) or production + is_localhost = request.url.hostname == "localhost" + + # Prepare response data + response_data = { + "success": True, + "message": "Login successful", + "user": { + "email": user_email, + "username": username + }, + "token": session_token # Include the token in the JSON response + } + + # Create the response + response = JSONResponse( + content=response_data, + status_code=200 ) - db.add(session) - db.commit() - # Set cookie with the session token (valid for 1 hour) + # Set cookies response.set_cookie( key="sessionid", value=session_token, httponly=True, - secure=not request.url.hostname == "localhost", - samesite="lax", - max_age=3600, # 1 hour + secure=not is_localhost, + samesite="lax" if is_localhost else "none", + max_age=86400, # 24 hours path="/" ) - print(f"[magic-login] Session created for user: {user.username}") - - # Redirect to success page - return RedirectResponse( - url=f"/?login=success&confirmed_uid={user.username}", - status_code=302, - headers=dict(response.headers) + response.set_cookie( + key="uid", + value=user_email, + samesite="lax" if is_localhost else "none", + secure=not is_localhost, + max_age=86400, # 24 hours + path="/" ) + + response.set_cookie( + key="authToken", + value=session_token, + samesite="lax" if is_localhost else "none", + secure=not is_localhost, + max_age=86400, # 24 hours + path="/" + ) + + # Debug messages disabled + # Debug messages disabled + # Debug messages disabled + return response diff --git a/main.py b/main.py index 014e7cc..30cd107 100644 --- a/main.py +++ b/main.py @@ -90,20 +90,30 @@ def get_current_user(request: Request, db: Session = Depends(get_db)): from range_response import range_response @app.get("/audio/{uid}/{filename}") -def get_audio(uid: str, filename: str, request: Request, db: Session = Depends(get_db)): +def get_audio(uid: str, filename: str, request: Request): # Allow public access ONLY to stream.opus - # Map email-based UID to username for file system access - # If UID contains @, it's an email - look up the corresponding username - if '@' in uid: - from models import User - user = db.exec(select(User).where(User.email == uid)).first() - if not user: - raise HTTPException(status_code=404, detail="User not found") - filesystem_uid = user.username - else: - # Legacy support for username-based UIDs - filesystem_uid = uid + # Use the database session context manager + with get_db() as db: + try: + # Use email-based UID directly for file system access + # If UID contains @, it's an email - use it directly + if '@' in uid: + from models import User + user = db.query(User).filter(User.email == uid).first() + if not user: + raise HTTPException(status_code=404, detail="User not found") + filesystem_uid = uid # Use email directly for directory + else: + # Legacy support for username-based UIDs - convert to email + from models import User + user = db.query(User).filter(User.username == uid).first() + if not user: + raise HTTPException(status_code=404, detail="User not found") + filesystem_uid = user.email # Convert username to email for directory + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") user_dir = os.path.join("data", filesystem_uid) file_path = os.path.join(user_dir, filename) @@ -127,7 +137,8 @@ def get_audio(uid: str, filename: str, request: Request, db: Session = Depends(g return FileResponse(real_file_path, media_type="audio/ogg") if debug_mode: - print("[DEBUG] FastAPI running in debug mode.") + # Debug messages disabled + pass # Global error handler to always return JSON from slowapi.errors import RateLimitExceeded @@ -179,7 +190,7 @@ from register import router as register_router from magic import router as magic_router from upload import router as upload_router from streams import router as streams_router -from list_user_files import router as list_user_files_router + from auth_router import router as auth_router app.include_router(streams_router) @@ -188,14 +199,100 @@ from list_streams import router as list_streams_router from account_router import router as account_router # Include all routers -app.include_router(auth_router) +app.include_router(auth_router, prefix="/api") app.include_router(account_router) app.include_router(register_router) app.include_router(magic_router) app.include_router(upload_router) -app.include_router(list_user_files_router) + app.include_router(list_streams_router) +@app.get("/user-files/{uid}") +async def list_user_files(uid: str): + from pathlib import Path + + # Get the user's directory and check for files first + user_dir = Path("data") / uid + if not user_dir.exists() or not user_dir.is_dir(): + return {"files": []} + + # Get all files that actually exist on disk + existing_files = {f.name for f in user_dir.iterdir() if f.is_file()} + + # Use the database session context manager for all database operations + with get_db() as db: + # Verify the user exists + user_check = db.query(User).filter((User.username == uid) | (User.email == uid)).first() + if not user_check: + raise HTTPException(status_code=404, detail="User not found") + + # Query the UploadLog table for this user + all_upload_logs = db.query(UploadLog).filter( + UploadLog.uid == uid + ).order_by(UploadLog.created_at.desc()).all() + + # Track processed files to avoid duplicates + processed_files = set() + files_metadata = [] + + for log in all_upload_logs: + # Skip if no processed filename + if not log.processed_filename: + continue + + # Skip if we've already processed this file + if log.processed_filename in processed_files: + continue + + # Skip stream.opus from uploads list (it's a special file) + if log.processed_filename == 'stream.opus': + continue + + # Skip if file doesn't exist on disk + # Files are stored with the pattern: {upload_id}_{processed_filename} + expected_filename = f"{log.id}_{log.processed_filename}" + if expected_filename not in existing_files: + # Only delete records older than 5 minutes to avoid race conditions + from datetime import datetime, timedelta + cutoff_time = datetime.utcnow() - timedelta(minutes=5) + if log.created_at < cutoff_time: + print(f"[CLEANUP] Removing orphaned DB record (older than 5min): {expected_filename}") + db.delete(log) + continue + + # Add to processed files to avoid duplicates + processed_files.add(log.processed_filename) + + # Always use the original filename if present + display_name = log.filename if log.filename else log.processed_filename + + # Only include files that exist on disk + # Files are stored with the pattern: {upload_id}_{processed_filename} + stored_filename = f"{log.id}_{log.processed_filename}" + file_path = user_dir / stored_filename + if file_path.exists() and file_path.is_file(): + try: + # Get the actual file size in case it changed + actual_size = file_path.stat().st_size + files_metadata.append({ + "original_name": display_name, + "stored_name": log.processed_filename, + "size": actual_size + }) + except OSError: + # If we can't access the file, skip it + continue + + # Commit any database changes (deletions of non-existent files) + try: + db.commit() + except Exception as e: + print(f"[ERROR] Failed to commit database changes: {e}") + db.rollback() + + return {"files": files_metadata} + + # Serve static files app.mount("/static", StaticFiles(directory="static"), name="static") @@ -258,9 +355,9 @@ def serve_me(): @app.get("/admin/stats") def admin_stats(request: Request, db: Session = Depends(get_db)): from sqlmodel import select - users = db.exec(select(User)).all() + users = db.query(User).all() users_count = len(users) - total_quota = db.exec(select(UserQuota)).all() + total_quota = db.query(UserQuota).all() total_quota_sum = sum(q.storage_bytes for q in total_quota) violations_log = 0 try: @@ -292,10 +389,224 @@ def debug(request: Request): MAX_QUOTA_BYTES = 100 * 1024 * 1024 -# Delete account endpoint has been moved to account_router.py +# Delete account endpoint - fallback implementation since account_router.py has loading issues +@app.post("/api/delete-account") +async def delete_account_fallback(request: Request, db: Session = Depends(get_db)): + try: + # Get request data + data = await request.json() + uid = data.get("uid") + if not uid: + raise HTTPException(status_code=400, detail="Missing UID") + + ip = request.client.host + # Debug messages disabled + + # Find user by email or username + user = None + if '@' in uid: + user = db.exec(select(User).where(User.email == uid)).first() + if not user: + user = db.exec(select(User).where(User.username == uid)).first() + + # If still not found, check if this UID exists in upload logs and try to find the associated user + if not user: + # Look for upload logs with this UID to find the real user + upload_log = db.exec(select(UploadLog).where(UploadLog.uid == uid)).first() + if upload_log: + # Try to find a user that might be associated with this UID + # Check if there's a user with the same IP or similar identifier + all_users = db.exec(select(User)).all() + for potential_user in all_users: + # Use the first confirmed user as fallback (for orphaned UIDs) + if potential_user.confirmed: + user = potential_user + # Debug messages disabled + break + + if not user: + # Debug messages disabled + raise HTTPException(status_code=404, detail="User not found") + + if user.ip != ip: + raise HTTPException(status_code=403, detail="Unauthorized: IP address does not match") + + # Delete user data from database using the original UID + # The original UID is what's stored in the database records + + # Delete upload logs for all possible UIDs (original UID, email, username) + upload_logs_to_delete = [] + + # Check for upload logs with original UID + upload_logs_original = db.query(UploadLog).filter(UploadLog.uid == uid).all() + if upload_logs_original: + # Debug messages disabled + upload_logs_to_delete.extend(upload_logs_original) + + # Check for upload logs with user email + upload_logs_email = db.query(UploadLog).filter(UploadLog.uid == user.email).all() + if upload_logs_email: + # Debug messages disabled + upload_logs_to_delete.extend(upload_logs_email) + + # Check for upload logs with username + upload_logs_username = db.query(UploadLog).filter(UploadLog.uid == user.username).all() + if upload_logs_username: + # Debug messages disabled + upload_logs_to_delete.extend(upload_logs_username) + + # Delete all found upload log records + for log in upload_logs_to_delete: + try: + db.delete(log) + except Exception as e: + # Debug messages disabled + pass + + # Debug messages disabled + + # Delete user quota for both the original UID and user email (to cover all cases) + quota_original = db.get(UserQuota, uid) + if quota_original: + # Debug messages disabled + db.delete(quota_original) + + quota_email = db.get(UserQuota, user.email) + if quota_email: + # Debug messages disabled + db.delete(quota_email) + + # Delete user sessions + sessions = db.query(DBSession).filter(DBSession.user_id == user.username).all() + # Debug messages disabled + for session in sessions: + db.delete(session) + + # Delete public stream entries for all possible UIDs + # Use select() instead of get() to find all matching records + public_streams_to_delete = [] + + # Check for public stream with original UID + public_stream_original = db.query(PublicStream).filter(PublicStream.uid == uid).first() + if public_stream_original: + # Debug messages disabled + public_streams_to_delete.append(public_stream_original) + + # Check for public stream with user email + public_stream_email = db.query(PublicStream).filter(PublicStream.uid == user.email).first() + if public_stream_email: + # Debug messages disabled + public_streams_to_delete.append(public_stream_email) + + # Check for public stream with username + public_stream_username = db.query(PublicStream).filter(PublicStream.uid == user.username).first() + if public_stream_username: + # Debug messages disabled + public_streams_to_delete.append(public_stream_username) + + # Delete all found public stream records + for ps in public_streams_to_delete: + try: + # Debug messages disabled + db.delete(ps) + except Exception as e: + # Debug messages disabled + pass + + # Debug messages disabled + + # Delete user directory BEFORE deleting user record - check all possible locations + import shutil + + # Try to delete directory with UID (email) - current standard + uid_dir = os.path.join('data', uid) + if os.path.exists(uid_dir): + # Debug messages disabled + shutil.rmtree(uid_dir, ignore_errors=True) + + # Also try to delete directory with email (in case of different UID formats) + email_dir = os.path.join('data', user.email) + if os.path.exists(email_dir) and email_dir != uid_dir: + # Debug messages disabled + shutil.rmtree(email_dir, ignore_errors=True) + + # Also try to delete directory with username (legacy format) + username_dir = os.path.join('data', user.username) + if os.path.exists(username_dir) and username_dir != uid_dir and username_dir != email_dir: + # Debug messages disabled + shutil.rmtree(username_dir, ignore_errors=True) + + # Delete user account AFTER directory cleanup + db.delete(user) + db.commit() + + # Debug messages disabled + return {"status": "success", "message": "Account deleted successfully"} + + except HTTPException: + raise + except Exception as e: + # Debug messages disabled + db.rollback() + raise HTTPException(status_code=500, detail=f"Failed to delete account: {str(e)}") + +# Cleanup endpoint for orphaned public streams +@app.post("/api/cleanup-streams") +async def cleanup_orphaned_streams(request: Request, db: Session = Depends(get_db)): + try: + # Get request data + data = await request.json() + admin_secret = data.get("admin_secret") + + # Verify admin access + if admin_secret != ADMIN_SECRET: + raise HTTPException(status_code=403, detail="Unauthorized") + + # Find orphaned public streams (streams without corresponding user accounts) + all_streams = db.query(PublicStream).all() + all_users = db.query(User).all() + + # Create sets of valid UIDs from user accounts + valid_uids = set() + for user in all_users: + valid_uids.add(user.email) + valid_uids.add(user.username) + + orphaned_streams = [] + for stream in all_streams: + if stream.uid not in valid_uids: + orphaned_streams.append(stream) + + # Delete orphaned streams + deleted_count = 0 + for stream in orphaned_streams: + try: + print(f"[CLEANUP] Deleting orphaned stream: {stream.uid} (username: {stream.username})") + db.delete(stream) + deleted_count += 1 + except Exception as e: + print(f"[CLEANUP] Error deleting stream {stream.uid}: {e}") + + db.commit() + print(f"[CLEANUP] Deleted {deleted_count} orphaned public streams") + + return { + "status": "success", + "message": f"Deleted {deleted_count} orphaned public streams", + "deleted_streams": [s.uid for s in orphaned_streams] + } + + except HTTPException: + raise + except Exception as e: + print(f"[CLEANUP] Error: {str(e)}") + db.rollback() + raise HTTPException(status_code=500, detail=f"Cleanup failed: {str(e)}") + +# Original delete account endpoint has been moved to account_router.py @app.delete("/uploads/{uid}/{filename}") -async def delete_file(uid: str, filename: str, request: Request, db: Session = Depends(get_db)): +async def delete_file(uid: str, filename: str, request: Request): """ Delete a file for a specific user. @@ -319,26 +630,84 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D if user.ip != ip: raise HTTPException(status_code=403, detail="Device/IP mismatch. Please log in again.") - # Set up user directory and validate paths - user_dir = os.path.join('data', user.username) + # Set up user directory using email (matching upload logic) + user_dir = os.path.join('data', user.email) os.makedirs(user_dir, exist_ok=True) # Decode URL-encoded filename from urllib.parse import unquote filename = unquote(filename) + # Debug: Print the user info and filename being used + # Debug messages disabled + # Debug messages disabled + # Debug messages disabled + # Debug messages disabled + if os.path.exists(user_dir): + # Debug messages disabled + pass + # Construct and validate target path target_path = os.path.join(user_dir, filename) real_target_path = os.path.realpath(target_path) real_user_dir = os.path.realpath(user_dir) + # Debug: Print the constructed paths + # Debug messages disabled + # Debug messages disabled + # Debug messages disabled + # Security check: Ensure the target path is inside the user's directory if not real_target_path.startswith(real_user_dir + os.sep): + # Debug messages disabled raise HTTPException(status_code=403, detail="Invalid file path") # Check if file exists if not os.path.isfile(real_target_path): - raise HTTPException(status_code=404, detail=f"File not found: {filename}") + # Debug: List files in the directory to help diagnose the issue + try: + # Debug messages disabled + # Debug messages disabled + # Debug messages disabled + + if os.path.exists(real_user_dir): + files_in_dir = os.listdir(real_user_dir) + # Debug messages disabled + + # Print detailed file info + for f in files_in_dir: + full_path = os.path.join(real_user_dir, f) + try: + # Debug messages disabled + pass + except Exception as e: + # Debug messages disabled + pass + + # Debug messages disabled + # Debug messages disabled + # Debug messages disabled + + # Try to find a matching file (case-insensitive, partial match) + matching_files = [f for f in files_in_dir if filename.lower() in f.lower()] + if matching_files: + # Debug messages disabled + # Use the first matching file + real_target_path = os.path.join(real_user_dir, matching_files[0]) + # Debug messages disabled + # Debug messages disabled + else: + # Debug messages disabled + raise HTTPException(status_code=404, detail=f"File not found: {filename}") + else: + # Debug messages disabled + raise HTTPException(status_code=404, detail=f"User directory not found") + + except HTTPException: + raise + except Exception as e: + # Debug messages disabled + raise HTTPException(status_code=404, detail=f"File not found: {filename}") # Delete both the target file and its UUID-only variant deleted_files = [] @@ -377,20 +746,23 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D # Clean up the database record for this file try: - # Find and delete the upload log entry - log_entry = db.exec( - select(UploadLog) - .where(UploadLog.uid == uid) - .where(UploadLog.processed_filename == filename) - ).first() - - if log_entry: - db.delete(log_entry) - db.commit() - log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}") + with get_db() as db: + try: + # Find and delete the upload log entry + log_entry = db.query(UploadLog).filter( + UploadLog.uid == uid, + UploadLog.processed_filename == filename + ).first() + + if log_entry: + db.delete(log_entry) + db.commit() + log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}") + except Exception as e: + db.rollback() + raise e except Exception as e: log_violation("DB_CLEANUP_ERROR", ip, uid, f"Failed to clean up DB record: {str(e)}") - db.rollback() # Regenerate stream.opus after file deletion try: @@ -405,14 +777,17 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D # Update user quota in a separate try-except to not fail the entire operation try: - # Use verify_and_fix_quota to ensure consistency between disk and DB - total_size = verify_and_fix_quota(db, user.username, user_dir) - log_violation("QUOTA_UPDATE", ip, uid, - f"Updated quota: {total_size} bytes") - + with get_db() as db: + try: + # Use verify_and_fix_quota to ensure consistency between disk and DB + total_size = verify_and_fix_quota(db, user.username, user_dir) + log_violation("QUOTA_UPDATE", ip, uid, + f"Updated quota: {total_size} bytes") + except Exception as e: + db.rollback() + raise e except Exception as e: log_violation("QUOTA_ERROR", ip, uid, f"Quota update failed: {str(e)}") - db.rollback() return {"status": "deleted"} @@ -444,11 +819,13 @@ def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int: if os.path.isfile(stream_opus_path): try: total_size = os.path.getsize(stream_opus_path) - print(f"[QUOTA] Stream.opus size for {uid}: {total_size} bytes") + # Debug messages disabled except (OSError, FileNotFoundError) as e: - print(f"[QUOTA] Error getting size for stream.opus: {e}") + # Debug messages disabled + pass else: - print(f"[QUOTA] stream.opus not found in {user_dir}") + # Debug messages disabled + pass # Update quota in database q = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0) @@ -456,123 +833,143 @@ def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int: db.add(q) # Clean up any database records for files that don't exist - uploads = db.exec(select(UploadLog).where(UploadLog.uid == uid)).all() + # BUT only for records older than 5 minutes to avoid race conditions with recent uploads + from datetime import datetime, timedelta + cutoff_time = datetime.utcnow() - timedelta(minutes=5) + + uploads = db.query(UploadLog).filter( + UploadLog.uid == uid, + UploadLog.created_at < cutoff_time # Only check older records + ).all() + for upload in uploads: if upload.processed_filename: # Only check if processed_filename exists stored_filename = f"{upload.id}_{upload.processed_filename}" file_path = os.path.join(user_dir, stored_filename) if not os.path.isfile(file_path): - print(f"[QUOTA] Removing orphaned DB record: {stored_filename}") + # Debug messages disabled db.delete(upload) try: db.commit() - print(f"[QUOTA] Updated quota for {uid}: {total_size} bytes") + # Debug messages disabled except Exception as e: - print(f"[QUOTA] Error committing quota update: {e}") + # Debug messages disabled db.rollback() raise return total_size @app.get("/me/{uid}") -def get_me(uid: str, request: Request, response: Response, db: Session = Depends(get_db)): +def get_me(uid: str, request: Request, response: Response): # Add headers to prevent caching response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" response.headers["Pragma"] = "no-cache" response.headers["Expires"] = "0" - print(f"[DEBUG] GET /me/{uid} - Client IP: {request.client.host}") - try: - # Get user info - user = get_user_by_uid(uid) - if not user: - print(f"[ERROR] User with UID {uid} not found") - raise HTTPException(status_code=404, detail="User not found") - - # Only enforce IP check in production - if not debug_mode: - if user.ip != request.client.host: - print(f"[WARNING] IP mismatch for UID {uid}: {request.client.host} != {user.ip}") - # In production, we might want to be more strict - # But for now, we'll just log a warning in development - if not debug_mode: - raise HTTPException(status_code=403, detail="IP address mismatch") - - # Get user directory - user_dir = os.path.join('data', uid) - os.makedirs(user_dir, exist_ok=True) - - # Get all upload logs for this user - upload_logs = db.exec( - select(UploadLog) - .where(UploadLog.uid == uid) - .order_by(UploadLog.created_at.desc()) - ).all() - print(f"[DEBUG] Found {len(upload_logs)} upload logs for UID {uid}") - - # Build file list from database records, checking if files exist on disk - files = [] - seen_files = set() # Track seen files to avoid duplicates - - print(f"[DEBUG] Processing {len(upload_logs)} upload logs for UID {uid}") - - for i, log in enumerate(upload_logs): - if not log.filename or not log.processed_filename: - print(f"[DEBUG] Skipping log entry {i}: missing filename or processed_filename") - continue - - # The actual filename on disk has the log ID prepended - stored_filename = f"{log.id}_{log.processed_filename}" - file_path = os.path.join(user_dir, stored_filename) + + # Debug messages disabled + + # Use the database session context manager for all database operations + with get_db() as db: + try: + # Get user info + user = db.query(User).filter((User.username == uid) | (User.email == uid)).first() + if not user: + print(f"[ERROR] User with UID {uid} not found") + raise HTTPException(status_code=404, detail="User not found") - # Skip if we've already seen this file - if stored_filename in seen_files: - print(f"[DEBUG] Skipping duplicate file: {stored_filename}") - continue - - seen_files.add(stored_filename) - - # Only include the file if it exists on disk and is not stream.opus - if os.path.isfile(file_path) and stored_filename != 'stream.opus': - try: - # Get the actual file size in case it changed - file_size = os.path.getsize(file_path) - file_info = { - "name": stored_filename, - "original_name": log.filename, - "size": file_size - } - files.append(file_info) - print(f"[DEBUG] Added file {len(files)}: {log.filename} (stored as {stored_filename}, {file_size} bytes)") - except OSError as e: - print(f"[WARNING] Could not access file {stored_filename}: {e}") - else: - print(f"[DEBUG] File not found on disk or is stream.opus: {stored_filename}") - - # Log all files being returned - print("[DEBUG] All files being returned:") - for i, file_info in enumerate(files, 1): - print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)") - - # Verify and fix quota based on actual files on disk - total_size = verify_and_fix_quota(db, uid, user_dir) - quota_mb = round(total_size / (1024 * 1024), 2) - print(f"[DEBUG] Verified quota for UID {uid}: {quota_mb} MB") + # Only enforce IP check in production + if not debug_mode: + if user.ip != request.client.host: + print(f"[WARNING] IP mismatch for UID {uid}: {request.client.host} != {user.ip}") + # In production, we might want to be more strict + if not debug_mode: + raise HTTPException(status_code=403, detail="IP address mismatch") - response_data = { - "files": files, - "quota": quota_mb - } - print(f"[DEBUG] Returning {len(files)} files and quota info") - return response_data - - except HTTPException: - # Re-raise HTTP exceptions as they are - raise - except Exception as e: - # Log the full traceback for debugging - import traceback - error_trace = traceback.format_exc() - print(f"[ERROR] Error in /me/{uid} endpoint: {str(e)}\n{error_trace}") - # Return a 500 error with a generic message - raise HTTPException(status_code=500, detail="Internal server error") + # Get user directory + user_dir = os.path.join('data', uid) + os.makedirs(user_dir, exist_ok=True) + + # Get all upload logs for this user using the query interface + upload_logs = db.query(UploadLog).filter( + UploadLog.uid == uid + ).order_by(UploadLog.created_at.desc()).all() + + # Debug messages disabled + + # Build file list from database records, checking if files exist on disk + files = [] + seen_files = set() # Track seen files to avoid duplicates + + # Debug messages disabled + + for i, log in enumerate(upload_logs): + if not log.filename or not log.processed_filename: + # Debug messages disabled + continue + + # The actual filename on disk has the log ID prepended + stored_filename = f"{log.id}_{log.processed_filename}" + file_path = os.path.join(user_dir, stored_filename) + + # Skip if we've already seen this file + if stored_filename in seen_files: + # Debug messages disabled + continue + + seen_files.add(stored_filename) + + # Only include the file if it exists on disk and is not stream.opus + if os.path.isfile(file_path) and stored_filename != 'stream.opus': + try: + # Get the actual file size in case it changed + file_size = os.path.getsize(file_path) + file_info = { + "name": stored_filename, + "original_name": log.filename, + "size": file_size + } + files.append(file_info) + # Debug messages disabled + except OSError as e: + print(f"[WARNING] Could not access file {stored_filename}: {e}") + else: + # Debug messages disabled + pass + + # Log all files being returned + # Debug messages disabled + # for i, file_info in enumerate(files, 1): + # print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)") + + # Verify and fix quota based on actual files on disk + total_size = verify_and_fix_quota(db, uid, user_dir) + quota_mb = round(total_size / (1024 * 1024), 2) + max_quota_mb = round(MAX_QUOTA_BYTES / (1024 * 1024), 2) + # Debug messages disabled + + response_data = { + "files": files, + "quota": { + "used": quota_mb, + "max": max_quota_mb, + "used_bytes": total_size, + "max_bytes": MAX_QUOTA_BYTES, + "percentage": round((total_size / MAX_QUOTA_BYTES) * 100, 2) if MAX_QUOTA_BYTES > 0 else 0 + } + } + # Debug messages disabled + return response_data + + except HTTPException: + # Re-raise HTTP exceptions as they are + raise + except Exception as e: + # Log the full traceback for debugging + import traceback + error_trace = traceback.format_exc() + print(f"[ERROR] Error in /me/{uid} endpoint: {str(e)}\n{error_trace}") + # Rollback any database changes in case of error + db.rollback() + # Return a 500 error with a generic message + raise HTTPException(status_code=500, detail="Internal server error") diff --git a/migrate_dbsession_fk.sql b/migrate_dbsession_fk.sql new file mode 100644 index 0000000..a3910d9 --- /dev/null +++ b/migrate_dbsession_fk.sql @@ -0,0 +1,13 @@ +-- Migration script to update DBSession foreign key to reference user.email +-- Run this when no active sessions exist to avoid deadlocks + +BEGIN; + +-- Step 1: Drop the existing foreign key constraint +ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey; + +-- Step 2: Add the new foreign key constraint referencing user.email +ALTER TABLE dbsession ADD CONSTRAINT dbsession_user_id_fkey + FOREIGN KEY (user_id) REFERENCES "user"(email); + +COMMIT; diff --git a/migrate_uid_to_email.py b/migrate_uid_to_email.py new file mode 100644 index 0000000..3574dbc --- /dev/null +++ b/migrate_uid_to_email.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +""" +UID Migration Script - Complete migration from username-based to email-based UIDs + +This script completes the UID migration by updating remaining username-based UIDs +in the database to use proper email format. + +Based on previous migration history: +- devuser -> oib@bubuit.net (as per migration memory) +- oibchello -> oib@chello.at (already completed) +""" + +import psycopg2 +import sys +from datetime import datetime + +# Database connection string +DATABASE_URL = "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream" + +def log_message(message): + """Log message with timestamp""" + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + print(f"[{timestamp}] {message}") + +def check_current_state(cursor): + """Check current state of UID migration""" + log_message("Checking current UID state...") + + # Check publicstream table + cursor.execute("SELECT uid, username FROM publicstream WHERE uid NOT LIKE '%@%'") + non_email_uids = cursor.fetchall() + + if non_email_uids: + log_message(f"Found {len(non_email_uids)} non-email UIDs in publicstream:") + for uid, username in non_email_uids: + log_message(f" - UID: {uid}, Username: {username}") + else: + log_message("All UIDs in publicstream are already in email format") + + # Check userquota table + cursor.execute("SELECT uid FROM userquota WHERE uid NOT LIKE '%@%'") + quota_non_email_uids = cursor.fetchall() + + if quota_non_email_uids: + log_message(f"Found {len(quota_non_email_uids)} non-email UIDs in userquota:") + for (uid,) in quota_non_email_uids: + log_message(f" - UID: {uid}") + else: + log_message("All UIDs in userquota are already in email format") + + return non_email_uids, quota_non_email_uids + +def migrate_uids(cursor): + """Migrate remaining username-based UIDs to email format""" + log_message("Starting UID migration...") + + # Migration mapping based on previous migration history + uid_mapping = { + 'devuser': 'oib@bubuit.net' + } + + migration_count = 0 + + for old_uid, new_uid in uid_mapping.items(): + log_message(f"Migrating UID: {old_uid} -> {new_uid}") + + # Update publicstream table + cursor.execute( + "UPDATE publicstream SET uid = %s WHERE uid = %s", + (new_uid, old_uid) + ) + publicstream_updated = cursor.rowcount + + # Update userquota table + cursor.execute( + "UPDATE userquota SET uid = %s WHERE uid = %s", + (new_uid, old_uid) + ) + userquota_updated = cursor.rowcount + + # Update uploadlog table (if any records exist) + cursor.execute( + "UPDATE uploadlog SET uid = %s WHERE uid = %s", + (new_uid, old_uid) + ) + uploadlog_updated = cursor.rowcount + + log_message(f" - Updated {publicstream_updated} records in publicstream") + log_message(f" - Updated {userquota_updated} records in userquota") + log_message(f" - Updated {uploadlog_updated} records in uploadlog") + + migration_count += publicstream_updated + userquota_updated + uploadlog_updated + + return migration_count + +def verify_migration(cursor): + """Verify migration was successful""" + log_message("Verifying migration...") + + # Check for any remaining non-email UIDs + cursor.execute(""" + SELECT 'publicstream' as table_name, uid FROM publicstream WHERE uid NOT LIKE '%@%' + UNION ALL + SELECT 'userquota' as table_name, uid FROM userquota WHERE uid NOT LIKE '%@%' + UNION ALL + SELECT 'uploadlog' as table_name, uid FROM uploadlog WHERE uid NOT LIKE '%@%' + """) + + remaining_non_email = cursor.fetchall() + + if remaining_non_email: + log_message("WARNING: Found remaining non-email UIDs:") + for table_name, uid in remaining_non_email: + log_message(f" - {table_name}: {uid}") + return False + else: + log_message("SUCCESS: All UIDs are now in email format") + return True + +def main(): + """Main migration function""" + log_message("Starting UID migration script") + + try: + # Connect to database + log_message("Connecting to database...") + conn = psycopg2.connect(DATABASE_URL) + cursor = conn.cursor() + + # Check current state + non_email_uids, quota_non_email_uids = check_current_state(cursor) + + if not non_email_uids and not quota_non_email_uids: + log_message("No migration needed - all UIDs are already in email format") + return + + # Perform migration + migration_count = migrate_uids(cursor) + + # Commit changes + conn.commit() + log_message(f"Migration committed - {migration_count} records updated") + + # Verify migration + if verify_migration(cursor): + log_message("UID migration completed successfully!") + else: + log_message("UID migration completed with warnings - manual review needed") + + except psycopg2.Error as e: + log_message(f"Database error: {e}") + if conn: + conn.rollback() + sys.exit(1) + except Exception as e: + log_message(f"Unexpected error: {e}") + if conn: + conn.rollback() + sys.exit(1) + finally: + if cursor: + cursor.close() + if conn: + conn.close() + log_message("Database connection closed") + +if __name__ == "__main__": + main() diff --git a/models.py b/models.py index 3c32dc8..fd1396b 100644 --- a/models.py +++ b/models.py @@ -9,7 +9,6 @@ class User(SQLModel, table=True): token_created: datetime = Field(default_factory=datetime.utcnow) email: str = Field(primary_key=True) username: str = Field(unique=True, index=True) - display_name: str = Field(default="", nullable=True) token: str confirmed: bool = False ip: str = Field(default="") @@ -32,7 +31,7 @@ class UploadLog(SQLModel, table=True): class DBSession(SQLModel, table=True): token: str = Field(primary_key=True) - user_id: str = Field(foreign_key="user.username") + uid: str = Field(foreign_key="user.email") # This references User.email (primary key) ip_address: str user_agent: str created_at: datetime = Field(default_factory=datetime.utcnow) @@ -45,7 +44,6 @@ class PublicStream(SQLModel, table=True): """Stores public stream metadata for all users""" uid: str = Field(primary_key=True) username: Optional[str] = Field(default=None, index=True) - display_name: Optional[str] = Field(default=None) storage_bytes: int = 0 mtime: int = Field(default_factory=lambda: int(datetime.utcnow().timestamp())) last_updated: Optional[datetime] = Field(default_factory=datetime.utcnow) @@ -55,26 +53,26 @@ class PublicStream(SQLModel, table=True): def get_user_by_uid(uid: str) -> Optional[User]: """ - Retrieve a user by their UID (username). + Retrieve a user by their UID (email). - Note: In this application, the User model uses email as primary key, - but we're using username as UID for API routes. This function looks up - users by username. + Note: In this application, UIDs are consistently email-based. + The User model uses email as primary key, and all user references + throughout the system use email format. Args: - uid: The username to look up + uid: The email to look up Returns: User object if found, None otherwise """ with Session(engine) as session: - # First try to find by username (which is what we're using as UID) - statement = select(User).where(User.username == uid) + # Primary lookup by email (which is what we're using as UID) + statement = select(User).where(User.email == uid) user = session.exec(statement).first() - # If not found by username, try by email (for backward compatibility) - if not user and '@' in uid: - statement = select(User).where(User.email == uid) + # Fallback: try by username for legacy compatibility + if not user and '@' not in uid: + statement = select(User).where(User.username == uid) user = session.exec(statement).first() return user @@ -85,11 +83,10 @@ def verify_session(db: Session, token: str) -> DBSession: from datetime import datetime # Find the session - session = db.exec( - select(DBSession) - .where(DBSession.token == token) - .where(DBSession.is_active == True) # noqa: E712 - .where(DBSession.expires_at > datetime.utcnow()) + session = db.query(DBSession).filter( + DBSession.token == token, + DBSession.is_active == True, # noqa: E712 + DBSession.expires_at > datetime.utcnow() ).first() if not session: diff --git a/register.py b/register.py index 7f2caf5..333b52a 100644 --- a/register.py +++ b/register.py @@ -16,27 +16,27 @@ MAGIC_FROM = "noreply@dicta2stream.net" MAGIC_DOMAIN = "https://dicta2stream.net" DATA_ROOT = Path("./data") -def initialize_user_directory(username: str): +def initialize_user_directory(uid: str): """Initialize user directory with a silent stream.opus file""" try: - user_dir = DATA_ROOT / username + user_dir = DATA_ROOT / uid default_stream_path = DATA_ROOT / "stream.opus" - print(f"[DEBUG] Initializing user directory: {user_dir.absolute()}") + # Debug messages disabled # Create the directory if it doesn't exist user_dir.mkdir(parents=True, exist_ok=True) - print(f"[DEBUG] Directory created or already exists: {user_dir.exists()}") + # Debug messages disabled # Create stream.opus by copying the default stream.opus file user_stream_path = user_dir / "stream.opus" - print(f"[DEBUG] Creating stream.opus at: {user_stream_path.absolute()}") + # Debug messages disabled if not user_stream_path.exists(): if default_stream_path.exists(): import shutil shutil.copy2(default_stream_path, user_stream_path) - print(f"[DEBUG] Copied default stream.opus to {user_stream_path}") + # Debug messages disabled else: print(f"[ERROR] Default stream.opus not found at {default_stream_path}") # Fallback: create an empty file to prevent errors @@ -45,62 +45,69 @@ def initialize_user_directory(username: str): return True except Exception as e: - print(f"Error initializing user directory for {username}: {str(e)}") + print(f"Error initializing user directory for {uid}: {str(e)}") return False @router.post("/register") -def register(request: Request, email: str = Form(...), user: str = Form(...), db: Session = Depends(get_db)): +def register(request: Request, email: str = Form(...), user: str = Form(...)): from sqlalchemy.exc import IntegrityError from datetime import datetime - # Check if user exists by email - existing_user_by_email = db.get(User, email) - - # Check if user exists by username - stmt = select(User).where(User.username == user) - existing_user_by_username = db.exec(stmt).first() - - token = str(uuid.uuid4()) - - # Case 1: Email and username match in db - it's a login - if existing_user_by_email and existing_user_by_username and existing_user_by_email.email == existing_user_by_username.email: - # Update token for existing user (login) - existing_user_by_email.token = token - existing_user_by_email.token_created = datetime.utcnow() - existing_user_by_email.confirmed = False - existing_user_by_email.ip = request.client.host - db.add(existing_user_by_email) + # Use the database session context manager + with get_db() as db: try: - db.commit() - except Exception as e: - db.rollback() - raise HTTPException(status_code=500, detail=f"Database error: {e}") - - action = "login" - - # Case 2: Email matches but username does not - only one account per email - elif existing_user_by_email and (not existing_user_by_username or existing_user_by_email.email != existing_user_by_username.email): - raise HTTPException(status_code=409, detail="📧 This email is already registered with a different username.\nOnly one account per email is allowed.") - - # Case 3: Email does not match but username is in db - username already taken - elif not existing_user_by_email and existing_user_by_username: - raise HTTPException(status_code=409, detail="👤 This username is already taken.\nPlease choose a different username.") - - # Case 4: Neither email nor username exist - create new user - elif not existing_user_by_email and not existing_user_by_username: - # Register new user - new_user = User(email=email, username=user, token=token, confirmed=False, ip=request.client.host) - new_quota = UserQuota(uid=email) # Use email as UID for quota tracking - - db.add(new_user) - db.add(new_quota) - - try: - # First commit the user to the database - db.commit() + # Check if user exists by email + existing_user_by_email = db.get(User, email) + + # Check if user exists by username + existing_user_by_username = db.query(User).filter(User.username == user).first() + + token = str(uuid.uuid4()) + action = None + + # Case 1: Email and username match in db - it's a login + if existing_user_by_email and existing_user_by_username and existing_user_by_email.email == existing_user_by_username.email: + # Update token for existing user (login) + existing_user_by_email.token = token + existing_user_by_email.token_created = datetime.utcnow() + existing_user_by_email.confirmed = False + existing_user_by_email.ip = request.client.host + db.add(existing_user_by_email) + db.commit() + action = "login" + + # Case 2: Email matches but username does not - only one account per email + elif existing_user_by_email and (not existing_user_by_username or existing_user_by_email.email != existing_user_by_username.email): + raise HTTPException(status_code=409, detail="📧 This email is already registered with a different username.\nOnly one account per email is allowed.") + + # Case 3: Email does not match but username is in db - username already taken + elif not existing_user_by_email and existing_user_by_username: + raise HTTPException(status_code=409, detail="👤 This username is already taken.\nPlease choose a different username.") + + # Case 4: Neither email nor username exist - create new user + elif not existing_user_by_email and not existing_user_by_username: + # Register new user + new_user = User(email=email, username=user, token=token, confirmed=False, ip=request.client.host) + new_quota = UserQuota(uid=email) # Use email as UID for quota tracking + + db.add(new_user) + db.add(new_quota) + db.commit() + action = "register" + + # Initialize user directory after successful registration + if not initialize_user_directory(email): + print(f"[WARNING] Failed to initialize user directory for {email}") + + # If we get here, we've either logged in or registered successfully + if action not in ["login", "register"]: + raise HTTPException(status_code=400, detail="Invalid registration request") + + # Store the email for use after the session is committed + user_email = email # Only after successful commit, initialize the user directory - initialize_user_directory(user) + initialize_user_directory(email) except Exception as e: db.rollback() if isinstance(e, IntegrityError): @@ -118,15 +125,10 @@ def register(request: Request, email: str = Form(...), user: str = Form(...), db else: raise HTTPException(status_code=500, detail=f"Database error: {e}") - action = "registration" - - else: - # This should not happen, but handle it gracefully - raise HTTPException(status_code=500, detail="Unexpected error during registration.") - # Send magic link with appropriate message based on action - msg = EmailMessage() - msg["From"] = MAGIC_FROM - msg["To"] = email + # Send magic link with appropriate message based on action + msg = EmailMessage() + msg["From"] = MAGIC_FROM + msg["To"] = email if action == "login": msg["Subject"] = "Your magic login link" diff --git a/simple_db_cleanup.py b/simple_db_cleanup.py new file mode 100644 index 0000000..199f454 --- /dev/null +++ b/simple_db_cleanup.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +""" +Simple Database Cleanup Script +Uses the provided connection string to fix legacy data issues +""" + +import psycopg2 +import sys + +# Database connection string provided by user +DATABASE_URL = "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream" + +def execute_query(conn, query, description): + """Execute a query and report results""" + print(f"\n{description}") + print(f"Query: {query}") + print("[DEBUG] Starting query execution...") + + try: + print("[DEBUG] Creating cursor...") + with conn.cursor() as cur: + print("[DEBUG] Executing query...") + cur.execute(query) + print("[DEBUG] Query executed successfully") + + if query.strip().upper().startswith('SELECT'): + print("[DEBUG] Fetching results...") + rows = cur.fetchall() + print(f"Result: {len(rows)} rows") + for row in rows: + print(f" {row}") + else: + print("[DEBUG] Committing transaction...") + conn.commit() + print(f"✅ Success: {cur.rowcount} rows affected") + print("[DEBUG] Query completed successfully") + return True + except Exception as e: + print(f"❌ Error: {e}") + print(f"[DEBUG] Error type: {type(e).__name__}") + print("[DEBUG] Rolling back transaction...") + conn.rollback() + return False + +def main(): + """Execute database cleanup step by step""" + print("=== DATABASE LEGACY DATA CLEANUP ===") + print(f"Attempting to connect to: {DATABASE_URL}") + + try: + print("[DEBUG] Creating database connection...") + conn = psycopg2.connect(DATABASE_URL) + print("✅ Connected to database successfully") + print(f"[DEBUG] Connection status: {conn.status}") + print(f"[DEBUG] Database info: {conn.get_dsn_parameters()}") + + # Step 1: Check current state + print("\n=== STEP 1: Check Current State ===") + execute_query(conn, 'SELECT email, username, display_name FROM "user"', "Check user table") + execute_query(conn, 'SELECT COUNT(*) as expired_active FROM dbsession WHERE expires_at < NOW() AND is_active = true', "Check expired sessions") + + # Step 2: Mark expired sessions as inactive (this was successful before) + print("\n=== STEP 2: Fix Expired Sessions ===") + execute_query(conn, 'UPDATE dbsession SET is_active = false WHERE expires_at < NOW() AND is_active = true', "Mark expired sessions inactive") + + # Step 3: Handle foreign key constraint by dropping it temporarily + print("\n=== STEP 3: Handle Foreign Key Constraint ===") + execute_query(conn, 'ALTER TABLE dbsession DROP CONSTRAINT IF EXISTS dbsession_user_id_fkey', "Drop foreign key constraint") + + # Step 4: Update user table + print("\n=== STEP 4: Update User Table ===") + execute_query(conn, """UPDATE "user" + SET username = email, + display_name = CASE + WHEN display_name = '' OR display_name IS NULL + THEN split_part(email, '@', 1) + ELSE display_name + END + WHERE email = 'oib@chello.at'""", "Update user username to email") + + # Step 5: Update session references + print("\n=== STEP 5: Update Session References ===") + execute_query(conn, "UPDATE dbsession SET user_id = 'oib@chello.at' WHERE user_id = 'oibchello'", "Update session user_id") + + # Step 6: Recreate foreign key constraint + print("\n=== STEP 6: Recreate Foreign Key ===") + execute_query(conn, 'ALTER TABLE dbsession ADD CONSTRAINT dbsession_user_id_fkey FOREIGN KEY (user_id) REFERENCES "user"(username)', "Recreate foreign key") + + # Step 7: Final verification + print("\n=== STEP 7: Final Verification ===") + execute_query(conn, 'SELECT email, username, display_name FROM "user"', "Verify user table") + execute_query(conn, 'SELECT DISTINCT user_id FROM dbsession', "Verify session user_id") + execute_query(conn, 'SELECT uid, username FROM publicstream', "Check publicstream") + + print("\n✅ Database cleanup completed successfully!") + + except Exception as e: + print(f"❌ Database connection error: {e}") + return 1 + finally: + if 'conn' in locals(): + conn.close() + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/static/audio-player.js b/static/audio-player.js index 779e88d..701bde7 100644 --- a/static/audio-player.js +++ b/static/audio-player.js @@ -16,6 +16,14 @@ export class AudioPlayer { this.lastPlayTime = 0; this.isLoading = false; this.loadTimeout = null; // For tracking loading timeouts + this.retryCount = 0; + this.maxRetries = 3; + this.retryDelay = 3000; // 3 seconds + this.buffering = false; + this.bufferRetryTimeout = null; + this.lastLoadTime = 0; + this.minLoadInterval = 2000; // 2 seconds between loads + this.pendingLoad = false; // Create a single audio element that we'll reuse this.audioElement = new Audio(); @@ -26,6 +34,14 @@ export class AudioPlayer { this.loadAndPlay = this.loadAndPlay.bind(this); this.stop = this.stop.bind(this); this.cleanup = this.cleanup.bind(this); + this.handlePlayError = this.handlePlayError.bind(this); + this.handleStalled = this.handleStalled.bind(this); + this.handleWaiting = this.handleWaiting.bind(this); + this.handlePlaying = this.handlePlaying.bind(this); + this.handleEnded = this.handleEnded.bind(this); + + // Set up event listeners + this.setupEventListeners(); // Register with global audio manager to handle stop requests from other players globalAudioManager.addListener('personal', () => { @@ -63,14 +79,41 @@ export class AudioPlayer { } async loadAndPlay(uid, button) { + const now = Date.now(); + + // Prevent rapid successive load attempts + if (this.pendingLoad || (now - this.lastLoadTime < this.minLoadInterval)) { + console.log('[AudioPlayer] Skipping duplicate load request'); + return; + } + // Validate UID exists and is in correct format if (!uid) { this.handleError(button, 'No UID provided for audio playback'); return; } - - if (!this.isValidUuid(uid)) { - this.handleError(button, `Invalid UID format: ${uid}. Expected UUID v4 format.`); + + // For logging purposes + const requestId = Math.random().toString(36).substr(2, 8); + console.log(`[AudioPlayer] Load request ${requestId} for UID: ${uid}`); + + this.pendingLoad = true; + this.lastLoadTime = now; + + // If we're in the middle of loading, check if it's for the same UID + if (this.isLoading) { + // If same UID, ignore duplicate request + if (this.currentUid === uid) { + console.log(`[AudioPlayer] Already loading this UID, ignoring duplicate request: ${uid}`); + this.pendingLoad = false; + return; + } + // If different UID, queue the new request + console.log(`[AudioPlayer] Already loading, queuing request for UID: ${uid}`); + setTimeout(() => { + this.pendingLoad = false; + this.loadAndPlay(uid, button); + }, 500); return; } @@ -170,8 +213,10 @@ export class AudioPlayer { // Set the source URL with proper encoding and cache-busting timestamp // Using the format: /audio/{uid}/stream.opus?t={timestamp} - const timestamp = new Date().getTime(); + // Only update timestamp if we're loading a different UID or after a retry + const timestamp = this.retryCount > 0 ? new Date().getTime() : this.lastLoadTime; this.audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus?t=${timestamp}`; + console.log(`[AudioPlayer] Loading audio from URL: ${this.audioUrl} (attempt ${this.retryCount + 1}/${this.maxRetries})`); console.log('Loading audio from URL:', this.audioUrl); this.audioElement.src = this.audioUrl; @@ -312,10 +357,150 @@ export class AudioPlayer { } } + /** + * Set up event listeners for the audio element + */ + setupEventListeners() { + if (!this.audioElement) return; + + // Remove any existing listeners to prevent duplicates + this.audioElement.removeEventListener('error', this.handlePlayError); + this.audioElement.removeEventListener('stalled', this.handleStalled); + this.audioElement.removeEventListener('waiting', this.handleWaiting); + this.audioElement.removeEventListener('playing', this.handlePlaying); + this.audioElement.removeEventListener('ended', this.handleEnded); + + // Add new listeners + this.audioElement.addEventListener('error', this.handlePlayError); + this.audioElement.addEventListener('stalled', this.handleStalled); + this.audioElement.addEventListener('waiting', this.handleWaiting); + this.audioElement.addEventListener('playing', this.handlePlaying); + this.audioElement.addEventListener('ended', this.handleEnded); + } + + /** + * Handle play errors + */ + handlePlayError(event) { + console.error('[AudioPlayer] Playback error:', { + event: event.type, + error: this.audioElement.error, + currentTime: this.audioElement.currentTime, + readyState: this.audioElement.readyState, + networkState: this.audioElement.networkState, + src: this.audioElement.src + }); + + this.isPlaying = false; + this.buffering = false; + this.pendingLoad = false; + + if (this.currentButton) { + this.updateButtonState(this.currentButton, 'error'); + } + + // Auto-retry logic + if (this.retryCount < this.maxRetries) { + this.retryCount++; + console.log(`Retrying playback (attempt ${this.retryCount}/${this.maxRetries})...`); + + setTimeout(() => { + if (this.currentUid && this.currentButton) { + this.loadAndPlay(this.currentUid, this.currentButton); + } + }, this.retryDelay); + } else { + console.error('Max retry attempts reached'); + this.retryCount = 0; // Reset for next time + } + } + + /** + * Handle stalled audio (buffering issues) + */ + handleStalled() { + console.log('[AudioPlayer] Playback stalled, attempting to recover...'); + this.buffering = true; + + if (this.bufferRetryTimeout) { + clearTimeout(this.bufferRetryTimeout); + } + + this.bufferRetryTimeout = setTimeout(() => { + if (this.buffering) { + console.log('[AudioPlayer] Buffer recovery timeout, attempting to reload...'); + if (this.currentUid && this.currentButton) { + // Only retry if we're still supposed to be playing + if (this.isPlaying) { + this.retryCount++; + if (this.retryCount <= this.maxRetries) { + console.log(`[AudioPlayer] Retry ${this.retryCount}/${this.maxRetries} for UID: ${this.currentUid}`); + this.loadAndPlay(this.currentUid, this.currentButton); + } else { + console.error('[AudioPlayer] Max retry attempts reached'); + this.retryCount = 0; + this.updateButtonState(this.currentButton, 'error'); + } + } + } + } + }, 5000); // 5 second buffer recovery timeout + } + + /** + * Handle waiting event (buffering) + */ + handleWaiting() { + console.log('Audio waiting for data...'); + this.buffering = true; + if (this.currentButton) { + this.updateButtonState(this.currentButton, 'loading'); + } + } + + /** + * Handle playing event (playback started/resumed) + */ + handlePlaying() { + console.log('Audio playback started/resumed'); + this.buffering = false; + this.retryCount = 0; // Reset retry counter on successful playback + if (this.bufferRetryTimeout) { + clearTimeout(this.bufferRetryTimeout); + this.bufferRetryTimeout = null; + } + if (this.currentButton) { + this.updateButtonState(this.currentButton, 'playing'); + } + } + + /** + * Handle ended event (playback completed) + */ + handleEnded() { + console.log('Audio playback ended'); + this.isPlaying = false; + this.buffering = false; + if (this.currentButton) { + this.updateButtonState(this.currentButton, 'paused'); + } + } + /** * Clean up resources */ cleanup() { + // Clear any pending timeouts + if (this.loadTimeout) { + clearTimeout(this.loadTimeout); + this.loadTimeout = null; + } + + if (this.bufferRetryTimeout) { + clearTimeout(this.bufferRetryTimeout); + this.bufferRetryTimeout = null; + } + // Update button state if we have a reference to the current button if (this.currentButton) { this.updateButtonState(this.currentButton, 'paused'); @@ -324,6 +509,13 @@ export class AudioPlayer { // Pause the audio and store the current time if (this.audioElement) { try { + // Remove event listeners to prevent memory leaks + this.audioElement.removeEventListener('error', this.handlePlayError); + this.audioElement.removeEventListener('stalled', this.handleStalled); + this.audioElement.removeEventListener('waiting', this.handleWaiting); + this.audioElement.removeEventListener('playing', this.handlePlaying); + this.audioElement.removeEventListener('ended', this.handleEnded); + try { this.audioElement.pause(); this.lastPlayTime = this.audioElement.currentTime; @@ -357,6 +549,8 @@ export class AudioPlayer { this.currentButton = null; this.audioUrl = ''; this.isPlaying = false; + this.buffering = false; + this.retryCount = 0; // Notify global audio manager that personal player has stopped globalAudioManager.stopPlayback('personal'); diff --git a/static/auth-manager.js b/static/auth-manager.js new file mode 100644 index 0000000..57c8e90 --- /dev/null +++ b/static/auth-manager.js @@ -0,0 +1,688 @@ +/** + * Centralized Authentication Manager + * + * This module consolidates all authentication logic from auth.js, magic-login.js, + * and cleanup-auth.js into a single, maintainable module. + */ + +import { showToast } from './toast.js'; + +class AuthManager { + constructor() { + this.DEBUG_AUTH_STATE = false; + this.AUTH_CHECK_DEBOUNCE = 1000; // 1 second + this.AUTH_CHECK_INTERVAL = 30000; // 30 seconds + this.CACHE_TTL = 5000; // 5 seconds + + // Authentication state cache + this.authStateCache = { + timestamp: 0, + value: null, + ttl: this.CACHE_TTL + }; + + // Track auth check calls + this.lastAuthCheckTime = 0; + this.authCheckCounter = 0; + this.wasAuthenticated = null; + + // Bind all methods that will be used as event handlers + this.checkAuthState = this.checkAuthState.bind(this); + this.handleMagicLoginRedirect = this.handleMagicLoginRedirect.bind(this); + this.logout = this.logout.bind(this); + this.deleteAccount = this.deleteAccount.bind(this); + this.handleStorageEvent = this.handleStorageEvent.bind(this); + this.handleVisibilityChange = this.handleVisibilityChange.bind(this); + + // Initialize + this.initialize = this.initialize.bind(this); + } + + /** + * Validate UID format - must be a valid email address + */ + validateUidFormat(uid) { + if (!uid || typeof uid !== 'string') { + // Debug messages disabled + return false; + } + + // Email regex pattern - RFC 5322 compliant basic validation + const emailRegex = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; + + const isValid = emailRegex.test(uid); + + if (!isValid) { + // Debug messages disabled + } else { + // Debug messages disabled + } + + return isValid; + } + + /** + * Sanitize and validate UID - ensures consistent format + */ + sanitizeUid(uid) { + if (!uid || typeof uid !== 'string') { + // Debug messages disabled + return null; + } + + // Trim whitespace and convert to lowercase + const sanitized = uid.trim().toLowerCase(); + + // Validate the sanitized UID + if (!this.validateUidFormat(sanitized)) { + // Debug messages disabled + return null; + } + + // Debug messages disabled + return sanitized; + } + + /** + * Check if current stored UID is valid and fix if needed + */ + validateStoredUid() { + const storedUid = localStorage.getItem('uid'); + + if (!storedUid) { + // Debug messages disabled + return null; + } + + const sanitizedUid = this.sanitizeUid(storedUid); + + if (!sanitizedUid) { + // Debug messages disabled + this.clearAuthState(); + return null; + } + + // Update stored UID if sanitization changed it + if (sanitizedUid !== storedUid) { + // Debug messages disabled + localStorage.setItem('uid', sanitizedUid); + + // Update cookies as well + document.cookie = `uid=${sanitizedUid}; path=/; SameSite=Lax; Secure`; + } + + return sanitizedUid; + } + + /** + * Get cookie value by name + */ + getCookieValue(name) { + const value = `; ${document.cookie}`; + const parts = value.split(`; ${name}=`); + if (parts.length === 2) { + return parts.pop().split(';').shift(); + } + return null; + } + + /** + * Initialize the authentication manager + */ + async initialize() { + // Debug messages disabled + + // Validate stored UID format and fix if needed + const validUid = this.validateStoredUid(); + if (validUid) { + // Debug messages disabled + } else { + // Debug messages disabled + } + + // Handle magic link login if present + await this.handleMagicLoginRedirect(); + + // Setup authentication state polling + this.setupAuthStatePolling(); + + // Setup event listeners + document.addEventListener('visibilitychange', this.handleVisibilityChange); + this.setupEventListeners(); + + // Debug messages disabled + } + + /** + * Fetch user information from the server + */ + async fetchUserInfo() { + try { + // Get the auth token from cookies + const authToken = this.getCookieValue('authToken') || localStorage.getItem('authToken'); + // Debug messages disabled + + const headers = { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }; + + // Add Authorization header if we have a token + if (authToken) { + headers['Authorization'] = `Bearer ${authToken}`; + // Debug messages disabled + } else { + // Debug messages disabled + } + + // Debug messages disabled + const response = await fetch('/api/me', { + method: 'GET', + credentials: 'include', + headers: headers + }); + + // Debug messages disabled + + if (response.ok) { + const contentType = response.headers.get('content-type'); + // Debug messages disabled + + if (contentType && contentType.includes('application/json')) { + const userInfo = await response.json(); + // Debug messages disabled + return userInfo; + } else { + const text = await response.text(); + // Debug messages disabled + } + } else { + const errorText = await response.text(); + // Debug messages disabled + } + return null; + } catch (error) { + // Debug messages disabled + return null; + } + } + + /** + * Set authentication state in localStorage and cookies + */ + setAuthState(userEmail, username, authToken = null) { + // Debug messages disabled + + // Validate and sanitize the UID (email) + const sanitizedUid = this.sanitizeUid(userEmail); + if (!sanitizedUid) { + // Debug messages disabled + throw new Error(`Invalid UID format: ${userEmail}. UID must be a valid email address.`); + } + + // Validate username (basic check) + if (!username || typeof username !== 'string' || username.trim().length === 0) { + // Debug messages disabled + throw new Error(`Invalid username: ${username}. Username cannot be empty.`); + } + + const sanitizedUsername = username.trim(); + + // Generate auth token if not provided + if (!authToken) { + authToken = 'token-' + Math.random().toString(36).substring(2, 15); + } + + // Debug messages disabled + + // Set localStorage for client-side access (not sent to server) + localStorage.setItem('uid', sanitizedUid); // Primary UID is email + localStorage.setItem('username', sanitizedUsername); // Username for display + localStorage.setItem('uid_time', Date.now().toString()); + + // Set cookies for server authentication (sent with requests) + document.cookie = `uid=${encodeURIComponent(sanitizedUid)}; path=/; SameSite=Lax`; + document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`; + // Note: isAuthenticated is determined by presence of valid authToken, no need to duplicate + + // Clear cache to force refresh + this.authStateCache.timestamp = 0; + } + + /** + * Clear authentication state + */ + clearAuthState() { + // Debug messages disabled + + // Clear localStorage (client-side data only) + const authKeys = ['uid', 'username', 'uid_time']; + authKeys.forEach(key => localStorage.removeItem(key)); + + // Clear cookies + document.cookie.split(';').forEach(cookie => { + const eqPos = cookie.indexOf('='); + const name = eqPos > -1 ? cookie.substr(0, eqPos).trim() : cookie.trim(); + document.cookie = `${name}=;expires=Thu, 01 Jan 1970 00:00:00 GMT;path=/; SameSite=Lax`; + }); + + // Clear cache + this.authStateCache.timestamp = 0; + } + + /** + * Check if user is currently authenticated + */ + isAuthenticated() { + const now = Date.now(); + + // Use cached value if still valid + if (this.authStateCache.timestamp > 0 && + (now - this.authStateCache.timestamp) < this.authStateCache.ttl) { + return this.authStateCache.value; + } + + // Check authentication state - simplified approach + const hasUid = !!(document.cookie.includes('uid=') || localStorage.getItem('uid')); + const hasAuthToken = !!document.cookie.includes('authToken='); + + const isAuth = hasUid && hasAuthToken; + + // Update cache + this.authStateCache.timestamp = now; + this.authStateCache.value = isAuth; + + return isAuth; + } + + /** + * Get current user data + */ + getCurrentUser() { + if (!this.isAuthenticated()) { + return null; + } + + return { + uid: localStorage.getItem('uid'), + email: localStorage.getItem('uid'), // uid is the email + username: localStorage.getItem('username'), + authToken: this.getCookieValue('authToken') // authToken is in cookies + }; + } + + /** + * Handle magic link login redirect + */ + async handleMagicLoginRedirect() { + const params = new URLSearchParams(window.location.search); + + // Handle secure token-based magic login only + const token = params.get('token'); + if (token) { + // Debug messages disabled + + // Clean up URL immediately + const url = new URL(window.location.href); + url.searchParams.delete('token'); + window.history.replaceState({}, document.title, url.pathname + url.search); + + await this.processTokenLogin(token); + return true; + } + + return false; + } + + + + /** + * Process token-based login + */ + async processTokenLogin(token) { + try { + // Debug messages disabled + + const formData = new FormData(); + formData.append('token', token); + + // Debug messages disabled + const response = await fetch('/magic-login', { + method: 'POST', + body: formData, + }); + + // Debug messages disabled + + // Handle successful token login response + const contentType = response.headers.get('content-type'); + // Debug messages disabled + + if (contentType && contentType.includes('application/json')) { + const data = await response.json(); + // Debug messages disabled + + if (data && data.success && data.user) { + // Debug messages disabled + + // Use the user data and token from the response + const { email, username } = data.user; + const authToken = data.token; // Get token from JSON response + + // Debug messages disabled + + // Set auth state with the token from the response + this.setAuthState(email, username, authToken); + this.updateUIState(true); + await this.initializeUserSession(username, email); + showToast('✅ Login successful!'); + this.navigateToProfile(); + return; + } else { + // Debug messages disabled + throw new Error('Invalid user data received from server'); + } + } else { + const text = await response.text(); + // Debug messages disabled + throw new Error(`Unexpected response format: ${text || 'No details available'}`); + } + } catch (error) { + // Debug messages disabled + showToast(`Login failed: ${error.message}`, 'error'); + } + } + + /** + * Initialize user session after login + */ + async initializeUserSession(username, userEmail) { + // Initialize dashboard + if (window.initDashboard) { + await window.initDashboard(username); + } else { + // Debug messages disabled + } + + // Fetch and display file list + if (window.fetchAndDisplayFiles) { + // Debug messages disabled + await window.fetchAndDisplayFiles(userEmail); + } else { + // Debug messages disabled + } + } + + /** + * Navigate to user profile + */ + navigateToProfile() { + if (window.showOnly) { + // Debug messages disabled + window.showOnly('me-page'); + } else if (window.location.hash !== '#me-page') { + window.location.hash = '#me-page'; + } + } + + /** + * Update UI state based on authentication + */ + updateUIState(isAuthenticated) { + if (isAuthenticated) { + document.body.classList.add('authenticated'); + document.body.classList.remove('guest'); + + // Note: Removed auto-loading of profile stream to prevent auto-play on page load + // Profile stream will only play when user clicks the play button + } else { + document.body.classList.remove('authenticated'); + document.body.classList.add('guest'); + } + + this.updateAccountDeletionVisibility(isAuthenticated); + + // Force reflow + void document.body.offsetHeight; + } + + /** + * Update account deletion section visibility + */ + updateAccountDeletionVisibility(isAuthenticated) { + const accountDeletionSection = document.getElementById('account-deletion-section'); + const deleteAccountFromPrivacy = document.getElementById('delete-account-from-privacy'); + + if (isAuthenticated) { + this.showElement(accountDeletionSection); + this.showElement(deleteAccountFromPrivacy); + } else { + this.hideElement(accountDeletionSection); + this.hideElement(deleteAccountFromPrivacy); + } + } + + showElement(element) { + if (element) { + element.style.display = 'block'; + element.style.visibility = 'visible'; + } + } + + hideElement(element) { + if (element) { + element.style.display = 'none'; + } + } + + /** + * Check authentication state with caching and debouncing + */ + checkAuthState(force = false) { + const now = Date.now(); + + // Debounce frequent calls + if (!force && (now - this.lastAuthCheckTime) < this.AUTH_CHECK_DEBOUNCE) { + return this.authStateCache.value; + } + + this.lastAuthCheckTime = now; + this.authCheckCounter++; + + if (this.DEBUG_AUTH_STATE) { + // Debug messages disabled + } + + const isAuthenticated = this.isAuthenticated(); + + // Only update UI if state changed or forced + if (force || this.wasAuthenticated !== isAuthenticated) { + if (this.DEBUG_AUTH_STATE) { + // Debug messages disabled + } + + // Handle logout detection + if (this.wasAuthenticated === true && isAuthenticated === false) { + // Debug messages disabled + this.logout(); + return false; + } + + this.updateUIState(isAuthenticated); + this.wasAuthenticated = isAuthenticated; + } + + return isAuthenticated; + } + + /** + * Setup authentication state polling + */ + setupAuthStatePolling() { + // Initial check + this.checkAuthState(true); + + // Periodic checks + setInterval(() => { + this.checkAuthState(!document.hidden); + }, this.AUTH_CHECK_INTERVAL); + + // Storage event listener + window.addEventListener('storage', this.handleStorageEvent); + + // Visibility change listener + document.addEventListener('visibilitychange', this.handleVisibilityChange); + } + + /** + * Handle storage events + */ + handleStorageEvent(e) { + if (['isAuthenticated', 'authToken', 'uid'].includes(e.key)) { + this.checkAuthState(true); + } + } + + /** + * Handle visibility change events + */ + handleVisibilityChange() { + if (!document.hidden) { + this.checkAuthState(true); + } + } + + /** + * Setup event listeners + */ + setupEventListeners() { + document.addEventListener('click', (e) => { + // Delete account buttons + if (e.target.closest('#delete-account') || e.target.closest('#delete-account-from-privacy')) { + this.deleteAccount(e); + return; + } + }); + } + + /** + * Delete user account + */ + async deleteAccount(e) { + if (e) e.preventDefault(); + if (this.deleteAccount.inProgress) return; + + if (!confirm('Are you sure you want to delete your account?\nThis action is permanent.')) { + return; + } + + this.deleteAccount.inProgress = true; + const deleteBtn = e?.target.closest('button'); + const originalText = deleteBtn?.textContent; + + if (deleteBtn) { + deleteBtn.disabled = true; + deleteBtn.textContent = 'Deleting...'; + } + + try { + const response = await fetch('/api/delete-account', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + credentials: 'include', + body: JSON.stringify({ uid: localStorage.getItem('uid') }) + }); + + if (!response.ok) { + const errorData = await response.json().catch(() => ({ detail: 'Failed to delete account.' })); + throw new Error(errorData.detail); + } + + showToast('Account deleted successfully.', 'success'); + this.logout(); + } catch (error) { + // Debug messages disabled + showToast(error.message, 'error'); + } finally { + this.deleteAccount.inProgress = false; + if (deleteBtn) { + deleteBtn.disabled = false; + deleteBtn.textContent = originalText; + } + } + } + + /** + * Logout user + */ + logout() { + // Debug messages disabled + this.clearAuthState(); + window.location.href = '/'; + } + + /** + * Cleanup authentication state (for migration/debugging) + */ + async cleanupAuthState(manualEmail = null) { + // Debug messages disabled + + let userEmail = manualEmail; + + // Try to get email from server if not provided + if (!userEmail) { + const userInfo = await this.fetchUserInfo(); + userEmail = userInfo?.email; + + if (!userEmail) { + userEmail = prompt('Please enter your email address (e.g., oib@chello.at):'); + if (!userEmail || !userEmail.includes('@')) { + // Debug messages disabled + return { success: false, error: 'Invalid email' }; + } + } + } + + if (!userEmail) { + // Debug messages disabled + return { success: false, error: 'No email available' }; + } + + // Get current username for reference + const currentUsername = localStorage.getItem('username') || localStorage.getItem('uid'); + + // Clear and reset authentication state + this.clearAuthState(); + this.setAuthState(userEmail, currentUsername || userEmail); + + // Debug messages disabled + // Debug messages disabled + + // Refresh if on profile page + if (window.location.hash === '#me-page') { + window.location.reload(); + } + + return { + email: userEmail, + username: currentUsername, + success: true + }; + } + + /** + * Destroy the authentication manager + */ + destroy() { + window.removeEventListener('storage', this.handleStorageEvent); + document.removeEventListener('visibilitychange', this.handleVisibilityChange); + } +} + +// Create and export singleton instance +const authManager = new AuthManager(); + +// Export for global access +window.authManager = authManager; + +export default authManager; diff --git a/static/auth.js b/static/auth.js index 8ceee05..cf49314 100644 --- a/static/auth.js +++ b/static/auth.js @@ -1,252 +1,31 @@ -import { showToast } from './toast.js'; +/** + * Simplified Authentication Module + * + * This file now uses the centralized AuthManager for all authentication logic. + * Legacy code has been replaced with the new consolidated approach. + */ +import authManager from './auth-manager.js'; import { loadProfileStream } from './personal-player.js'; -document.addEventListener('DOMContentLoaded', () => { - // Track previous authentication state - let wasAuthenticated = null; - // Debug flag - set to false to disable auth state change logs - const DEBUG_AUTH_STATE = false; - - // Track auth check calls and cache state - let lastAuthCheckTime = 0; - let authCheckCounter = 0; - const AUTH_CHECK_DEBOUNCE = 1000; // 1 second - let authStateCache = { - timestamp: 0, - value: null, - ttl: 5000 // Cache TTL in milliseconds - }; - - // Handle magic link login redirect - function handleMagicLoginRedirect() { - const params = new URLSearchParams(window.location.search); - if (params.get('login') === 'success' && params.get('confirmed_uid')) { - const username = params.get('confirmed_uid'); - console.log('Magic link login detected for user:', username); - - // Update authentication state - localStorage.setItem('uid', username); - localStorage.setItem('confirmed_uid', username); - localStorage.setItem('uid_time', Date.now().toString()); - document.cookie = `uid=${encodeURIComponent(username)}; path=/; SameSite=Lax`; - - // Update UI state - document.body.classList.add('authenticated'); - document.body.classList.remove('guest'); - - // Update local storage and cookies - localStorage.setItem('isAuthenticated', 'true'); - document.cookie = `isAuthenticated=true; path=/; SameSite=Lax`; - - // Update URL and history without reloading - window.history.replaceState({}, document.title, window.location.pathname); - - // Update navigation - if (typeof injectNavigation === 'function') { - console.log('Updating navigation after magic link login'); - injectNavigation(true); - } else { - console.warn('injectNavigation function not available after magic link login'); - } - - // Navigate to user's profile page - if (window.showOnly) { - console.log('Navigating to me-page'); - window.showOnly('me-page'); - } else if (window.location.hash !== '#me') { - window.location.hash = '#me'; - } - - // Auth state will be updated by the polling mechanism - } - } - - // Update the visibility of the account deletion section based on authentication state - function updateAccountDeletionVisibility(isAuthenticated) { - const authOnlyWrapper = document.querySelector('#privacy-page .auth-only'); - const accountDeletionSection = document.getElementById('account-deletion'); - - const showElement = (element) => { - if (!element) return; - element.classList.remove('hidden', 'auth-only-hidden'); - element.style.display = 'block'; - }; - - const hideElement = (element) => { - if (!element) return; - element.style.display = 'none'; - }; - - if (isAuthenticated) { - const isPrivacyPage = window.location.hash === '#privacy-page'; - if (isPrivacyPage) { - if (authOnlyWrapper) showElement(authOnlyWrapper); - if (accountDeletionSection) showElement(accountDeletionSection); - } else { - if (accountDeletionSection) hideElement(accountDeletionSection); - if (authOnlyWrapper) hideElement(authOnlyWrapper); - } - } else { - if (accountDeletionSection) hideElement(accountDeletionSection); - if (authOnlyWrapper) { - const hasOtherContent = Array.from(authOnlyWrapper.children).some( - child => child.id !== 'account-deletion' && child.offsetParent !== null - ); - if (!hasOtherContent) { - hideElement(authOnlyWrapper); - } - } - } - } - - // Check authentication state and update UI with caching and debouncing - function checkAuthState(force = false) { - const now = Date.now(); - if (!force && authStateCache.value !== null && now - authStateCache.timestamp < authStateCache.ttl) { - return authStateCache.value; - } - - if (now - lastAuthCheckTime < AUTH_CHECK_DEBOUNCE && !force) { - return wasAuthenticated; - } - lastAuthCheckTime = now; - authCheckCounter++; - - const isAuthenticated = - (document.cookie.includes('isAuthenticated=true') || localStorage.getItem('isAuthenticated') === 'true') && - (document.cookie.includes('uid=') || localStorage.getItem('uid')) && - !!localStorage.getItem('authToken'); - - authStateCache = { - timestamp: now, - value: isAuthenticated, - ttl: isAuthenticated ? 30000 : 5000 - }; - - if (isAuthenticated !== wasAuthenticated) { - if (DEBUG_AUTH_STATE) { - console.log('Auth state changed, updating UI...'); - } - - if (!isAuthenticated && wasAuthenticated) { - console.log('User was authenticated, but is no longer. Triggering logout.'); - basicLogout(); - return; // Stop further processing after logout - } - - if (isAuthenticated) { - document.body.classList.add('authenticated'); - document.body.classList.remove('guest'); - const uid = localStorage.getItem('uid'); - if (uid && (window.location.hash === '#me-page' || window.location.hash === '#me' || window.location.pathname.startsWith('/~'))) { - loadProfileStream(uid); - } - } else { - document.body.classList.remove('authenticated'); - document.body.classList.add('guest'); - } - - updateAccountDeletionVisibility(isAuthenticated); - wasAuthenticated = isAuthenticated; - void document.body.offsetHeight; // Force reflow - } - - return isAuthenticated; - } - - // Periodically check authentication state with optimized polling - function setupAuthStatePolling() { - checkAuthState(true); - - const checkAndUpdate = () => { - checkAuthState(!document.hidden); - }; - - const AUTH_CHECK_INTERVAL = 30000; - setInterval(checkAndUpdate, AUTH_CHECK_INTERVAL); - - const handleStorageEvent = (e) => { - if (['isAuthenticated', 'authToken', 'uid'].includes(e.key)) { - checkAuthState(true); - } - }; - - window.addEventListener('storage', handleStorageEvent); - - const handleVisibilityChange = () => { - if (!document.hidden) { - checkAuthState(true); - } - }; - - document.addEventListener('visibilitychange', handleVisibilityChange); - - return () => { - window.removeEventListener('storage', handleStorageEvent); - document.removeEventListener('visibilitychange', handleVisibilityChange); - }; - } - - // --- ACCOUNT DELETION --- - const deleteAccount = async (e) => { - if (e) e.preventDefault(); - if (deleteAccount.inProgress) return; - if (!confirm('Are you sure you want to delete your account?\nThis action is permanent.')) return; - - deleteAccount.inProgress = true; - const deleteBtn = e?.target.closest('button'); - const originalText = deleteBtn?.textContent; - if (deleteBtn) { - deleteBtn.disabled = true; - deleteBtn.textContent = 'Deleting...'; - } - - try { - const response = await fetch('/api/delete-account', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - credentials: 'include', - body: JSON.stringify({ uid: localStorage.getItem('uid') }) - }); - - if (!response.ok) { - const errorData = await response.json().catch(() => ({ detail: 'Failed to delete account.' })); - throw new Error(errorData.detail); - } - - showToast('Account deleted successfully.', 'success'); - // Perform a full client-side logout and redirect - basicLogout(); - } catch (error) { - showToast(error.message, 'error'); - } finally { - deleteAccount.inProgress = false; - if (deleteBtn) { - deleteBtn.disabled = false; - deleteBtn.textContent = originalText; - } - } - }; - - // --- LOGOUT --- - function basicLogout() { - ['isAuthenticated', 'uid', 'confirmed_uid', 'uid_time', 'authToken'].forEach(k => localStorage.removeItem(k)); - document.cookie.split(';').forEach(c => document.cookie = c.replace(/^ +/, '').replace(/=.*/, `=;expires=${new Date().toUTCString()};path=/`)); - window.location.href = '/'; - } - - // --- DELEGATED EVENT LISTENERS --- - document.addEventListener('click', (e) => { - - // Delete Account Buttons - if (e.target.closest('#delete-account') || e.target.closest('#delete-account-from-privacy')) { - deleteAccount(e); - return; - } - }); - - // --- INITIALIZATION --- - handleMagicLoginRedirect(); - setupAuthStatePolling(); +// Initialize authentication manager when DOM is ready +document.addEventListener('DOMContentLoaded', async () => { + // Debug messages disabled + + // Initialize the centralized auth manager + await authManager.initialize(); + + // Make loadProfileStream available globally for auth manager + window.loadProfileStream = loadProfileStream; + + // Debug messages disabled }); + +// Export auth manager for other modules to use +export { authManager }; + +// Legacy compatibility - expose some functions globally +window.getCurrentUser = () => authManager.getCurrentUser(); +window.isAuthenticated = () => authManager.isAuthenticated(); +window.logout = () => authManager.logout(); +window.cleanupAuthState = (email) => authManager.cleanupAuthState(email); diff --git a/static/cleanup-auth.js b/static/cleanup-auth.js new file mode 100644 index 0000000..45b309e --- /dev/null +++ b/static/cleanup-auth.js @@ -0,0 +1,38 @@ +/** + * Simplified Authentication Cleanup Module + * + * This file now uses the centralized AuthManager for authentication cleanup. + * The cleanup logic has been moved to the AuthManager. + */ + +import authManager from './auth-manager.js'; + +/** + * Clean up authentication state - now delegated to AuthManager + * This function is kept for backward compatibility. + */ +async function cleanupAuthState(manualEmail = null) { + console.log('[CLEANUP] Starting authentication state cleanup via AuthManager...'); + + // Delegate to the centralized AuthManager + return await authManager.cleanupAuthState(manualEmail); +} + +// Auto-run cleanup if this script is loaded directly +if (typeof window !== 'undefined') { + // Export function for manual use + window.cleanupAuthState = cleanupAuthState; + + // Auto-run if URL contains cleanup parameter + const urlParams = new URLSearchParams(window.location.search); + if (urlParams.get('cleanup') === 'auth') { + cleanupAuthState().then(result => { + if (result && result.success) { + console.log('[CLEANUP] Auto-cleanup completed successfully'); + } + }); + } +} + +// Export for ES6 modules +export { cleanupAuthState }; diff --git a/static/css/components/file-upload.css b/static/css/components/file-upload.css index dc95409..c463aee 100644 --- a/static/css/components/file-upload.css +++ b/static/css/components/file-upload.css @@ -34,8 +34,7 @@ #file-list li { display: flex; - justify-content: space-between; - align-items: center; + flex-direction: column; padding: 0.75rem 1rem; margin: 0.5rem 0; background-color: var(--surface); @@ -97,36 +96,58 @@ .file-info { display: flex; - align-items: center; + align-items: flex-start; flex: 1; - min-width: 0; /* Allows text truncation */ + min-width: 0; + flex-direction: column; + gap: 0.25rem; } -.file-icon { - margin-right: 0.75rem; - font-size: 1.2em; - flex-shrink: 0; +.file-header { + display: flex; + align-items: flex-start; + justify-content: space-between; + width: 100%; + gap: 0.75rem; } + + .file-name { - color: var(--primary); - text-decoration: none; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - margin-right: 0.5rem; -} - -.file-name:hover { - text-decoration: underline; + color: var(--text-color); + word-break: break-word; + overflow-wrap: break-word; + line-height: 1.3; + flex: 1; + font-size: 0.95em; } .file-size { color: var(--text-muted); - font-size: 0.85em; - margin-left: 0.5rem; + font-size: 0.8em; white-space: nowrap; flex-shrink: 0; + font-style: italic; + align-self: flex-start; +} + +.delete-file { + align-self: center; + background: none; + border: none; + font-size: 1.1em; + cursor: pointer; + padding: 0.3rem 0.5rem; + border-radius: 4px; + transition: all 0.2s ease; + color: var(--text-muted); + margin-top: 0.2rem; +} + +.delete-file:hover { + background-color: var(--error); + color: white; + transform: scale(1.1); } .file-actions { diff --git a/static/dashboard.js b/static/dashboard.js index fa0f9f6..4908a42 100644 --- a/static/dashboard.js +++ b/static/dashboard.js @@ -12,13 +12,14 @@ function getCookie(name) { // Global state let isLoggingOut = false; +let dashboardInitialized = false; async function handleLogout(event) { - console.log('[LOGOUT] Logout initiated'); + // Debug messages disabled // Prevent multiple simultaneous logout attempts if (isLoggingOut) { - console.log('[LOGOUT] Logout already in progress'); + // Debug messages disabled return; } isLoggingOut = true; @@ -34,11 +35,11 @@ async function handleLogout(event) { const authToken = localStorage.getItem('authToken'); // 1. Clear all client-side state first (most important) - console.log('[LOGOUT] Clearing all client-side state'); + // Debug messages disabled // Clear localStorage and sessionStorage const storageKeys = [ - 'uid', 'uid_time', 'confirmed_uid', 'last_page', + 'uid', 'uid_time', 'last_page', 'isAuthenticated', 'authToken', 'user', 'token', 'sessionid', 'sessionId' ]; @@ -49,22 +50,22 @@ async function handleLogout(event) { // Get all current cookies for debugging const allCookies = document.cookie.split(';'); - console.log('[LOGOUT] Current cookies before clearing:', allCookies); + // Debug messages disabled // Clear ALL cookies (aggressive approach) allCookies.forEach(cookie => { const [name] = cookie.trim().split('='); if (name) { const cookieName = name.trim(); - console.log(`[LOGOUT] Clearing cookie: ${cookieName}`); + // Debug messages disabled // Try multiple clearing strategies to ensure cookies are removed const clearStrategies = [ - `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;`, - `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname};`, - `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname};`, - `${cookieName}=; max-age=0; path=/;`, - `${cookieName}=; max-age=0; path=/; domain=${window.location.hostname};` + `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; SameSite=Lax;`, + `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname}; SameSite=Lax;`, + `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname}; SameSite=Lax;`, + `${cookieName}=; max-age=0; path=/; SameSite=Lax;`, + `${cookieName}=; max-age=0; path=/; domain=${window.location.hostname}; SameSite=Lax;` ]; clearStrategies.forEach(strategy => { @@ -75,7 +76,7 @@ async function handleLogout(event) { // Verify cookies are cleared const remainingCookies = document.cookie.split(';').filter(c => c.trim()); - console.log('[LOGOUT] Remaining cookies after clearing:', remainingCookies); + // Debug messages disabled // Update UI state document.body.classList.remove('authenticated', 'logged-in'); @@ -84,7 +85,7 @@ async function handleLogout(event) { // 2. Try to invalidate server session (non-blocking) if (authToken) { try { - console.log('[LOGOUT] Attempting to invalidate server session'); + // Debug messages disabled const controller = new AbortController(); const timeoutId = setTimeout(() => controller.abort(), 2000); @@ -99,18 +100,18 @@ async function handleLogout(event) { }); clearTimeout(timeoutId); - console.log('[LOGOUT] Server session invalidation completed'); + // Debug messages disabled } catch (error) { - console.warn('[LOGOUT] Server session invalidation failed (non-critical):', error); + // Debug messages disabled } } // 3. Final redirect - console.log('[LOGOUT] Redirecting to home page'); + // Debug messages disabled window.location.href = '/?logout=' + Date.now(); } catch (error) { - console.error('[LOGOUT] Unexpected error during logout:', error); + // Debug messages disabled if (window.showToast) { showToast('Logout failed. Please try again.'); } @@ -138,7 +139,7 @@ async function handleDeleteAccount() { } // Show loading state - const deleteButton = document.getElementById('delete-account-button'); + const deleteButton = document.getElementById('delete-account-from-privacy'); const originalText = deleteButton.textContent; deleteButton.disabled = true; deleteButton.textContent = 'Deleting...'; @@ -162,7 +163,7 @@ async function handleDeleteAccount() { // Clear all authentication-related data from localStorage const keysToRemove = [ - 'uid', 'uid_time', 'confirmed_uid', 'last_page', + 'uid', 'uid_time', 'last_page', 'isAuthenticated', 'authToken', 'user', 'token', 'sessionid' ]; @@ -180,11 +181,11 @@ async function handleDeleteAccount() { // Clear all cookies using multiple strategies const clearCookie = (cookieName) => { const clearStrategies = [ - `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;`, - `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname};`, - `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname};`, - `${cookieName}=; max-age=0; path=/;`, - `${cookieName}=; max-age=0; path=/; domain=${window.location.hostname};` + `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; SameSite=Lax;`, + `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=${window.location.hostname}; SameSite=Lax;`, + `${cookieName}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/; domain=.${window.location.hostname}; SameSite=Lax;`, + `${cookieName}=; max-age=0; path=/; SameSite=Lax;`, + `${cookieName}=; max-age=0; path=/; domain=${window.location.hostname}; SameSite=Lax;` ]; clearStrategies.forEach(strategy => { @@ -224,7 +225,7 @@ async function handleDeleteAccount() { showToast(`Failed to delete account: ${error.message}`); // Reset button state - const deleteButton = document.getElementById('delete-account-button'); + const deleteButton = document.getElementById('delete-account-from-privacy'); if (deleteButton) { deleteButton.disabled = false; deleteButton.textContent = '🗑️ Delete Account'; @@ -251,33 +252,37 @@ function debugElementVisibility(elementId) { parentDisplay: el.parentElement ? window.getComputedStyle(el.parentElement).display : 'no-parent', parentVisibility: el.parentElement ? window.getComputedStyle(el.parentElement).visibility : 'no-parent', rect: el.getBoundingClientRect() - }; + } } +// Make updateQuotaDisplay available globally +window.updateQuotaDisplay = updateQuotaDisplay; + /** * Initialize the dashboard and handle authentication state */ -async function initDashboard() { - console.log('[DASHBOARD] Initializing dashboard...'); +async function initDashboard(uid = null) { + // Debug messages disabled try { const guestDashboard = document.getElementById('guest-dashboard'); const userDashboard = document.getElementById('user-dashboard'); const userUpload = document.getElementById('user-upload-area'); const logoutButton = document.getElementById('logout-button'); - const deleteAccountButton = document.getElementById('delete-account-button'); + const deleteAccountButton = document.getElementById('delete-account-from-privacy'); const fileList = document.getElementById('file-list'); - if (logoutButton) { - logoutButton.addEventListener('click', handleLogout); - } - if (deleteAccountButton) { - deleteAccountButton.addEventListener('click', (e) => { - e.preventDefault(); - handleDeleteAccount(); - }); + // Only attach event listeners once to prevent duplicates + if (!dashboardInitialized) { + if (logoutButton) { + logoutButton.addEventListener('click', handleLogout); + } + // Delete account button is handled by auth.js delegated event listener + // Removed duplicate event listener to prevent double confirmation dialogs + dashboardInitialized = true; } - const isAuthenticated = (document.cookie.includes('isAuthenticated=true') || localStorage.getItem('isAuthenticated') === 'true'); + const effectiveUid = uid || getCookie('uid') || localStorage.getItem('uid'); + const isAuthenticated = !!effectiveUid; if (isAuthenticated) { document.body.classList.add('authenticated'); @@ -286,9 +291,11 @@ async function initDashboard() { if (userUpload) userUpload.style.display = 'block'; if (guestDashboard) guestDashboard.style.display = 'none'; - const uid = getCookie('uid') || localStorage.getItem('uid'); - if (uid && window.fetchAndDisplayFiles) { - await window.fetchAndDisplayFiles(uid); + if (window.fetchAndDisplayFiles) { + // Use email-based UID for file operations if available, fallback to effectiveUid + const fileOperationUid = localStorage.getItem('uid') || effectiveUid; // uid is now email-based + // Debug messages disabled + await window.fetchAndDisplayFiles(fileOperationUid); } } else { document.body.classList.remove('authenticated'); @@ -297,7 +304,7 @@ async function initDashboard() { if (userDashboard) userDashboard.style.display = 'none'; if (userUpload) userUpload.style.display = 'none'; if (fileList) { - fileList.innerHTML = `
  • Please to view your files.
  • `; + fileList.innerHTML = `
  • Please to view your files.
  • `; } } } catch (e) { @@ -326,11 +333,11 @@ async function fetchAndDisplayFiles(uid) { const fileList = document.getElementById('file-list'); if (!fileList) { - console.error('[FILES] File list element not found'); + // Debug messages disabled return; } - console.log(`[FILES] Fetching files for user: ${uid}`); + // Debug messages disabled fileList.innerHTML = '
  • Loading your files...
  • '; // Prepare headers with auth token if available @@ -344,44 +351,44 @@ async function fetchAndDisplayFiles(uid) { headers['Authorization'] = `Bearer ${authToken}`; } - console.log('[FILES] Making request to /me with headers:', headers); + // Debug messages disabled try { // The backend should handle authentication via session cookies // We include the auth token in headers if available, but don't rely on it for auth - console.log(`[FILES] Making request to /me/${uid} with credentials...`); - const response = await fetch(`/me/${uid}`, { + // Debug messages disabled + const response = await fetch(`/user-files/${uid}`, { method: 'GET', credentials: 'include', // Important: include cookies for session auth headers: headers }); - console.log('[FILES] Response status:', response.status); - console.log('[FILES] Response headers:', Object.fromEntries([...response.headers.entries()])); + // Debug messages disabled + // Debug messages disabled // Get response as text first to handle potential JSON parsing errors const responseText = await response.text(); - console.log('[FILES] Raw response text:', responseText); + // Debug messages disabled // Parse the JSON response let responseData = {}; if (responseText && responseText.trim() !== '') { try { responseData = JSON.parse(responseText); - console.log('[FILES] Successfully parsed JSON response:', responseData); + // Debug messages disabled } catch (e) { - console.error('[FILES] Failed to parse JSON response. Response text:', responseText); - console.error('[FILES] Error details:', e); + // Debug messages disabled + // Debug messages disabled // If we have a non-JSON response but the status is 200, try to handle it if (response.ok) { - console.warn('[FILES] Non-JSON response with 200 status, treating as empty response'); + // Debug messages disabled } else { throw new Error(`Invalid JSON response from server: ${e.message}`); } } } else { - console.log('[FILES] Empty response received, using empty object'); + // Debug messages disabled } // Note: Authentication is handled by the parent component @@ -390,13 +397,13 @@ async function fetchAndDisplayFiles(uid) { if (response.ok) { // Check if the response has the expected format if (!responseData || !Array.isArray(responseData.files)) { - console.error('[FILES] Invalid response format, expected {files: [...]}:', responseData); + // Debug messages disabled fileList.innerHTML = '
  • Error: Invalid response from server
  • '; return; } const files = responseData.files; - console.log('[FILES] Files array:', files); + // Debug messages disabled if (files.length === 0) { fileList.innerHTML = '
  • No files uploaded yet.
  • '; @@ -406,68 +413,9 @@ async function fetchAndDisplayFiles(uid) { // Clear the loading message fileList.innerHTML = ''; - // Track displayed files to prevent duplicates using stored filenames as unique identifiers - const displayedFiles = new Set(); - - // Add each file to the list - files.forEach(file => { - // Get the stored filename (with UUID) - this is our unique identifier - const storedFileName = file.stored_name || file.name || file; - - // Skip if we've already displayed this file - if (displayedFiles.has(storedFileName)) { - console.log(`[FILES] Skipping duplicate file with stored name: ${storedFileName}`); - return; - } - - displayedFiles.add(storedFileName); - - const fileExt = storedFileName.split('.').pop().toLowerCase(); - const fileUrl = `/data/${uid}/${encodeURIComponent(storedFileName)}`; - const fileSize = file.size ? formatFileSize(file.size) : 'N/A'; - - const listItem = document.createElement('li'); - listItem.className = 'file-item'; - listItem.setAttribute('data-uid', uid); - - // Create file icon based on file extension - let fileIcon = '📄'; // Default icon - if (['mp3', 'wav', 'ogg', 'm4a', 'opus'].includes(fileExt)) { - fileIcon = '🎵'; - } else if (['jpg', 'jpeg', 'png', 'gif', 'webp'].includes(fileExt)) { - fileIcon = '🖼️'; - } else if (['pdf', 'doc', 'docx', 'txt'].includes(fileExt)) { - fileIcon = '📄'; - } - - // Use original_name if available, otherwise use the stored filename for display - const displayName = file.original_name || storedFileName; - - listItem.innerHTML = ` -
    - ${fileIcon} - - ${displayName} - - ${fileSize} -
    -
    - - ⬇️ - Download - - -
    - `; - - // Delete button handler will be handled by event delegation - // No need to add individual event listeners here - - fileList.appendChild(listItem); - }); + // Use the new global function to render the files + window.displayUserFiles(uid, files); + } else { // Handle non-OK responses if (response.status === 401) { @@ -482,10 +430,10 @@ async function fetchAndDisplayFiles(uid) { Error loading files (${response.status}). Please try again later. `; } - console.error('[FILES] Server error:', response.status, response.statusText); + // Debug messages disabled } } catch (error) { - console.error('[FILES] Error fetching files:', error); + // Debug messages disabled const fileList = document.getElementById('file-list'); if (fileList) { fileList.innerHTML = ` @@ -496,6 +444,69 @@ async function fetchAndDisplayFiles(uid) { } } +// Function to update the quota display +async function updateQuotaDisplay(uid) { + // Debug messages disabled + try { + const authToken = localStorage.getItem('authToken'); + const headers = { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }; + + if (authToken) { + headers['Authorization'] = `Bearer ${authToken}`; + } + + // Debug messages disabled + // Fetch user info which includes quota + const response = await fetch(`/me/${uid}`, { + method: 'GET', + credentials: 'include', + headers: headers + }); + + // Debug messages disabled + if (response.ok) { + const userData = await response.json(); + // Debug messages disabled + + // Update the quota display + const quotaText = document.getElementById('quota-text'); + const quotaBar = document.getElementById('quota-bar'); + + // Debug messages disabled + // Debug messages disabled + + if (quotaText && userData.quota) { + const usedMB = (userData.quota.used_bytes / (1024 * 1024)).toFixed(2); + const maxMB = (userData.quota.max_bytes / (1024 * 1024)).toFixed(2); + const percentage = userData.quota.percentage || 0; + + // Debug messages disabled + + const quotaDisplayText = `${usedMB} MB of ${maxMB} MB (${percentage}%)`; + quotaText.textContent = quotaDisplayText; + // Debug messages disabled + + if (quotaBar) { + quotaBar.value = percentage; + // Debug messages disabled + } + } else { + // Debug messages disabled + } + } else { + // Debug messages disabled + } + } catch (error) { + // Debug messages disabled + } +} + +// Make fetchAndDisplayFiles globally accessible +window.fetchAndDisplayFiles = fetchAndDisplayFiles; + // Function to handle file deletion async function deleteFile(uid, fileName, listItem, displayName = '') { const fileToDelete = displayName || fileName; @@ -519,7 +530,7 @@ async function deleteFile(uid, fileName, listItem, displayName = '') { throw new Error('User not authenticated. Please log in again.'); } - console.log(`[DELETE] Attempting to delete file: ${fileName} for user: ${uid}`); + // Debug messages disabled const authToken = localStorage.getItem('authToken'); const headers = { 'Content-Type': 'application/json' }; @@ -553,7 +564,7 @@ async function deleteFile(uid, fileName, listItem, displayName = '') { fileList.innerHTML = '
  • No files uploaded yet.
  • '; } } catch (error) { - console.error('[DELETE] Error deleting file:', error); + // Debug messages disabled showToast(`Error deleting "${fileToDelete}": ${error.message}`, 'error'); // Reset the button state if there was an error @@ -575,7 +586,7 @@ function initFileUpload() { const fileInput = document.getElementById('fileInputUser'); if (!uploadArea || !fileInput) { - console.warn('[UPLOAD] Required elements not found for file upload'); + // Debug messages disabled return; } @@ -630,7 +641,7 @@ function initFileUpload() { } } catch (error) { - console.error('[UPLOAD] Error uploading file:', error); + // Debug messages disabled showToast(`Upload failed: ${error.message}`, 'error'); } finally { // Reset file input and restore upload area text @@ -679,9 +690,15 @@ function initFileUpload() { } // Main initialization when the DOM is fully loaded -document.addEventListener('DOMContentLoaded', () => { +document.addEventListener('DOMContentLoaded', async () => { // Initialize dashboard components - initDashboard(); // initFileUpload is called from within initDashboard + await initDashboard(); // initFileUpload is called from within initDashboard + + // Update quota display if user is logged in + const uid = localStorage.getItem('uid'); + if (uid) { + updateQuotaDisplay(uid); + } // Delegated event listener for clicks on the document document.addEventListener('click', (e) => { @@ -701,10 +718,10 @@ document.addEventListener('DOMContentLoaded', () => { const listItem = deleteButton.closest('.file-item'); if (!listItem) return; - const uid = localStorage.getItem('uid') || localStorage.getItem('confirmed_uid'); + const uid = localStorage.getItem('uid'); if (!uid) { showToast('You need to be logged in to delete files', 'error'); - console.error('[DELETE] No UID found in localStorage'); + // Debug messages disabled return; } @@ -715,8 +732,9 @@ document.addEventListener('DOMContentLoaded', () => { } }); - // Make fetchAndDisplayFiles available globally + // Make dashboard functions available globally window.fetchAndDisplayFiles = fetchAndDisplayFiles; + window.initDashboard = initDashboard; // Login/Register (guest) const regForm = document.getElementById('register-form'); @@ -757,7 +775,7 @@ document.addEventListener('DOMContentLoaded', () => { regForm.reset(); } else { showToast(`Error: ${data.detail || 'Unknown error occurred'}`, 'error'); - console.error('Registration failed:', data); + // Debug messages disabled } } catch (parseError) { console.error('Error parsing response:', parseError); diff --git a/static/file-display.js b/static/file-display.js new file mode 100644 index 0000000..9ebbea4 --- /dev/null +++ b/static/file-display.js @@ -0,0 +1,220 @@ +// This function is responsible for rendering the list of files to the DOM. +// It is globally accessible via window.displayUserFiles. + +window.displayUserFiles = function(uid, files) { + const fileList = document.getElementById('file-list'); + if (!fileList) { + // Debug messages disabled + return; + } + + if (!files || files.length === 0) { + fileList.innerHTML = '
  • You have no uploaded files yet.
  • '; + return; + } + + const fragment = document.createDocumentFragment(); + const displayedFiles = new Set(); + + files.forEach(file => { + // Use original_name for display, stored_name for operations. + let displayName = file.original_name || file.stored_name || 'Unnamed File'; + const storedFileName = file.stored_name || file.original_name; + // No UUID pattern replacement: always show the original_name from backend. + + // Skip if no valid identifier is found or if it's a duplicate. + if (!storedFileName || displayedFiles.has(storedFileName)) { + return; + } + displayedFiles.add(storedFileName); + + const listItem = document.createElement('li'); + const fileUrl = `/user-uploads/${uid}/${encodeURIComponent(storedFileName)}`; + const fileSize = file.size ? (file.size / 1024 / 1024).toFixed(2) + ' MB' : 'N/A'; + + let fileIcon = '🎵'; // Default icon + const fileExt = displayName.split('.').pop().toLowerCase(); + if (['mp3', 'wav', 'ogg', 'flac', 'm4a'].includes(fileExt)) { + fileIcon = '🎵'; + } else if (['jpg', 'jpeg', 'png', 'gif', 'svg'].includes(fileExt)) { + fileIcon = '🖼️'; + } else if (['pdf', 'doc', 'docx', 'txt'].includes(fileExt)) { + fileIcon = '📄'; + } + + listItem.innerHTML = ` +
    +
    + ${displayName} + ${fileSize} +
    +
    + + `; + + fragment.appendChild(listItem); + }); + + fileList.appendChild(fragment); +}; + +// Function to handle file deletion +async function deleteFile(uid, fileName, listItem, displayName = '') { + const fileToDelete = displayName || fileName; + if (!confirm(`Are you sure you want to delete "${fileToDelete}"?`)) { + return; + } + + // Show loading state + if (listItem) { + listItem.style.opacity = '0.6'; + listItem.style.pointerEvents = 'none'; + const deleteButton = listItem.querySelector('.delete-file'); + if (deleteButton) { + deleteButton.disabled = true; + deleteButton.textContent = '⏳'; + } + } + + try { + if (!uid) { + throw new Error('User not authenticated. Please log in again.'); + } + + // Debug messages disabled + const authToken = localStorage.getItem('authToken'); + const headers = { 'Content-Type': 'application/json' }; + + if (authToken) { + headers['Authorization'] = `Bearer ${authToken}`; + } + + // Get the email from localStorage (it's the UID) + const email = localStorage.getItem('uid'); + if (!email) { + throw new Error('User not authenticated'); + } + + // The backend expects the full email as the UID in the path + // We need to ensure it's properly encoded for the URL + const username = email; + // Debug messages disabled + + // Check if the filename is just a UUID (without log ID prefix) + const uuidPattern = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\.\w+$/i; + let fileToDelete = fileName; + + // If the filename is just a UUID, try to find the actual file with log ID prefix + if (uuidPattern.test(fileName)) { + // Debug messages disabled + try { + // First try to get the list of files to find the one with the matching UUID + const filesResponse = await fetch(`/user-files/${uid}`, { + method: 'GET', + headers: headers, + credentials: 'include' + }); + + if (filesResponse.ok) { + const filesData = await filesResponse.json(); + if (filesData.files && Array.isArray(filesData.files)) { + // Look for a file that contains our UUID in its name + const matchingFile = filesData.files.find(f => + f.stored_name && f.stored_name.includes(fileName) + ); + + if (matchingFile && matchingFile.stored_name) { + // Debug messages disabled + fileToDelete = matchingFile.stored_name; + } + } + } + } catch (e) { + // Debug messages disabled + // Continue with the original filename if there's an error + } + } + + // Use the username in the URL with the correct filename + // Debug messages disabled + const response = await fetch(`/uploads/${username}/${encodeURIComponent(fileToDelete)}`, { + method: 'DELETE', + headers: headers, + credentials: 'include' + }); + + if (!response.ok) { + const errorData = await response.json().catch(() => ({})); + throw new Error(errorData.detail || `HTTP error! status: ${response.status}`); + } + + // Remove the file from the UI immediately + if (listItem && listItem.parentNode) { + listItem.parentNode.removeChild(listItem); + } + + // Show success message + window.showToast(`Successfully deleted "${fileToDelete}"`, 'success'); + + // If the file list is now empty, show a message + const fileList = document.getElementById('file-list'); + if (fileList && fileList.children.length === 0) { + fileList.innerHTML = '
  • No files uploaded yet.
  • '; + } + + // Refresh the file list and stream + const uid_current = localStorage.getItem('uid'); + if (window.fetchAndDisplayFiles) { + // Use email-based UID for file operations if available, fallback to uid_current + const fileOperationUid = localStorage.getItem('uid') || uid_current; // uid is now email-based + // Debug messages disabled + await window.fetchAndDisplayFiles(fileOperationUid); + } + if (window.loadProfileStream) { + await window.loadProfileStream(uid_current); + } + } catch (error) { + // Debug messages disabled + window.showToast(`Error deleting "${fileToDelete}": ${error.message}`, 'error'); + + // Reset the button state if there was an error + if (listItem) { + listItem.style.opacity = ''; + listItem.style.pointerEvents = ''; + const deleteButton = listItem.querySelector('.delete-file'); + if (deleteButton) { + deleteButton.disabled = false; + deleteButton.textContent = '🗑️'; + } + } + } +} + +// Add event delegation for delete buttons +document.addEventListener('DOMContentLoaded', () => { + const fileList = document.getElementById('file-list'); + if (fileList) { + fileList.addEventListener('click', (e) => { + const deleteButton = e.target.closest('.delete-file'); + if (deleteButton) { + e.preventDefault(); + e.stopPropagation(); + + const listItem = deleteButton.closest('li'); + if (!listItem) return; + + const uid = localStorage.getItem('uid'); + if (!uid) { + window.showToast('You need to be logged in to delete files', 'error'); + // Debug messages disabled + return; + } + + const fileName = deleteButton.getAttribute('data-filename'); + const displayName = deleteButton.getAttribute('data-display-name') || fileName; + + deleteFile(uid, fileName, listItem, displayName); + } + }); + } +}); diff --git a/static/global-audio-manager.js b/static/global-audio-manager.js index 619b18d..be503a5 100644 --- a/static/global-audio-manager.js +++ b/static/global-audio-manager.js @@ -23,7 +23,7 @@ class GlobalAudioManager { * @param {Object} playerInstance - Reference to the player instance */ startPlayback(playerType, uid, playerInstance = null) { - console.log(`[GlobalAudioManager] startPlayback called by: ${playerType} for UID: ${uid}`); + // Debug messages disabled // If the same player is already playing the same UID, allow it if (this.currentPlayer === playerType && this.currentUid === uid) { return true; @@ -38,7 +38,7 @@ class GlobalAudioManager { this.currentPlayer = playerType; this.currentUid = uid; - console.log(`Global Audio Manager: ${playerType} player started playing UID: ${uid}`); + // Debug messages disabled return true; } @@ -48,7 +48,7 @@ class GlobalAudioManager { */ stopPlayback(playerType) { if (this.currentPlayer === playerType) { - console.log(`Global Audio Manager: ${playerType} player stopped`); + // Debug messages disabled this.currentPlayer = null; this.currentUid = null; } @@ -93,7 +93,7 @@ class GlobalAudioManager { * Notify a specific player type to stop */ notifyStop(playerType) { - console.log(`Global Audio Manager: Notifying ${playerType} player to stop`); + // Debug messages disabled this.listeners.forEach(listener => { if (listener.playerType === playerType) { try { diff --git a/static/index.html b/static/index.html index b74bf7b..80478d3 100644 --- a/static/index.html +++ b/static/index.html @@ -21,9 +21,11 @@ } + + - - + +
    @@ -66,12 +68,12 @@ -
    -

    Quota: 0 MB

    -

    Uploaded Files

    +
    +

    Uploaded Files

    • Loading files...
    +

    Quota: 0 MB

    @@ -194,7 +196,6 @@

    - diff --git a/static/magic-login.js b/static/magic-login.js index 8c465fd..68bba97 100644 --- a/static/magic-login.js +++ b/static/magic-login.js @@ -1,90 +1,43 @@ -// static/magic-login.js — handles magic‑link token UI +/** + * Simplified Magic Login Module + * + * This file now uses the centralized AuthManager for authentication logic. + * The token-based magic login is handled by the AuthManager. + */ + +import authManager from './auth-manager.js'; import { showSection } from './nav.js'; let magicLoginSubmitted = false; +/** + * Initialize magic login - now delegated to AuthManager + * This function is kept for backward compatibility but the actual + * magic login logic is handled by the AuthManager during initialization. + */ export async function initMagicLogin() { - console.debug('[magic-login] initMagicLogin called'); + // Debug messages disabled + + // The AuthManager handles both URL-based and token-based magic login + // during its initialization, so we just need to ensure it's initialized + if (!window.authManager) { + // Debug messages disabled + await authManager.initialize(); + } + + // Check if there was a magic login processed const params = new URLSearchParams(location.search); const token = params.get('token'); - if (!token) { - console.debug('[magic-login] No token in URL'); - return; - } - // Remove token from URL immediately to prevent loops - const url = new URL(window.location.href); - url.searchParams.delete('token'); - window.history.replaceState({}, document.title, url.pathname + url.search); - try { - const formData = new FormData(); - formData.append('token', token); - const res = await fetch('/magic-login', { - method: 'POST', - body: formData, - }); - if (res.redirected) { - // If redirected, backend should set cookie; but set localStorage for SPA - const url = new URL(res.url); - const confirmedUid = url.searchParams.get('confirmed_uid'); - if (confirmedUid) { - // Generate a simple auth token (in a real app, this would come from the server) - const authToken = 'token-' + Math.random().toString(36).substring(2, 15); - - // Set cookies and localStorage for SPA session logic - document.cookie = `uid=${encodeURIComponent(confirmedUid)}; path=/; SameSite=Lax`; - document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`; - - // Store in localStorage for client-side access - localStorage.setItem('uid', confirmedUid); - localStorage.setItem('confirmed_uid', confirmedUid); - localStorage.setItem('authToken', authToken); - localStorage.setItem('uid_time', Date.now().toString()); - } - window.location.href = res.url; - return; - } - // If not redirected, show error (shouldn't happen in normal flow) - let data; - const contentType = res.headers.get('content-type'); - if (contentType && contentType.includes('application/json')) { - data = await res.json(); - if (data && data.confirmed_uid) { - // Generate a simple auth token (in a real app, this would come from the server) - const authToken = 'token-' + Math.random().toString(36).substring(2, 15); - - // Set cookies and localStorage for SPA session logic - document.cookie = `uid=${encodeURIComponent(data.confirmed_uid)}; path=/; SameSite=Lax`; - document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`; - - // Store in localStorage for client-side access - localStorage.setItem('uid', data.confirmed_uid); - localStorage.setItem('confirmed_uid', data.confirmed_uid); - localStorage.setItem('authToken', authToken); - localStorage.setItem('uid_time', Date.now().toString()); - import('./toast.js').then(({ showToast }) => { - showToast('✅ Login successful!'); - // Update UI state after login - const guestDashboard = document.getElementById('guest-dashboard'); - const userDashboard = document.getElementById('user-dashboard'); - const registerPage = document.getElementById('register-page'); - - if (guestDashboard) guestDashboard.style.display = 'none'; - if (userDashboard) userDashboard.style.display = 'block'; - if (registerPage) registerPage.style.display = 'none'; - - // Show the user's stream page - if (typeof showSection === 'function') { - showSection('me-page'); - } - }); - return; - } - alert(data.detail || 'Login failed.'); - } else { - const text = await res.text(); - alert(text || 'Login failed.'); - } - } catch (err) { - alert('Network error: ' + err); + + if (token) { + // Debug messages disabled + } else { + // Debug messages disabled } } + +// Export for backward compatibility +export { magicLoginSubmitted }; + +// Make showSection available globally for AuthManager +window.showSection = showSection; diff --git a/static/personal-player.js b/static/personal-player.js index 9df5b89..6614fe8 100644 --- a/static/personal-player.js +++ b/static/personal-player.js @@ -1,81 +1,57 @@ import { showToast } from "./toast.js"; -import { globalAudioManager } from './global-audio-manager.js'; +import { SharedAudioPlayer } from './shared-audio-player.js'; -// Module-level state for the personal player -let audio = null; +function getPersonalStreamUrl(uid) { + return `/audio/${encodeURIComponent(uid)}/stream.opus`; +} + +function updatePlayPauseButton(button, isPlaying) { + if (button) button.textContent = isPlaying ? '⏸️' : '▶️'; + // Optionally, update other UI elements here +} + +const personalPlayer = new SharedAudioPlayer({ + playerType: 'personal', + getStreamUrl: getPersonalStreamUrl, + onUpdateButton: updatePlayPauseButton +}); /** * Finds or creates the audio element for the personal stream. * @returns {HTMLAudioElement | null} */ -function getOrCreateAudioElement() { - if (audio) { - return audio; - } - - audio = document.createElement('audio'); - audio.id = 'me-audio'; - audio.preload = 'metadata'; - audio.crossOrigin = 'use-credentials'; - document.body.appendChild(audio); - - // --- Setup Event Listeners (only once) --- - audio.addEventListener('error', (e) => { - console.error('Personal Player: Audio Element Error', e); - const error = audio.error; - let errorMessage = 'An unknown audio error occurred.'; - if (error) { - switch (error.code) { - case error.MEDIA_ERR_ABORTED: - errorMessage = 'Audio playback was aborted.'; - break; - case error.MEDIA_ERR_NETWORK: - errorMessage = 'A network error caused the audio to fail.'; - break; - case error.MEDIA_ERR_DECODE: - errorMessage = 'The audio could not be decoded.'; - break; - case error.MEDIA_ERR_SRC_NOT_SUPPORTED: - errorMessage = 'The audio format is not supported by your browser.'; - break; - default: - errorMessage = `An unexpected error occurred (Code: ${error.code}).`; - break; +function cleanupPersonalAudio() { + if (audioElement) { + try { + if (audioElement._eventHandlers) { + const { onPlay, onPause, onEnded, onError } = audioElement._eventHandlers; + if (onPlay) audioElement.removeEventListener('play', onPlay); + if (onPause) audioElement.removeEventListener('pause', onPause); + if (onEnded) audioElement.removeEventListener('ended', onEnded); + if (onError) audioElement.removeEventListener('error', onError); } + audioElement.pause(); + audioElement.removeAttribute('src'); + audioElement.load(); + if (audioElement._eventHandlers) delete audioElement._eventHandlers; + // Remove from DOM + if (audioElement.parentNode) audioElement.parentNode.removeChild(audioElement); + } catch (e) { + console.warn('[personal-player.js] Error cleaning up audio element:', e); } - showToast(errorMessage, 'error'); - }); - - audio.addEventListener('play', () => updatePlayPauseButton(true)); - audio.addEventListener('pause', () => updatePlayPauseButton(false)); - audio.addEventListener('ended', () => updatePlayPauseButton(false)); - - // The canplaythrough listener is removed as it violates autoplay policies. - // The user will perform a second click to play the media after it's loaded. - - return audio; -} - -/** - * Updates the play/pause button icon based on audio state. - * @param {boolean} isPlaying - Whether the audio is currently playing. - */ -function updatePlayPauseButton(isPlaying) { - const playPauseBtn = document.querySelector('#me-page .play-pause-btn'); - if (playPauseBtn) { - playPauseBtn.textContent = isPlaying ? '⏸️' : '▶️'; + audioElement = null; } } -/** - * Loads the user's personal audio stream into the player. - * @param {string} uid - The user's unique ID. - */ -export async function loadProfileStream(uid) { - const audioElement = getOrCreateAudioElement(); - const audioSrc = `/audio/${uid}/stream.opus?t=${Date.now()}`; - console.log(`[personal-player.js] Setting personal audio source to: ${audioSrc}`); - audioElement.src = audioSrc; + + +// Use the shared player for loading and playing the personal stream +export function loadProfileStream(uid, playPauseBtn) { + if (!uid) { + showToast('No UID provided for profile stream', 'error'); + return; + } + personalPlayer.play(uid, playPauseBtn); } /** @@ -91,50 +67,19 @@ export function initPersonalPlayer() { if (!playPauseBtn) return; e.stopPropagation(); - const audio = getOrCreateAudioElement(); - if (!audio) return; - - try { - if (audio.paused) { - if (!audio.src || audio.src.endsWith('/#')) { - showToast('No audio file available. Please upload one first.', 'info'); - return; - } - - console.log('Attempting to play...'); - globalAudioManager.startPlayback('personal', localStorage.getItem('uid') || 'personal'); - - const playPromise = audio.play(); - if (playPromise !== undefined) { - playPromise.catch(error => { - console.error(`Initial play() failed: ${error.name}. This is expected on first load.`); - // If play fails, it's because the content isn't loaded. - // The recovery is to call load(). The user will need to click play again. - console.log('Calling load() to fetch media...'); - audio.load(); - showToast('Stream is loading. Please click play again in a moment.', 'info'); - }); - } - } else { - console.log('Attempting to pause...'); - audio.pause(); - } - } catch (err) { - console.error('A synchronous error occurred in handlePlayPause:', err); - showToast('An unexpected error occurred with the audio player.', 'error'); + const uid = localStorage.getItem('uid'); + if (!uid) { + showToast('Please log in to play audio.', 'error'); + return; + } + // Toggle play/pause + if (personalPlayer.audioElement && !personalPlayer.audioElement.paused && !personalPlayer.audioElement.ended) { + personalPlayer.pause(); + } else { + loadProfileStream(uid, playPauseBtn); } }); - // Listen for stop requests from the global manager - globalAudioManager.addListener('personal', () => { - console.log('[personal-player.js] Received stop request from global audio manager.'); - const audio = getOrCreateAudioElement(); - if (audio && !audio.paused) { - console.log('[personal-player.js] Pausing personal audio player.'); - audio.pause(); - } - }); - - // Initial setup - getOrCreateAudioElement(); + // Make loadProfileStream globally accessible for upload.js + window.loadProfileStream = loadProfileStream; } diff --git a/static/remove-confirmed-uid.js b/static/remove-confirmed-uid.js new file mode 100644 index 0000000..a009aa6 --- /dev/null +++ b/static/remove-confirmed-uid.js @@ -0,0 +1,70 @@ +/** + * Cleanup Script: Remove Redundant confirmed_uid from localStorage + * + * This script removes the redundant confirmed_uid field from localStorage + * for users who might have it stored from the old authentication system. + */ + +(function() { + 'use strict'; + + console.log('[CONFIRMED_UID_CLEANUP] Starting cleanup of redundant confirmed_uid field...'); + + // Check if confirmed_uid exists in localStorage + const confirmedUid = localStorage.getItem('confirmed_uid'); + const currentUid = localStorage.getItem('uid'); + + if (confirmedUid) { + console.log(`[CONFIRMED_UID_CLEANUP] Found confirmed_uid: ${confirmedUid}`); + console.log(`[CONFIRMED_UID_CLEANUP] Current uid: ${currentUid}`); + + // Verify that uid exists and is properly set + if (!currentUid) { + console.warn('[CONFIRMED_UID_CLEANUP] No uid found, setting uid from confirmed_uid'); + localStorage.setItem('uid', confirmedUid); + } else if (currentUid !== confirmedUid) { + console.warn(`[CONFIRMED_UID_CLEANUP] UID mismatch - uid: ${currentUid}, confirmed_uid: ${confirmedUid}`); + console.log('[CONFIRMED_UID_CLEANUP] Keeping current uid value'); + } + + // Remove the redundant confirmed_uid + localStorage.removeItem('confirmed_uid'); + console.log('[CONFIRMED_UID_CLEANUP] Removed redundant confirmed_uid from localStorage'); + + // Log the cleanup action + console.log('[CONFIRMED_UID_CLEANUP] Cleanup completed successfully'); + } else { + console.log('[CONFIRMED_UID_CLEANUP] No confirmed_uid found, no cleanup needed'); + } + + // Also check for any other potential redundant fields + const redundantFields = [ + 'confirmed_uid', // Main target + 'confirmedUid', // Camel case variant + 'confirmed-uid' // Hyphenated variant + ]; + + let removedCount = 0; + redundantFields.forEach(field => { + if (localStorage.getItem(field)) { + localStorage.removeItem(field); + removedCount++; + console.log(`[CONFIRMED_UID_CLEANUP] Removed redundant field: ${field}`); + } + }); + + if (removedCount > 0) { + console.log(`[CONFIRMED_UID_CLEANUP] Removed ${removedCount} redundant authentication fields`); + } + + console.log('[CONFIRMED_UID_CLEANUP] Cleanup process completed'); +})(); + +// Export for manual execution if needed +if (typeof window !== 'undefined') { + window.removeConfirmedUidCleanup = function() { + const script = document.createElement('script'); + script.src = '/static/remove-confirmed-uid.js'; + document.head.appendChild(script); + }; +} diff --git a/static/shared-audio-player.js b/static/shared-audio-player.js new file mode 100644 index 0000000..8781e90 --- /dev/null +++ b/static/shared-audio-player.js @@ -0,0 +1,162 @@ +// shared-audio-player.js +// Unified audio player logic for both streams and personal player + +import { globalAudioManager } from './global-audio-manager.js'; + +export class SharedAudioPlayer { + constructor({ playerType, getStreamUrl, onUpdateButton }) { + this.playerType = playerType; // 'streams' or 'personal' + this.getStreamUrl = getStreamUrl; // function(uid) => url + this.onUpdateButton = onUpdateButton; // function(button, isPlaying) + this.audioElement = null; + this.currentUid = null; + this.isPlaying = false; + this.currentButton = null; + this._eventHandlers = {}; + + // Register stop listener + globalAudioManager.addListener(playerType, () => { + this.stop(); + }); + } + + pause() { + if (this.audioElement && !this.audioElement.paused && !this.audioElement.ended) { + this.audioElement.pause(); + this.isPlaying = false; + if (this.onUpdateButton && this.currentButton) { + this.onUpdateButton(this.currentButton, false); + } + } + } + + async play(uid, button) { + const ctx = `[SharedAudioPlayer][${this.playerType}]${uid ? `[${uid}]` : ''}`; + const isSameUid = this.currentUid === uid; + const isActive = this.audioElement && !this.audioElement.paused && !this.audioElement.ended; + + // Guard: If already playing the requested UID and not paused/ended, do nothing + if (isSameUid && isActive) { + if (this.onUpdateButton) this.onUpdateButton(button || this.currentButton, true); + return; + } + + // If same UID but paused, resume + if (isSameUid && this.audioElement && this.audioElement.paused && !this.audioElement.ended) { + try { + await this.audioElement.play(); + this.isPlaying = true; + if (this.onUpdateButton) this.onUpdateButton(button || this.currentButton, true); + globalAudioManager.startPlayback(this.playerType, uid); + } catch (err) { + this.isPlaying = false; + if (this.onUpdateButton) this.onUpdateButton(button || this.currentButton, false); + console.error(`${ctx} play() resume failed:`, err); + } + return; + } + + // Otherwise, stop current and start new + if (!isSameUid && this.audioElement) { + } else { + } + this.stop(); + this.currentUid = uid; + this.currentButton = button; + const url = this.getStreamUrl(uid); + this.audioElement = new Audio(url); + this.audioElement.preload = 'auto'; + this.audioElement.crossOrigin = 'anonymous'; + this.audioElement.style.display = 'none'; + document.body.appendChild(this.audioElement); + this._attachEventHandlers(); + try { + await this.audioElement.play(); + this.isPlaying = true; + if (this.onUpdateButton) this.onUpdateButton(button, true); + globalAudioManager.startPlayback(this.playerType, uid); + } catch (err) { + this.isPlaying = false; + if (this.onUpdateButton) this.onUpdateButton(button, false); + console.error(`${ctx} play() failed:`, err); + } + } + + stop() { + if (this.audioElement) { + this._removeEventHandlers(); + try { + this.audioElement.pause(); + this.audioElement.removeAttribute('src'); + this.audioElement.load(); + if (this.audioElement.parentNode) { + this.audioElement.parentNode.removeChild(this.audioElement); + } + } catch (e) { + console.warn('[shared-audio-player] Error cleaning up audio element:', e); + } + this.audioElement = null; + } + this.isPlaying = false; + this.currentUid = null; + if (this.currentButton && this.onUpdateButton) { + this.onUpdateButton(this.currentButton, false); + } + this.currentButton = null; + } + + _attachEventHandlers() { + if (!this.audioElement) return; + const ctx = `[SharedAudioPlayer][${this.playerType}]${this.currentUid ? `[${this.currentUid}]` : ''}`; + const logEvent = (event) => { + // Debug logging disabled + }; + // Core handlers + const onPlay = (e) => { + logEvent(e); + this.isPlaying = true; + if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, true); + }; + const onPause = (e) => { + logEvent(e); + // console.trace(`${ctx} Audio pause stack trace:`); + this.isPlaying = false; + if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, false); + }; + const onEnded = (e) => { + logEvent(e); + this.isPlaying = false; + if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, false); + }; + const onError = (e) => { + logEvent(e); + this.isPlaying = false; + if (this.currentButton && this.onUpdateButton) this.onUpdateButton(this.currentButton, false); + console.error(`${ctx} Audio error:`, e); + }; + // Attach handlers + this.audioElement.addEventListener('play', onPlay); + this.audioElement.addEventListener('pause', onPause); + this.audioElement.addEventListener('ended', onEnded); + this.audioElement.addEventListener('error', onError); + // Attach debug logging for all relevant events + const debugEvents = [ + 'abort','canplay','canplaythrough','durationchange','emptied','encrypted','loadeddata','loadedmetadata', + 'loadstart','playing','progress','ratechange','seeked','seeking','stalled','suspend','timeupdate','volumechange','waiting' + ]; + debugEvents.forEach(evt => { + this.audioElement.addEventListener(evt, logEvent); + }); // Logging now disabled + this._eventHandlers = { onPlay, onPause, onEnded, onError, debugEvents, logEvent }; + } + + _removeEventHandlers() { + if (!this.audioElement || !this._eventHandlers) return; + const { onPlay, onPause, onEnded, onError } = this._eventHandlers; + if (onPlay) this.audioElement.removeEventListener('play', onPlay); + if (onPause) this.audioElement.removeEventListener('pause', onPause); + if (onEnded) this.audioElement.removeEventListener('ended', onEnded); + if (onError) this.audioElement.removeEventListener('error', onError); + this._eventHandlers = {}; + } +} diff --git a/static/sound.js b/static/sound.js index 4227281..34335c5 100644 --- a/static/sound.js +++ b/static/sound.js @@ -1,17 +1,30 @@ // sound.js — reusable Web Audio beep export function playBeep(frequency = 432, duration = 0.2, type = 'sine') { - const ctx = new (window.AudioContext || window.webkitAudioContext)(); - const osc = ctx.createOscillator(); - const gain = ctx.createGain(); + try { + // Validate parameters to prevent audio errors + if (!Number.isFinite(frequency) || frequency <= 0) { + frequency = 432; // fallback to default + } + if (!Number.isFinite(duration) || duration <= 0) { + duration = 0.2; // fallback to default + } + + const ctx = new (window.AudioContext || window.webkitAudioContext)(); + const osc = ctx.createOscillator(); + const gain = ctx.createGain(); - osc.type = type; - osc.frequency.value = frequency; + osc.type = type; + osc.frequency.value = frequency; - osc.connect(gain); - gain.connect(ctx.destination); + osc.connect(gain); + gain.connect(ctx.destination); - gain.gain.setValueAtTime(0.1, ctx.currentTime); // subtle volume - osc.start(); - osc.stop(ctx.currentTime + duration); + gain.gain.setValueAtTime(0.1, ctx.currentTime); // subtle volume + osc.start(); + osc.stop(ctx.currentTime + duration); + } catch (error) { + // Silently handle audio errors to prevent breaking upload flow + console.warn('[SOUND] Audio beep failed:', error.message); + } } diff --git a/static/streams-ui.js b/static/streams-ui.js index 2320f16..c65588f 100644 --- a/static/streams-ui.js +++ b/static/streams-ui.js @@ -28,7 +28,7 @@ export function initStreamsUI() { // Register with global audio manager to handle stop requests from other players globalAudioManager.addListener('streams', () => { - console.log('[streams-ui] Received stop request from global audio manager'); + // Debug messages disabled stopPlayback(); }); } @@ -79,10 +79,10 @@ document.addEventListener('DOMContentLoaded', () => { function loadAndRenderStreams() { const ul = document.getElementById('stream-list'); if (!ul) { - console.error('[STREAMS-UI] Stream list element not found'); + // Debug messages disabled return; } - console.log('[STREAMS-UI] loadAndRenderStreams called, shouldForceRefresh:', shouldForceRefresh); + // Debug messages disabled // Don't start a new connection if one is already active and we're not forcing a refresh if (activeSSEConnection && !shouldForceRefresh) { @@ -140,7 +140,7 @@ function loadAndRenderStreams() { window.location.hostname === '127.0.0.1'; if (isLocalDevelopment || window.DEBUG_STREAMS) { const duration = Date.now() - connectionStartTime; - console.group('[streams-ui] Connection timeout reached'); + // Debug messages disabled console.log(`Duration: ${duration}ms`); console.log('Current time:', new Date().toISOString()); console.log('Streams received:', streams.length); @@ -203,18 +203,18 @@ function loadAndRenderStreams() { // Process the stream function processStream({ done, value }) { - console.log('[STREAMS-UI] processStream called with done:', done); + // Debug messages disabled if (done) { - console.log('[STREAMS-UI] Stream processing complete'); + // Debug messages disabled // Process any remaining data in the buffer if (buffer.trim()) { - console.log('[STREAMS-UI] Processing remaining buffer data'); + // Debug messages disabled try { const data = JSON.parse(buffer); - console.log('[STREAMS-UI] Parsed data from buffer:', data); + // Debug messages disabled processSSEEvent(data); } catch (e) { - console.error('[STREAMS-UI] Error parsing buffer data:', e); + // Debug messages disabled } } return; @@ -237,7 +237,7 @@ function loadAndRenderStreams() { const data = JSON.parse(dataMatch[1]); processSSEEvent(data); } catch (e) { - console.error('[streams-ui] Error parsing event data:', e, 'Event:', event); + // Debug messages disabled } } } @@ -298,7 +298,7 @@ function loadAndRenderStreams() { // Function to process SSE events function processSSEEvent(data) { - console.log('[STREAMS-UI] Processing SSE event:', data); + // Debug messages disabled if (data.end) { if (streams.length === 0) { ul.innerHTML = '
  • No active streams.
  • '; @@ -356,7 +356,7 @@ function loadAndRenderStreams() { // Function to handle SSE errors function handleSSEError(error) { - console.error('[streams-ui] SSE error:', error); + // Debug messages disabled // Only show error if we haven't already loaded any streams if (streams.length === 0) { @@ -386,11 +386,11 @@ function loadAndRenderStreams() { export function renderStreamList(streams) { const ul = document.getElementById('stream-list'); if (!ul) { - console.warn('[STREAMS-UI] renderStreamList: #stream-list not found'); + // Debug messages disabled return; } - console.log('[STREAMS-UI] Rendering stream list with', streams.length, 'streams'); - console.debug('[STREAMS-UI] Streams data:', streams); + // Debug messages disabled + // Debug messages disabled if (Array.isArray(streams)) { if (streams.length) { // Sort by mtime descending (most recent first) @@ -409,10 +409,10 @@ export function renderStreamList(streams) { } } else { ul.innerHTML = '
  • Error: Invalid stream data.
  • '; - console.error('[streams-ui] renderStreamList: streams is not an array', streams); + // Debug messages disabled } highlightActiveProfileLink(); - console.debug('[streams-ui] renderStreamList complete'); + // Debug messages disabled } export function highlightActiveProfileLink() { @@ -463,12 +463,7 @@ function escapeHtml(unsafe) { .replace(/'/g, "'"); } -// Function to update play/pause button state -function updatePlayPauseButton(button, isPlaying) { - if (!button) return; - button.textContent = isPlaying ? '⏸️' : '▶️'; - button.setAttribute('aria-label', isPlaying ? 'Pause' : 'Play'); -} + // Audio context for Web Audio API let audioContext = null; @@ -492,7 +487,7 @@ function getAudioContext() { // Stop current playback completely function stopPlayback() { - console.log('[streams-ui] Stopping playback'); + // Debug messages disabled // Stop Web Audio API if active if (audioSource) { @@ -561,120 +556,28 @@ function stopPlayback() { currentlyPlayingAudio = null; } -// Load and play audio using HTML5 Audio element for Opus -async function loadAndPlayAudio(uid, playPauseBtn) { - // If we already have an audio element for this UID and it's paused, just resume it - if (audioElement && currentUid === uid && audioElement.paused) { - try { - await audioElement.play(); - isPlaying = true; - updatePlayPauseButton(playPauseBtn, true); - return; - } catch (error) { - // Fall through to reload if resume fails - } - } - - // Stop any current playback - stopPlayback(); - - // Notify global audio manager that streams player is starting - globalAudioManager.startPlayback('streams', uid); - - // Update UI - updatePlayPauseButton(playPauseBtn, true); - currentlyPlayingButton = playPauseBtn; - currentUid = uid; - - try { - // Create a new audio element with the correct MIME type - const audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus`; - - // Create a new audio element with a small delay to prevent race conditions - await new Promise(resolve => setTimeout(resolve, 50)); - - audioElement = new Audio(audioUrl); - audioElement.preload = 'auto'; - audioElement.crossOrigin = 'anonymous'; // Important for CORS - - // Set up event handlers with proper binding - const onPlay = () => { - isPlaying = true; - updatePlayPauseButton(playPauseBtn, true); - }; - - const onPause = () => { - isPlaying = false; - updatePlayPauseButton(playPauseBtn, false); - }; - - const onEnded = () => { - isPlaying = false; - cleanupAudio(); - }; - - const onError = (e) => { - // Ignore errors from previous audio elements that were cleaned up - if (!audioElement || audioElement.readyState === 0) { - return; - } - - isPlaying = false; - updatePlayPauseButton(playPauseBtn, false); - - // Don't show error to user for aborted requests - if (audioElement.error && audioElement.error.code === MediaError.MEDIA_ERR_ABORTED) { - return; - } - - // Show error to user for other errors - if (typeof showToast === 'function') { - showToast('Error playing audio. The format may not be supported.', 'error'); - } - }; - - // Add event listeners - audioElement.addEventListener('play', onPlay, { once: true }); - audioElement.addEventListener('pause', onPause); - audioElement.addEventListener('ended', onEnded, { once: true }); - audioElement.addEventListener('error', onError); - - // Store references for cleanup - audioElement._eventHandlers = { onPlay, onPause, onEnded, onError }; - - // Start playback with error handling - try { - const playPromise = audioElement.play(); - - if (playPromise !== undefined) { - await playPromise.catch(error => { - // Ignore abort errors when switching between streams - if (error.name !== 'AbortError') { - throw error; - } - }); - } - - isPlaying = true; - } catch (error) { - // Only log unexpected errors - if (error.name !== 'AbortError') { - console.error('[streams-ui] Error during playback:', error); - throw error; - } - } - - } catch (error) { - console.error('[streams-ui] Error loading/playing audio:', error); - if (playPauseBtn) { - updatePlayPauseButton(playPauseBtn, false); - } - - // Only show error if it's not an abort error - if (error.name !== 'AbortError' && typeof showToast === 'function') { - showToast('Error playing audio. Please try again.', 'error'); - } - } +// --- Shared Audio Player Integration --- +import { SharedAudioPlayer } from './shared-audio-player.js'; + +function getStreamUrl(uid) { + return `/audio/${encodeURIComponent(uid)}/stream.opus`; +} + +function updatePlayPauseButton(button, isPlaying) { + if (button) button.textContent = isPlaying ? '⏸️' : '▶️'; + // Optionally, update other UI elements here +} +// Only this definition should remain; remove any other updatePlayPauseButton functions. + +const streamsPlayer = new SharedAudioPlayer({ + playerType: 'streams', + getStreamUrl, + onUpdateButton: updatePlayPauseButton +}); + +// Load and play audio using SharedAudioPlayer +function loadAndPlayAudio(uid, playPauseBtn) { + streamsPlayer.play(uid, playPauseBtn); } // Handle audio ended event @@ -688,7 +591,7 @@ function handleAudioEnded() { // Clean up audio resources function cleanupAudio() { - console.log('[streams-ui] Cleaning up audio resources'); + // Debug messages disabled // Clean up Web Audio API resources if they exist if (audioSource) { @@ -756,32 +659,14 @@ if (streamList) { e.preventDefault(); const uid = playPauseBtn.dataset.uid; - if (!uid) { - return; + if (!uid) return; + + // Toggle play/pause using SharedAudioPlayer + if (streamsPlayer.currentUid === uid && streamsPlayer.audioElement && !streamsPlayer.audioElement.paused && !streamsPlayer.audioElement.ended) { + streamsPlayer.pause(); + } else { + await loadAndPlayAudio(uid, playPauseBtn); } - - // If clicking the currently playing button, toggle pause/play - if (currentUid === uid) { - if (isPlaying) { - await audioElement.pause(); - isPlaying = false; - updatePlayPauseButton(playPauseBtn, false); - } else { - try { - await audioElement.play(); - isPlaying = true; - updatePlayPauseButton(playPauseBtn, true); - } catch (error) { - // If resume fails, try reloading the audio - await loadAndPlayAudio(uid, playPauseBtn); - } - } - return; - } - - // If a different stream is playing, stop it and start the new one - stopPlayback(); - await loadAndPlayAudio(uid, playPauseBtn); }); } diff --git a/static/style.css b/static/style.css index 299721a..9e06803 100644 --- a/static/style.css +++ b/static/style.css @@ -490,7 +490,7 @@ nav#guest-dashboard.dashboard-nav { box-shadow: 0 4px 20px rgba(0, 0, 0, 0.4), 0 0 0 1px rgba(255, 255, 255, 0.1); margin-top: 0.8em; opacity: 0; - animation: fadeInOut 3.5s both; + animation: fadeInOut 15s both; font-size: 1.1em; pointer-events: auto; border: 1px solid rgba(255, 255, 255, 0.1); @@ -580,7 +580,7 @@ nav#guest-dashboard.dashboard-nav { } /* Quota meter and uploaded files section */ -#quota-meter { +#uploaded-files { background: var(--surface); /* Match article background */ border: 1px solid var(--border); border-radius: 8px; @@ -593,19 +593,19 @@ nav#guest-dashboard.dashboard-nav { color: var(--text-light); } -#quota-meter { +#uploaded-files { transition: all 0.2s ease; } -#quota-meter h4 { +#uploaded-files h3 { font-weight: 400; text-align: center; - margin: 1.5rem 0 0.75rem; + margin: 0 0 27px 0; color: var(--text); } -#quota-meter > h4 { - margin-top: 1.5rem; +#uploaded-files > h3 { + margin: 0 0 27px 0; text-align: center; font-weight: 400; color: var(--text); @@ -732,7 +732,7 @@ nav#guest-dashboard.dashboard-nav { border-bottom: none; } -#quota-meter:hover { +#uploaded-files:hover { transform: translateY(-2px); box-shadow: 0 6px 16px rgba(0, 0, 0, 0.15); } @@ -740,7 +740,7 @@ nav#guest-dashboard.dashboard-nav { .quota-meter { font-size: 0.9em; color: var(--text-muted); - margin: 0 0 1rem 0; + margin: 1rem 0 0 0; } #file-list { diff --git a/static/toast.js b/static/toast.js index aa80272..0d3576b 100644 --- a/static/toast.js +++ b/static/toast.js @@ -14,6 +14,6 @@ export function showToast(message) { setTimeout(() => { toast.remove(); // Do not remove the container; let it persist for stacking - }, 3500); + }, 15000); } diff --git a/static/uid-validator.js b/static/uid-validator.js new file mode 100644 index 0000000..bd92200 --- /dev/null +++ b/static/uid-validator.js @@ -0,0 +1,169 @@ +/** + * UID Validation Utility + * + * Provides comprehensive UID format validation and sanitization + * to ensure all UIDs are properly formatted as email addresses. + */ + +export class UidValidator { + constructor() { + // RFC 5322 compliant email regex (basic validation) + this.emailRegex = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; + + // Common invalid patterns to check against + this.invalidPatterns = [ + /^devuser$/i, // Legacy username pattern + /^user\d+$/i, // Generic user patterns + /^test$/i, // Test user + /^admin$/i, // Admin user + /^\d+$/, // Pure numeric + /^[a-zA-Z]+$/, // Pure alphabetic (no @ symbol) + ]; + } + + /** + * Validate UID format - must be a valid email address + */ + isValidFormat(uid) { + if (!uid || typeof uid !== 'string') { + return { + valid: false, + error: 'UID must be a non-empty string', + code: 'INVALID_TYPE' + }; + } + + const trimmed = uid.trim(); + if (trimmed.length === 0) { + return { + valid: false, + error: 'UID cannot be empty', + code: 'EMPTY_UID' + }; + } + + // Check against invalid patterns + for (const pattern of this.invalidPatterns) { + if (pattern.test(trimmed)) { + return { + valid: false, + error: `UID matches invalid pattern: ${pattern}`, + code: 'INVALID_PATTERN' + }; + } + } + + // Validate email format + if (!this.emailRegex.test(trimmed)) { + return { + valid: false, + error: 'UID must be a valid email address', + code: 'INVALID_EMAIL_FORMAT' + }; + } + + return { + valid: true, + sanitized: trimmed.toLowerCase() + }; + } + + /** + * Sanitize and validate UID - ensures consistent format + */ + sanitize(uid) { + const validation = this.isValidFormat(uid); + + if (!validation.valid) { + console.error('[UID-VALIDATOR] Validation failed:', validation.error, { uid }); + return null; + } + + return validation.sanitized; + } + + /** + * Validate and throw error if invalid + */ + validateOrThrow(uid, context = 'UID') { + const validation = this.isValidFormat(uid); + + if (!validation.valid) { + throw new Error(`${context} validation failed: ${validation.error} (${validation.code})`); + } + + return validation.sanitized; + } + + /** + * Check if a UID needs migration (legacy format) + */ + needsMigration(uid) { + if (!uid || typeof uid !== 'string') { + return false; + } + + const trimmed = uid.trim(); + + // Check if it's already a valid email + if (this.emailRegex.test(trimmed)) { + return false; + } + + // Check if it matches known legacy patterns + for (const pattern of this.invalidPatterns) { + if (pattern.test(trimmed)) { + return true; + } + } + + return true; // Any non-email format needs migration + } + + /** + * Get validation statistics for debugging + */ + getValidationStats(uids) { + const stats = { + total: uids.length, + valid: 0, + invalid: 0, + needsMigration: 0, + errors: {} + }; + + uids.forEach(uid => { + const validation = this.isValidFormat(uid); + + if (validation.valid) { + stats.valid++; + } else { + stats.invalid++; + const code = validation.code || 'UNKNOWN'; + stats.errors[code] = (stats.errors[code] || 0) + 1; + } + + if (this.needsMigration(uid)) { + stats.needsMigration++; + } + }); + + return stats; + } +} + +// Create singleton instance +export const uidValidator = new UidValidator(); + +// Legacy exports for backward compatibility +export function validateUidFormat(uid) { + return uidValidator.isValidFormat(uid).valid; +} + +export function sanitizeUid(uid) { + return uidValidator.sanitize(uid); +} + +export function validateUidOrThrow(uid, context) { + return uidValidator.validateOrThrow(uid, context); +} diff --git a/static/upload.js b/static/upload.js index be33409..670c02c 100644 --- a/static/upload.js +++ b/static/upload.js @@ -1,266 +1,178 @@ -// upload.js — Frontend file upload handler - import { showToast } from "./toast.js"; import { playBeep } from "./sound.js"; -import { logToServer } from "./logger.js"; -// Initialize upload system when DOM is loaded document.addEventListener('DOMContentLoaded', () => { + // This module handles the file upload functionality, including drag-and-drop, + // progress indication, and post-upload actions like refreshing the file list. + + // DOM elements are fetched once the DOM is ready const dropzone = document.getElementById("user-upload-area"); - if (dropzone) { - dropzone.setAttribute("aria-label", "Upload area. Click or drop an audio file to upload."); - } const fileInput = document.getElementById("fileInputUser"); - const fileInfo = document.createElement("div"); - fileInfo.id = "file-info"; - fileInfo.style.textAlign = "center"; - if (fileInput) { - fileInput.parentNode.insertBefore(fileInfo, fileInput.nextSibling); - } - const streamInfo = document.getElementById("stream-info"); - const streamUrlEl = document.getElementById("streamUrl"); - const spinner = document.getElementById("spinner") || { style: { display: 'none' } }; - let abortController; + const fileList = document.getElementById("file-list"); - // Upload function - const upload = async (file) => { - if (abortController) abortController.abort(); - abortController = new AbortController(); - fileInfo.innerText = `📁 ${file.name} • ${(file.size / 1024 / 1024).toFixed(2)} MB`; - if (file.size > 100 * 1024 * 1024) { - showToast("❌ File too large. Please upload a file smaller than 100MB."); - return; - } - spinner.style.display = "block"; - showToast('📡 Uploading…'); + // Early exit if critical UI elements are missing + if (!dropzone || !fileInput || !fileList) { + // Debug messages disabled + return; + } - fileInput.disabled = true; - dropzone.classList.add("uploading"); - const formData = new FormData(); - const sessionUid = localStorage.getItem("uid"); - formData.append("uid", sessionUid); - formData.append("file", file); + // Attach all event listeners + initializeUploadListeners(); - const res = await fetch("/upload", { - signal: abortController.signal, - method: "POST", - body: formData, - }); - - let data, parseError; - try { - data = await res.json(); - } catch (e) { - parseError = e; - } - if (!data) { - showToast("❌ Upload failed: " + (parseError && parseError.message ? parseError.message : "Unknown error")); - spinner.style.display = "none"; - fileInput.disabled = false; - dropzone.classList.remove("uploading"); - return; - } - if (res.ok) { - if (data.quota && data.quota.used_mb !== undefined) { - const bar = document.getElementById("quota-bar"); - const text = document.getElementById("quota-text"); - const quotaSec = document.getElementById("quota-meter"); - if (bar && text && quotaSec) { - quotaSec.hidden = false; - const used = parseFloat(data.quota.used_mb); - bar.value = used; - bar.max = 100; - text.textContent = `${used.toFixed(1)} MB used`; - } - } - spinner.style.display = "none"; - fileInput.disabled = false; - dropzone.classList.remove("uploading"); - showToast("✅ Upload successful."); - - // Refresh the audio player and file list - const uid = localStorage.getItem("uid"); - if (uid) { - try { - if (window.loadProfileStream) { - await window.loadProfileStream(uid); - } - // Refresh the file list - if (window.fetchAndDisplayFiles) { - await window.fetchAndDisplayFiles(uid); - } - - // Refresh the stream list to update the last update time - if (window.refreshStreamList) { - await window.refreshStreamList(); - } - } catch (e) { - console.error('Failed to refresh:', e); - } - } - - playBeep(432, 0.25, "sine"); - } else { - if (streamInfo) streamInfo.hidden = true; - if (spinner) spinner.style.display = "none"; - if ((data.detail || data.error || "").includes("music")) { - showToast("🎵 Upload rejected: singing or music detected."); - } else { - showToast(`❌ Upload failed: ${data.detail || data.error}`); - } - - if (fileInput) fileInput.value = null; - if (dropzone) dropzone.classList.remove("uploading"); - if (fileInput) fileInput.disabled = false; - if (streamInfo) streamInfo.classList.remove("visible", "slide-in"); - } - }; - - // Function to fetch and display uploaded files - async function fetchAndDisplayFiles(uidFromParam) { - console.log('[UPLOAD] fetchAndDisplayFiles called with uid:', uidFromParam); - - // Get the file list element - const fileList = document.getElementById('file-list'); - if (!fileList) { - const errorMsg = 'File list element not found in DOM'; - console.error(errorMsg); - return showErrorInUI(errorMsg); - } - - // Get UID from parameter, localStorage, or cookie - const uid = uidFromParam || localStorage.getItem('uid') || getCookie('uid'); - const authToken = localStorage.getItem('authToken'); - const headers = { - 'Accept': 'application/json', - }; - - // Include auth token in headers if available, but don't fail if it's not - // The server should handle both token-based and UID-based auth - if (authToken) { - headers['Authorization'] = `Bearer ${authToken}`; - } else { - console.debug('[UPLOAD] No auth token available, using UID-only authentication'); - } - - console.log('[UPLOAD] Auth state - UID:', uid, 'Token exists:', !!authToken); + /** + * Main upload function + * @param {File} file - The file to upload + */ + async function upload(file) { + // Get user ID from localStorage or cookie + const uid = localStorage.getItem('uid') || getCookie('uid'); if (!uid) { - console.error('[UPLOAD] No UID found in any source'); - fileList.innerHTML = '
  • User session expired. Please refresh the page.
  • '; + // Debug messages disabled + showToast("You must be logged in to upload files.", "error"); return; } - // Log the authentication method being used - if (!authToken) { - console.debug('[UPLOAD] No auth token found, using UID-only authentication'); - } else { - console.debug('[UPLOAD] Using token-based authentication'); - } + // Debug messages disabled - // Show loading state - fileList.innerHTML = '
  • Loading files...
  • '; + // Create and display the upload status indicator + const statusDiv = createStatusIndicator(file.name); + fileList.prepend(statusDiv); + + const progressBar = statusDiv.querySelector('.progress-bar'); + const statusText = statusDiv.querySelector('.status-text'); + + const formData = new FormData(); + formData.append("file", file); + formData.append("uid", uid); try { - console.log(`[DEBUG] Fetching files for user: ${uid}`); - const response = await fetch(`/me/${uid}`, { + const response = await fetch(`/upload`, { + method: "POST", + body: formData, headers: { - 'Authorization': authToken ? `Bearer ${authToken}` : '', - 'Content-Type': 'application/json', + 'Accept': 'application/json', }, }); - console.log('[DEBUG] Response status:', response.status, response.statusText); - + if (!response.ok) { - const errorText = await response.text(); - const errorMsg = `Failed to fetch files: ${response.status} ${response.statusText} - ${errorText}`; - console.error(`[ERROR] ${errorMsg}`); - throw new Error(errorMsg); - } - - const data = await response.json(); - console.log('[DEBUG] Received files data:', data); - - if (!data.files) { - throw new Error('Invalid response format: missing files array'); - } - - if (data.files.length > 0) { - // Sort files by name - const sortedFiles = [...data.files].sort((a, b) => a.name.localeCompare(b.name)); - - fileList.innerHTML = sortedFiles.map(file => { - const sizeMB = (file.size / (1024 * 1024)).toFixed(2); - const displayName = file.original_name || file.name; - const isRenamed = file.original_name && file.original_name !== file.name; - return ` -
  • -
    - ${displayName} - ${isRenamed ? `
    ` : - ``} -
    - ${sizeMB} MB -
  • - `; - }).join(''); - } else { - fileList.innerHTML = '
  • No files uploaded yet
  • '; - } - - // Delete button handling is now managed by dashboard.js - - // Update quota display if available - if (data.quota !== undefined) { - const bar = document.getElementById('quota-bar'); - const text = document.getElementById('quota-text'); - const quotaSec = document.getElementById('quota-meter'); - if (bar && text && quotaSec) { - quotaSec.hidden = false; - bar.value = data.quota; - bar.max = 100; - text.textContent = `${data.quota.toFixed(1)} MB`; - } + const errorData = await response.json().catch(() => ({ detail: 'Upload failed with non-JSON response.' })); + throw new Error(errorData.detail || 'Unknown upload error'); } + + const result = await response.json(); + // Debug messages disabled + playBeep(800, 0.2); // Success beep - higher frequency + + // Update UI to show success + statusText.textContent = 'Success!'; + progressBar.style.width = '100%'; + progressBar.style.backgroundColor = 'var(--success-color)'; + + // Remove the status indicator after a short delay + setTimeout(() => { + statusDiv.remove(); + }, 2000); + + // --- Post-Upload Actions --- + await postUploadActions(uid); + } catch (error) { - const errorMessage = `Error loading file list: ${error.message || 'Unknown error'}`; - console.error('[ERROR]', errorMessage, error); - showErrorInUI(errorMessage, fileList); - } - - // Helper function to show error messages in the UI - function showErrorInUI(message, targetElement = null) { - const errorHtml = ` -
    -
    Error loading files
    -
    ${message}
    -
    - Check browser console for details -
    -
    - `; - - if (targetElement) { - targetElement.innerHTML = errorHtml; - } else { - // If no target element, try to find it - const fileList = document.getElementById('file-list'); - if (fileList) fileList.innerHTML = errorHtml; - } + // Debug messages disabled + playBeep(200, 0.5); // Error beep - lower frequency, longer duration + statusText.textContent = `Error: ${error.message}`; + progressBar.style.backgroundColor = 'var(--error-color)'; + statusDiv.classList.add('upload-error'); } } - // Helper function to get cookie value by name + /** + * Actions to perform after a successful upload. + * @param {string} uid - The user's ID + */ + async function postUploadActions(uid) { + // 1. Refresh the user's personal stream if the function is available + if (window.loadProfileStream) { + await window.loadProfileStream(uid); + } + // 2. Refresh the file list by re-fetching and then displaying. + if (window.fetchAndDisplayFiles) { + // Use email-based UID for file operations if available, fallback to uid + const fileOperationUid = localStorage.getItem('uid') || uid; // uid is now email-based + // Debug messages disabled + await window.fetchAndDisplayFiles(fileOperationUid); + } + // 3. Update quota display after upload + if (window.updateQuotaDisplay) { + const quotaUid = localStorage.getItem('uid') || uid; + // Debug messages disabled + await window.updateQuotaDisplay(quotaUid); + } + // 4. Refresh the public stream list to update the last update time + if (window.refreshStreamList) { + await window.refreshStreamList(); + } + } + + /** + * Creates the DOM element for the upload status indicator. + * @param {string} fileName - The name of the file being uploaded. + * @returns {HTMLElement} + */ + function createStatusIndicator(fileName) { + const statusDiv = document.createElement('div'); + statusDiv.className = 'upload-status-indicator'; + statusDiv.innerHTML = ` +
    + ${fileName} + Uploading... +
    +
    +
    +
    + `; + return statusDiv; + } + + /** + * Initializes all event listeners for the upload UI. + */ + function initializeUploadListeners() { + dropzone.addEventListener("click", () => { + fileInput.click(); + }); + + dropzone.addEventListener("dragover", (e) => { + e.preventDefault(); + dropzone.classList.add("dragover"); + }); + + dropzone.addEventListener("dragleave", () => { + dropzone.classList.remove("dragover"); + }); + + dropzone.addEventListener("drop", (e) => { + e.preventDefault(); + dropzone.classList.remove("dragover"); + const file = e.dataTransfer.files[0]; + if (file) { + upload(file); + } + }); + + fileInput.addEventListener("change", (e) => { + const file = e.target.files[0]; + if (file) { + upload(file); + } + }); + } + + /** + * Helper function to get a cookie value by name. + * @param {string} name - The name of the cookie. + * @returns {string|null} + */ function getCookie(name) { const value = `; ${document.cookie}`; const parts = value.split(`; ${name}=`); @@ -268,35 +180,6 @@ document.addEventListener('DOMContentLoaded', () => { return null; } - // Export functions for use in other modules + // Make the upload function globally accessible if needed by other scripts window.upload = upload; - window.fetchAndDisplayFiles = fetchAndDisplayFiles; - - if (dropzone && fileInput) { - dropzone.addEventListener("click", () => { - console.log("[DEBUG] Dropzone clicked"); - fileInput.click(); - console.log("[DEBUG] fileInput.click() called"); - }); - dropzone.addEventListener("dragover", (e) => { - e.preventDefault(); - dropzone.classList.add("dragover"); - dropzone.style.transition = "background-color 0.3s ease"; - }); - dropzone.addEventListener("dragleave", () => { - dropzone.classList.remove("dragover"); - }); - dropzone.addEventListener("drop", (e) => { - dropzone.classList.add("pulse"); - setTimeout(() => dropzone.classList.remove("pulse"), 400); - e.preventDefault(); - dropzone.classList.remove("dragover"); - const file = e.dataTransfer.files[0]; - if (file) upload(file); - }); - fileInput.addEventListener("change", (e) => { - const file = e.target.files[0]; - if (file) upload(file); - }); - } }); diff --git a/upload.py b/upload.py index 1df43f6..7467462 100644 --- a/upload.py +++ b/upload.py @@ -23,7 +23,8 @@ DATA_ROOT = Path("./data") @limiter.limit("5/minute") @router.post("/upload") -async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), file: UploadFile = Form(...)): +def upload(request: Request, uid: str = Form(...), file: UploadFile = Form(...)): + # Import here to avoid circular imports from log import log_violation import time @@ -32,183 +33,259 @@ async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), f log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Starting upload of {file.filename}") try: - # First, verify the user exists and is confirmed - user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first() - if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"): - user = user[0] - - log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] User check - found: {user is not None}, confirmed: {getattr(user, 'confirmed', False) if user else 'N/A'}") - - if not user or not hasattr(user, "confirmed") or not user.confirmed: - raise HTTPException(status_code=403, detail="Account not confirmed") - - # Check quota before doing any file operations - quota = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0) - if quota.storage_bytes >= 100 * 1024 * 1024: - raise HTTPException(status_code=400, detail="Quota exceeded") - - # Create user directory if it doesn't exist - user_dir = DATA_ROOT / uid - user_dir.mkdir(parents=True, exist_ok=True) - - # Generate a unique filename for the processed file first - import uuid - unique_name = f"{uuid.uuid4()}.opus" - raw_ext = file.filename.split(".")[-1].lower() - raw_path = user_dir / ("raw." + raw_ext) - processed_path = user_dir / unique_name - - # Clean up any existing raw files first (except the one we're about to create) - for old_file in user_dir.glob('raw.*'): + # Use the database session context manager to handle the session + with get_db() as db: try: - if old_file != raw_path: # Don't delete the file we're about to create - old_file.unlink(missing_ok=True) - log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Cleaned up old file: {old_file}") - except Exception as e: - log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_file}: {e}") - - # Save the uploaded file temporarily - log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Saving temporary file to {raw_path}") - try: - with open(raw_path, "wb") as f: - content = await file.read() - if not content: - raise ValueError("Uploaded file is empty") - f.write(content) - log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Successfully wrote {len(content)} bytes to {raw_path}") - except Exception as e: - log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to save {raw_path}: {e}") - raise HTTPException(status_code=500, detail=f"Failed to save uploaded file: {e}") - - # Ollama music/singing check is disabled for this release - log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Ollama music/singing check is disabled") - - try: - convert_to_opus(str(raw_path), str(processed_path)) - except Exception as e: - raw_path.unlink(missing_ok=True) - raise HTTPException(status_code=500, detail=str(e)) - - original_size = raw_path.stat().st_size - raw_path.unlink(missing_ok=True) # cleanup - - # First, verify the file was created and has content - if not processed_path.exists() or processed_path.stat().st_size == 0: - raise HTTPException(status_code=500, detail="Failed to process audio file") - - # Concatenate all .opus files in random order to stream.opus for public playback - # This is now done after the file is in its final location with log ID - from concat_opus import concat_opus_files - def update_stream_opus(): - try: - concat_opus_files(user_dir, user_dir / "stream.opus") - except Exception as e: - # fallback: just use the latest processed file if concat fails - import shutil - stream_path = user_dir / "stream.opus" - shutil.copy2(processed_path, stream_path) - log_violation("STREAM_UPDATE", request.client.host, uid, - f"[fallback] Updated stream.opus with {processed_path}") - - # We'll call this after the file is in its final location - - # Get the final file size - size = processed_path.stat().st_size - - # Start a transaction - try: - # Create a log entry with the original filename - log = UploadLog( - uid=uid, - ip=request.client.host, - filename=file.filename, # Store original filename - processed_filename=unique_name, # Store the processed filename - size_bytes=size - ) - db.add(log) - db.flush() # Get the log ID without committing - - # Rename the processed file to include the log ID for better tracking - processed_with_id = user_dir / f"{log.id}_{unique_name}" - if processed_path.exists(): - # First check if there's already a file with the same UUID but different prefix - for existing_file in user_dir.glob(f"*_{unique_name}"): - if existing_file != processed_path: - log_violation("CLEANUP", request.client.host, uid, - f"[UPLOAD] Removing duplicate file: {existing_file}") - existing_file.unlink(missing_ok=True) + # First, verify the user exists and is confirmed + user = db.query(User).filter( + (User.username == uid) | (User.email == uid) + ).first() - # Now do the rename - if processed_path != processed_with_id: - if processed_with_id.exists(): - processed_with_id.unlink(missing_ok=True) - processed_path.rename(processed_with_id) - processed_path = processed_with_id + if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"): + user = user[0] + if not user: + log_violation("UPLOAD", request.client.host, uid, f"User {uid} not found") + raise HTTPException(status_code=404, detail="User not found") - # Only clean up raw.* files, not previously uploaded opus files - for old_temp_file in user_dir.glob('raw.*'): + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] User check - found: {user is not None}, confirmed: {getattr(user, 'confirmed', False) if user else 'N/A'}") + + # Check if user is confirmed + if not hasattr(user, 'confirmed') or not user.confirmed: + raise HTTPException(status_code=403, detail="Account not confirmed") + + # Use user.email as the proper UID for quota and directory operations + user_email = user.email + quota = db.get(UserQuota, user_email) or UserQuota(uid=user_email, storage_bytes=0) + + if quota.storage_bytes >= 100 * 1024 * 1024: + raise HTTPException(status_code=400, detail="Quota exceeded") + + # Create user directory using email (proper UID) - not the uid parameter which could be username + user_dir = DATA_ROOT / user_email + user_dir.mkdir(parents=True, exist_ok=True) + + # Generate a unique filename for the processed file first + import uuid + unique_name = f"{uuid.uuid4()}.opus" + raw_ext = file.filename.split(".")[-1].lower() + raw_path = user_dir / ("raw." + raw_ext) + processed_path = user_dir / unique_name + + # Clean up any existing raw files first (except the one we're about to create) + for old_file in user_dir.glob('raw.*'): try: - old_temp_file.unlink(missing_ok=True) - log_violation("CLEANUP", request.client.host, uid, f"[{request_id}] Cleaned up temp file: {old_temp_file}") + if old_file != raw_path: # Don't delete the file we're about to create + old_file.unlink(missing_ok=True) + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Cleaned up old file: {old_file}") except Exception as e: - log_violation("CLEANUP_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_temp_file}: {e}") - - # Get or create quota - quota = db.query(UserQuota).filter(UserQuota.uid == uid).first() - if not quota: - quota = UserQuota(uid=uid, storage_bytes=0) - db.add(quota) + log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_file}: {e}") - # Update quota with the new file size - quota.storage_bytes = sum( - f.stat().st_size - for f in user_dir.glob('*.opus') - if f.name != 'stream.opus' and f != processed_path - ) + size - - # Update public streams - update_public_streams(uid, quota.storage_bytes, db) - - # Commit the transaction - db.commit() - - # Now that the transaction is committed and files are in their final location, - # update the stream.opus file to include all files - update_stream_opus() - - except Exception as e: - db.rollback() - # Clean up the processed file if something went wrong - if processed_path.exists(): - processed_path.unlink(missing_ok=True) - raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") + # Save the uploaded file temporarily + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Saving temporary file to {raw_path}") + + try: + with open(raw_path, "wb") as f: + content = file.file.read() + if not content: + raise ValueError("Uploaded file is empty") + f.write(content) + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Successfully wrote {len(content)} bytes to {raw_path}") + + # EARLY DB RECORD CREATION: after upload completes, before processing + early_log = UploadLog( + uid=user_email, + ip=request.client.host, + filename=file.filename, # original filename from user + processed_filename=None, # not yet processed + size_bytes=None # not yet known + ) + db.add(early_log) + log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE FLUSH] Before db.flush() after early_log add") + db.flush() + log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE FLUSH] After db.flush() after early_log add") + db.commit() + log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[FORCE COMMIT] After db.commit() after early_log add") + early_log_id = early_log.id + log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[DEBUG] Early UploadLog created: id={early_log_id}, filename={file.filename}, UploadLog.filename={early_log.filename}") + except Exception as e: + log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to save {raw_path}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to save uploaded file: {e}") - return { - "filename": file.filename, - "original_size": round(original_size / 1024, 1), - "quota": { - "used_mb": round(quota.storage_bytes / (1024 * 1024), 2) - } - } + # Ollama music/singing check is disabled for this release + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Ollama music/singing check is disabled") + + try: + convert_to_opus(str(raw_path), str(processed_path)) + except Exception as e: + raw_path.unlink(missing_ok=True) + raise HTTPException(status_code=500, detail=str(e)) + + original_size = raw_path.stat().st_size + raw_path.unlink(missing_ok=True) # cleanup + + # First, verify the file was created and has content + if not processed_path.exists() or processed_path.stat().st_size == 0: + raise HTTPException(status_code=500, detail="Failed to process audio file") + + # Get the final file size + size = processed_path.stat().st_size + + # Concatenate all .opus files in random order to stream.opus for public playback + # This is now done after the file is in its final location with log ID + from concat_opus import concat_opus_files + + def update_stream_opus(): + try: + concat_opus_files(user_dir, user_dir / "stream.opus") + except Exception as e: + # fallback: just use the latest processed file if concat fails + import shutil + stream_path = user_dir / "stream.opus" + shutil.copy2(processed_path, stream_path) + log_violation("STREAM_UPDATE", request.client.host, uid, + f"[fallback] Updated stream.opus with {processed_path}") + + # Start a transaction + try: + # Update the early DB record with processed filename and size + log = db.get(UploadLog, early_log_id) + log.processed_filename = unique_name + log.size_bytes = size + db.add(log) + db.flush() # Ensure update is committed + + # Assert that log.filename is still the original filename, never overwritten + if log.filename is None or (log.filename.endswith('.opus') and log.filename == log.processed_filename): + log_violation("UPLOAD_ERROR", request.client.host, uid, + f"[ASSERTION FAILED] UploadLog.filename was overwritten! id={log.id}, filename={log.filename}, processed_filename={log.processed_filename}") + raise RuntimeError(f"UploadLog.filename was overwritten! id={log.id}, filename={log.filename}, processed_filename={log.processed_filename}") + else: + log_violation("UPLOAD_DEBUG", request.client.host, uid, + f"[ASSERTION OK] After update: id={log.id}, filename={log.filename}, processed_filename={log.processed_filename}") + + log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[COMMIT] Committing UploadLog for id={log.id}") + db.commit() + log_violation("UPLOAD_DEBUG", request.client.host, uid, f"[COMMIT OK] UploadLog committed for id={log.id}") + + # Rename the processed file to include the log ID for better tracking + processed_with_id = user_dir / f"{log.id}_{unique_name}" + + if processed_path.exists(): + # First check if there's already a file with the same UUID but different prefix + for existing_file in user_dir.glob(f"*_{unique_name}"): + if existing_file != processed_path: + log_violation("CLEANUP", request.client.host, uid, + f"[UPLOAD] Removing duplicate file: {existing_file}") + existing_file.unlink(missing_ok=True) + + # Now do the rename + if processed_path != processed_with_id: + if processed_with_id.exists(): + processed_with_id.unlink(missing_ok=True) + processed_path.rename(processed_with_id) + processed_path = processed_with_id + + # Only clean up raw.* files, not previously uploaded opus files + for old_temp_file in user_dir.glob('raw.*'): + try: + old_temp_file.unlink(missing_ok=True) + log_violation("CLEANUP", request.client.host, uid, f"[{request_id}] Cleaned up temp file: {old_temp_file}") + except Exception as e: + log_violation("CLEANUP_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_temp_file}: {e}") + + # Get or create quota + quota = db.query(UserQuota).filter(UserQuota.uid == user_email).first() + if not quota: + quota = UserQuota(uid=user_email, storage_bytes=0) + db.add(quota) + + # Update quota with the new file size + quota.storage_bytes = sum( + f.stat().st_size + for f in user_dir.glob('*.opus') + if f.name != 'stream.opus' and f != processed_path + ) + size + + # Update public streams + update_public_streams(user_email, quota.storage_bytes, db) + + # The context manager will handle commit/rollback + # Now that the transaction is committed and files are in their final location, + # update the stream.opus file to include all files + update_stream_opus() + + return { + "filename": file.filename, + "original_size": round(original_size / 1024, 1), + "quota": { + "used_mb": round(quota.storage_bytes / (1024 * 1024), 2) + } + } + + except HTTPException as e: + # Re-raise HTTP exceptions as they are already properly formatted + db.rollback() + raise e + + except Exception as e: + # Log the error and return a 500 response + db.rollback() + import traceback + tb = traceback.format_exc() + # Try to log the error + try: + log_violation("UPLOAD_ERROR", request.client.host, uid, f"Error processing upload: {str(e)}\n{tb}") + except Exception: + pass # If logging fails, continue with the error response + + # Clean up the processed file if it exists + if 'processed_path' in locals() and processed_path.exists(): + processed_path.unlink(missing_ok=True) + + raise HTTPException(status_code=500, detail=f"Error processing upload: {str(e)}") + + except HTTPException as e: + # Re-raise HTTP exceptions as they are already properly formatted + db.rollback() + raise e + + except Exception as e: + # Log the error and return a 500 response + db.rollback() + import traceback + tb = traceback.format_exc() + # Try to log the error + try: + log_violation("UPLOAD_ERROR", request.client.host, uid, f"Error processing upload: {str(e)}\n{tb}") + except Exception: + pass # If logging fails, continue with the error response + + # Clean up the processed file if it exists + if 'processed_path' in locals() and processed_path.exists(): + processed_path.unlink(missing_ok=True) + + raise HTTPException(status_code=500, detail=f"Error processing upload: {str(e)}") + except HTTPException as e: - # Already a JSON response, just re-raise + # Re-raise HTTP exceptions as they are already properly formatted raise e + except Exception as e: + # Catch any other exceptions that might occur outside the main processing block import traceback tb = traceback.format_exc() - # Log and return a JSON error try: - log_violation("UPLOAD", request.client.host, uid, f"Unexpected error: {type(e).__name__}: {str(e)}\n{tb}") - except Exception: - pass - return {"detail": f"Server error: {type(e).__name__}: {str(e)}"} + log_violation("UPLOAD_ERROR", request.client.host, uid, f"Unhandled error in upload handler: {str(e)}\n{tb}") + except: + pass # If logging fails, continue with the error response + raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}") def update_public_streams(uid: str, storage_bytes: int, db: Session): """Update the public streams list in the database with the latest user upload info""" try: - # Get the user's info - user = db.query(User).filter(User.username == uid).first() + # Get the user's info - uid is now email-based + user = db.query(User).filter(User.email == uid).first() if not user: print(f"[WARNING] User {uid} not found when updating public streams") return @@ -221,7 +298,6 @@ def update_public_streams(uid: str, storage_bytes: int, db: Session): # Update the public stream info public_stream.username = user.username - public_stream.display_name = user.display_name or user.username public_stream.storage_bytes = storage_bytes public_stream.last_updated = datetime.utcnow()