diff --git a/alembic/versions/8be4811023d8_add_display_name_to_user.py b/alembic/versions/8be4811023d8_add_display_name_to_user.py new file mode 100644 index 0000000..8f85a1e --- /dev/null +++ b/alembic/versions/8be4811023d8_add_display_name_to_user.py @@ -0,0 +1,49 @@ +"""add_display_name_to_user + +Revision ID: 8be4811023d8 +Revises: 0df481ee920b +Create Date: 2025-07-19 19:46:01.129412 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '8be4811023d8' +down_revision: Union[str, Sequence[str], None] = '0df481ee920b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(op.f('dbsession_user_id_fkey'), 'dbsession', type_='foreignkey') + op.create_foreign_key(None, 'dbsession', 'user', ['user_id'], ['username']) + op.alter_column('publicstream', 'storage_bytes', + existing_type=sa.INTEGER(), + nullable=False, + existing_server_default=sa.text('0')) + op.create_index(op.f('ix_publicstream_username'), 'publicstream', ['username'], unique=False) + op.drop_column('publicstream', 'size') + op.add_column('user', sa.Column('display_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('user', 'display_name') + op.add_column('publicstream', sa.Column('size', sa.INTEGER(), autoincrement=False, nullable=False)) + op.drop_index(op.f('ix_publicstream_username'), table_name='publicstream') + op.alter_column('publicstream', 'storage_bytes', + existing_type=sa.INTEGER(), + nullable=True, + existing_server_default=sa.text('0')) + op.drop_constraint(None, 'dbsession', type_='foreignkey') + op.create_foreign_key(op.f('dbsession_user_id_fkey'), 'dbsession', 'user', ['user_id'], ['username'], ondelete='CASCADE') + # ### end Alembic commands ### diff --git a/auth_router.py b/auth_router.py index ac0596c..da43959 100644 --- a/auth_router.py +++ b/auth_router.py @@ -1,13 +1,14 @@ """Authentication routes for dicta2stream""" from fastapi import APIRouter, Depends, Request, Response, HTTPException, status from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials -from sqlmodel import Session +from sqlmodel import Session, select +from datetime import datetime from models import Session as DBSession, User from database import get_db from auth import get_current_user -router = APIRouter() +router = APIRouter(prefix="/api", tags=["auth"]) security = HTTPBearer() @router.post("/logout") @@ -18,30 +19,61 @@ async def logout( credentials: HTTPAuthorizationCredentials = Depends(security) ): """Log out by invalidating the current session""" - token = credentials.credentials - - # Find and invalidate the session - session = db.exec( - select(DBSession) - .where(DBSession.token == token) - .where(DBSession.is_active == True) # noqa: E712 - ).first() - - if session: - session.is_active = False - db.add(session) - db.commit() - - # Clear the session cookie - response.delete_cookie( - key="sessionid", # Must match the cookie name in main.py - httponly=True, - secure=True, # Must match the cookie settings from login - samesite="lax", - path="/" - ) - - return {"message": "Successfully logged out"} + try: + # Get the token from the Authorization header + token = credentials.credentials if credentials else None + + if not token: + return {"message": "No session to invalidate"} + + try: + # Find and invalidate the session + session = db.exec( + select(DBSession) + .where(DBSession.token == token) + .where(DBSession.is_active == True) # noqa: E712 + ).first() + + if session: + try: + session.is_active = False + db.add(session) + db.commit() + except Exception: + db.rollback() + + except Exception: + # Continue with logout even if session lookup fails + pass + + # Clear the session cookie + response.delete_cookie( + key="sessionid", + httponly=True, + secure=True, + samesite="lax", + path="/" + ) + + # Clear any other auth-related cookies + for cookie_name in ["uid", "authToken", "isAuthenticated", "token"]: + response.delete_cookie( + key=cookie_name, + path="/", + domain=request.url.hostname, + secure=True, + httponly=True, + samesite="lax" + ) + + return {"message": "Successfully logged out"} + + except HTTPException: + # Re-raise HTTP exceptions + raise + except Exception: + # Don't expose internal errors to the client + return {"message": "Logout processed"} @router.get("/me") diff --git a/concat_opus.py b/concat_opus.py index 83af46c..3a109f8 100644 --- a/concat_opus.py +++ b/concat_opus.py @@ -9,9 +9,50 @@ def concat_opus_files(user_dir: Path, output_file: Path): Concatenate all .opus files in user_dir (except stream.opus) in random order into output_file. Overwrites output_file if exists. Creates it if missing. """ - files = [f for f in user_dir.glob('*.opus') if f.name != 'stream.opus'] + # Clean up any existing filelist.txt to prevent issues + filelist_path = user_dir / 'filelist.txt' + if filelist_path.exists(): + try: + filelist_path.unlink() + except Exception as e: + print(f"Warning: Could not clean up old filelist.txt: {e}") + + # Get all opus files except stream.opus and remove any duplicates + import hashlib + file_hashes = set() + files = [] + + for f in user_dir.glob('*.opus'): + if f.name == 'stream.opus': + continue + + try: + # Calculate file hash for duplicate detection + hasher = hashlib.md5() + with open(f, 'rb') as file: + buf = file.read(65536) # Read in 64kb chunks + while len(buf) > 0: + hasher.update(buf) + buf = file.read(65536) + file_hash = hasher.hexdigest() + + # Skip if we've seen this exact file before + if file_hash in file_hashes: + print(f"Removing duplicate file: {f.name}") + f.unlink() + continue + + file_hashes.add(file_hash) + files.append(f) + + except Exception as e: + print(f"Error processing {f}: {e}") + if not files: - raise FileNotFoundError(f"No opus files to concatenate in {user_dir}") + # If no files, create an empty stream.opus + output_file.write_bytes(b'') + return output_file + random.shuffle(files) # Create a filelist for ffmpeg concat diff --git a/create_silent_opus.py b/create_silent_opus.py new file mode 100644 index 0000000..2858b2c --- /dev/null +++ b/create_silent_opus.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +""" +Create a silent OPUS audio file with 1 second of silence. +""" +import os +import opuslib +import numpy as np +import struct + +# Configuration +SAMPLE_RATE = 48000 +CHANNELS = 1 +FRAME_SIZE = 960 # 20ms at 48kHz +SILENCE_DURATION = 1.0 # seconds +OUTPUT_FILE = "silent.opus" + +# Calculate number of frames needed +num_frames = int((SAMPLE_RATE * SILENCE_DURATION) / (FRAME_SIZE * CHANNELS)) + +# Initialize Opus encoder +enc = opuslib.Encoder(SAMPLE_RATE, CHANNELS, 'voip') + +# Create silent audio data (all zeros) +silent_frame = struct.pack('h' * FRAME_SIZE * CHANNELS, *([0] * FRAME_SIZE * CHANNELS)) + +# Create Ogg Opus file +with open(OUTPUT_FILE, 'wb') as f: + # Write Ogg header + f.write(b'OggS') # Magic number + f.write(b'\x00') # Version + f.write(b'\x00') # Header type (0 = normal) + f.write(b'\x00\x00\x00\x00\x00\x00\x00\x00') # Granule position + f.write(b'\x00\x00\x00\x00') # Bitstream serial number + f.write(b'\x00\x00\x00\x00') # Page sequence number + f.write(b'\x00\x00\x00\x00') # Checksum + f.write(b'\x01') # Number of segments + f.write(b'\x00') # Segment table (0 = 1 byte segment) + + # Write Opus header + f.write(b'OpusHead') # Magic signature + f.write(b'\x01') # Version + f.write(chr(CHANNELS).encode('latin1')) # Channel count + f.write(struct.pack(' 0 + + if not file_deleted: + log_violation("DELETE_WARNING", ip, uid, f"No files found to delete for: {filename}") + + except Exception as e: + log_violation("DELETE_ERROR", ip, uid, f"Error deleting file {filename}: {str(e)}") + file_deleted = False + + # Try to refresh the user's playlist, but don't fail if we can't + try: + subprocess.run(["/root/scripts/refresh_user_playlist.sh", user.username], + check=False, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL) + except Exception as e: + log_violation("PLAYLIST_REFRESH_WARNING", ip, uid, + f"Failed to refresh playlist: {str(e)}") + + # Clean up the database record for this file + try: + # Find and delete the upload log entry + log_entry = db.exec( + select(UploadLog) + .where(UploadLog.uid == uid) + .where(UploadLog.processed_filename == filename) + ).first() + + if log_entry: + db.delete(log_entry) + db.commit() + log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}") + except Exception as e: + log_violation("DB_CLEANUP_ERROR", ip, uid, f"Failed to clean up DB record: {str(e)}") + db.rollback() + + # Regenerate stream.opus after file deletion + try: + from concat_opus import concat_opus_files + from pathlib import Path + user_dir_path = Path(user_dir) + stream_path = user_dir_path / "stream.opus" + concat_opus_files(user_dir_path, stream_path) + log_violation("STREAM_UPDATE", ip, uid, "Regenerated stream.opus after file deletion") + except Exception as e: + log_violation("STREAM_UPDATE_ERROR", ip, uid, f"Failed to regenerate stream.opus: {str(e)}") + + # Update user quota in a separate try-except to not fail the entire operation + try: + # Use verify_and_fix_quota to ensure consistency between disk and DB + total_size = verify_and_fix_quota(db, user.username, user_dir) + log_violation("QUOTA_UPDATE", ip, uid, + f"Updated quota: {total_size} bytes") + + except Exception as e: + log_violation("QUOTA_ERROR", ip, uid, f"Quota update failed: {str(e)}") + db.rollback() + + return {"status": "deleted"} + + except Exception as e: + # Log the error and re-raise with a user-friendly message + error_detail = str(e) + log_violation("DELETE_ERROR", request.client.host, uid, f"Failed to delete {filename}: {error_detail}") + if not isinstance(e, HTTPException): + raise HTTPException(status_code=500, detail=f"Failed to delete file: {error_detail}") + raise @app.get("/confirm/{uid}") def confirm_user(uid: str, request: Request): @@ -296,8 +419,55 @@ def confirm_user(uid: str, request: Request): raise HTTPException(status_code=403, detail="Unauthorized") return {"username": user.username, "email": user.email} +def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int: + """ + Verify and fix the user's quota based on the size of stream.opus file. + Returns the size of stream.opus in bytes. + """ + stream_opus_path = os.path.join(user_dir, 'stream.opus') + total_size = 0 + + # Only consider stream.opus for quota + if os.path.isfile(stream_opus_path): + try: + total_size = os.path.getsize(stream_opus_path) + print(f"[QUOTA] Stream.opus size for {uid}: {total_size} bytes") + except (OSError, FileNotFoundError) as e: + print(f"[QUOTA] Error getting size for stream.opus: {e}") + else: + print(f"[QUOTA] stream.opus not found in {user_dir}") + + # Update quota in database + q = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0) + q.storage_bytes = total_size + db.add(q) + + # Clean up any database records for files that don't exist + uploads = db.exec(select(UploadLog).where(UploadLog.uid == uid)).all() + for upload in uploads: + if upload.processed_filename: # Only check if processed_filename exists + stored_filename = f"{upload.id}_{upload.processed_filename}" + file_path = os.path.join(user_dir, stored_filename) + if not os.path.isfile(file_path): + print(f"[QUOTA] Removing orphaned DB record: {stored_filename}") + db.delete(upload) + + try: + db.commit() + print(f"[QUOTA] Updated quota for {uid}: {total_size} bytes") + except Exception as e: + print(f"[QUOTA] Error committing quota update: {e}") + db.rollback() + raise + + return total_size + @app.get("/me/{uid}") -def get_me(uid: str, request: Request, db: Session = Depends(get_db)): +def get_me(uid: str, request: Request, response: Response, db: Session = Depends(get_db)): + # Add headers to prevent caching + response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" + response.headers["Pragma"] = "no-cache" + response.headers["Expires"] = "0" print(f"[DEBUG] GET /me/{uid} - Client IP: {request.client.host}") try: # Get user info @@ -315,6 +485,10 @@ def get_me(uid: str, request: Request, db: Session = Depends(get_db)): if not debug_mode: raise HTTPException(status_code=403, detail="IP address mismatch") + # Get user directory + user_dir = os.path.join('data', uid) + os.makedirs(user_dir, exist_ok=True) + # Get all upload logs for this user upload_logs = db.exec( select(UploadLog) @@ -323,23 +497,54 @@ def get_me(uid: str, request: Request, db: Session = Depends(get_db)): ).all() print(f"[DEBUG] Found {len(upload_logs)} upload logs for UID {uid}") - # Build file list from database records + # Build file list from database records, checking if files exist on disk files = [] - for log in upload_logs: - if log.filename and log.processed_filename: - # The actual filename on disk might have the log ID prepended - stored_filename = f"{log.id}_{log.processed_filename}" - files.append({ - "name": stored_filename, - "original_name": log.filename, - "size": log.size_bytes - }) - print(f"[DEBUG] Added file from DB: {log.filename} (stored as {stored_filename}, {log.size_bytes} bytes)") + seen_files = set() # Track seen files to avoid duplicates - # Get quota info - q = db.get(UserQuota, uid) - quota_mb = round(q.storage_bytes / (1024 * 1024), 2) if q else 0 - print(f"[DEBUG] Quota for UID {uid}: {quota_mb} MB") + print(f"[DEBUG] Processing {len(upload_logs)} upload logs for UID {uid}") + + for i, log in enumerate(upload_logs): + if not log.filename or not log.processed_filename: + print(f"[DEBUG] Skipping log entry {i}: missing filename or processed_filename") + continue + + # The actual filename on disk has the log ID prepended + stored_filename = f"{log.id}_{log.processed_filename}" + file_path = os.path.join(user_dir, stored_filename) + + # Skip if we've already seen this file + if stored_filename in seen_files: + print(f"[DEBUG] Skipping duplicate file: {stored_filename}") + continue + + seen_files.add(stored_filename) + + # Only include the file if it exists on disk and is not stream.opus + if os.path.isfile(file_path) and stored_filename != 'stream.opus': + try: + # Get the actual file size in case it changed + file_size = os.path.getsize(file_path) + file_info = { + "name": stored_filename, + "original_name": log.filename, + "size": file_size + } + files.append(file_info) + print(f"[DEBUG] Added file {len(files)}: {log.filename} (stored as {stored_filename}, {file_size} bytes)") + except OSError as e: + print(f"[WARNING] Could not access file {stored_filename}: {e}") + else: + print(f"[DEBUG] File not found on disk or is stream.opus: {stored_filename}") + + # Log all files being returned + print("[DEBUG] All files being returned:") + for i, file_info in enumerate(files, 1): + print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)") + + # Verify and fix quota based on actual files on disk + total_size = verify_and_fix_quota(db, uid, user_dir) + quota_mb = round(total_size / (1024 * 1024), 2) + print(f"[DEBUG] Verified quota for UID {uid}: {quota_mb} MB") response_data = { "files": files, diff --git a/models.py b/models.py index 50061f2..3c32dc8 100644 --- a/models.py +++ b/models.py @@ -9,6 +9,7 @@ class User(SQLModel, table=True): token_created: datetime = Field(default_factory=datetime.utcnow) email: str = Field(primary_key=True) username: str = Field(unique=True, index=True) + display_name: str = Field(default="", nullable=True) token: str confirmed: bool = False ip: str = Field(default="") @@ -43,17 +44,40 @@ class DBSession(SQLModel, table=True): class PublicStream(SQLModel, table=True): """Stores public stream metadata for all users""" uid: str = Field(primary_key=True) - size: int = 0 + username: Optional[str] = Field(default=None, index=True) + display_name: Optional[str] = Field(default=None) + storage_bytes: int = 0 mtime: int = Field(default_factory=lambda: int(datetime.utcnow().timestamp())) + last_updated: Optional[datetime] = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow) def get_user_by_uid(uid: str) -> Optional[User]: + """ + Retrieve a user by their UID (username). + + Note: In this application, the User model uses email as primary key, + but we're using username as UID for API routes. This function looks up + users by username. + + Args: + uid: The username to look up + + Returns: + User object if found, None otherwise + """ with Session(engine) as session: + # First try to find by username (which is what we're using as UID) statement = select(User).where(User.username == uid) - result = session.exec(statement).first() - return result + user = session.exec(statement).first() + + # If not found by username, try by email (for backward compatibility) + if not user and '@' in uid: + statement = select(User).where(User.email == uid) + user = session.exec(statement).first() + + return user def verify_session(db: Session, token: str) -> DBSession: diff --git a/nohup.out b/nohup.out new file mode 100644 index 0000000..30a0b95 --- /dev/null +++ b/nohup.out @@ -0,0 +1,4 @@ +INFO: Will watch for changes in these directories: ['/home/oib/games/dicta2stream'] +ERROR: [Errno 98] Address already in use +INFO: Will watch for changes in these directories: ['/home/oib/games/dicta2stream'] +ERROR: [Errno 98] Address already in use diff --git a/public_streams.txt b/public_streams.txt index 59b6010..3eb027b 100644 --- a/public_streams.txt +++ b/public_streams.txt @@ -1,4 +1,2 @@ -{"uid":"devuser","size":65551721,"mtime":1752752391} -{"uid":"oib9","size":12735117,"mtime":1752843762} -{"uid":"oibchello","size":1549246,"mtime":1752840918} +{"uid":"oibchello","size":3371119,"mtime":1752994076} {"uid":"orangeicebear","size":1734396,"mtime":1748767975} diff --git a/public_streams.txt.backup b/public_streams.txt.backup new file mode 100644 index 0000000..e97e65a --- /dev/null +++ b/public_streams.txt.backup @@ -0,0 +1,3 @@ +{"uid":"devuser","size":90059327,"mtime":1752911461} +{"uid":"oibchello","size":16262818,"mtime":1752911899} +{"uid":"orangeicebear","size":1734396,"mtime":1748767975} diff --git a/register.py b/register.py index 1ef5624..8015f32 100644 --- a/register.py +++ b/register.py @@ -7,11 +7,46 @@ from database import get_db import uuid import smtplib from email.message import EmailMessage +from pathlib import Path +import os router = APIRouter() MAGIC_FROM = "noreply@dicta2stream.net" MAGIC_DOMAIN = "https://dicta2stream.net" +DATA_ROOT = Path("./data") + +def initialize_user_directory(username: str): + """Initialize user directory with a silent stream.opus file""" + try: + user_dir = DATA_ROOT / username + default_stream_path = DATA_ROOT / "stream.opus" + + print(f"[DEBUG] Initializing user directory: {user_dir.absolute()}") + + # Create the directory if it doesn't exist + user_dir.mkdir(parents=True, exist_ok=True) + print(f"[DEBUG] Directory created or already exists: {user_dir.exists()}") + + # Create stream.opus by copying the default stream.opus file + user_stream_path = user_dir / "stream.opus" + print(f"[DEBUG] Creating stream.opus at: {user_stream_path.absolute()}") + + if not user_stream_path.exists(): + if default_stream_path.exists(): + import shutil + shutil.copy2(default_stream_path, user_stream_path) + print(f"[DEBUG] Copied default stream.opus to {user_stream_path}") + else: + print(f"[ERROR] Default stream.opus not found at {default_stream_path}") + # Fallback: create an empty file to prevent errors + with open(user_stream_path, 'wb') as f: + f.write(b'') + + return True + except Exception as e: + print(f"Error initializing user directory for {username}: {str(e)}") + return False @router.post("/register") def register(request: Request, email: str = Form(...), user: str = Form(...), db: Session = Depends(get_db)): @@ -40,8 +75,13 @@ def register(request: Request, email: str = Form(...), user: str = Form(...), db # Register new user db.add(User(email=email, username=user, token=token, confirmed=False, ip=request.client.host)) db.add(UserQuota(uid=user)) + try: + # First commit the user to the database db.commit() + + # Only after successful commit, initialize the user directory + initialize_user_directory(user) except Exception as e: db.rollback() if isinstance(e, IntegrityError): diff --git a/silent.opus b/silent.opus new file mode 100644 index 0000000..73b4dd7 Binary files /dev/null and b/silent.opus differ diff --git a/static/app.js b/static/app.js index e7d0980..0025e3a 100644 --- a/static/app.js +++ b/static/app.js @@ -37,7 +37,7 @@ function handleMagicLoginRedirect() { localStorage.setItem('uid', username); localStorage.setItem('confirmed_uid', username); localStorage.setItem('uid_time', Date.now().toString()); - document.cookie = `uid=${encodeURIComponent(username)}; path=/`; + document.cookie = `uid=${encodeURIComponent(username)}; path=/; SameSite=Lax`; // Update UI state document.body.classList.add('authenticated'); @@ -45,7 +45,7 @@ function handleMagicLoginRedirect() { // Update local storage and cookies localStorage.setItem('isAuthenticated', 'true'); - document.cookie = `isAuthenticated=true; path=/`; + document.cookie = `isAuthenticated=true; path=/; SameSite=Lax`; // Update URL and history without reloading window.history.replaceState({}, document.title, window.location.pathname); @@ -677,25 +677,170 @@ trackedFunctions.forEach(fnName => { } }); +// Update the visibility of the account deletion section based on authentication state +function updateAccountDeletionVisibility(isAuthenticated) { + console.log('[ACCOUNT-DELETION] updateAccountDeletionVisibility called with isAuthenticated:', isAuthenticated); + + // Find the account deletion section and its auth-only wrapper + const authOnlyWrapper = document.querySelector('#privacy-page .auth-only'); + const accountDeletionSection = document.getElementById('account-deletion'); + + console.log('[ACCOUNT-DELETION] Elements found:', { + authOnlyWrapper: !!authOnlyWrapper, + accountDeletionSection: !!accountDeletionSection + }); + + // Function to show an element with all necessary styles + const showElement = (element) => { + if (!element) return; + + console.log('[ACCOUNT-DELETION] Showing element:', element); + + // Remove any hiding classes + element.classList.remove('hidden', 'auth-only-hidden'); + + // Set all possible visibility properties + element.style.display = 'block'; + element.style.visibility = 'visible'; + element.style.opacity = '1'; + element.style.height = 'auto'; + element.style.position = 'relative'; + element.style.clip = 'auto'; + element.style.overflow = 'visible'; + + // Add a class to mark as visible + element.classList.add('account-visible'); + }; + + // Function to hide an element + const hideElement = (element) => { + if (!element) return; + + console.log('[ACCOUNT-DELETION] Hiding element:', element); + + // Set display to none to completely remove from layout + element.style.display = 'none'; + + // Remove any visibility-related classes + element.classList.remove('account-visible'); + }; + + if (isAuthenticated) { + console.log('[ACCOUNT-DELETION] User is authenticated, checking if on privacy page'); + + // Get the current page state - only show on #privacy-page + const currentHash = window.location.hash; + const isPrivacyPage = currentHash === '#privacy-page'; + + console.log('[ACCOUNT-DELETION] Debug - Page State:', { + isAuthenticated, + currentHash, + isPrivacyPage, + documentTitle: document.title + }); + + if (isAuthenticated && isPrivacyPage) { + console.log('[ACCOUNT-DELETION] On privacy page, showing account deletion section'); + + // Show the auth wrapper and account deletion section + if (authOnlyWrapper) { + authOnlyWrapper.style.display = 'block'; + authOnlyWrapper.style.visibility = 'visible'; + } + + if (accountDeletionSection) { + accountDeletionSection.style.display = 'block'; + accountDeletionSection.style.visibility = 'visible'; + } + } else { + console.log('[ACCOUNT-DELETION] Not on privacy page, hiding account deletion section'); + + // Hide the account deletion section + if (accountDeletionSection) { + accountDeletionSection.style.display = 'none'; + accountDeletionSection.style.visibility = 'hidden'; + } + + // Only hide the auth wrapper if we're not on the privacy page + if (authOnlyWrapper && !isPrivacyPage) { + authOnlyWrapper.style.display = 'none'; + authOnlyWrapper.style.visibility = 'hidden'; + } + } + + // Debug: Log the current state after updates + if (accountDeletionSection) { + console.log('[ACCOUNT-DELETION] Account deletion section state after show:', { + display: window.getComputedStyle(accountDeletionSection).display, + visibility: window.getComputedStyle(accountDeletionSection).visibility, + classes: accountDeletionSection.className, + parent: accountDeletionSection.parentElement ? { + tag: accountDeletionSection.parentElement.tagName, + classes: accountDeletionSection.parentElement.className, + display: window.getComputedStyle(accountDeletionSection.parentElement).display + } : 'no parent' + }); + } + + } else { + console.log('[ACCOUNT-DELETION] User is not authenticated, hiding account deletion section'); + + // Hide the account deletion section but keep the auth-only wrapper for other potential content + if (accountDeletionSection) { + hideElement(accountDeletionSection); + } + + // Only hide the auth-only wrapper if it doesn't contain other important content + if (authOnlyWrapper) { + const hasOtherContent = Array.from(authOnlyWrapper.children).some( + child => child.id !== 'account-deletion' && child.offsetParent !== null + ); + + if (!hasOtherContent) { + hideElement(authOnlyWrapper); + } + } + } + + // Log final state for debugging + console.log('[ACCOUNT-DELETION] Final state:', { + authOnlyWrapper: authOnlyWrapper ? { + display: window.getComputedStyle(authOnlyWrapper).display, + visibility: window.getComputedStyle(authOnlyWrapper).visibility, + classes: authOnlyWrapper.className + } : 'not found', + accountDeletionSection: accountDeletionSection ? { + display: window.getComputedStyle(accountDeletionSection).display, + visibility: window.getComputedStyle(accountDeletionSection).visibility, + classes: accountDeletionSection.className, + parent: accountDeletionSection.parentElement ? { + tag: accountDeletionSection.parentElement.tagName, + classes: accountDeletionSection.parentElement.className, + display: window.getComputedStyle(accountDeletionSection.parentElement).display + } : 'no parent' + } : 'not found' + }); +} + // Check authentication state and update UI function checkAuthState() { + // Debounce rapid calls const now = Date.now(); - - // Throttle the checks if (now - lastAuthCheckTime < AUTH_CHECK_DEBOUNCE) { - return; + return wasAuthenticated === true; } lastAuthCheckTime = now; - - // Check various auth indicators - const hasAuthCookie = document.cookie.includes('sessionid='); + authCheckCounter++; + + // Check various authentication indicators + const hasAuthCookie = document.cookie.includes('isAuthenticated=true'); const hasUidCookie = document.cookie.includes('uid='); const hasLocalStorageAuth = localStorage.getItem('isAuthenticated') === 'true'; - const hasAuthToken = localStorage.getItem('authToken') !== null; + const hasAuthToken = !!localStorage.getItem('authToken'); + // User is considered authenticated if any of these are true const isAuthenticated = hasAuthCookie || hasUidCookie || hasLocalStorageAuth || hasAuthToken; - - // Only log if debug is enabled or if state has changed + if (DEBUG_AUTH_STATE || isAuthenticated !== wasAuthenticated) { console.log('Auth State Check:', { hasAuthCookie, @@ -729,6 +874,9 @@ function checkAuthState() { console.warn('injectNavigation function not found'); } + // Update account deletion section visibility + updateAccountDeletionVisibility(isAuthenticated); + // Update the tracked state wasAuthenticated = isAuthenticated; @@ -755,6 +903,12 @@ function setupAuthStatePolling() { } +// Function to handle page navigation +function handlePageNavigation() { + const isAuthenticated = checkAuthState(); + updateAccountDeletionVisibility(isAuthenticated); +} + // Initialize the application when DOM is loaded document.addEventListener("DOMContentLoaded", () => { // Set up authentication state monitoring @@ -766,6 +920,11 @@ document.addEventListener("DOMContentLoaded", () => { // Initialize components initNavigation(); + // Initialize account deletion section visibility + handlePageNavigation(); + + // Listen for hash changes to update visibility when navigating + window.addEventListener('hashchange', handlePageNavigation); // Initialize profile player after a short delay setTimeout(() => { @@ -861,32 +1020,96 @@ document.addEventListener("DOMContentLoaded", () => { const deleteAccountFromPrivacyBtn = document.getElementById('delete-account-from-privacy'); const deleteAccount = async (e) => { - if (e) e.preventDefault(); + if (e) { + e.preventDefault(); + e.stopPropagation(); + } - if (!confirm('Are you sure you want to delete your account? This action cannot be undone.')) { + if (!confirm('Are you sure you want to delete your account?\n\nThis action cannot be undone.')) { return; } + // Show loading state + const deleteBtn = e?.target.closest('button'); + const originalText = deleteBtn?.textContent || 'Delete My Account'; + if (deleteBtn) { + deleteBtn.disabled = true; + deleteBtn.textContent = 'Deleting...'; + } + try { + // Get UID from localStorage + const uid = localStorage.getItem('uid'); + if (!uid) { + throw new Error('User not authenticated. Please log in again.'); + } + + console.log('Sending delete account request for UID:', uid); const response = await fetch('/api/delete-account', { method: 'POST', headers: { 'Content-Type': 'application/json', - } + }, + credentials: 'include', + body: JSON.stringify({ + uid: uid // Include UID in the request body + }) }); - if (response.ok) { - // Clear local storage and redirect to home page - localStorage.clear(); - window.location.href = '/'; - } else { - const error = await response.json(); - throw new Error(error.detail || 'Failed to delete account'); - } - } catch (error) { - console.error('Error deleting account:', error); - showToast(`❌ ${error.message || 'Failed to delete account'}`, 'error'); + console.log('Received response status:', response.status, response.statusText); + + // Try to parse response as JSON, but handle non-JSON responses + let data; + const text = await response.text(); + try { + data = text ? JSON.parse(text) : {}; + } catch (parseError) { + console.error('Failed to parse response as JSON:', parseError); + console.log('Raw response text:', text); + data = {}; } + + if (response.ok) { + console.log('Account deletion successful'); + showToast('✅ Account deleted successfully', 'success'); + // Clear local storage and redirect to home page after a short delay + setTimeout(() => { + localStorage.clear(); + window.location.href = '/'; + }, 1000); + } else { + console.error('Delete account failed:', { status: response.status, data }); + const errorMessage = data.detail || data.message || + data.error || + `Server returned ${response.status} ${response.statusText}`; + throw new Error(errorMessage); + } + } catch (error) { + console.error('Error in deleteAccount:', { + name: error.name, + message: error.message, + stack: error.stack, + error: error + }); + + // Try to extract a meaningful error message + let errorMessage = 'Failed to delete account'; + if (error instanceof Error) { + errorMessage = error.message || error.toString(); + } else if (typeof error === 'string') { + errorMessage = error; + } else if (error && typeof error === 'object') { + errorMessage = error.message || JSON.stringify(error); + } + + showToast(`❌ ${errorMessage}`, 'error'); + } finally { + // Restore button state + if (deleteBtn) { + deleteBtn.disabled = false; + deleteBtn.textContent = originalText; + } + } }; // Add event listeners to both delete account buttons @@ -902,22 +1125,49 @@ document.addEventListener("DOMContentLoaded", () => { }); // Logout function -function logout() { +async function logout(event) { + if (event) { + event.preventDefault(); + event.stopPropagation(); + } + + // If handleLogout is available in dashboard.js, use it for comprehensive logout + if (typeof handleLogout === 'function') { + try { + await handleLogout(event); + } catch (error) { + console.error('Error during logout:', error); + // Fall back to basic logout if handleLogout fails + basicLogout(); + } + } else { + // Fallback to basic logout if handleLogout is not available + basicLogout(); + } +} + +// Basic client-side logout as fallback +function basicLogout() { // Clear authentication state document.body.classList.remove('authenticated'); localStorage.removeItem('isAuthenticated'); localStorage.removeItem('uid'); localStorage.removeItem('confirmed_uid'); localStorage.removeItem('uid_time'); + localStorage.removeItem('authToken'); - // Clear cookies - document.cookie = 'isAuthenticated=; Path=/; Expires=Thu, 01 Jan 1970 00:00:01 GMT;'; - document.cookie = 'uid=; Path=/; Expires=Thu, 01 Jan 1970 00:00:01 GMT;'; + // Clear all cookies with proper SameSite attribute + document.cookie.split(';').forEach(cookie => { + const [name] = cookie.trim().split('='); + if (name) { + document.cookie = `${name}=; Path=/; Expires=Thu, 01 Jan 1970 00:00:01 GMT; domain=${window.location.hostname}; SameSite=Lax`; + } + }); // Stop any playing audio stopMainAudio(); - // Redirect to home page + // Force a hard redirect to ensure all state is cleared window.location.href = '/'; } diff --git a/static/css/section.css b/static/css/section.css index 453aafb..60580fa 100644 --- a/static/css/section.css +++ b/static/css/section.css @@ -36,16 +36,78 @@ body.authenticated .auth-only { #me-page:not([hidden]) > .auth-only, #me-page:not([hidden]) > section, #me-page:not([hidden]) > article, -#me-page:not([hidden]) > div, -/* Ensure account deletion section is visible when privacy page is active and user is authenticated */ -#privacy-page:not([hidden]) .auth-only, -#privacy-page:not([hidden]) #account-deletion { +#me-page:not([hidden]) > div { display: block !important; visibility: visible !important; opacity: 1 !important; } -body.authenticated .guest-only { +/* Show auth-only elements when authenticated */ +body.authenticated .auth-only { + display: block !important; + visibility: visible !important; +} + +/* Account deletion section - improved width and formatting */ +#account-deletion { + margin: 2.5rem auto; + padding: 2.5rem; + background: rgba(255, 255, 255, 0.05); + border-radius: 10px; + box-shadow: 0 3px 6px rgba(0, 0, 0, 0.15); + max-width: 600px; + line-height: 1.6; + color: var(--text-color); +} + +#account-deletion h3 { + color: var(--color-primary); + margin-top: 0; + margin-bottom: 1.5rem; + font-size: 1.5rem; +} + +#account-deletion p { + color: var(--color-text); + line-height: 1.6; + margin-bottom: 1.5rem; +} + +#account-deletion ul { + margin: 1rem 0 1.5rem 1.5rem; + padding: 0; + color: var(--color-text); +} + +#account-deletion .centered-container { + text-align: center; + margin-top: 2rem; +} + +#delete-account-from-privacy { + background-color: #ff4d4f; + color: white; + border: none; + padding: 0.75rem 1.5rem; + border-radius: 4px; + cursor: pointer; + font-weight: 600; + font-size: 1rem; + transition: background-color 0.2s ease; + display: inline-flex; + align-items: center; + gap: 0.5rem; +} + +#delete-account-from-privacy:hover { + background-color: #ff6b6b; + text-decoration: none; +} + +/* Hide guest-only elements when authenticated */ +body.authenticated .guest-only { + display: none !important; + visibility: hidden !important; display: none; } diff --git a/static/init-personal-stream.js b/static/init-personal-stream.js index 1e2136d..e31f08b 100644 --- a/static/init-personal-stream.js +++ b/static/init-personal-stream.js @@ -3,19 +3,22 @@ document.addEventListener('DOMContentLoaded', () => { // Function to update the play button with UID function updatePersonalStreamPlayButton() { const playButton = document.querySelector('#me-page .play-pause-btn'); - if (!playButton) return; + const streamPlayer = document.querySelector('#me-page .stream-player'); + + if (!playButton || !streamPlayer) return; // Get UID from localStorage or cookie const uid = localStorage.getItem('uid') || getCookie('uid'); if (uid) { - // Set the data-uid attribute if not already set + // Show the player and set the UID if not already set + streamPlayer.style.display = 'block'; if (!playButton.dataset.uid) { playButton.dataset.uid = uid; - console.log('[personal-stream] Set UID for personal stream play button:', uid); } } else { - console.warn('[personal-stream] No UID found for personal stream play button'); + // Hide the player for guests + streamPlayer.style.display = 'none'; } } diff --git a/static/magic-login.js b/static/magic-login.js index 705c3f0..39fb856 100644 --- a/static/magic-login.js +++ b/static/magic-login.js @@ -31,8 +31,8 @@ export async function initMagicLogin() { const authToken = 'token-' + Math.random().toString(36).substring(2, 15); // Set cookies and localStorage for SPA session logic - document.cookie = `uid=${encodeURIComponent(confirmedUid)}; path=/`; - document.cookie = `authToken=${authToken}; path=/`; + document.cookie = `uid=${encodeURIComponent(confirmedUid)}; path=/; SameSite=Lax`; + document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`; // Store in localStorage for client-side access localStorage.setItem('uid', confirmedUid); @@ -53,8 +53,8 @@ export async function initMagicLogin() { const authToken = 'token-' + Math.random().toString(36).substring(2, 15); // Set cookies and localStorage for SPA session logic - document.cookie = `uid=${encodeURIComponent(data.confirmed_uid)}; path=/`; - document.cookie = `authToken=${authToken}; path=/`; + document.cookie = `uid=${encodeURIComponent(data.confirmed_uid)}; path=/; SameSite=Lax`; + document.cookie = `authToken=${authToken}; path=/; SameSite=Lax; Secure`; // Store in localStorage for client-side access localStorage.setItem('uid', data.confirmed_uid); diff --git a/static/streams-ui.js b/static/streams-ui.js index 074049d..19521c7 100644 --- a/static/streams-ui.js +++ b/static/streams-ui.js @@ -1,12 +1,22 @@ // static/streams-ui.js — public streams loader and profile-link handling import { showOnly } from './router.js'; -console.log('[streams-ui] Module loaded'); +// Global variable to track if we should force refresh the stream list +let shouldForceRefresh = false; + +// Function to refresh the stream list +window.refreshStreamList = function(force = true) { + shouldForceRefresh = force; + loadAndRenderStreams(); + return new Promise((resolve) => { + // Resolve after a short delay to allow the stream list to update + setTimeout(resolve, 500); + }); +}; // Removed loadingStreams and lastStreamsPageVisible guards for instant fetch export function initStreamsUI() { - console.log('[streams-ui] Initializing streams UI'); initStreamLinks(); window.addEventListener('popstate', () => { highlightActiveProfileLink(); @@ -29,25 +39,55 @@ window.maybeLoadStreamsOnShow = maybeLoadStreamsOnShow; // Global variables for audio control let currentlyPlayingAudio = null; +// Global variable to track the active SSE connection +let activeSSEConnection = null; + +// Global cleanup function for SSE connections +const cleanupConnections = () => { + if (window._streamsSSE) { + if (window._streamsSSE.abort) { + window._streamsSSE.abort(); + } + window._streamsSSE = null; + } + + if (window.connectionTimeout) { + clearTimeout(window.connectionTimeout); + window.connectionTimeout = null; + } + + activeSSEConnection = null; +}; + // Initialize when DOM is loaded document.addEventListener('DOMContentLoaded', () => { - console.log('[streams-ui] DOM content loaded, initializing streams UI'); initStreamsUI(); // Also try to load streams immediately in case the page is already loaded setTimeout(() => { - console.log('[streams-ui] Attempting initial stream load'); loadAndRenderStreams(); }, 100); }); function loadAndRenderStreams() { - console.log('[streams-ui] loadAndRenderStreams called'); const ul = document.getElementById('stream-list'); if (!ul) { - console.warn('[streams-ui] #stream-list not found in DOM'); + console.error('[STREAMS-UI] Stream list element not found'); return; } + console.log('[STREAMS-UI] loadAndRenderStreams called, shouldForceRefresh:', shouldForceRefresh); + + // Don't start a new connection if one is already active and we're not forcing a refresh + if (activeSSEConnection && !shouldForceRefresh) { + return; + } + + // If we're forcing a refresh, clean up the existing connection + if (shouldForceRefresh && activeSSEConnection) { + // Clean up any existing connections + cleanupConnections(); + shouldForceRefresh = false; // Reset the flag after handling + } // Clear any existing error messages or retry buttons ul.innerHTML = '
  • Loading public streams...
  • '; @@ -59,36 +99,21 @@ function loadAndRenderStreams() { const baseUrl = window.location.origin; const sseUrl = `${baseUrl}/streams-sse?t=${timestamp}`; - console.log(`[streams-ui] Connecting to ${sseUrl}`); - let gotAny = false; let streams = []; - let connectionTimeout = null; + window.connectionTimeout = null; - // Clean up previous connection and timeouts - if (window._streamsSSE) { - console.group('[streams-ui] Cleaning up previous connection'); - console.log('Previous connection exists, aborting...'); - if (window._streamsSSE.abort) { - window._streamsSSE.abort(); - console.log('Previous connection aborted'); - } else { - console.log('No abort method on previous connection'); - } - window._streamsSSE = null; - console.groupEnd(); + // Clean up any existing connections + cleanupConnections(); + + // Reset the retry count if we have a successful connection + window.streamRetryCount = 0; + + if (window.connectionTimeout) { + clearTimeout(window.connectionTimeout); + window.connectionTimeout = null; } - if (connectionTimeout) { - console.log('[streams-ui] Clearing previous connection timeout'); - clearTimeout(connectionTimeout); - connectionTimeout = null; - } else { - console.log('[streams-ui] No previous connection timeout to clear'); - } - - console.log(`[streams-ui] Creating fetch-based SSE connection to ${sseUrl}`); - // Use fetch with ReadableStream for better CORS handling const controller = new AbortController(); const signal = controller.signal; @@ -96,6 +121,9 @@ function loadAndRenderStreams() { // Store the controller for cleanup window._streamsSSE = controller; + // Track the active connection + activeSSEConnection = controller; + // Set a connection timeout with debug info const connectionStartTime = Date.now(); const connectionTimeoutId = setTimeout(() => { @@ -123,20 +151,12 @@ function loadAndRenderStreams() { window.streamRetryCount = retryCount + 1; const backoffTime = Math.min(1000 * Math.pow(2, retryCount), 10000); // Exponential backoff, max 10s setTimeout(loadAndRenderStreams, backoffTime); - } else if (process.env.NODE_ENV === 'development' || window.DEBUG_STREAMS) { - console.warn('Max retries reached for stream loading'); } } }, 15000); // 15 second timeout (increased from 10s) // Store the timeout ID for cleanup - connectionTimeout = connectionTimeoutId; - - console.log('[streams-ui] Making fetch request to:', sseUrl); - - console.log('[streams-ui] Making fetch request to:', sseUrl); - - console.log('[streams-ui] Creating fetch request with URL:', sseUrl); + window.connectionTimeout = connectionTimeoutId; // Make the fetch request with proper error handling fetch(sseUrl, { @@ -151,25 +171,14 @@ function loadAndRenderStreams() { mode: 'cors', redirect: 'follow' }) - .then(response => { - console.log('[streams-ui] Fetch response received, status:', response.status, response.statusText); - console.log('[streams-ui] Response URL:', response.url); - console.log('[streams-ui] Response type:', response.type); - console.log('[streams-ui] Response redirected:', response.redirected); - console.log('[streams-ui] Response headers:'); - response.headers.forEach((value, key) => { - console.log(` ${key}: ${value}`); - }); - + .then(response => { if (!response.ok) { // Try to get the response text for error details return response.text().then(text => { - console.error('[streams-ui] Error response body:', text); const error = new Error(`HTTP error! status: ${response.status}, statusText: ${response.statusText}`); error.response = { status: response.status, statusText: response.statusText, body: text }; throw error; - }).catch(textError => { - console.error('[streams-ui] Could not read error response body:', textError); + }).catch(() => { const error = new Error(`HTTP error! status: ${response.status}, statusText: ${response.statusText}`); error.response = { status: response.status, statusText: response.statusText }; throw error; @@ -177,13 +186,9 @@ function loadAndRenderStreams() { } if (!response.body) { - const error = new Error('Response body is null or undefined'); - console.error('[streams-ui] No response body:', error); - throw error; + throw new Error('Response body is null or undefined'); } - console.log('[streams-ui] Response body is available, content-type:', response.headers.get('content-type')); - // Get the readable stream const reader = response.body.getReader(); const decoder = new TextDecoder(); @@ -191,15 +196,18 @@ function loadAndRenderStreams() { // Process the stream function processStream({ done, value }) { + console.log('[STREAMS-UI] processStream called with done:', done); if (done) { - console.log('[streams-ui] Stream completed'); + console.log('[STREAMS-UI] Stream processing complete'); // Process any remaining data in the buffer if (buffer.trim()) { + console.log('[STREAMS-UI] Processing remaining buffer data'); try { const data = JSON.parse(buffer); + console.log('[STREAMS-UI] Parsed data from buffer:', data); processSSEEvent(data); } catch (e) { - console.error('[streams-ui] Error parsing final data:', e); + console.error('[STREAMS-UI] Error parsing buffer data:', e); } } return; @@ -235,68 +243,63 @@ function loadAndRenderStreams() { return reader.read().then(processStream); }) .catch(error => { - // Only handle the error if it's not an AbortError (from our own abort) - if (error.name === 'AbortError') { - console.log('[streams-ui] Request was aborted as expected'); - return; - } - - console.error('[streams-ui] Stream loading failed:', error); - - // Log additional error details - if (error.name === 'TypeError') { - console.error('[streams-ui] This is likely a network error or CORS issue'); - } - - // Show a user-friendly error message - const ul = document.getElementById('stream-list'); - if (ul) { - let errorMessage = 'Error loading streams. '; + // Only handle the error if it's not an abort error + if (error.name !== 'AbortError') { + // Clean up the controller reference + window._streamsSSE = null; + activeSSEConnection = null; - if (error.message.includes('Failed to fetch')) { - errorMessage += 'Unable to connect to the server. Please check your internet connection.'; - } else if (error.message.includes('CORS')) { - errorMessage += 'A server configuration issue occurred. Please try again later.'; - } else { - errorMessage += 'Please try again later.'; + // Clear the connection timeout + if (connectionTimeout) { + clearTimeout(connectionTimeout); + connectionTimeout = null; } - ul.innerHTML = ` -
  • -

    ${errorMessage}

    - -
  • - `; + // Show a user-friendly error message + const ul = document.getElementById('stream-list'); + if (ul) { + let errorMessage = 'Error loading streams. '; - // Add retry handler - const retryButton = document.getElementById('retry-loading'); - if (retryButton) { - retryButton.addEventListener('click', () => { - ul.innerHTML = '
  • Loading streams...
  • '; - loadAndRenderStreams(); - }); + if (error.message && error.message.includes('Failed to fetch')) { + errorMessage += 'Unable to connect to the server. Please check your internet connection.'; + } else if (error.message && error.message.includes('CORS')) { + errorMessage += 'A server configuration issue occurred. Please try again later.'; + } else { + errorMessage += 'Please try again later.'; + } + + ul.innerHTML = ` +
  • +

    ${errorMessage}

    + +
  • + `; + + // Add retry handler + const retryButton = document.getElementById('retry-loading'); + if (retryButton) { + retryButton.addEventListener('click', () => { + ul.innerHTML = '
  • Loading streams...
  • '; + loadAndRenderStreams(); + }); + } } } }); // Function to process SSE events function processSSEEvent(data) { - console.log('[streams-ui] Received SSE event:', data); - + console.log('[STREAMS-UI] Processing SSE event:', data); if (data.end) { - console.log('[streams-ui] Received end event, total streams:', streams.length); - if (streams.length === 0) { - console.log('[streams-ui] No streams found, showing empty state'); ul.innerHTML = '
  • No active streams.
  • '; return; } // Sort streams by mtime in descending order (newest first) streams.sort((a, b) => (b.mtime || 0) - (a.mtime || 0)); - console.log('[streams-ui] Sorted streams:', streams); // Clear the list ul.innerHTML = ''; @@ -307,8 +310,6 @@ function loadAndRenderStreams() { const sizeMb = stream.size ? (stream.size / (1024 * 1024)).toFixed(1) : '?'; const mtime = stream.mtime ? new Date(stream.mtime * 1000).toISOString().split('T')[0].replace(/-/g, '/') : ''; - console.log(`[streams-ui] Rendering stream ${index + 1}/${streams.length}:`, { uid, sizeMb, mtime }); - const li = document.createElement('li'); li.className = 'stream-item'; @@ -323,9 +324,7 @@ function loadAndRenderStreams() { `; ul.appendChild(li); - console.log(`[streams-ui] Successfully rendered stream: ${uid}`); } catch (error) { - console.error(`[streams-ui] Error rendering stream ${uid}:`, error); const errorLi = document.createElement('li'); errorLi.textContent = `Error loading stream: ${uid}`; errorLi.style.color = 'var(--error)'; @@ -379,10 +378,11 @@ function loadAndRenderStreams() { export function renderStreamList(streams) { const ul = document.getElementById('stream-list'); if (!ul) { - console.warn('[streams-ui] renderStreamList: #stream-list not found'); + console.warn('[STREAMS-UI] renderStreamList: #stream-list not found'); return; } - console.debug('[streams-ui] Rendering stream list:', streams); + console.log('[STREAMS-UI] Rendering stream list with', streams.length, 'streams'); + console.debug('[STREAMS-UI] Streams data:', streams); if (Array.isArray(streams)) { if (streams.length) { // Sort by mtime descending (most recent first) @@ -551,18 +551,14 @@ function stopPlayback() { // Load and play audio using HTML5 Audio element for Opus async function loadAndPlayAudio(uid, playPauseBtn) { - console.log(`[streams-ui] loadAndPlayAudio called for UID: ${uid}`); - - // If trying to play the currently paused audio, just resume it - if (audioElement && currentUid === uid) { - console.log('[streams-ui] Resuming existing audio'); + // If we already have an audio element for this UID and it's paused, just resume it + if (audioElement && currentUid === uid && audioElement.paused) { try { await audioElement.play(); isPlaying = true; updatePlayPauseButton(playPauseBtn, true); return; } catch (error) { - console.error('Error resuming audio:', error); // Fall through to reload if resume fails } } @@ -576,11 +572,8 @@ async function loadAndPlayAudio(uid, playPauseBtn) { currentUid = uid; try { - console.log(`[streams-ui] Creating new audio element for ${uid}`); - // Create a new audio element with the correct MIME type const audioUrl = `/audio/${encodeURIComponent(uid)}/stream.opus`; - console.log(`[streams-ui] Loading audio from: ${audioUrl}`); // Create a new audio element with a small delay to prevent race conditions await new Promise(resolve => setTimeout(resolve, 50)); @@ -591,19 +584,16 @@ async function loadAndPlayAudio(uid, playPauseBtn) { // Set up event handlers with proper binding const onPlay = () => { - console.log('[streams-ui] Audio play event'); isPlaying = true; updatePlayPauseButton(playPauseBtn, true); }; const onPause = () => { - console.log('[streams-ui] Audio pause event'); isPlaying = false; updatePlayPauseButton(playPauseBtn, false); }; const onEnded = () => { - console.log('[streams-ui] Audio ended event'); isPlaying = false; cleanupAudio(); }; @@ -611,18 +601,14 @@ async function loadAndPlayAudio(uid, playPauseBtn) { const onError = (e) => { // Ignore errors from previous audio elements that were cleaned up if (!audioElement || audioElement.readyState === 0) { - console.log('[streams-ui] Ignoring error from cleaned up audio element'); return; } - console.error('[streams-ui] Audio error:', e); - console.error('Error details:', audioElement.error); isPlaying = false; updatePlayPauseButton(playPauseBtn, false); // Don't show error to user for aborted requests if (audioElement.error && audioElement.error.code === MediaError.MEDIA_ERR_ABORTED) { - console.log('[streams-ui] Playback was aborted as expected'); return; } @@ -642,7 +628,6 @@ async function loadAndPlayAudio(uid, playPauseBtn) { audioElement._eventHandlers = { onPlay, onPause, onEnded, onError }; // Start playback with error handling - console.log('[streams-ui] Starting audio playback'); try { const playPromise = audioElement.play(); @@ -650,10 +635,8 @@ async function loadAndPlayAudio(uid, playPauseBtn) { await playPromise.catch(error => { // Ignore abort errors when switching between streams if (error.name !== 'AbortError') { - console.error('[streams-ui] Play failed:', error); throw error; } - console.log('[streams-ui] Play was aborted as expected'); }); } @@ -759,27 +742,21 @@ if (streamList) { const uid = playPauseBtn.dataset.uid; if (!uid) { - console.error('No UID found for play button'); return; } - console.log(`[streams-ui] Play/pause clicked for UID: ${uid}, currentUid: ${currentUid}, isPlaying: ${isPlaying}`); - // If clicking the currently playing button, toggle pause/play if (currentUid === uid) { if (isPlaying) { - console.log('[streams-ui] Pausing current audio'); await audioElement.pause(); isPlaying = false; updatePlayPauseButton(playPauseBtn, false); } else { - console.log('[streams-ui] Resuming current audio'); try { await audioElement.play(); isPlaying = true; updatePlayPauseButton(playPauseBtn, true); } catch (error) { - console.error('[streams-ui] Error resuming audio:', error); // If resume fails, try reloading the audio await loadAndPlayAudio(uid, playPauseBtn); } @@ -788,7 +765,6 @@ if (streamList) { } // If a different stream is playing, stop it and start the new one - console.log(`[streams-ui] Switching to new audio stream: ${uid}`); stopPlayback(); await loadAndPlayAudio(uid, playPauseBtn); }); diff --git a/static/upload.js b/static/upload.js index de1b960..4147264 100644 --- a/static/upload.js +++ b/static/upload.js @@ -19,7 +19,7 @@ document.addEventListener('DOMContentLoaded', () => { } const streamInfo = document.getElementById("stream-info"); const streamUrlEl = document.getElementById("streamUrl"); - const spinner = document.getElementById("spinner"); + const spinner = document.getElementById("spinner") || { style: { display: 'none' } }; let abortController; // Upload function @@ -89,6 +89,11 @@ document.addEventListener('DOMContentLoaded', () => { if (window.fetchAndDisplayFiles) { await window.fetchAndDisplayFiles(uid); } + + // Refresh the stream list to update the last update time + if (window.refreshStreamList) { + await window.refreshStreamList(); + } } catch (e) { console.error('Failed to refresh:', e); } @@ -96,8 +101,8 @@ document.addEventListener('DOMContentLoaded', () => { playBeep(432, 0.25, "sine"); } else { - streamInfo.hidden = true; - spinner.style.display = "none"; + if (streamInfo) streamInfo.hidden = true; + if (spinner) spinner.style.display = "none"; if ((data.detail || data.error || "").includes("music")) { showToast("🎵 Upload rejected: singing or music detected."); } else { @@ -190,10 +195,10 @@ document.addEventListener('DOMContentLoaded', () => { const isRenamed = file.original_name && file.original_name !== file.name; return `
  • -
    +
    ${displayName} - ${isRenamed ? `
    ${file.name}
    ` : - ``} + ${isRenamed ? `
    ` : + ``}
    ${sizeMB} MB
  • @@ -203,48 +208,7 @@ document.addEventListener('DOMContentLoaded', () => { fileList.innerHTML = '
  • No files uploaded yet
  • '; } - // Add event listeners to delete buttons - document.querySelectorAll('.delete-file').forEach(button => { - button.addEventListener('click', async (e) => { - e.stopPropagation(); - const filename = button.dataset.filename; - if (confirm(`Are you sure you want to delete ${filename}?`)) { - try { - // Get the auth token from the cookie - const token = document.cookie - .split('; ') - .find(row => row.startsWith('sessionid=')) - ?.split('=')[1]; - - if (!token) { - throw new Error('Not authenticated'); - } - - const response = await fetch(`/delete/${filename}`, { - method: 'DELETE', - headers: { - 'Authorization': `Bearer ${token}`, - 'Content-Type': 'application/json', - }, - }); - - if (!response.ok) { - const errorData = await response.json().catch(() => ({})); - throw new Error(errorData.detail || `Failed to delete file: ${response.statusText}`); - } - - // Refresh the file list - const uid = document.body.dataset.userUid; - if (uid) { - fetchAndDisplayFiles(uid); - } - } catch (error) { - console.error('Error deleting file:', error); - alert('Failed to delete file. Please try again.'); - } - } - }); - }); + // Delete button handling is now managed by dashboard.js // Update quota display if available if (data.quota !== undefined) { diff --git a/upload.py b/upload.py index b9742f2..1df43f6 100644 --- a/upload.py +++ b/upload.py @@ -6,11 +6,13 @@ from slowapi.util import get_remote_address from slowapi.errors import RateLimitExceeded from pathlib import Path import json +import requests from datetime import datetime from convert_to_opus import convert_to_opus -from models import UploadLog, UserQuota, User -from sqlalchemy import select +from models import UploadLog, UserQuota, User, PublicStream +from sqlalchemy import select, or_ from database import get_db +from sqlalchemy.orm import Session limiter = Limiter(key_func=get_remote_address) router = APIRouter() @@ -23,55 +25,63 @@ DATA_ROOT = Path("./data") @router.post("/upload") async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), file: UploadFile = Form(...)): from log import log_violation + import time + + # Generate a unique request ID for this upload + request_id = str(int(time.time())) + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Starting upload of {file.filename}") + try: - user_dir = DATA_ROOT / uid - user_dir.mkdir(parents=True, exist_ok=True) - - raw_path = user_dir / ("raw." + file.filename.split(".")[-1]) - import uuid - - unique_name = str(uuid.uuid4()) + ".opus" - - # Save temp upload FIRST - with open(raw_path, "wb") as f: - f.write(await file.read()) - - # Block music/singing via Ollama prompt - import requests - try: - with open(raw_path, "rb") as f: - audio = f.read() - res = requests.post("http://localhost:11434/api/generate", json={ - "model": "whisper", - "prompt": "Does this audio contain music or singing? Answer yes or no only.", - "audio": audio - }, timeout=10) - resp = res.json().get("response", "").lower() - if "yes" in resp: - raw_path.unlink(missing_ok=True) - raise HTTPException(status_code=403, detail="Upload rejected: music or singing detected") - except Exception as ollama_err: - # fallback: allow, log if needed - pass - processed_path = user_dir / unique_name - - # Block unconfirmed users (use ORM) + # First, verify the user exists and is confirmed user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first() - # If result is a Row or tuple, extract the User object if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"): user = user[0] - from log import log_violation - log_violation("UPLOAD", request.client.host, uid, f"DEBUG: Incoming uid={uid}, user found={user}, confirmed={getattr(user, 'confirmed', None)}") - log_violation("UPLOAD", request.client.host, uid, f"DEBUG: After unpack, user={user}, type={type(user)}, confirmed={getattr(user, 'confirmed', None)}") + + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] User check - found: {user is not None}, confirmed: {getattr(user, 'confirmed', False) if user else 'N/A'}") + if not user or not hasattr(user, "confirmed") or not user.confirmed: - raw_path.unlink(missing_ok=True) raise HTTPException(status_code=403, detail="Account not confirmed") - # DB-based quota check - quota = db.get(UserQuota, uid) - if quota and quota.storage_bytes >= 100 * 1024 * 1024: - raw_path.unlink(missing_ok=True) + # Check quota before doing any file operations + quota = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0) + if quota.storage_bytes >= 100 * 1024 * 1024: raise HTTPException(status_code=400, detail="Quota exceeded") + + # Create user directory if it doesn't exist + user_dir = DATA_ROOT / uid + user_dir.mkdir(parents=True, exist_ok=True) + + # Generate a unique filename for the processed file first + import uuid + unique_name = f"{uuid.uuid4()}.opus" + raw_ext = file.filename.split(".")[-1].lower() + raw_path = user_dir / ("raw." + raw_ext) + processed_path = user_dir / unique_name + + # Clean up any existing raw files first (except the one we're about to create) + for old_file in user_dir.glob('raw.*'): + try: + if old_file != raw_path: # Don't delete the file we're about to create + old_file.unlink(missing_ok=True) + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Cleaned up old file: {old_file}") + except Exception as e: + log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_file}: {e}") + + # Save the uploaded file temporarily + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Saving temporary file to {raw_path}") + try: + with open(raw_path, "wb") as f: + content = await file.read() + if not content: + raise ValueError("Uploaded file is empty") + f.write(content) + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Successfully wrote {len(content)} bytes to {raw_path}") + except Exception as e: + log_violation("UPLOAD_ERROR", request.client.host, uid, f"[{request_id}] Failed to save {raw_path}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to save uploaded file: {e}") + + # Ollama music/singing check is disabled for this release + log_violation("UPLOAD", request.client.host, uid, f"[{request_id}] Ollama music/singing check is disabled") try: convert_to_opus(str(raw_path), str(processed_path)) @@ -82,44 +92,96 @@ async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), f original_size = raw_path.stat().st_size raw_path.unlink(missing_ok=True) # cleanup + # First, verify the file was created and has content + if not processed_path.exists() or processed_path.stat().st_size == 0: + raise HTTPException(status_code=500, detail="Failed to process audio file") + # Concatenate all .opus files in random order to stream.opus for public playback + # This is now done after the file is in its final location with log ID from concat_opus import concat_opus_files - try: - concat_opus_files(user_dir, user_dir / "stream.opus") - except Exception as e: - # fallback: just use the latest processed file if concat fails - import shutil - stream_path = user_dir / "stream.opus" - shutil.copy2(processed_path, stream_path) - - # Create a log entry with the original filename - log = UploadLog( - uid=uid, - ip=request.client.host, - filename=file.filename, # Store original filename - processed_filename=unique_name, # Store the processed filename - size_bytes=original_size - ) - db.add(log) - db.commit() - db.refresh(log) - - # Rename the processed file to include the log ID for better tracking - processed_with_id = user_dir / f"{log.id}_{unique_name}" - processed_path.rename(processed_with_id) - processed_path = processed_with_id - - # Store updated quota + def update_stream_opus(): + try: + concat_opus_files(user_dir, user_dir / "stream.opus") + except Exception as e: + # fallback: just use the latest processed file if concat fails + import shutil + stream_path = user_dir / "stream.opus" + shutil.copy2(processed_path, stream_path) + log_violation("STREAM_UPDATE", request.client.host, uid, + f"[fallback] Updated stream.opus with {processed_path}") + + # We'll call this after the file is in its final location + + # Get the final file size size = processed_path.stat().st_size - quota = db.get(UserQuota, uid) - if not quota: - quota = UserQuota(uid=uid) - db.add(quota) - quota.storage_bytes += size - db.commit() - # Update public streams list - update_public_streams(uid, quota.storage_bytes) + # Start a transaction + try: + # Create a log entry with the original filename + log = UploadLog( + uid=uid, + ip=request.client.host, + filename=file.filename, # Store original filename + processed_filename=unique_name, # Store the processed filename + size_bytes=size + ) + db.add(log) + db.flush() # Get the log ID without committing + + # Rename the processed file to include the log ID for better tracking + processed_with_id = user_dir / f"{log.id}_{unique_name}" + if processed_path.exists(): + # First check if there's already a file with the same UUID but different prefix + for existing_file in user_dir.glob(f"*_{unique_name}"): + if existing_file != processed_path: + log_violation("CLEANUP", request.client.host, uid, + f"[UPLOAD] Removing duplicate file: {existing_file}") + existing_file.unlink(missing_ok=True) + + # Now do the rename + if processed_path != processed_with_id: + if processed_with_id.exists(): + processed_with_id.unlink(missing_ok=True) + processed_path.rename(processed_with_id) + processed_path = processed_with_id + + # Only clean up raw.* files, not previously uploaded opus files + for old_temp_file in user_dir.glob('raw.*'): + try: + old_temp_file.unlink(missing_ok=True) + log_violation("CLEANUP", request.client.host, uid, f"[{request_id}] Cleaned up temp file: {old_temp_file}") + except Exception as e: + log_violation("CLEANUP_ERROR", request.client.host, uid, f"[{request_id}] Failed to clean up {old_temp_file}: {e}") + + # Get or create quota + quota = db.query(UserQuota).filter(UserQuota.uid == uid).first() + if not quota: + quota = UserQuota(uid=uid, storage_bytes=0) + db.add(quota) + + # Update quota with the new file size + quota.storage_bytes = sum( + f.stat().st_size + for f in user_dir.glob('*.opus') + if f.name != 'stream.opus' and f != processed_path + ) + size + + # Update public streams + update_public_streams(uid, quota.storage_bytes, db) + + # Commit the transaction + db.commit() + + # Now that the transaction is committed and files are in their final location, + # update the stream.opus file to include all files + update_stream_opus() + + except Exception as e: + db.rollback() + # Clean up the processed file if something went wrong + if processed_path.exists(): + processed_path.unlink(missing_ok=True) + raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") return { "filename": file.filename, @@ -142,37 +204,33 @@ async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), f return {"detail": f"Server error: {type(e).__name__}: {str(e)}"} -def update_public_streams(uid: str, storage_bytes: int, db = Depends(get_db)): +def update_public_streams(uid: str, storage_bytes: int, db: Session): """Update the public streams list in the database with the latest user upload info""" try: - from models import PublicStream - - # Get or create the public stream record - public_stream = db.get(PublicStream, uid) - current_time = datetime.utcnow() - - if public_stream is None: - # Create a new record if it doesn't exist - public_stream = PublicStream( - uid=uid, - size=storage_bytes, - mtime=int(current_time.timestamp()), - created_at=current_time, - updated_at=current_time - ) + # Get the user's info + user = db.query(User).filter(User.username == uid).first() + if not user: + print(f"[WARNING] User {uid} not found when updating public streams") + return + + # Try to get existing public stream or create new one + public_stream = db.query(PublicStream).filter(PublicStream.uid == uid).first() + if not public_stream: + public_stream = PublicStream(uid=uid) db.add(public_stream) - else: - # Update existing record - public_stream.size = storage_bytes - public_stream.mtime = int(current_time.timestamp()) - public_stream.updated_at = current_time + + # Update the public stream info + public_stream.username = user.username + public_stream.display_name = user.display_name or user.username + public_stream.storage_bytes = storage_bytes + public_stream.last_updated = datetime.utcnow() - db.commit() - db.refresh(public_stream) + # Don't commit here - let the caller handle the transaction + db.flush() except Exception as e: - db.rollback() + # Just log the error and let the caller handle the rollback + print(f"[ERROR] Error updating public streams: {e}") import traceback - print(f"Error updating public streams in database: {e}") - print(traceback.format_exc()) - raise + traceback.print_exc() + raise # Re-raise to let the caller handle the error