Update authentication system, database models, and UI components
This commit is contained in:
681
main.py
681
main.py
@ -90,20 +90,30 @@ def get_current_user(request: Request, db: Session = Depends(get_db)):
|
||||
from range_response import range_response
|
||||
|
||||
@app.get("/audio/{uid}/{filename}")
|
||||
def get_audio(uid: str, filename: str, request: Request, db: Session = Depends(get_db)):
|
||||
def get_audio(uid: str, filename: str, request: Request):
|
||||
# Allow public access ONLY to stream.opus
|
||||
|
||||
# Map email-based UID to username for file system access
|
||||
# If UID contains @, it's an email - look up the corresponding username
|
||||
if '@' in uid:
|
||||
from models import User
|
||||
user = db.exec(select(User).where(User.email == uid)).first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
filesystem_uid = user.username
|
||||
else:
|
||||
# Legacy support for username-based UIDs
|
||||
filesystem_uid = uid
|
||||
# Use the database session context manager
|
||||
with get_db() as db:
|
||||
try:
|
||||
# Use email-based UID directly for file system access
|
||||
# If UID contains @, it's an email - use it directly
|
||||
if '@' in uid:
|
||||
from models import User
|
||||
user = db.query(User).filter(User.email == uid).first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
filesystem_uid = uid # Use email directly for directory
|
||||
else:
|
||||
# Legacy support for username-based UIDs - convert to email
|
||||
from models import User
|
||||
user = db.query(User).filter(User.username == uid).first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
filesystem_uid = user.email # Convert username to email for directory
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
|
||||
|
||||
user_dir = os.path.join("data", filesystem_uid)
|
||||
file_path = os.path.join(user_dir, filename)
|
||||
@ -127,7 +137,8 @@ def get_audio(uid: str, filename: str, request: Request, db: Session = Depends(g
|
||||
return FileResponse(real_file_path, media_type="audio/ogg")
|
||||
|
||||
if debug_mode:
|
||||
print("[DEBUG] FastAPI running in debug mode.")
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Global error handler to always return JSON
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
@ -179,7 +190,7 @@ from register import router as register_router
|
||||
from magic import router as magic_router
|
||||
from upload import router as upload_router
|
||||
from streams import router as streams_router
|
||||
from list_user_files import router as list_user_files_router
|
||||
|
||||
from auth_router import router as auth_router
|
||||
|
||||
app.include_router(streams_router)
|
||||
@ -188,14 +199,100 @@ from list_streams import router as list_streams_router
|
||||
from account_router import router as account_router
|
||||
|
||||
# Include all routers
|
||||
app.include_router(auth_router)
|
||||
app.include_router(auth_router, prefix="/api")
|
||||
app.include_router(account_router)
|
||||
app.include_router(register_router)
|
||||
app.include_router(magic_router)
|
||||
app.include_router(upload_router)
|
||||
app.include_router(list_user_files_router)
|
||||
|
||||
app.include_router(list_streams_router)
|
||||
|
||||
@app.get("/user-files/{uid}")
|
||||
async def list_user_files(uid: str):
|
||||
from pathlib import Path
|
||||
|
||||
# Get the user's directory and check for files first
|
||||
user_dir = Path("data") / uid
|
||||
if not user_dir.exists() or not user_dir.is_dir():
|
||||
return {"files": []}
|
||||
|
||||
# Get all files that actually exist on disk
|
||||
existing_files = {f.name for f in user_dir.iterdir() if f.is_file()}
|
||||
|
||||
# Use the database session context manager for all database operations
|
||||
with get_db() as db:
|
||||
# Verify the user exists
|
||||
user_check = db.query(User).filter((User.username == uid) | (User.email == uid)).first()
|
||||
if not user_check:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
# Query the UploadLog table for this user
|
||||
all_upload_logs = db.query(UploadLog).filter(
|
||||
UploadLog.uid == uid
|
||||
).order_by(UploadLog.created_at.desc()).all()
|
||||
|
||||
# Track processed files to avoid duplicates
|
||||
processed_files = set()
|
||||
files_metadata = []
|
||||
|
||||
for log in all_upload_logs:
|
||||
# Skip if no processed filename
|
||||
if not log.processed_filename:
|
||||
continue
|
||||
|
||||
# Skip if we've already processed this file
|
||||
if log.processed_filename in processed_files:
|
||||
continue
|
||||
|
||||
# Skip stream.opus from uploads list (it's a special file)
|
||||
if log.processed_filename == 'stream.opus':
|
||||
continue
|
||||
|
||||
# Skip if file doesn't exist on disk
|
||||
# Files are stored with the pattern: {upload_id}_{processed_filename}
|
||||
expected_filename = f"{log.id}_{log.processed_filename}"
|
||||
if expected_filename not in existing_files:
|
||||
# Only delete records older than 5 minutes to avoid race conditions
|
||||
from datetime import datetime, timedelta
|
||||
cutoff_time = datetime.utcnow() - timedelta(minutes=5)
|
||||
if log.created_at < cutoff_time:
|
||||
print(f"[CLEANUP] Removing orphaned DB record (older than 5min): {expected_filename}")
|
||||
db.delete(log)
|
||||
continue
|
||||
|
||||
# Add to processed files to avoid duplicates
|
||||
processed_files.add(log.processed_filename)
|
||||
|
||||
# Always use the original filename if present
|
||||
display_name = log.filename if log.filename else log.processed_filename
|
||||
|
||||
# Only include files that exist on disk
|
||||
# Files are stored with the pattern: {upload_id}_{processed_filename}
|
||||
stored_filename = f"{log.id}_{log.processed_filename}"
|
||||
file_path = user_dir / stored_filename
|
||||
if file_path.exists() and file_path.is_file():
|
||||
try:
|
||||
# Get the actual file size in case it changed
|
||||
actual_size = file_path.stat().st_size
|
||||
files_metadata.append({
|
||||
"original_name": display_name,
|
||||
"stored_name": log.processed_filename,
|
||||
"size": actual_size
|
||||
})
|
||||
except OSError:
|
||||
# If we can't access the file, skip it
|
||||
continue
|
||||
|
||||
# Commit any database changes (deletions of non-existent files)
|
||||
try:
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to commit database changes: {e}")
|
||||
db.rollback()
|
||||
|
||||
return {"files": files_metadata}
|
||||
|
||||
|
||||
# Serve static files
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||
|
||||
@ -258,9 +355,9 @@ def serve_me():
|
||||
@app.get("/admin/stats")
|
||||
def admin_stats(request: Request, db: Session = Depends(get_db)):
|
||||
from sqlmodel import select
|
||||
users = db.exec(select(User)).all()
|
||||
users = db.query(User).all()
|
||||
users_count = len(users)
|
||||
total_quota = db.exec(select(UserQuota)).all()
|
||||
total_quota = db.query(UserQuota).all()
|
||||
total_quota_sum = sum(q.storage_bytes for q in total_quota)
|
||||
violations_log = 0
|
||||
try:
|
||||
@ -292,10 +389,224 @@ def debug(request: Request):
|
||||
|
||||
MAX_QUOTA_BYTES = 100 * 1024 * 1024
|
||||
|
||||
# Delete account endpoint has been moved to account_router.py
|
||||
# Delete account endpoint - fallback implementation since account_router.py has loading issues
|
||||
@app.post("/api/delete-account")
|
||||
async def delete_account_fallback(request: Request, db: Session = Depends(get_db)):
|
||||
try:
|
||||
# Get request data
|
||||
data = await request.json()
|
||||
uid = data.get("uid")
|
||||
if not uid:
|
||||
raise HTTPException(status_code=400, detail="Missing UID")
|
||||
|
||||
ip = request.client.host
|
||||
# Debug messages disabled
|
||||
|
||||
# Find user by email or username
|
||||
user = None
|
||||
if '@' in uid:
|
||||
user = db.exec(select(User).where(User.email == uid)).first()
|
||||
if not user:
|
||||
user = db.exec(select(User).where(User.username == uid)).first()
|
||||
|
||||
# If still not found, check if this UID exists in upload logs and try to find the associated user
|
||||
if not user:
|
||||
# Look for upload logs with this UID to find the real user
|
||||
upload_log = db.exec(select(UploadLog).where(UploadLog.uid == uid)).first()
|
||||
if upload_log:
|
||||
# Try to find a user that might be associated with this UID
|
||||
# Check if there's a user with the same IP or similar identifier
|
||||
all_users = db.exec(select(User)).all()
|
||||
for potential_user in all_users:
|
||||
# Use the first confirmed user as fallback (for orphaned UIDs)
|
||||
if potential_user.confirmed:
|
||||
user = potential_user
|
||||
# Debug messages disabled
|
||||
break
|
||||
|
||||
if not user:
|
||||
# Debug messages disabled
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
if user.ip != ip:
|
||||
raise HTTPException(status_code=403, detail="Unauthorized: IP address does not match")
|
||||
|
||||
# Delete user data from database using the original UID
|
||||
# The original UID is what's stored in the database records
|
||||
|
||||
# Delete upload logs for all possible UIDs (original UID, email, username)
|
||||
upload_logs_to_delete = []
|
||||
|
||||
# Check for upload logs with original UID
|
||||
upload_logs_original = db.query(UploadLog).filter(UploadLog.uid == uid).all()
|
||||
if upload_logs_original:
|
||||
# Debug messages disabled
|
||||
upload_logs_to_delete.extend(upload_logs_original)
|
||||
|
||||
# Check for upload logs with user email
|
||||
upload_logs_email = db.query(UploadLog).filter(UploadLog.uid == user.email).all()
|
||||
if upload_logs_email:
|
||||
# Debug messages disabled
|
||||
upload_logs_to_delete.extend(upload_logs_email)
|
||||
|
||||
# Check for upload logs with username
|
||||
upload_logs_username = db.query(UploadLog).filter(UploadLog.uid == user.username).all()
|
||||
if upload_logs_username:
|
||||
# Debug messages disabled
|
||||
upload_logs_to_delete.extend(upload_logs_username)
|
||||
|
||||
# Delete all found upload log records
|
||||
for log in upload_logs_to_delete:
|
||||
try:
|
||||
db.delete(log)
|
||||
except Exception as e:
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Debug messages disabled
|
||||
|
||||
# Delete user quota for both the original UID and user email (to cover all cases)
|
||||
quota_original = db.get(UserQuota, uid)
|
||||
if quota_original:
|
||||
# Debug messages disabled
|
||||
db.delete(quota_original)
|
||||
|
||||
quota_email = db.get(UserQuota, user.email)
|
||||
if quota_email:
|
||||
# Debug messages disabled
|
||||
db.delete(quota_email)
|
||||
|
||||
# Delete user sessions
|
||||
sessions = db.query(DBSession).filter(DBSession.user_id == user.username).all()
|
||||
# Debug messages disabled
|
||||
for session in sessions:
|
||||
db.delete(session)
|
||||
|
||||
# Delete public stream entries for all possible UIDs
|
||||
# Use select() instead of get() to find all matching records
|
||||
public_streams_to_delete = []
|
||||
|
||||
# Check for public stream with original UID
|
||||
public_stream_original = db.query(PublicStream).filter(PublicStream.uid == uid).first()
|
||||
if public_stream_original:
|
||||
# Debug messages disabled
|
||||
public_streams_to_delete.append(public_stream_original)
|
||||
|
||||
# Check for public stream with user email
|
||||
public_stream_email = db.query(PublicStream).filter(PublicStream.uid == user.email).first()
|
||||
if public_stream_email:
|
||||
# Debug messages disabled
|
||||
public_streams_to_delete.append(public_stream_email)
|
||||
|
||||
# Check for public stream with username
|
||||
public_stream_username = db.query(PublicStream).filter(PublicStream.uid == user.username).first()
|
||||
if public_stream_username:
|
||||
# Debug messages disabled
|
||||
public_streams_to_delete.append(public_stream_username)
|
||||
|
||||
# Delete all found public stream records
|
||||
for ps in public_streams_to_delete:
|
||||
try:
|
||||
# Debug messages disabled
|
||||
db.delete(ps)
|
||||
except Exception as e:
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Debug messages disabled
|
||||
|
||||
# Delete user directory BEFORE deleting user record - check all possible locations
|
||||
import shutil
|
||||
|
||||
# Try to delete directory with UID (email) - current standard
|
||||
uid_dir = os.path.join('data', uid)
|
||||
if os.path.exists(uid_dir):
|
||||
# Debug messages disabled
|
||||
shutil.rmtree(uid_dir, ignore_errors=True)
|
||||
|
||||
# Also try to delete directory with email (in case of different UID formats)
|
||||
email_dir = os.path.join('data', user.email)
|
||||
if os.path.exists(email_dir) and email_dir != uid_dir:
|
||||
# Debug messages disabled
|
||||
shutil.rmtree(email_dir, ignore_errors=True)
|
||||
|
||||
# Also try to delete directory with username (legacy format)
|
||||
username_dir = os.path.join('data', user.username)
|
||||
if os.path.exists(username_dir) and username_dir != uid_dir and username_dir != email_dir:
|
||||
# Debug messages disabled
|
||||
shutil.rmtree(username_dir, ignore_errors=True)
|
||||
|
||||
# Delete user account AFTER directory cleanup
|
||||
db.delete(user)
|
||||
db.commit()
|
||||
|
||||
# Debug messages disabled
|
||||
return {"status": "success", "message": "Account deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
# Debug messages disabled
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete account: {str(e)}")
|
||||
|
||||
# Cleanup endpoint for orphaned public streams
|
||||
@app.post("/api/cleanup-streams")
|
||||
async def cleanup_orphaned_streams(request: Request, db: Session = Depends(get_db)):
|
||||
try:
|
||||
# Get request data
|
||||
data = await request.json()
|
||||
admin_secret = data.get("admin_secret")
|
||||
|
||||
# Verify admin access
|
||||
if admin_secret != ADMIN_SECRET:
|
||||
raise HTTPException(status_code=403, detail="Unauthorized")
|
||||
|
||||
# Find orphaned public streams (streams without corresponding user accounts)
|
||||
all_streams = db.query(PublicStream).all()
|
||||
all_users = db.query(User).all()
|
||||
|
||||
# Create sets of valid UIDs from user accounts
|
||||
valid_uids = set()
|
||||
for user in all_users:
|
||||
valid_uids.add(user.email)
|
||||
valid_uids.add(user.username)
|
||||
|
||||
orphaned_streams = []
|
||||
for stream in all_streams:
|
||||
if stream.uid not in valid_uids:
|
||||
orphaned_streams.append(stream)
|
||||
|
||||
# Delete orphaned streams
|
||||
deleted_count = 0
|
||||
for stream in orphaned_streams:
|
||||
try:
|
||||
print(f"[CLEANUP] Deleting orphaned stream: {stream.uid} (username: {stream.username})")
|
||||
db.delete(stream)
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
print(f"[CLEANUP] Error deleting stream {stream.uid}: {e}")
|
||||
|
||||
db.commit()
|
||||
print(f"[CLEANUP] Deleted {deleted_count} orphaned public streams")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Deleted {deleted_count} orphaned public streams",
|
||||
"deleted_streams": [s.uid for s in orphaned_streams]
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
print(f"[CLEANUP] Error: {str(e)}")
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Cleanup failed: {str(e)}")
|
||||
|
||||
# Original delete account endpoint has been moved to account_router.py
|
||||
|
||||
@app.delete("/uploads/{uid}/{filename}")
|
||||
async def delete_file(uid: str, filename: str, request: Request, db: Session = Depends(get_db)):
|
||||
async def delete_file(uid: str, filename: str, request: Request):
|
||||
"""
|
||||
Delete a file for a specific user.
|
||||
|
||||
@ -319,26 +630,84 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D
|
||||
if user.ip != ip:
|
||||
raise HTTPException(status_code=403, detail="Device/IP mismatch. Please log in again.")
|
||||
|
||||
# Set up user directory and validate paths
|
||||
user_dir = os.path.join('data', user.username)
|
||||
# Set up user directory using email (matching upload logic)
|
||||
user_dir = os.path.join('data', user.email)
|
||||
os.makedirs(user_dir, exist_ok=True)
|
||||
|
||||
# Decode URL-encoded filename
|
||||
from urllib.parse import unquote
|
||||
filename = unquote(filename)
|
||||
|
||||
# Debug: Print the user info and filename being used
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
if os.path.exists(user_dir):
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Construct and validate target path
|
||||
target_path = os.path.join(user_dir, filename)
|
||||
real_target_path = os.path.realpath(target_path)
|
||||
real_user_dir = os.path.realpath(user_dir)
|
||||
|
||||
# Debug: Print the constructed paths
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
|
||||
# Security check: Ensure the target path is inside the user's directory
|
||||
if not real_target_path.startswith(real_user_dir + os.sep):
|
||||
# Debug messages disabled
|
||||
raise HTTPException(status_code=403, detail="Invalid file path")
|
||||
|
||||
# Check if file exists
|
||||
if not os.path.isfile(real_target_path):
|
||||
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
||||
# Debug: List files in the directory to help diagnose the issue
|
||||
try:
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
|
||||
if os.path.exists(real_user_dir):
|
||||
files_in_dir = os.listdir(real_user_dir)
|
||||
# Debug messages disabled
|
||||
|
||||
# Print detailed file info
|
||||
for f in files_in_dir:
|
||||
full_path = os.path.join(real_user_dir, f)
|
||||
try:
|
||||
# Debug messages disabled
|
||||
pass
|
||||
except Exception as e:
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
|
||||
# Try to find a matching file (case-insensitive, partial match)
|
||||
matching_files = [f for f in files_in_dir if filename.lower() in f.lower()]
|
||||
if matching_files:
|
||||
# Debug messages disabled
|
||||
# Use the first matching file
|
||||
real_target_path = os.path.join(real_user_dir, matching_files[0])
|
||||
# Debug messages disabled
|
||||
# Debug messages disabled
|
||||
else:
|
||||
# Debug messages disabled
|
||||
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
||||
else:
|
||||
# Debug messages disabled
|
||||
raise HTTPException(status_code=404, detail=f"User directory not found")
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
# Debug messages disabled
|
||||
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
||||
|
||||
# Delete both the target file and its UUID-only variant
|
||||
deleted_files = []
|
||||
@ -377,20 +746,23 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D
|
||||
|
||||
# Clean up the database record for this file
|
||||
try:
|
||||
# Find and delete the upload log entry
|
||||
log_entry = db.exec(
|
||||
select(UploadLog)
|
||||
.where(UploadLog.uid == uid)
|
||||
.where(UploadLog.processed_filename == filename)
|
||||
).first()
|
||||
|
||||
if log_entry:
|
||||
db.delete(log_entry)
|
||||
db.commit()
|
||||
log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}")
|
||||
with get_db() as db:
|
||||
try:
|
||||
# Find and delete the upload log entry
|
||||
log_entry = db.query(UploadLog).filter(
|
||||
UploadLog.uid == uid,
|
||||
UploadLog.processed_filename == filename
|
||||
).first()
|
||||
|
||||
if log_entry:
|
||||
db.delete(log_entry)
|
||||
db.commit()
|
||||
log_violation("DB_CLEANUP", ip, uid, f"Removed DB record for {filename}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise e
|
||||
except Exception as e:
|
||||
log_violation("DB_CLEANUP_ERROR", ip, uid, f"Failed to clean up DB record: {str(e)}")
|
||||
db.rollback()
|
||||
|
||||
# Regenerate stream.opus after file deletion
|
||||
try:
|
||||
@ -405,14 +777,17 @@ async def delete_file(uid: str, filename: str, request: Request, db: Session = D
|
||||
|
||||
# Update user quota in a separate try-except to not fail the entire operation
|
||||
try:
|
||||
# Use verify_and_fix_quota to ensure consistency between disk and DB
|
||||
total_size = verify_and_fix_quota(db, user.username, user_dir)
|
||||
log_violation("QUOTA_UPDATE", ip, uid,
|
||||
f"Updated quota: {total_size} bytes")
|
||||
|
||||
with get_db() as db:
|
||||
try:
|
||||
# Use verify_and_fix_quota to ensure consistency between disk and DB
|
||||
total_size = verify_and_fix_quota(db, user.username, user_dir)
|
||||
log_violation("QUOTA_UPDATE", ip, uid,
|
||||
f"Updated quota: {total_size} bytes")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise e
|
||||
except Exception as e:
|
||||
log_violation("QUOTA_ERROR", ip, uid, f"Quota update failed: {str(e)}")
|
||||
db.rollback()
|
||||
|
||||
return {"status": "deleted"}
|
||||
|
||||
@ -444,11 +819,13 @@ def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int:
|
||||
if os.path.isfile(stream_opus_path):
|
||||
try:
|
||||
total_size = os.path.getsize(stream_opus_path)
|
||||
print(f"[QUOTA] Stream.opus size for {uid}: {total_size} bytes")
|
||||
# Debug messages disabled
|
||||
except (OSError, FileNotFoundError) as e:
|
||||
print(f"[QUOTA] Error getting size for stream.opus: {e}")
|
||||
# Debug messages disabled
|
||||
pass
|
||||
else:
|
||||
print(f"[QUOTA] stream.opus not found in {user_dir}")
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Update quota in database
|
||||
q = db.get(UserQuota, uid) or UserQuota(uid=uid, storage_bytes=0)
|
||||
@ -456,123 +833,143 @@ def verify_and_fix_quota(db: Session, uid: str, user_dir: str) -> int:
|
||||
db.add(q)
|
||||
|
||||
# Clean up any database records for files that don't exist
|
||||
uploads = db.exec(select(UploadLog).where(UploadLog.uid == uid)).all()
|
||||
# BUT only for records older than 5 minutes to avoid race conditions with recent uploads
|
||||
from datetime import datetime, timedelta
|
||||
cutoff_time = datetime.utcnow() - timedelta(minutes=5)
|
||||
|
||||
uploads = db.query(UploadLog).filter(
|
||||
UploadLog.uid == uid,
|
||||
UploadLog.created_at < cutoff_time # Only check older records
|
||||
).all()
|
||||
|
||||
for upload in uploads:
|
||||
if upload.processed_filename: # Only check if processed_filename exists
|
||||
stored_filename = f"{upload.id}_{upload.processed_filename}"
|
||||
file_path = os.path.join(user_dir, stored_filename)
|
||||
if not os.path.isfile(file_path):
|
||||
print(f"[QUOTA] Removing orphaned DB record: {stored_filename}")
|
||||
# Debug messages disabled
|
||||
db.delete(upload)
|
||||
|
||||
try:
|
||||
db.commit()
|
||||
print(f"[QUOTA] Updated quota for {uid}: {total_size} bytes")
|
||||
# Debug messages disabled
|
||||
except Exception as e:
|
||||
print(f"[QUOTA] Error committing quota update: {e}")
|
||||
# Debug messages disabled
|
||||
db.rollback()
|
||||
raise
|
||||
|
||||
return total_size
|
||||
|
||||
@app.get("/me/{uid}")
|
||||
def get_me(uid: str, request: Request, response: Response, db: Session = Depends(get_db)):
|
||||
def get_me(uid: str, request: Request, response: Response):
|
||||
# Add headers to prevent caching
|
||||
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
|
||||
response.headers["Pragma"] = "no-cache"
|
||||
response.headers["Expires"] = "0"
|
||||
print(f"[DEBUG] GET /me/{uid} - Client IP: {request.client.host}")
|
||||
try:
|
||||
# Get user info
|
||||
user = get_user_by_uid(uid)
|
||||
if not user:
|
||||
print(f"[ERROR] User with UID {uid} not found")
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
# Only enforce IP check in production
|
||||
if not debug_mode:
|
||||
if user.ip != request.client.host:
|
||||
print(f"[WARNING] IP mismatch for UID {uid}: {request.client.host} != {user.ip}")
|
||||
# In production, we might want to be more strict
|
||||
# But for now, we'll just log a warning in development
|
||||
if not debug_mode:
|
||||
raise HTTPException(status_code=403, detail="IP address mismatch")
|
||||
|
||||
# Get user directory
|
||||
user_dir = os.path.join('data', uid)
|
||||
os.makedirs(user_dir, exist_ok=True)
|
||||
|
||||
# Get all upload logs for this user
|
||||
upload_logs = db.exec(
|
||||
select(UploadLog)
|
||||
.where(UploadLog.uid == uid)
|
||||
.order_by(UploadLog.created_at.desc())
|
||||
).all()
|
||||
print(f"[DEBUG] Found {len(upload_logs)} upload logs for UID {uid}")
|
||||
|
||||
# Build file list from database records, checking if files exist on disk
|
||||
files = []
|
||||
seen_files = set() # Track seen files to avoid duplicates
|
||||
|
||||
print(f"[DEBUG] Processing {len(upload_logs)} upload logs for UID {uid}")
|
||||
|
||||
for i, log in enumerate(upload_logs):
|
||||
if not log.filename or not log.processed_filename:
|
||||
print(f"[DEBUG] Skipping log entry {i}: missing filename or processed_filename")
|
||||
continue
|
||||
|
||||
# The actual filename on disk has the log ID prepended
|
||||
stored_filename = f"{log.id}_{log.processed_filename}"
|
||||
file_path = os.path.join(user_dir, stored_filename)
|
||||
|
||||
# Debug messages disabled
|
||||
|
||||
# Use the database session context manager for all database operations
|
||||
with get_db() as db:
|
||||
try:
|
||||
# Get user info
|
||||
user = db.query(User).filter((User.username == uid) | (User.email == uid)).first()
|
||||
if not user:
|
||||
print(f"[ERROR] User with UID {uid} not found")
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
# Skip if we've already seen this file
|
||||
if stored_filename in seen_files:
|
||||
print(f"[DEBUG] Skipping duplicate file: {stored_filename}")
|
||||
continue
|
||||
|
||||
seen_files.add(stored_filename)
|
||||
|
||||
# Only include the file if it exists on disk and is not stream.opus
|
||||
if os.path.isfile(file_path) and stored_filename != 'stream.opus':
|
||||
try:
|
||||
# Get the actual file size in case it changed
|
||||
file_size = os.path.getsize(file_path)
|
||||
file_info = {
|
||||
"name": stored_filename,
|
||||
"original_name": log.filename,
|
||||
"size": file_size
|
||||
}
|
||||
files.append(file_info)
|
||||
print(f"[DEBUG] Added file {len(files)}: {log.filename} (stored as {stored_filename}, {file_size} bytes)")
|
||||
except OSError as e:
|
||||
print(f"[WARNING] Could not access file {stored_filename}: {e}")
|
||||
else:
|
||||
print(f"[DEBUG] File not found on disk or is stream.opus: {stored_filename}")
|
||||
|
||||
# Log all files being returned
|
||||
print("[DEBUG] All files being returned:")
|
||||
for i, file_info in enumerate(files, 1):
|
||||
print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)")
|
||||
|
||||
# Verify and fix quota based on actual files on disk
|
||||
total_size = verify_and_fix_quota(db, uid, user_dir)
|
||||
quota_mb = round(total_size / (1024 * 1024), 2)
|
||||
print(f"[DEBUG] Verified quota for UID {uid}: {quota_mb} MB")
|
||||
# Only enforce IP check in production
|
||||
if not debug_mode:
|
||||
if user.ip != request.client.host:
|
||||
print(f"[WARNING] IP mismatch for UID {uid}: {request.client.host} != {user.ip}")
|
||||
# In production, we might want to be more strict
|
||||
if not debug_mode:
|
||||
raise HTTPException(status_code=403, detail="IP address mismatch")
|
||||
|
||||
response_data = {
|
||||
"files": files,
|
||||
"quota": quota_mb
|
||||
}
|
||||
print(f"[DEBUG] Returning {len(files)} files and quota info")
|
||||
return response_data
|
||||
|
||||
except HTTPException:
|
||||
# Re-raise HTTP exceptions as they are
|
||||
raise
|
||||
except Exception as e:
|
||||
# Log the full traceback for debugging
|
||||
import traceback
|
||||
error_trace = traceback.format_exc()
|
||||
print(f"[ERROR] Error in /me/{uid} endpoint: {str(e)}\n{error_trace}")
|
||||
# Return a 500 error with a generic message
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
# Get user directory
|
||||
user_dir = os.path.join('data', uid)
|
||||
os.makedirs(user_dir, exist_ok=True)
|
||||
|
||||
# Get all upload logs for this user using the query interface
|
||||
upload_logs = db.query(UploadLog).filter(
|
||||
UploadLog.uid == uid
|
||||
).order_by(UploadLog.created_at.desc()).all()
|
||||
|
||||
# Debug messages disabled
|
||||
|
||||
# Build file list from database records, checking if files exist on disk
|
||||
files = []
|
||||
seen_files = set() # Track seen files to avoid duplicates
|
||||
|
||||
# Debug messages disabled
|
||||
|
||||
for i, log in enumerate(upload_logs):
|
||||
if not log.filename or not log.processed_filename:
|
||||
# Debug messages disabled
|
||||
continue
|
||||
|
||||
# The actual filename on disk has the log ID prepended
|
||||
stored_filename = f"{log.id}_{log.processed_filename}"
|
||||
file_path = os.path.join(user_dir, stored_filename)
|
||||
|
||||
# Skip if we've already seen this file
|
||||
if stored_filename in seen_files:
|
||||
# Debug messages disabled
|
||||
continue
|
||||
|
||||
seen_files.add(stored_filename)
|
||||
|
||||
# Only include the file if it exists on disk and is not stream.opus
|
||||
if os.path.isfile(file_path) and stored_filename != 'stream.opus':
|
||||
try:
|
||||
# Get the actual file size in case it changed
|
||||
file_size = os.path.getsize(file_path)
|
||||
file_info = {
|
||||
"name": stored_filename,
|
||||
"original_name": log.filename,
|
||||
"size": file_size
|
||||
}
|
||||
files.append(file_info)
|
||||
# Debug messages disabled
|
||||
except OSError as e:
|
||||
print(f"[WARNING] Could not access file {stored_filename}: {e}")
|
||||
else:
|
||||
# Debug messages disabled
|
||||
pass
|
||||
|
||||
# Log all files being returned
|
||||
# Debug messages disabled
|
||||
# for i, file_info in enumerate(files, 1):
|
||||
# print(f" {i}. {file_info['name']} (original: {file_info['original_name']}, size: {file_info['size']} bytes)")
|
||||
|
||||
# Verify and fix quota based on actual files on disk
|
||||
total_size = verify_and_fix_quota(db, uid, user_dir)
|
||||
quota_mb = round(total_size / (1024 * 1024), 2)
|
||||
max_quota_mb = round(MAX_QUOTA_BYTES / (1024 * 1024), 2)
|
||||
# Debug messages disabled
|
||||
|
||||
response_data = {
|
||||
"files": files,
|
||||
"quota": {
|
||||
"used": quota_mb,
|
||||
"max": max_quota_mb,
|
||||
"used_bytes": total_size,
|
||||
"max_bytes": MAX_QUOTA_BYTES,
|
||||
"percentage": round((total_size / MAX_QUOTA_BYTES) * 100, 2) if MAX_QUOTA_BYTES > 0 else 0
|
||||
}
|
||||
}
|
||||
# Debug messages disabled
|
||||
return response_data
|
||||
|
||||
except HTTPException:
|
||||
# Re-raise HTTP exceptions as they are
|
||||
raise
|
||||
except Exception as e:
|
||||
# Log the full traceback for debugging
|
||||
import traceback
|
||||
error_trace = traceback.format_exc()
|
||||
print(f"[ERROR] Error in /me/{uid} endpoint: {str(e)}\n{error_trace}")
|
||||
# Rollback any database changes in case of error
|
||||
db.rollback()
|
||||
# Return a 500 error with a generic message
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
Reference in New Issue
Block a user