Update 2025-04-24_11:44:19

This commit is contained in:
oib
2025-04-24 11:44:23 +02:00
commit e748c737f4
3408 changed files with 717481 additions and 0 deletions

4
.env Normal file
View File

@ -0,0 +1,4 @@
# .env — environment configuration for dicta2stream backend
DEBUG=1
ADMIN_SECRET=4042fbd3531ec327ddde28925e03d69a
DATABASE_URL=postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost/dictastream

50
README.md Normal file
View File

@ -0,0 +1,50 @@
# dicta2stream
A FastAPI-based audio streaming and upload backend with user registration, quota management, and abuse logging.
## Features
- User registration and magic link login
- Audio upload with music/singing detection
- Per-user storage quota
- Admin stats endpoint
- Abuse/violation logging
## Setup
### Requirements
- Python 3.11+
- PostgreSQL (or compatible DB)
### Installation
```sh
python3 -m venv venv
source venv/bin/activate
pip install -r requirements.txt
```
### Environment Variables
Create a `.env` file in the project root with:
```
DATABASE_URL=postgresql://user:pass@localhost:5432/dictastream
ADMIN_SECRET=your_admin_secret
```
### Running
```sh
uvicorn main:app --reload
```
### Directory Structure
- `main.py` — FastAPI entrypoint
- `register.py`, `magic.py`, `upload.py`, `redirect.py` — routers
- `models.py` — SQLModel ORM models
- `database.py` — DB session/engine
- `static/` — static HTML/JS/CSS assets
### Notes
- By default, audio uploads are stored in `/data` and streams in `/srv/streams` (change in code as needed).
- Ollama music/singing detection requires a local Whisper API at `localhost:11434`.
- Abuse logs are written to `log.txt`.
## License
MIT

0
__CASCADE_RELOAD__ Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

39
convert_to_opus.py Normal file
View File

@ -0,0 +1,39 @@
# convert_to_opus.py — Default voice pipeline: bandpass + compressor + limiter + gate
import subprocess
import os
def convert_to_opus(input_path, output_path):
if not os.path.exists(input_path):
raise FileNotFoundError(f"Input file not found: {input_path}")
filters = [
"highpass=f=400", # low-cut below 400 Hz
"lowpass=f=12000", # high-cut above 12 kHz
"acompressor=threshold=-18dB",
"alimiter=limit=-1dB",
"agate=threshold=0.02"
]
cmd = [
"ffmpeg", "-y",
"-i", input_path,
"-af", ",".join(filters),
"-ac", "1",
"-ar", "24000",
"-c:a", "libopus",
"-b:a", "40k",
"-vbr", "on",
"-application", "voip",
output_path
]
try:
subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except subprocess.CalledProcessError as e:
raise RuntimeError(f"FFmpeg conversion failed: {e}")
if not os.path.exists(output_path):
raise RuntimeError("Conversion did not produce output.")
return output_path

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
data/devuser/stream.opus Normal file

Binary file not shown.

11
database.py Normal file
View File

@ -0,0 +1,11 @@
# database.py — SQLModel engine/session for PostgreSQL
from sqlmodel import create_engine, Session
import os
POSTGRES_URL = os.getenv("DATABASE_URL", "postgresql://d2s:kuTy4ZKs2VcjgDh6@localhost:5432/dictastream")
engine = create_engine(POSTGRES_URL, echo=False)
def get_db():
with Session(engine) as session:
yield session

40
dev_user.py Normal file
View File

@ -0,0 +1,40 @@
# dev_user.py — Script to create and confirm a dev user for dicta2stream
import os
from sqlmodel import Session
from database import engine
from models import User, UserQuota
from datetime import datetime
import uuid
USERNAME = os.getenv("DEV_USERNAME", "devuser")
EMAIL = os.getenv("DEV_EMAIL", "devuser@localhost")
IP = os.getenv("DEV_IP", "127.0.0.1")
with Session(engine) as session:
user = session.get(User, EMAIL)
if not user:
token = str(uuid.uuid4())
user = User(
email=EMAIL,
username=USERNAME,
token=token,
confirmed=True,
ip=IP,
token_created=datetime.utcnow()
)
session.add(user)
print(f"[INFO] Created new dev user: {USERNAME} with email: {EMAIL}")
else:
user.confirmed = True
user.ip = IP
print(f"[INFO] Existing user found. Marked as confirmed: {USERNAME}")
quota = session.get(UserQuota, USERNAME)
if not quota:
quota = UserQuota(uid=USERNAME, storage_bytes=0)
session.add(quota)
print(f"[INFO] Created quota for user: {USERNAME}")
session.commit()
print(f"[INFO] Dev user ready: {USERNAME} ({EMAIL}) — confirmed, IP={IP}")
print(f"[INFO] To use: set localStorage uid and confirmed_uid to '{USERNAME}' in your browser.")

15
list_streams.py Normal file
View File

@ -0,0 +1,15 @@
# list_streams.py — FastAPI route to list all public streams (users with stream.opus)
from fastapi import APIRouter
from pathlib import Path
router = APIRouter()
DATA_ROOT = Path("./data")
@router.get("/streams")
def list_streams():
streams = []
for user_dir in DATA_ROOT.iterdir():
if user_dir.is_dir() and (user_dir / "stream.opus").exists():
streams.append(user_dir.name)
return {"streams": streams}

23
list_user_files.py Normal file
View File

@ -0,0 +1,23 @@
# list_user_files.py
from fastapi import APIRouter, Depends, HTTPException
from pathlib import Path
from models import User
from database import get_db
router = APIRouter()
@router.get("/user-files/{uid}")
def list_user_files(uid: str, db = Depends(get_db)):
# Check user exists and is confirmed
from sqlmodel import select
user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first()
if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"):
user = user[0]
if not user or not user.confirmed:
raise HTTPException(status_code=403, detail="Account not confirmed")
user_dir = Path("data") / uid
if not user_dir.exists() or not user_dir.is_dir():
return {"files": []}
files = [f.name for f in user_dir.iterdir() if f.is_file() and not f.name.startswith(".")]
files.sort()
return {"files": files}

19
log.py Normal file
View File

@ -0,0 +1,19 @@
# log.py — Logging of abuse or violations in dicta2stream
from datetime import datetime
import os
from datetime import datetime
def log_violation(event: str, ip: str, uid: str, reason: str):
timestamp = datetime.utcnow().isoformat()
log_dir = os.path.join(os.path.dirname(__file__), "log")
os.makedirs(log_dir, exist_ok=True)
log_path = os.path.join(log_dir, "log.txt")
log_entry = f"[{timestamp}] {event} IP={ip} UID={uid} REASON={reason}\n"
with open(log_path, "a") as f:
f.write(log_entry)
# If DEBUG mode, also print to stdout
if os.getenv("DEBUG", "0") in ("1", "true", "True"): # Set DEBUG=1 in .env to enable
print(f"[DEBUG] {log_entry.strip()}")

26
log/log.txt Normal file
View File

@ -0,0 +1,26 @@
[2025-04-21T18:38:38.896980] UPLOAD IP=127.0.0.1 UID=devuser REASON=Unexpected error: confirmed
[2025-04-21T18:55:06.719322] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), confirmed=None
[2025-04-21T18:56:16.763868] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), confirmed=None
[2025-04-21T18:57:40.143032] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), confirmed=None
[2025-04-21T18:57:45.348372] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), confirmed=None
[2025-04-21T19:00:16.815295] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), confirmed=None
[2025-04-21T19:01:35.388044] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), confirmed=None
[2025-04-21T19:01:35.388233] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=(User(token='9c5bebba-4d07-4fae-897e-85b78e2083bc', username='devuser', confirmed=True, token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546), email='devuser@localhost', ip='127.0.0.1'),), type=<class 'sqlalchemy.engine.row.Row'>, confirmed=None
[2025-04-21T19:03:56.225787] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-21T19:03:56.225989] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-21T19:14:01.400092] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-21T19:14:01.400281] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-21T19:15:56.131161] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-21T19:15:56.131338] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-21T19:18:52.256658] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-21T19:18:52.256883] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-21T21:09:23.991881] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-21T21:09:23.992059] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-21T21:27:53.889321] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-21T21:27:53.889527] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-23T16:12:38.920014] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-23T16:12:38.920194] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-23T16:13:31.324262] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-23T16:13:31.324440] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True
[2025-04-23T16:20:52.603638] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: Incoming uid=devuser, user found=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', confirmed=True
[2025-04-23T16:20:52.603812] UPLOAD IP=127.0.0.1 UID=devuser REASON=DEBUG: After unpack, user=token='9c5bebba-4d07-4fae-897e-85b78e2083bc' username='devuser' confirmed=True token_created=datetime.datetime(2025, 4, 21, 18, 20, 39, 164546) email='devuser@localhost' ip='127.0.0.1', type=<class 'models.User'>, confirmed=True

30
magic.py Normal file
View File

@ -0,0 +1,30 @@
# magic.py — handle magic token login confirmation
from fastapi import APIRouter, Form, HTTPException, Depends, Request
from fastapi.responses import RedirectResponse
from sqlmodel import Session, select
from database import get_db
from models import User
from datetime import datetime, timedelta
router = APIRouter()
@router.post("/magic-login")
def magic_login(request: Request, db: Session = Depends(get_db), token: str = Form(...)):
user = db.exec(select(User).where(User.token == token)).first()
if not user:
return RedirectResponse(url="/?error=Invalid%20or%20expired%20token", status_code=302)
if user.confirmed:
return RedirectResponse(url="/?error=Token%20already%20used", status_code=302)
if datetime.utcnow() - user.token_created > timedelta(minutes=15):
return RedirectResponse(url="/?error=Token%20expired", status_code=302)
user.confirmed = True
# record client IP on confirmation
user.ip = request.client.host
db.commit()
return RedirectResponse(url=f"/?login=success&confirmed_uid={user.username}", status_code=302)

286
main.py Normal file
View File

@ -0,0 +1,286 @@
# main.py — FastAPI backend entrypoint for dicta2stream
from fastapi import FastAPI, UploadFile, File, Form, HTTPException, Request
from fastapi.responses import JSONResponse, HTMLResponse
from fastapi.staticfiles import StaticFiles
import os
import subprocess
from log import log_violation
from models import get_user_by_uid
from sqlmodel import Session, SQLModel, select
from database import get_db, engine
from models import User, UserQuota
from fastapi import Depends
from dotenv import load_dotenv
load_dotenv()
# Ensure all tables exist at startup
SQLModel.metadata.create_all(engine)
ADMIN_SECRET = os.getenv("ADMIN_SECRET")
import os
debug_mode = os.getenv("DEBUG", "0") in ("1", "true", "True")
from fastapi.responses import JSONResponse
from fastapi.requests import Request as FastAPIRequest
from fastapi.exception_handlers import RequestValidationError
from fastapi.exceptions import HTTPException as FastAPIHTTPException
app = FastAPI(debug=debug_mode)
from fastapi.staticfiles import StaticFiles
import os
if not os.path.exists("data"):
os.makedirs("data")
app.mount("/audio", StaticFiles(directory="data"), name="audio")
if debug_mode:
print("[DEBUG] FastAPI running in debug mode.")
# Global error handler to always return JSON
@app.exception_handler(FastAPIHTTPException)
async def http_exception_handler(request: FastAPIRequest, exc: FastAPIHTTPException):
return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(request: FastAPIRequest, exc: RequestValidationError):
return JSONResponse(status_code=422, content={"detail": exc.errors()})
@app.exception_handler(Exception)
async def generic_exception_handler(request: FastAPIRequest, exc: Exception):
return JSONResponse(status_code=500, content={"detail": str(exc)})
# include routers from submodules
from register import router as register_router
from magic import router as magic_router
from upload import router as upload_router
from redirect import router as redirect_router
from list_user_files import router as list_user_files_router
from list_streams import router as list_streams_router
app.include_router(register_router)
app.include_router(magic_router)
app.include_router(upload_router)
app.include_router(redirect_router)
app.include_router(list_user_files_router)
app.include_router(list_streams_router)
# Serve static files
app.mount("/static", StaticFiles(directory="static"), name="static")
@app.get("/", response_class=HTMLResponse)
def serve_index():
with open("static/index.html") as f:
return f.read()
@app.get("/me", response_class=HTMLResponse)
def serve_me():
with open("static/index.html") as f:
return f.read()
@app.get("/admin/stats")
def admin_stats(request: Request, db: Session = Depends(get_db)):
from sqlmodel import select
users = db.exec(select(User)).all()
users_count = len(users)
total_quota = db.exec(select(UserQuota)).all()
total_quota_sum = sum(q.storage_bytes for q in total_quota)
violations_log = 0
try:
with open("log.txt") as f:
violations_log = sum(1 for _ in f)
except FileNotFoundError:
pass
secret = request.headers.get("x-admin-secret")
if secret != ADMIN_SECRET:
raise HTTPException(status_code=403, detail="Forbidden")
return {
"total_users": users_count,
"total_quota_mb": round(total_quota_sum / (1024 * 1024), 2),
"violation_log_entries": violations_log
}
@app.get("/status")
def status():
return {"status": "ok"}
@app.get("/debug")
def debug(request: Request):
return {
"ip": request.client.host,
"headers": dict(request.headers),
}
STREAM_DIR = "/srv/streams"
ICECAST_BASE_URL = "https://dicta2stream.net/stream/"
ICECAST_MOUNT_PREFIX = "user-"
MAX_QUOTA_BYTES = 100 * 1024 * 1024
@app.post("/delete-account")
async def delete_account(data: dict, request: Request, db: Session = Depends(get_db)):
uid = data.get("uid")
if not uid:
raise HTTPException(status_code=400, detail="Missing UID")
ip = request.client.host
user = get_user_by_uid(uid)
if not user or user.ip != ip:
raise HTTPException(status_code=403, detail="Unauthorized")
# Delete user quota and user using ORM
quota = db.get(UserQuota, uid)
if quota:
db.delete(quota)
user_obj = db.get(User, user.email)
if user_obj:
db.delete(user_obj)
db.commit()
import shutil
user_dir = os.path.join(STREAM_DIR, user.username)
# Only allow deletion within STREAM_DIR
real_user_dir = os.path.realpath(user_dir)
if not real_user_dir.startswith(os.path.realpath(STREAM_DIR)):
raise HTTPException(status_code=400, detail="Invalid user directory")
if os.path.exists(real_user_dir):
shutil.rmtree(real_user_dir, ignore_errors=True)
return {"message": "User deleted"}
@app.post("/upload")
async def upload_audio(
request: Request,
uid: str = Form(...),
file: UploadFile = File(...)
):
ip = request.client.host
user = get_user_by_uid(uid)
if not user:
log_violation("UPLOAD", ip, uid, "UID not found")
raise HTTPException(status_code=403, detail="Invalid user ID")
if user.ip != ip:
log_violation("UPLOAD", ip, uid, "UID/IP mismatch")
raise HTTPException(status_code=403, detail="Device/IP mismatch")
user_dir = os.path.join(STREAM_DIR, user.username)
os.makedirs(user_dir, exist_ok=True)
raw_path = os.path.join(user_dir, "upload.wav")
final_path = os.path.join(user_dir, "stream.opus")
with open(raw_path, "wb") as out:
content = await file.read()
out.write(content)
usage = subprocess.check_output(["du", "-sb", user_dir]).split()[0]
if int(usage) > MAX_QUOTA_BYTES:
os.remove(raw_path)
log_violation("UPLOAD", ip, uid, "Quota exceeded")
raise HTTPException(status_code=403, detail="Quota exceeded")
from fastapi.concurrency import run_in_threadpool
# from detect_content_type_whisper_ollama import detect_content_type_whisper_ollama # Broken import: module not found
content_type = None
if content_type in ["music", "singing"]:
os.remove(raw_path)
log_violation("UPLOAD", ip, uid, f"Rejected content: {content_type}")
return JSONResponse(status_code=403, content={"error": f"{content_type.capitalize()} uploads are not allowed."})
try:
subprocess.run([
"ffmpeg", "-y", "-i", raw_path,
"-ac", "1", "-ar", "48000",
"-c:a", "libopus", "-b:a", "60k",
final_path
], check=True)
except subprocess.CalledProcessError as e:
os.remove(raw_path)
log_violation("FFMPEG", ip, uid, f"ffmpeg failed: {e}")
raise HTTPException(status_code=500, detail="Encoding failed")
os.remove(raw_path)
try:
actual_bytes = int(subprocess.check_output(["du", "-sb", user_dir]).split()[0])
q = db.get(UserQuota, uid)
if q:
q.storage_bytes = actual_bytes
db.add(q)
db.commit()
except Exception as e:
log_violation("QUOTA", ip, uid, f"Quota update failed: {e}")
stream_url = f"{ICECAST_BASE_URL}{ICECAST_MOUNT_PREFIX}{user.username}.opus"
return {"stream_url": stream_url}
@app.delete("/uploads/{uid}/{filename}")
def delete_file(uid: str, filename: str, request: Request, db: Session = Depends(get_db)):
user = get_user_by_uid(uid)
if not user:
raise HTTPException(status_code=403, detail="Invalid user ID")
ip = request.client.host
if user.ip != ip:
raise HTTPException(status_code=403, detail="Device/IP mismatch")
user_dir = os.path.join(STREAM_DIR, user.username)
target_path = os.path.join(user_dir, filename)
# Prevent path traversal attacks
real_target_path = os.path.realpath(target_path)
real_user_dir = os.path.realpath(user_dir)
if not real_target_path.startswith(real_user_dir + os.sep):
raise HTTPException(status_code=403, detail="Invalid path")
if not os.path.isfile(real_target_path):
raise HTTPException(status_code=404, detail="File not found")
os.remove(real_target_path)
log_violation("DELETE", ip, uid, f"Deleted {filename}")
subprocess.run(["/root/scripts/refresh_user_playlist.sh", user.username])
try:
actual_bytes = int(subprocess.check_output(["du", "-sb", user_dir]).split()[0])
q = db.get(UserQuota, uid)
if q:
q.storage_bytes = actual_bytes
db.add(q)
db.commit()
except Exception as e:
log_violation("QUOTA", ip, uid, f"Quota update after delete failed: {e}")
return {"status": "deleted"}
@app.get("/confirm/{uid}")
def confirm_user(uid: str, request: Request):
ip = request.client.host
user = get_user_by_uid(uid)
if not user or user.ip != ip:
raise HTTPException(status_code=403, detail="Unauthorized")
return {"username": user.username, "email": user.email}
@app.get("/me/{uid}")
def get_me(uid: str, request: Request, db: Session = Depends(get_db)):
ip = request.client.host
user = get_user_by_uid(uid)
if not user or user.ip != ip:
raise HTTPException(status_code=403, detail="Unauthorized access")
user_dir = os.path.join(STREAM_DIR, user.username)
files = []
if os.path.exists(user_dir):
for f in os.listdir(user_dir):
path = os.path.join(user_dir, f)
if os.path.isfile(path):
files.append({"name": f, "size": os.path.getsize(path)})
q = db.get(UserQuota, uid)
quota_mb = round(q.storage_bytes / (1024 * 1024), 2) if q else 0
return {
"stream_url": f"{ICECAST_BASE_URL}{ICECAST_MOUNT_PREFIX}{user.username}.opus",
"files": files,
"quota": quota_mb
}

35
models.py Normal file
View File

@ -0,0 +1,35 @@
# models.py — UploadLog table (SQLModel) and related models
from sqlmodel import SQLModel, Field, Session, select
from typing import Optional
from datetime import datetime
from database import engine
class User(SQLModel, table=True):
token_created: datetime = Field(default_factory=datetime.utcnow)
email: str = Field(primary_key=True)
username: str
token: str
confirmed: bool = False
ip: str = Field(default="")
class UserQuota(SQLModel, table=True):
uid: str = Field(primary_key=True)
storage_bytes: int = 0
class UploadLog(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
uid: str
ip: str
filename: Optional[str]
size_bytes: int
created_at: datetime = Field(default_factory=datetime.utcnow)
def get_user_by_uid(uid: str) -> Optional[User]:
with Session(engine) as session:
statement = select(User).where(User.username == uid)
result = session.exec(statement).first()
return result

16
redirect.py Normal file
View File

@ -0,0 +1,16 @@
# redirect.py — Short stream link: /stream/{uid} → /stream/{uid}/stream.opus
from fastapi import APIRouter, HTTPException
from fastapi.responses import RedirectResponse
from pathlib import Path
router = APIRouter()
DATA_ROOT = Path("/data")
@router.get("/stream/{uid}")
def redirect_to_stream(uid: str):
stream_path = DATA_ROOT / uid / "stream.opus"
if not stream_path.exists():
raise HTTPException(status_code=404, detail="Stream not found")
return RedirectResponse(f"/stream/{uid}/stream.opus")

40
register.py Normal file
View File

@ -0,0 +1,40 @@
# register.py — user registration and magic link sender
from fastapi import APIRouter, Form, Request, HTTPException, Depends
from sqlmodel import Session, select
from models import User, UserQuota
from database import get_db
import uuid
import smtplib
from email.message import EmailMessage
router = APIRouter()
MAGIC_FROM = "noreply@dicta2stream.net"
MAGIC_DOMAIN = "https://dicta2stream.net"
@router.post("/register")
def register(request: Request, email: str = Form(...), user: str = Form(...), db: Session = Depends(get_db)):
if db.get(User, email):
raise HTTPException(status_code=400, detail="Email already registered")
token = str(uuid.uuid4())
db.add(User(email=email, username=user, token=token, confirmed=False, ip=request.client.host))
db.add(UserQuota(uid=user))
db.commit()
msg = EmailMessage()
msg["From"] = MAGIC_FROM
msg["To"] = email
msg["Subject"] = "Your magic login link"
msg.set_content(
f"Hello {user},\n\nClick to confirm your account:\n{MAGIC_DOMAIN}/?token={token}\n\nThis link is valid for one-time login."
)
try:
with smtplib.SMTP("localhost") as smtp:
smtp.send_message(msg)
except Exception as e:
raise HTTPException(status_code=500, detail=f"Email failed: {e}")
return { "message": "Confirmation sent" }

0
reload.txt Normal file
View File

9
requirements.txt Normal file
View File

@ -0,0 +1,9 @@
fastapi
uvicorn
sqlmodel
python-dotenv
slowapi
requests
smtplib
email
psycopg2-binary

429
static/app.js Normal file
View File

@ -0,0 +1,429 @@
// app.js — Frontend upload + minimal native player logic with slide-in and pulse effect
import { playBeep } from "./sound.js";
// 🔔 Minimal toast helper so calls to showToast() dont fail
function showToast(msg) {
const toast = document.createElement("div");
toast.className = "toast";
toast.textContent = msg;
toast.style.position = "fixed";
toast.style.bottom = "1.5rem";
toast.style.left = "50%";
toast.style.transform = "translateX(-50%)";
toast.style.background = "#333";
toast.style.color = "#fff";
toast.style.padding = "0.6em 1.2em";
toast.style.borderRadius = "6px";
toast.style.boxShadow = "0 2px 6px rgba(0,0,0,.2)";
toast.style.zIndex = 9999;
document.body.appendChild(toast);
setTimeout(() => toast.remove(), 4000);
}
document.addEventListener("DOMContentLoaded", () => {
// Guest vs. logged-in toggling is now handled by dashboard.js
// --- Public profile view logic ---
function showProfilePlayerFromUrl() {
const params = new URLSearchParams(window.location.search);
const profileUid = params.get("profile");
if (profileUid) {
const mePage = document.getElementById("me-page");
if (mePage) {
document.querySelectorAll("main > section").forEach(sec => sec.hidden = sec.id !== "me-page");
// Hide upload/delete/copy-url controls for guest view
const uploadArea = document.getElementById("upload-area");
if (uploadArea) uploadArea.hidden = true;
const copyUrlBtn = document.getElementById("copy-url");
if (copyUrlBtn) copyUrlBtn.style.display = "none";
const deleteBtn = document.getElementById("delete-account");
if (deleteBtn) deleteBtn.style.display = "none";
// Update heading and description for guest view
const meHeading = document.querySelector("#me-page h2");
if (meHeading) meHeading.textContent = `${profileUid}'s Stream 🎙️`;
const meDesc = document.querySelector("#me-page p");
if (meDesc) meDesc.textContent = `This is ${profileUid}'s public stream.`;
// Load playlist for the given profileUid
loadProfilePlaylist(profileUid);
}
}
}
// Run on popstate (SPA navigation and browser back/forward)
window.addEventListener('popstate', showProfilePlayerFromUrl);
async function loadProfilePlaylist(uid) {
const meAudio = document.getElementById("me-audio");
if (!meAudio) return;
const resp = await fetch(`/user-files/${encodeURIComponent(uid)}`);
const data = await resp.json();
if (!data.files || !Array.isArray(data.files) || data.files.length === 0) {
meAudio.src = "";
return;
}
// Shuffle playlist
function shuffle(array) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
return array;
}
window.mePlaylist = shuffle(data.files.map(f => `/audio/${uid}/${f}`));
window.mePlaylistIdx = 0;
const newSrc = window.mePlaylist[window.mePlaylistIdx];
meAudio.src = newSrc;
meAudio.load();
meAudio.play().catch(() => {/* autoplay may be blocked, ignore */});
}
window.loadProfilePlaylist = loadProfilePlaylist;
// --- Playlist for #me-page ---
const mePageLink = document.getElementById("show-me");
const meAudio = document.getElementById("me-audio");
const copyUrlBtn = document.getElementById("copy-url");
if (copyUrlBtn) copyUrlBtn.onclick = () => {
const uid = localStorage.getItem("uid");
if (uid) {
const streamUrl = `${window.location.origin}/stream/${encodeURIComponent(uid)}`;
navigator.clipboard.writeText(streamUrl);
showToast(`Copied your stream URL: ${streamUrl}`);
} else {
showToast("No user stream URL available");
}
};
let mePlaylist = [];
let mePlaylistIdx = 0;
// Playlist UI is hidden, so do not render
const mePrevBtn = document.getElementById("me-prev");
if (mePrevBtn) mePrevBtn.style.display = "none";
const meNextBtn = document.getElementById("me-next");
if (meNextBtn) meNextBtn.style.display = "none";
async function loadUserPlaylist() {
const uid = localStorage.getItem("uid");
if (!uid) return;
const resp = await fetch(`/user-files/${encodeURIComponent(uid)}`);
const data = await resp.json();
if (!data.files || !Array.isArray(data.files) || data.files.length === 0) {
meAudio.src = "";
return;
}
// Shuffle playlist
function shuffle(array) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
return array;
}
mePlaylist = shuffle(data.files.map(f => `/audio/${uid}/${f}`));
mePlaylistIdx = 0;
const newSrc = mePlaylist[mePlaylistIdx];
const prevSrc = meAudio.src;
const wasPlaying = !meAudio.paused && !meAudio.ended && meAudio.currentTime > 0;
const fullNewSrc = window.location.origin + newSrc;
if (prevSrc !== fullNewSrc) {
meAudio.src = newSrc;
meAudio.load();
} // else: do nothing, already loaded
// Don't call load() if already playing the correct file
// Don't call load() redundantly
// Don't set src redundantly
// This prevents DOMException from fetch aborts
}
if (mePageLink && meAudio) {
mePageLink.addEventListener("click", async () => {
await loadUserPlaylist();
// Start playback from current index
if (mePlaylist.length > 0) {
const newSrc = mePlaylist[mePlaylistIdx];
const prevSrc = meAudio.src;
const fullNewSrc = window.location.origin + newSrc;
if (prevSrc !== fullNewSrc) {
meAudio.src = newSrc;
meAudio.load();
}
meAudio.play();
}
});
meAudio.addEventListener("ended", () => {
if (mePlaylist.length > 1) {
mePlaylistIdx = (mePlaylistIdx + 1) % mePlaylist.length;
meAudio.src = mePlaylist[mePlaylistIdx];
meAudio.load();
meAudio.play();
} else if (mePlaylist.length === 1) {
// Only one file: restart
meAudio.currentTime = 0;
meAudio.load();
meAudio.play();
}
});
// Detect player stop and random play a new track
meAudio.addEventListener("pause", () => {
// Only trigger if playback reached the end and playlist has more than 1 track
if (meAudio.ended && mePlaylist.length > 1) {
let nextIdx;
do {
nextIdx = Math.floor(Math.random() * mePlaylist.length);
} while (nextIdx === mePlaylistIdx && mePlaylist.length > 1);
mePlaylistIdx = nextIdx;
meAudio.currentTime = 0;
meAudio.src = mePlaylist[mePlaylistIdx];
meAudio.load();
meAudio.play();
}
});
}
const deleteBtn = document.getElementById("delete-account");
if (deleteBtn) deleteBtn.onclick = async () => {
if (!confirm("Are you sure you want to delete your account and all uploaded audio?")) return;
const res = await fetch("/delete-account", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ uid })
});
if (res.ok) {
showToast("✅ Account deleted");
localStorage.removeItem("uid");
setTimeout(() => window.location.reload(), 2000);
} else {
const msg = (await res.json()).detail || res.status;
showToast("❌ Delete failed: " + msg);
}
};
const fadeAllSections = () => {
const uid = localStorage.getItem('uid');
document.querySelectorAll("main > section").forEach(section => {
// Always keep upload-area visible for logged-in users
if (uid && section.id === 'upload-area') return;
if (!section.hidden) {
section.classList.add("fade-out");
setTimeout(() => {
section.classList.remove("fade-out");
section.hidden = true;
}, 300);
}
});
};
const dropzone = document.getElementById("upload-area");
dropzone.setAttribute("aria-label", "Upload area. Click or drop an audio file to upload.");
const fileInput = document.getElementById("fileInput");
const fileInfo = document.createElement("div");
fileInfo.id = "file-info";
fileInfo.style.textAlign = "center";
fileInput.parentNode.insertBefore(fileInfo, fileInput.nextSibling);
const streamInfo = document.getElementById("stream-info");
const streamUrlEl = document.getElementById("streamUrl");
const status = document.getElementById("status");
const spinner = document.getElementById("spinner");
const uid = localStorage.getItem("uid");
const uidTime = parseInt(localStorage.getItem("uid_time"), 10);
const now = Date.now();
// Hide register button if logged in
const registerBtn = document.getElementById("show-register");
if (uid && localStorage.getItem("confirmed_uid") === uid && uidTime && (now - uidTime) < 3600000) {
if (registerBtn) registerBtn.style.display = "none";
} else {
if (registerBtn) registerBtn.style.display = "";
}
if (!uid || !uidTime || (now - uidTime) > 3600000) {
localStorage.removeItem("uid");
localStorage.removeItem("confirmed_uid");
localStorage.removeItem("uid_time");
status.className = "error-toast";
status.innerText = "❌ Session expired. Please log in again.";
// Add Login or Register button only for this error
let loginBtn = document.createElement('button');
loginBtn.textContent = 'Login or Register';
loginBtn.className = 'login-register-btn';
loginBtn.onclick = () => {
document.querySelectorAll('main > section').forEach(sec => sec.hidden = sec.id !== 'register-page');
};
status.appendChild(document.createElement('br'));
status.appendChild(loginBtn);
// Remove the status div after a short delay so only toast remains
setTimeout(() => {
if (status.parentNode) status.parentNode.removeChild(status);
}, 100);
return;
}
const confirmed = localStorage.getItem("confirmed_uid");
if (!confirmed || uid !== confirmed) {
status.className = "error-toast";
status.innerText = "❌ Please confirm your account via email first.";
showToast(status.innerText);
return;
}
let abortController;
const upload = async (file) => {
if (abortController) abortController.abort();
abortController = new AbortController();
fileInfo.innerText = `📁 ${file.name}${(file.size / 1024 / 1024).toFixed(2)} MB`;
if (file.size > 100 * 1024 * 1024) {
status.className = "error-toast";
status.innerText = "❌ Session expired. Please log in again.";
// Add Login or Register button only for this error
let loginBtn = document.createElement('button');
loginBtn.textContent = 'Login or Register';
loginBtn.className = 'login-register-btn';
loginBtn.onclick = () => {
document.querySelectorAll('main > section').forEach(sec => sec.hidden = sec.id !== 'register-page');
};
status.appendChild(document.createElement('br'));
status.appendChild(loginBtn);
showToast(status.innerText);
return;
}
spinner.style.display = "block";
status.innerHTML = '📡 Uploading…';
status.className = "uploading-toast";
fileInput.disabled = true;
dropzone.classList.add("uploading");
const formData = new FormData();
formData.append("uid", uid);
formData.append("file", file);
const res = await fetch("/upload", {
signal: abortController.signal,
method: "POST",
body: formData,
});
let data, parseError;
try {
data = await res.json();
} catch (e) {
parseError = e;
}
if (!data) {
status.className = "error-toast";
status.innerText = "❌ Upload failed: " + (parseError && parseError.message ? parseError.message : "Unknown error");
showToast(status.innerText);
spinner.style.display = "none";
fileInput.disabled = false;
dropzone.classList.remove("uploading");
return;
}
if (res.ok) {
status.className = "success-toast";
streamInfo.hidden = false;
streamInfo.innerHTML = `
<p>Your stream is now live:</p>
<audio controls id="me-audio" aria-label="Stream audio player. Your uploaded voice loop plays here.">
<p style='font-size: 0.9em; text-align: center;'>🔁 This stream loops forever</p>
<source src="${data.stream_url}" type="audio/ogg">
</audio>
<p><a href="${data.stream_url}" target="_blank" class="button" aria-label="Open your stream in a new tab">Open in external player</a></p>
`;
const meAudio = document.getElementById("me-audio");
meAudio.addEventListener("ended", () => {
if (mePlaylist.length > 1) {
mePlaylistIdx = (mePlaylistIdx + 1) % mePlaylist.length;
meAudio.src = mePlaylist[mePlaylistIdx];
meAudio.load();
meAudio.play();
meUrl.value = mePlaylist[mePlaylistIdx];
} else if (mePlaylist.length === 1) {
// Only one file: restart
meAudio.currentTime = 0;
meAudio.load();
meAudio.play();
}
});
if (data.quota && data.quota.used_mb !== undefined) {
const bar = document.getElementById("quota-bar");
const text = document.getElementById("quota-text");
const quotaSec = document.getElementById("quota-meter");
if (bar && text && quotaSec) {
quotaSec.hidden = false;
const used = parseFloat(data.quota.used_mb);
bar.value = used;
bar.max = 100;
text.textContent = `${used.toFixed(1)} MB used`;
}
}
spinner.style.display = "none";
fileInput.disabled = false;
dropzone.classList.remove("uploading");
showToast(status.innerText);
status.innerText = "✅ Upload successful.";
playBeep(432, 0.25, "sine");
setTimeout(() => status.innerText = "", 5000);
streamInfo.classList.add("visible", "slide-in");
} else {
streamInfo.hidden = true;
status.className = "error-toast";
spinner.style.display = "none";
if ((data.detail || data.error || "").includes("music")) {
status.innerText = "🎵 Upload rejected: singing or music detected.";
} else {
status.innerText = `❌ Upload failed: ${data.detail || data.error}`;
}
showToast(status.innerText);
fileInput.value = null;
dropzone.classList.remove("uploading");
fileInput.disabled = false;
streamInfo.classList.remove("visible", "slide-in");
}
};
dropzone.addEventListener("click", () => {
console.log("[DEBUG] Dropzone clicked");
fileInput.click();
console.log("[DEBUG] fileInput.click() called");
});
dropzone.addEventListener("dragover", (e) => {
e.preventDefault();
dropzone.classList.add("dragover");
dropzone.style.transition = "background-color 0.3s ease";
});
dropzone.addEventListener("dragleave", () => {
dropzone.classList.remove("dragover");
});
dropzone.addEventListener("drop", (e) => {
dropzone.classList.add("pulse");
setTimeout(() => dropzone.classList.remove("pulse"), 400);
e.preventDefault();
dropzone.classList.remove("dragover");
const file = e.dataTransfer.files[0];
if (file) upload(file);
});
fileInput.addEventListener("change", (e) => {
status.innerText = "";
status.className = "";
const file = e.target.files[0];
if (file) upload(file);
});
document.querySelectorAll('#links a[data-target]').forEach(link => {
link.addEventListener('click', (e) => {
e.preventDefault();
const target = link.getAttribute('data-target');
// Only hide other sections when not opening #me-page
if (target !== 'me-page') fadeAllSections();
const section = document.getElementById(target);
if (section) {
section.hidden = false;
section.classList.add("slide-in");
section.scrollIntoView({ behavior: "smooth" });
}
const burger = document.getElementById('burger-toggle');
if (burger && burger.checked) burger.checked = false;
});
});
});

28
static/auth-ui.js Normal file
View File

@ -0,0 +1,28 @@
// static/auth-ui.js — navigation link and back-button handlers
import { showOnly } from './router.js';
// Data-target navigation (e.g., at #links)
export function initNavLinks() {
const linksContainer = document.getElementById('links');
if (!linksContainer) return;
linksContainer.addEventListener('click', e => {
const a = e.target.closest('a[data-target]');
if (!a || !linksContainer.contains(a)) return;
e.preventDefault();
const target = a.dataset.target;
if (target) showOnly(target);
const burger = document.getElementById('burger-toggle');
if (burger && burger.checked) burger.checked = false;
});
}
// Back-button navigation
export function initBackButtons() {
document.querySelectorAll('a[data-back]').forEach(btn => {
btn.addEventListener('click', e => {
e.preventDefault();
const target = btn.dataset.back;
if (target) showOnly(target);
});
});
}

55
static/dashboard.js Normal file
View File

@ -0,0 +1,55 @@
// dashboard.js — toggle guest vs. user dashboard and reposition streams link
async function initDashboard() {
const uploadArea = document.querySelector('#upload-area');
const userDashboard = document.querySelector('#me-page');
const meAudio = document.querySelector('#me-audio');
const quotaBar = document.querySelector('#quota-bar');
const quotaText = document.querySelector('#quota-text');
const streamsLink = document.querySelector('#show-streams');
const registerLink = document.querySelector('#show-register');
// Default state: hide both
uploadArea.hidden = true;
userDashboard.hidden = true;
const uid = localStorage.getItem('uid');
if (!uid) {
// Guest: only upload area and move Streams next to Register
uploadArea.hidden = false;
userDashboard.hidden = true;
if (registerLink && streamsLink) {
registerLink.parentElement.insertAdjacentElement('afterend', streamsLink.parentElement);
}
return;
}
try {
const res = await fetch(`/me/${uid}`);
if (!res.ok) throw new Error('Not authorized');
const data = await res.json();
// Logged-in view
uploadArea.hidden = false;
userDashboard.hidden = false;
// Set audio source
meAudio.src = data.stream_url;
// Update quota
quotaBar.value = data.quota;
quotaText.textContent = `${data.quota} MB used`;
// Ensure Streams link remains in nav, not moved
// (No action needed if static)
} catch (e) {
console.warn('Dashboard init error, treating as guest:', e);
localStorage.removeItem('uid');
uploadArea.hidden = false;
userDashboard.hidden = true;
if (registerLink && streamsLink) {
registerLink.parentElement.insertAdjacentElement('afterend', streamsLink.parentElement);
}
}
}
document.addEventListener('DOMContentLoaded', initDashboard);

188
static/index.html Normal file
View File

@ -0,0 +1,188 @@
<!-- index.html -->
<!DOCTYPE html>
<html lang="en">
<head>
<link rel="icon" href="data:image/svg+xml,<svg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 100 100%22><text y=%22.9em%22 font-size=%2290%22>🎙️</text></svg>">
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="description" content="dicta2stream is a minimalist voice streaming platform for looping your spoken audio anonymously." />
<title>dicta2stream</title>
<link rel="stylesheet" href="/static/style.css" />
<!-- Responsive burger menu display -->
<style>
#burger-label, #burger-toggle { display: none; }
@media (max-width: 959px) {
#burger-label { display: block; }
section#links { display: none; }
#burger-toggle:checked + #burger-label + section#links { display: block; }
}
@media (min-width: 960px) {
section#links { display: block; }
}
</style>
<link rel="modulepreload" href="/static/sound.js" />
</head>
<body>
<header>
<h1>dicta2stream 🎙️</h1>
<p>Your voice. Your loop. One drop away.</p>
</header>
<main>
<section id="upload-area" class="dropzone">
<p>🎙 Drag & drop your audio file here<br>or click to browse</p>
<input type="file" id="fileInput" accept="audio/*" hidden />
</section>
<div id="spinner" class="spinner">
</div>
<div id="status"></div>
<section id="stream-info" hidden>
<p>Your loop stream:</p>
<code id="streamUrl">...</code>
<audio controls id="player" loop></audio>
<p><button id="delete-account" class="delete-account">🗑️ Delete My Account</button></p>
</section>
<input type="checkbox" id="burger-toggle" hidden>
<label for="burger-toggle" id="burger-label" aria-label="Menu">
<span></span>
<span></span>
<span></span>
</label>
<section id="links">
<p><a href="#" id="show-me" data-target="me-page">Your Stream</a></p>
<p><a href="#" id="show-register" data-target="register-page">Login or Register</a></p>
<p>
<a href="#" id="show-terms" data-target="terms-page">Terms of Service</a> |
<a href="#" id="show-imprint" data-target="imprint-page">Imprint</a> |
<a href="#" id="show-privacy" data-target="privacy-page">Privacy Policy</a> |
<a href="#" id="show-streams" data-target="stream-page">Streams</a>
</p>
</section>
<section id="terms-page" hidden>
<article>
<h2>Terms of Service</h2>
<p><em>Last updated: April 18, 2025</em></p>
<p>By accessing or using dicta2stream.net (the “Service”), you agree to be bound by these Terms of Service (“Terms”). If you do not agree, do not use the Service.</p>
<ul>
<li>You must be at least 18 years old to register.</li>
<li>UID in localStorage must be uniquely yours.</li>
<li>One account per device/IP per 24 hours.</li>
</ul>
<p>Uploads are limited to <strong>100 MB</strong> and must be <strong>voice only</strong>. Music/singing will be rejected. Your stream will loop publicly and anonymously via Icecast.</p>
<p>See full legal terms in the Git repository or request via support@dicta2stream.net.</p>
<p><a href="#" data-back="upload-area">← Back</a></p>
</article>
</section>
<section id="privacy-page" hidden>
<article>
<h2>Privacy Policy</h2>
<p><em>Last updated: April 18, 2025</em></p>
<ul>
<li>No cookies. UID is stored locally only.</li>
<li>We log IP + UID only for abuse protection and quota enforcement.</li>
<li>Uploads are scanned via Whisper+Ollama but not stored as transcripts.</li>
<li>Data is never sold. Contact us for account deletion.</li>
</ul>
<p><a href="#" data-back="upload-area">← Back</a></p>
</article>
</section>
<section id="imprint-page" hidden>
<article>
<h2>Imprint</h2>
<p><strong>Andreas Michael Fleckl</strong></p>
<p>Johnstrassse 7/6<br>1140 Vienna<br>Austria / Europe</p>
<p><strong>Contact:</strong><br>
<a href="mailto:Andreas.Fleckl@dicta2stream.net">Andreas.Fleckl@dicta2stream.net</a></p>
<p><a href="#" data-back="upload-area">← Back</a></p>
</article>
</section>
<section id="stream-page" hidden>
<article>
<h2>🎧 Public Streams</h2>
<ul id="stream-list"><li>Loading...</li></ul>
<p><a href="#" data-back="upload-area">← Back</a></p>
<p style="margin-top:1.5em;font-size:0.98em;">
<a href="#" id="show-terms" data-target="terms-page">Terms of Service</a> |
<a href="#" id="show-imprint" data-target="imprint-page">Imprint</a> |
<a href="#" id="show-privacy" data-target="privacy-page">Privacy Policy</a>
</p>
</article>
</section>
<section id="register-page" hidden>
<article>
<h2>Register</h2>
<form id="register-form">
<p><label>Email<br><input type="email" name="email" required /></label></p>
<p><label>Username<br><input type="text" name="user" required /></label></p>
<p><button type="submit">Create Account</button></p>
</form>
<p><small>Youll receive a magic login link via email. No password required.</small></p>
<p><a href="#" data-back="upload-area">← Back</a></p>
</article>
</section>
<section id="magic-login-page" hidden>
<article>
<h2>Magic Login</h2>
<p>If you received a magic login link, you're almost in. Click below to confirm your account and activate streaming.</p>
<form id="magic-login-form">
<div id="magic-error" style="color: #b22222; font-size: 0.9em; display: none; margin-bottom: 1em;"></div>
<input type="hidden" name="token" id="magic-token" />
<button type="submit">Confirm &amp; Activate</button>
</form>
<p><a href="#" data-back="upload-area">← Back</a></p>
</article>
</section>
<section id="quota-meter" hidden>
<p class="quota-meter">Quota: <progress id="quota-bar" value="0" max="100"></progress> <span id="quota-text">0 MB used</span></p>
</section>
<section id="me-page" hidden>
<article>
<h2>Your Stream 🎙️</h2>
<p>This is your personal stream. Only you can upload to it.</p>
<audio controls id="me-audio"></audio>
<!-- Playlist and URL input hidden as per user request -->
<div class="playlist-controls">
<button id="me-prev" aria-label="Previous track">⏮️</button>
<button id="me-next" aria-label="Next track">⏭️</button>
</div>
<!-- <ul id="me-playlist" class="playlist"></ul> -->
<!-- <p><input id="me-url" readonly class="me-url" /></p> -->
<p><button id="copy-url">📋 Copy URL to clipboard</button></p>
<p><a href="#" data-back="upload-area">← Back</a></p>
</article>
</section>
</main>
<footer>
<p>Built for public voice streaming • Opus | Mono | 48kHz | 60kbps</p>
<p class="footer-hint">Need more space? Contact <a href="mailto:Andreas.Fleckl@dicta2stream.net">Andreas.Fleckl@dicta2stream.net</a></p>
<p style="font-size: 0.85em; opacity: 0.65;">Your session expires after 1 hour. Shareable links redirect to homepage.</p>
</footer>
<script type="module" src="/static/dashboard.js"></script>
<script type="module" src="/static/app.js"></script>
<script type="module">
import "/static/nav.js";
window.addEventListener("pageshow", () => {
const dz = document.querySelector("#upload-area");
dz.classList.remove("uploading");
const spinner = document.querySelector("#spinner");
if (spinner) spinner.style.display = "none";
});
</script>
</body>
</html>

18
static/magic-login.js Normal file
View File

@ -0,0 +1,18 @@
// static/magic-login.js — handles magiclink token UI
import { showOnly } from './router.js';
export function initMagicLogin() {
const params = new URLSearchParams(location.search);
const token = params.get('token');
if (token) {
const tokenInput = document.getElementById('magic-token');
if (tokenInput) tokenInput.value = token;
showOnly('magic-login-page');
const err = params.get('error');
if (err) {
const box = document.getElementById('magic-error');
box.textContent = decodeURIComponent(err);
box.style.display = 'block';
}
}
}

147
static/nav.js Normal file
View File

@ -0,0 +1,147 @@
// nav.js — lightweight navigation & magiclink handling
// fallback toast if app.js not yet loaded
if (typeof window.showToast !== "function") {
window.showToast = (msg) => alert(msg);
}
document.addEventListener("DOMContentLoaded", () => {
const Router = {
sections: Array.from(document.querySelectorAll("main > section")),
showOnly(id) {
this.sections.forEach(sec => {
sec.hidden = sec.id !== id;
sec.tabIndex = -1;
});
localStorage.setItem("last_page", id);
const target = document.getElementById(id);
if (target) target.focus();
},
init() {
initNavLinks();
initBackButtons();
initStreamsLoader();
initStreamLinks();
}
};
const showOnly = Router.showOnly.bind(Router);
// Highlight active profile link on browser back/forward navigation
function highlightActiveProfileLink() {
const params = new URLSearchParams(window.location.search);
const profileUid = params.get('profile');
const ul = document.getElementById('stream-list');
if (!ul) return;
ul.querySelectorAll('a.profile-link').forEach(link => {
const url = new URL(link.href, window.location.origin);
const uidParam = url.searchParams.get('profile');
link.classList.toggle('active', uidParam === profileUid);
});
}
window.addEventListener('popstate', highlightActiveProfileLink);
/* restore last page (unless magiclink token present) */
const params = new URLSearchParams(location.search);
const token = params.get("token");
if (!token) {
const last = localStorage.getItem("last_page");
if (last && document.getElementById(last)) showOnly(last);
// Highlight active link on initial load
highlightActiveProfileLink();
}
/* token → show magiclogin page */
if (token) {
document.getElementById("magic-token").value = token;
showOnly("magic-login-page");
const err = params.get("error");
if (err) {
const box = document.getElementById("magic-error");
box.textContent = decodeURIComponent(err);
box.style.display = "block";
}
}
// Debounce loading and helper for streams list
let loadingStreams = false;
function renderStreamList(streams) {
const ul = document.getElementById("stream-list");
if (!ul) return;
if (streams.length) {
streams.sort();
ul.innerHTML = streams.map(uid => `
<li><a href="/?profile=${encodeURIComponent(uid)}" class="profile-link">▶ ${uid}</a></li>
`).join("");
} else {
ul.innerHTML = "<li>No active streams.</li>";
}
// Ensure correct link is active after rendering
highlightActiveProfileLink();
}
// Initialize navigation listeners
function initNavLinks() {
const linksContainer = document.getElementById("links");
if (!linksContainer) return;
linksContainer.addEventListener("click", e => {
const a = e.target.closest("a[data-target]");
if (!a || !linksContainer.contains(a)) return;
e.preventDefault();
const target = a.dataset.target;
if (target) showOnly(target);
const burger = document.getElementById("burger-toggle");
if (burger && burger.checked) burger.checked = false;
});
}
function initBackButtons() {
document.querySelectorAll('a[data-back]').forEach(btn => {
btn.addEventListener("click", e => {
e.preventDefault();
const target = btn.dataset.back;
if (target) showOnly(target);
});
});
}
function initStreamsLoader() {
const streamsLink = document.getElementById("show-streams");
streamsLink?.addEventListener("click", async e => {
e.preventDefault();
if (loadingStreams) return;
loadingStreams = true;
showOnly("stream-page");
try {
const res = await fetch("/streams");
if (!res.ok) throw new Error(`HTTP error ${res.status}`);
const data = await res.json();
renderStreamList(data.streams || []);
} catch {
const ul = document.getElementById("stream-list");
if (ul) ul.innerHTML = "<li>Error loading stream list</li>";
} finally {
loadingStreams = false;
}
});
}
function initStreamLinks() {
const ul = document.getElementById("stream-list");
if (!ul) return;
ul.addEventListener("click", e => {
const a = e.target.closest("a.profile-link");
if (!a || !ul.contains(a)) return;
e.preventDefault();
const url = new URL(a.href, window.location.origin);
const profileUid = url.searchParams.get("profile");
if (profileUid && window.location.search !== `?profile=${encodeURIComponent(profileUid)}`) {
window.profileNavigationTriggered = true;
window.history.pushState({}, '', `/?profile=${encodeURIComponent(profileUid)}`);
window.dispatchEvent(new Event("popstate"));
}
});
}
// Initialize Router
Router.init();
});

15
static/router.js Normal file
View File

@ -0,0 +1,15 @@
// static/router.js — core routing for SPA navigation
export const Router = {
sections: Array.from(document.querySelectorAll("main > section")),
showOnly(id) {
this.sections.forEach(sec => {
sec.hidden = sec.id !== id;
sec.tabIndex = -1;
});
localStorage.setItem("last_page", id);
const target = document.getElementById(id);
if (target) target.focus();
}
};
export const showOnly = Router.showOnly.bind(Router);

17
static/sound.js Normal file
View File

@ -0,0 +1,17 @@
// sound.js — reusable Web Audio beep
export function playBeep(frequency = 432, duration = 0.2, type = 'sine') {
const ctx = new (window.AudioContext || window.webkitAudioContext)();
const osc = ctx.createOscillator();
const gain = ctx.createGain();
osc.type = type;
osc.frequency.value = frequency;
osc.connect(gain);
gain.connect(ctx.destination);
gain.gain.setValueAtTime(0.1, ctx.currentTime); // subtle volume
osc.start();
osc.stop(ctx.currentTime + duration);
}

76
static/streams-ui.js Normal file
View File

@ -0,0 +1,76 @@
// static/streams-ui.js — public streams loader and profile-link handling
import { showOnly } from './router.js';
let loadingStreams = false;
export function renderStreamList(streams) {
const ul = document.getElementById('stream-list');
if (!ul) return;
if (streams.length) {
streams.sort();
ul.innerHTML = streams
.map(
uid => `<li><a href="/?profile=${encodeURIComponent(uid)}" class="profile-link">▶ ${uid}</a></li>`
)
.join('');
} else {
ul.innerHTML = '<li>No active streams.</li>';
}
highlightActiveProfileLink();
}
export function highlightActiveProfileLink() {
const params = new URLSearchParams(window.location.search);
const profileUid = params.get('profile');
const ul = document.getElementById('stream-list');
if (!ul) return;
ul.querySelectorAll('a.profile-link').forEach(link => {
const url = new URL(link.href, window.location.origin);
const uidParam = url.searchParams.get('profile');
link.classList.toggle('active', uidParam === profileUid);
});
}
export function initStreamsLoader() {
const streamsLink = document.getElementById('show-streams');
streamsLink?.addEventListener('click', async e => {
e.preventDefault();
if (loadingStreams) return;
loadingStreams = true;
showOnly('stream-page');
try {
const res = await fetch('/streams');
if (!res.ok) throw new Error(`HTTP error ${res.status}`);
const data = await res.json();
renderStreamList(data.streams || []);
} catch {
const ul = document.getElementById('stream-list');
if (ul) ul.innerHTML = '<li>Error loading stream list</li>';
} finally {
loadingStreams = false;
}
});
}
export function initStreamLinks() {
const ul = document.getElementById('stream-list');
if (!ul) return;
ul.addEventListener('click', e => {
const a = e.target.closest('a.profile-link');
if (!a || !ul.contains(a)) return;
e.preventDefault();
const url = new URL(a.href, window.location.origin);
const profileUid = url.searchParams.get('profile');
if (profileUid && window.location.search !== `?profile=${encodeURIComponent(profileUid)}`) {
window.profileNavigationTriggered = true;
window.history.pushState({}, '', `/?profile=${encodeURIComponent(profileUid)}`);
window.dispatchEvent(new Event('popstate'));
}
});
}
export function initStreamsUI() {
initStreamsLoader();
initStreamLinks();
window.addEventListener('popstate', highlightActiveProfileLink);
}

519
static/style.css Normal file
View File

@ -0,0 +1,519 @@
/* style.css — minimal UI styling for dicta2stream */
.spinner {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
z-index: 10;
display: flex;
flex-direction: column;
align-items: center;
gap: 1.2em;
pointer-events: none; /* allow clicks through except for children */
}
.spinner > * {
pointer-events: auto;
}
.quota-meter {
font-size: 0.9em;
margin-top: 0.5em;
color: #555;
}
.footer-hint {
font-size: 0.9em;
opacity: 0.7;
}
.cancel-upload {
display: none;
margin-top: 0.4em;
font-size: 0.95em;
background: #b22222;
color: white;
border: none;
padding: 0.5em 1.2em;
border-radius: 4px;
cursor: pointer;
box-shadow: 0 2px 6px rgba(0,0,0,.08);
transition: background 0.2s;
}
.cancel-upload:hover {
background: #e74c3c;
}
.delete-account {
margin-top: 1em;
background: #ccc;
color: black;
padding: 0.4em 1em;
border-radius: 5px;
font-size: 0.9em;
border: none;
cursor: pointer;
}
.me-url {
width: 100%;
font-family: monospace;
margin: 0.5em 0;
padding: 0.4em;
border: 1px solid #ccc;
border-radius: 4px;
background: #f9f9f9;
}
button.logout {
display: block;
margin: 1em auto;
padding: 0.4em 1.2em;
background: #eee;
border: 1px solid #ccc;
border-radius: 6px;
font-size: 0.95em;
cursor: pointer;
transition: background 0.2s ease;
}
button.logout:hover {
background: #ddd;
}
audio {
display: block;
margin: 1em auto;
max-width: 100%;
outline: none;
border-radius: 6px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
background: #fff;
}
#me-wrap {
background: #fdfdfd;
padding: 1.5em;
border: 1px solid #ddd;
border-radius: 8px;
margin: 2em auto;
max-width: 600px;
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.03);
transition: opacity 0.6s ease;
opacity: 1;
}
.hidden {
display: none !important;
}
.button:focus {
outline: 2px solid #00aaff;
outline-offset: 2px;
}
#quota-bar[value="100"] {
accent-color: #b22222;
}
#quota-bar[value="100"] + #quota-text::after {
content: " (Full)";
color: #b22222;
font-weight: bold;
}
input[disabled], button[disabled] {
opacity: 0.5;
cursor: not-allowed;
}
.uploading-toast {
color: #1e90ff;
background: #eaf4ff;
border: 1px solid #b3daff;
padding: 0.5em 1em;
border-radius: 6px;
font-size: 0.95em;
animation: fadeIn 0.3s ease;
display: inline-block;
}
.success-toast {
color: #2e8b57;
background: #e7f6ed;
border: 1px solid #c2e3d3;
padding: 0.5em 1em;
border-radius: 6px;
font-size: 0.95em;
animation: fadeIn 0.3s ease;
display: inline-block;
}
body {
font-family: sans-serif;
background: #fafafa;
margin: 0;
padding: 1em;
color: #333;
}
header h1 {
animation: slideDown 0.6s ease-out;
}
header h1::before {
content: "🎙️ ";
animation: pulse 1.2s ease-in-out infinite;
}
header p {
animation: fadeIn 0.8s ease-out 0.3s both;
}
header, footer {
text-align: center;
margin-bottom: 1.5em;
}
footer p {
margin: 0.4em 0;
font-size: 0.9em;
opacity: 0.8;
}
.dropzone::before {
animation: emojiBounce 0.6s ease-out;
content: "📤 ";
font-size: 1.2em;
display: block;
margin-bottom: 0.5em;
}
.dropzone {
transition: background 0.3s ease, border-color 0.3s ease, box-shadow 0.3s ease;
border: 2px dashed #999;
padding: 2em;
text-align: center;
cursor: pointer;
}
.dropzone.dragover {
background: #e0f7ff;
border-color: #00aaff;
box-shadow: 0 0 0.4em rgba(0, 170, 255, 0.4);
background: #f0f8ff;
border-color: #00aaff;
}
.dropzone.pulse,
.dropzone.pulse::before {
box-shadow: 0 0 0.6em rgba(0, 170, 255, 0.6);
animation: pulse 0.6s ease-in-out;
}
/* Reusable glowing pulse */
.pulse-glow {
animation: pulse 0.4s ease-in-out;
box-shadow: 0 0 0.6em rgba(0, 170, 255, 0.6);
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(-5px); }
to { opacity: 1; transform: translateY(0); }
}
#file-info {
animation: fadeIn 0.4s ease;
margin-top: 0.8em;
font-size: 0.95em;
text-align: center;
color: #555;
}
.emoji-bounce {
display: inline-block;
animation: emojiBounce 0.6s ease-out;
}
@keyframes emojiBounce {
0% { transform: scale(1); }
30% { transform: scale(1.3); }
60% { transform: scale(0.95); }
100% { transform: scale(1); }
}
#spinner {
border: 3px solid #eee;
border-top: 3px solid #2e8b57;
border-radius: 50%;
width: 24px;
height: 24px;
animation: spin 1s linear infinite;
margin: 0 auto 1em;
display: none;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
#status {
animation: fadeIn 0.4s ease;
margin: 1em auto;
font-weight: bold;
text-align: center;
}
#status:empty {
display: none;
}
#status.success::before {
content: "✅ ";
}
.error-toast {
color: #b22222;
background: #fcebea;
border: 1px solid #f5c6cb;
padding: 0.5em 1em;
border-radius: 6px;
font-size: 0.95em;
animation: fadeIn 0.3s ease;
display: inline-block;
}
#stream-info.fade-out {
animation: fadeOut 0.3s ease forwards;
}
#stream-info {
text-align: center;
opacity: 0;
transition: opacity 0.5s ease;
}
#stream-info.visible {
animation: fadeIn 0.4s ease forwards;
opacity: 1;
}
a.button.pulse-glow { animation: pulse 0.4s ease-in-out; }
a.button::before {
content: "🔗 ";
margin-right: 0.3em;
}
a.button[aria-label] {
position: relative;
}
a.button[aria-label]::after {
content: attr(aria-label);
position: absolute;
left: 50%;
bottom: 100%;
transform: translateX(-50%);
background: #333;
color: #fff;
font-size: 0.75em;
padding: 0.3em 0.6em;
border-radius: 4px;
white-space: nowrap;
opacity: 0;
pointer-events: none;
transition: opacity 0.2s ease;
margin-bottom: 0.4em;
}
a.button[aria-label]:hover::after {
opacity: 1;
}
a.button {
display: inline-block;
background: #2e8b57;
color: white;
padding: 0.4em 1em;
margin-top: 0.5em;
border-radius: 4px;
text-decoration: none;
font-weight: bold;
transition: background 0.2s ease;
}
a.button:hover {
animation: pulse 0.4s ease-in-out;
background: #256b45;
}
section article {
max-width: 600px;
margin: 2em auto;
padding: 1.5em;
background: #fff;
border-radius: 6px;
box-shadow: 0 2px 6px rgba(0,0,0,0.05);
}
ul#stream-list,
ul#me-files {
padding-left: 0;
list-style: none;
text-align: center;
margin-top: 1em;
}
ul#stream-list li a,
ul#me-files li {
display: inline-flex;
align-items: center;
justify-content: center;
margin: 0.3em auto;
padding: 0.4em 0.8em;
border-radius: 6px;
background: #f0f0f0;
font-size: 0.95em;
max-width: 90%;
gap: 1em;
color: #333;
}
ul#stream-list li a:hover,
ul#me-files li:hover {
background: #e5f5ec;
}
section article h2 {
text-align: center;
margin-top: 0;
margin-bottom: 0.6em;
}
section article a[href^="mailto"]::before {
content: "✉️ ";
margin-right: 0.3em;
}
section article a[href^="mailto"] {
display: inline-block;
background: #2e8b57;
color: white;
padding: 0.3em 0.9em;
margin-top: 0.5em;
border-radius: 4px;
text-decoration: none;
font-weight: bold;
transition: background 0.2s ease;
}
section article a[href^="mailto"]:hover {
background: #256b45;
}
code {
background: #eee;
padding: 0.2em 0.4em;
border-radius: 4px;
font-family: monospace;
}
@media (min-width: 960px) {
section#links {
display: flex;
flex-direction: column;
align-items: center;
background: #e0f7ff;
padding: 1em;
margin: 2em auto;
border-radius: 6px;
max-width: 600px;
box-shadow: 0 2px 6px rgba(0, 170, 255, 0.1);
}
section#links p:first-child a,
section#links p:nth-child(2) a {
display: inline-block;
background: #2e8b57;
color: white;
font-weight: bold;
padding: 0.4em 1em;
border-radius: 5px;
text-decoration: none;
transition: background 0.2s ease;
margin-bottom: 0.8em;
}
section#links p:first-child a:hover,
section#links p:nth-child(2) a:hover {
background: #256b45;
}
}
#burger-toggle {
display: none;
}
#burger-label {
display: none;
position: absolute;
top: 1em;
right: 1em;
cursor: pointer;
z-index: 20;
}
#burger-label span {
display: block;
width: 25px;
height: 3px;
margin: 5px;
background-color: #333;
transition: all 0.3s ease;
}
@media (max-width: 959px) {
#burger-label {
display: block;
}
section#links {
display: none;
background: #fff;
position: absolute;
top: 3.2em;
right: 1em;
border: 1px solid #ccc;
border-radius: 6px;
box-shadow: 0 2px 6px rgba(0,0,0,0.1);
padding: 1em;
z-index: 10;
}
#burger-toggle:checked + #burger-label + section#links {
display: block;
}
}
@keyframes slideFadeIn {
0% {
opacity: 0;
transform: translateY(-10px);
}
100% {
opacity: 1;
transform: translateY(0);
}
}
@keyframes slideFadeOut {
0% {
opacity: 1;
transform: translateY(0);
}
100% {
opacity: 0;
transform: translateY(-10px);
}
}

BIN
streams/devuser/stream.opus Normal file

Binary file not shown.

127
upload.py Normal file
View File

@ -0,0 +1,127 @@
# upload.py — FastAPI route for upload + quota check + voice conversion
from fastapi import APIRouter, UploadFile, Form, HTTPException, Request, Depends
from slowapi import Limiter
from slowapi.util import get_remote_address
from slowapi.errors import RateLimitExceeded
from pathlib import Path
from convert_to_opus import convert_to_opus
from database import get_db
from models import UploadLog, UserQuota, User
from sqlalchemy import select
limiter = Limiter(key_func=get_remote_address)
router = APIRouter()
# # Not needed for SlowAPI ≥0.1.5
DATA_ROOT = Path("./data")
@limiter.limit("5/minute")
@router.post("/upload")
async def upload(request: Request, db = Depends(get_db), uid: str = Form(...), file: UploadFile = Form(...)):
from log import log_violation
try:
user_dir = DATA_ROOT / uid
user_dir.mkdir(parents=True, exist_ok=True)
raw_path = user_dir / ("raw." + file.filename.split(".")[-1])
import uuid
unique_name = str(uuid.uuid4()) + ".opus"
# Save temp upload FIRST
with open(raw_path, "wb") as f:
f.write(await file.read())
# Block music/singing via Ollama prompt
import requests
try:
with open(raw_path, "rb") as f:
audio = f.read()
res = requests.post("http://localhost:11434/api/generate", json={
"model": "whisper",
"prompt": "Does this audio contain music or singing? Answer yes or no only.",
"audio": audio
}, timeout=10)
resp = res.json().get("response", "").lower()
if "yes" in resp:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=403, detail="Upload rejected: music or singing detected")
except Exception as ollama_err:
# fallback: allow, log if needed
pass
processed_path = user_dir / unique_name
# Block unconfirmed users (use ORM)
user = db.exec(select(User).where((User.username == uid) | (User.email == uid))).first()
# If result is a Row or tuple, extract the User object
if user is not None and not isinstance(user, User) and hasattr(user, "__getitem__"):
user = user[0]
from log import log_violation
log_violation("UPLOAD", request.client.host, uid, f"DEBUG: Incoming uid={uid}, user found={user}, confirmed={getattr(user, 'confirmed', None)}")
log_violation("UPLOAD", request.client.host, uid, f"DEBUG: After unpack, user={user}, type={type(user)}, confirmed={getattr(user, 'confirmed', None)}")
if not user or not hasattr(user, "confirmed") or not user.confirmed:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=403, detail="Account not confirmed")
# DB-based quota check
quota = db.get(UserQuota, uid)
if quota and quota.storage_bytes >= 100 * 1024 * 1024:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=400, detail="Quota exceeded")
try:
convert_to_opus(str(raw_path), str(processed_path))
except Exception as e:
raw_path.unlink(missing_ok=True)
raise HTTPException(status_code=500, detail=str(e))
original_size = raw_path.stat().st_size
raw_path.unlink(missing_ok=True) # cleanup
# Always copy latest upload as stream.opus for redirect compatibility
import shutil
stream_path = user_dir / "stream.opus"
shutil.copy2(processed_path, stream_path)
# Also update ./data/{uid}/stream.opus for public stream listing
streams_dir = Path("data") / uid
streams_dir.mkdir(parents=True, exist_ok=True)
streams_stream_path = streams_dir / "stream.opus"
shutil.copy2(processed_path, streams_stream_path)
db.add(UploadLog(
uid=uid,
ip=request.client.host,
filename=file.filename,
size_bytes=original_size
))
# Store updated quota
size = processed_path.stat().st_size
quota = db.get(UserQuota, uid)
if not quota:
quota = UserQuota(uid=uid)
db.add(quota)
quota.storage_bytes += size
db.commit()
return {
"stream_url": f"http://localhost:8000/streams/{uid}/stream.opus",
"filename": file.filename,
"original_size": round(original_size / 1024, 1),
"quota": {
"used_mb": round(quota.storage_bytes / (1024 * 1024), 2)
}
}
except HTTPException as e:
# Already a JSON response, just re-raise
raise e
except Exception as e:
import traceback
tb = traceback.format_exc()
# Log and return a JSON error
try:
log_violation("UPLOAD", request.client.host, uid, f"Unexpected error: {type(e).__name__}: {str(e)}\n{tb}")
except Exception:
pass
return {"detail": f"Server error: {type(e).__name__}: {str(e)}"}

247
venv/bin/Activate.ps1 Normal file
View File

@ -0,0 +1,247 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

69
venv/bin/activate Normal file
View File

@ -0,0 +1,69 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV=/home/oib/games/dicta2stream-bubu/venv
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/"bin":$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1='(venv) '"${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT='(venv) '
export VIRTUAL_ENV_PROMPT
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

26
venv/bin/activate.csh Normal file
View File

@ -0,0 +1,26 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV /home/oib/games/dicta2stream-bubu/venv
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = '(venv) '"$prompt"
setenv VIRTUAL_ENV_PROMPT '(venv) '
endif
alias pydoc python -m pydoc
rehash

69
venv/bin/activate.fish Normal file
View File

@ -0,0 +1,69 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/); you cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV /home/oib/games/dicta2stream-bubu/venv
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT '(venv) '
end

8
venv/bin/dotenv Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from dotenv.__main__ import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli())

8
venv/bin/fastapi Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from fastapi.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/normalizer Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli.cli_detect())

8
venv/bin/pip Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pip3 Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pip3.11 Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
venv/bin/python Symbolic link
View File

@ -0,0 +1 @@
python3

1
venv/bin/python3 Symbolic link
View File

@ -0,0 +1 @@
/usr/bin/python3

1
venv/bin/python3.11 Symbolic link
View File

@ -0,0 +1 @@
python3

8
venv/bin/uvicorn Executable file
View File

@ -0,0 +1,8 @@
#!/home/oib/games/dicta2stream-bubu/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from uvicorn.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View File

@ -0,0 +1,164 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#ifndef GREENLET_MODULE
#define implementation_ptr_t void*
#endif
typedef struct _greenlet {
PyObject_HEAD
PyObject* weakreflist;
PyObject* dict;
implementation_ptr_t pimpl;
} PyGreenlet;
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 12
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#define PyGreenlet_MAIN_NUM 8
#define PyGreenlet_STARTED_NUM 9
#define PyGreenlet_ACTIVE_NUM 10
#define PyGreenlet_GET_PARENT_NUM 11
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/*
* PyGreenlet_GetParent(PyObject* greenlet)
*
* return greenlet.parent;
*
* This could return NULL even if there is no exception active.
* If it does not return NULL, you are responsible for decrementing the
* reference count.
*/
# define PyGreenlet_GetParent \
(*(PyGreenlet* (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
/*
* deprecated, undocumented alias.
*/
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
# define PyGreenlet_MAIN \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
# define PyGreenlet_STARTED \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
# define PyGreenlet_ACTIVE \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2017 Laurent LAPORTE
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,195 @@
Metadata-Version: 2.2
Name: Deprecated
Version: 1.2.18
Summary: Python @deprecated decorator to deprecate old python classes, functions or methods.
Home-page: https://github.com/laurent-laporte-pro/deprecated
Author: Laurent LAPORTE
Author-email: laurent.laporte.pro@gmail.com
License: MIT
Project-URL: Documentation, https://deprecated.readthedocs.io/en/latest/
Project-URL: Source, https://github.com/laurent-laporte-pro/deprecated
Project-URL: Bug Tracker, https://github.com/laurent-laporte-pro/deprecated/issues
Keywords: deprecate,deprecated,deprecation,warning,warn,decorator
Platform: any
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
Requires-Dist: wrapt<2,>=1.10
Provides-Extra: dev
Requires-Dist: tox; extra == "dev"
Requires-Dist: PyTest; extra == "dev"
Requires-Dist: PyTest-Cov; extra == "dev"
Requires-Dist: bump2version<1; extra == "dev"
Requires-Dist: setuptools; python_version >= "3.12" and extra == "dev"
Dynamic: author
Dynamic: author-email
Dynamic: classifier
Dynamic: description
Dynamic: description-content-type
Dynamic: home-page
Dynamic: keywords
Dynamic: license
Dynamic: platform
Dynamic: project-url
Dynamic: provides-extra
Dynamic: requires-dist
Dynamic: requires-python
Dynamic: summary
Deprecated Library
------------------
Deprecated is Easy to Use
`````````````````````````
If you need to mark a function or a method as deprecated,
you can use the ``@deprecated`` decorator:
Save in a hello.py:
.. code:: python
from deprecated import deprecated
@deprecated(version='1.2.1', reason="You should use another function")
def some_old_function(x, y):
return x + y
class SomeClass(object):
@deprecated(version='1.3.0', reason="This method is deprecated")
def some_old_method(self, x, y):
return x + y
some_old_function(12, 34)
obj = SomeClass()
obj.some_old_method(5, 8)
And Easy to Setup
`````````````````
And run it:
.. code:: bash
$ pip install Deprecated
$ python hello.py
hello.py:15: DeprecationWarning: Call to deprecated function (or staticmethod) some_old_function.
(You should use another function) -- Deprecated since version 1.2.0.
some_old_function(12, 34)
hello.py:17: DeprecationWarning: Call to deprecated method some_old_method.
(This method is deprecated) -- Deprecated since version 1.3.0.
obj.some_old_method(5, 8)
You can document your code
``````````````````````````
Have you ever wonder how to document that some functions, classes, methods, etc. are deprecated?
This is now possible with the integrated Sphinx directives:
For instance, in hello_sphinx.py:
.. code:: python
from deprecated.sphinx import deprecated
from deprecated.sphinx import versionadded
from deprecated.sphinx import versionchanged
@versionadded(version='1.0', reason="This function is new")
def function_one():
'''This is the function one'''
@versionchanged(version='1.0', reason="This function is modified")
def function_two():
'''This is the function two'''
@deprecated(version='1.0', reason="This function will be removed soon")
def function_three():
'''This is the function three'''
function_one()
function_two()
function_three() # warns
help(function_one)
help(function_two)
help(function_three)
The result it immediate
```````````````````````
Run it:
.. code:: bash
$ python hello_sphinx.py
hello_sphinx.py:23: DeprecationWarning: Call to deprecated function (or staticmethod) function_three.
(This function will be removed soon) -- Deprecated since version 1.0.
function_three() # warns
Help on function function_one in module __main__:
function_one()
This is the function one
.. versionadded:: 1.0
This function is new
Help on function function_two in module __main__:
function_two()
This is the function two
.. versionchanged:: 1.0
This function is modified
Help on function function_three in module __main__:
function_three()
This is the function three
.. deprecated:: 1.0
This function will be removed soon
Links
`````
* `Python package index (PyPi) <https://pypi.org/project/Deprecated/>`_
* `GitHub website <https://github.com/laurent-laporte-pro/deprecated>`_
* `Read The Docs <https://readthedocs.org/projects/deprecated>`_
* `EBook on Lulu.com <http://www.lulu.com/commerce/index.php?fBuyContent=21305117>`_
* `StackOverFlow Q&A <https://stackoverflow.com/a/40301488/1513933>`_
* `Development version
<https://github.com/laurent-laporte-pro/deprecated/zipball/master#egg=Deprecated-dev>`_

View File

@ -0,0 +1,12 @@
Deprecated-1.2.18.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Deprecated-1.2.18.dist-info/LICENSE.rst,sha256=HoPt0VvkGbXVveNy4yXlJ_9PmRX1SOfHUxS0H2aZ6Dw,1081
Deprecated-1.2.18.dist-info/METADATA,sha256=4CrUw5Bl8_NsBuZYe0Nw-mIwQnVpT1CnmBYU9BqOuq8,5725
Deprecated-1.2.18.dist-info/RECORD,,
Deprecated-1.2.18.dist-info/WHEEL,sha256=9Hm2OB-j1QcCUq9Jguht7ayGIIZBRTdOXD1qg9cCgPM,109
Deprecated-1.2.18.dist-info/top_level.txt,sha256=nHbOYawKPQQE5lQl-toUB1JBRJjUyn_m_Mb8RVJ0RjA,11
deprecated/__init__.py,sha256=yZNbmDKXF4PLtp_Ikdb_9ObJLkHuFSUHvqidFTKKGFM,351
deprecated/__pycache__/__init__.cpython-311.pyc,,
deprecated/__pycache__/classic.cpython-311.pyc,,
deprecated/__pycache__/sphinx.cpython-311.pyc,,
deprecated/classic.py,sha256=7WXOt4Vf1NhrUznm8ypjS50CMyAdZwrGT58Lhb8fW14,10609
deprecated/sphinx.py,sha256=cOKnXbDyFAwDr5O7HBEpgQrx-J-qfp57sfdK_LabDxs,11109

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: setuptools (75.8.0)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View File

@ -0,0 +1 @@
deprecated

View File

@ -0,0 +1,222 @@
# don't import any costly modules
import sys
import os
is_pypy = '__pypy__' in sys.builtin_module_names
def warn_distutils_present():
if 'distutils' not in sys.modules:
return
if is_pypy and sys.version_info < (3, 7):
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
import warnings
warnings.warn(
"Distutils was imported before Setuptools, but importing Setuptools "
"also replaces the `distutils` module in `sys.modules`. This may lead "
"to undesirable behaviors or errors. To avoid these issues, avoid "
"using distutils directly, ensure that setuptools is installed in the "
"traditional way (e.g. not an editable install), and/or make sure "
"that setuptools is always imported before distutils."
)
def clear_distutils():
if 'distutils' not in sys.modules:
return
import warnings
warnings.warn("Setuptools is replacing distutils.")
mods = [
name
for name in sys.modules
if name == "distutils" or name.startswith("distutils.")
]
for name in mods:
del sys.modules[name]
def enabled():
"""
Allow selection of distutils by environment variable.
"""
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
return which == 'local'
def ensure_local_distutils():
import importlib
clear_distutils()
# With the DistutilsMetaFinder in place,
# perform an import to cause distutils to be
# loaded from setuptools._distutils. Ref #2906.
with shim():
importlib.import_module('distutils')
# check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
assert 'setuptools._distutils.log' not in sys.modules
def do_override():
"""
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
if enabled():
warn_distutils_present()
ensure_local_distutils()
class _TrivialRe:
def __init__(self, *patterns):
self._patterns = patterns
def match(self, string):
return all(pat in string for pat in self._patterns)
class DistutilsMetaFinder:
def find_spec(self, fullname, path, target=None):
# optimization: only consider top level modules and those
# found in the CPython test suite.
if path is not None and not fullname.startswith('test.'):
return
method_name = 'spec_for_{fullname}'.format(**locals())
method = getattr(self, method_name, lambda: None)
return method()
def spec_for_distutils(self):
if self.is_cpython():
return
import importlib
import importlib.abc
import importlib.util
try:
mod = importlib.import_module('setuptools._distutils')
except Exception:
# There are a couple of cases where setuptools._distutils
# may not be present:
# - An older Setuptools without a local distutils is
# taking precedence. Ref #2957.
# - Path manipulation during sitecustomize removes
# setuptools from the path but only after the hook
# has been loaded. Ref #2980.
# In either case, fall back to stdlib behavior.
return
class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
mod.__name__ = 'distutils'
return mod
def exec_module(self, module):
pass
return importlib.util.spec_from_loader(
'distutils', DistutilsLoader(), origin=mod.__file__
)
@staticmethod
def is_cpython():
"""
Suppress supplying distutils for CPython (build and tests).
Ref #2965 and #3007.
"""
return os.path.isfile('pybuilddir.txt')
def spec_for_pip(self):
"""
Ensure stdlib distutils when running under pip.
See pypa/pip#8761 for rationale.
"""
if self.pip_imported_during_build():
return
clear_distutils()
self.spec_for_distutils = lambda: None
@classmethod
def pip_imported_during_build(cls):
"""
Detect if pip is being imported in a build script. Ref #2355.
"""
import traceback
return any(
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
)
@staticmethod
def frame_file_is_setup(frame):
"""
Return True if the indicated frame suggests a setup.py file.
"""
# some frames may not have __file__ (#2940)
return frame.f_globals.get('__file__', '').endswith('setup.py')
def spec_for_sensitive_tests(self):
"""
Ensure stdlib distutils when running select tests under CPython.
python/cpython#91169
"""
clear_distutils()
self.spec_for_distutils = lambda: None
sensitive_tests = (
[
'test.test_distutils',
'test.test_peg_generator',
'test.test_importlib',
]
if sys.version_info < (3, 10)
else [
'test.test_distutils',
]
)
for name in DistutilsMetaFinder.sensitive_tests:
setattr(
DistutilsMetaFinder,
f'spec_for_{name}',
DistutilsMetaFinder.spec_for_sensitive_tests,
)
DISTUTILS_FINDER = DistutilsMetaFinder()
def add_shim():
DISTUTILS_FINDER in sys.meta_path or insert_shim()
class shim:
def __enter__(self):
insert_shim()
def __exit__(self, exc, value, tb):
remove_shim()
def insert_shim():
sys.meta_path.insert(0, DISTUTILS_FINDER)
def remove_shim():
try:
sys.meta_path.remove(DISTUTILS_FINDER)
except ValueError:
pass

View File

@ -0,0 +1 @@
__import__('_distutils_hack').do_override()

View File

@ -0,0 +1,295 @@
Metadata-Version: 2.3
Name: annotated-types
Version: 0.7.0
Summary: Reusable constraint types to use with typing.Annotated
Project-URL: Homepage, https://github.com/annotated-types/annotated-types
Project-URL: Source, https://github.com/annotated-types/annotated-types
Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin <s@muelcolvin.com>, Zac Hatfield-Dodds <zac@zhd.dev>
License-File: LICENSE
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Console
Classifier: Environment :: MacOS X
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: Information Technology
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Unix
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Typing :: Typed
Requires-Python: >=3.8
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
Description-Content-Type: text/markdown
# annotated-types
[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types)
[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types)
[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
adding context-specific metadata to existing types, and specifies that
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
logic for `x`.
This package provides metadata objects which can be used to represent common
constraints such as upper and lower bounds on scalar values and collection sizes,
a `Predicate` marker for runtime checks, and
descriptions of how we intend these metadata to be interpreted. In some cases,
we also note alternative representations which do not require this package.
## Install
```bash
pip install annotated-types
```
## Examples
```python
from typing import Annotated
from annotated_types import Gt, Len, Predicate
class MyClass:
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
# Invalid: 17, 18, "19", 19.0, ...
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
# Invalid: 4, 8, -2, 5.0, "prime", ...
my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
# Invalid: (1, 2), ["abc"], [0] * 20
```
## Documentation
_While `annotated-types` avoids runtime checks for performance, users should not
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
Downstream implementors may choose to raise an error, emit a warning, silently ignore
a metadata item, etc., if the metadata objects described below are used with an
incompatible type - or for any other reason!_
### Gt, Ge, Lt, Le
Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
dates, times, strings, sets, etc. Note that the boundary value need not be of the
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
the `annotated-types` package.
To be explicit, these types have the following meanings:
* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
### Interval
`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
metadata object. `None` attributes should be ignored, and non-`None` attributes
treated as per the single bounds above.
### MultipleOf
`MultipleOf(multiple_of=x)` might be interpreted in two ways:
1. Python semantics, implying `value % multiple_of == 0`, or
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
where `int(value / multiple_of) == value / multiple_of`.
We encourage users to be aware of these two common interpretations and their
distinct behaviours, especially since very large or non-integer numbers make
it easy to cause silent data corruption due to floating-point imprecision.
We encourage libraries to carefully document which interpretation they implement.
### MinLen, MaxLen, Len
`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
As well as `Len()` which can optionally include upper and lower bounds, we also
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
and `Len(max_length=y)` respectively.
`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
Examples of usage:
* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
#### Changed in v0.4.0
* `min_inclusive` has been renamed to `min_length`, no change in meaning
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
meaning of the upper bound in slices vs. `Len`
See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
### Timezone
`Timezone` can be used with a `datetime` or a `time` to express which timezones
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
expresses that any timezone-aware datetime is allowed. You may also pass a specific
timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
allow a specific timezone, though we note that this is often a symptom of fragile design.
#### Changed in v0.x.x
* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
`timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.
### Unit
`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
would be a float representing a velocity in meters per second.
Please note that `annotated_types` itself makes no attempt to parse or validate
the unit string in any way. That is left entirely to downstream libraries,
such as [`pint`](https://pint.readthedocs.io) or
[`astropy.units`](https://docs.astropy.org/en/stable/units/).
An example of how a library might use this metadata:
```python
from annotated_types import Unit
from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args
# given a type annotated with a unit:
Meters = Annotated[float, Unit("m")]
# you can cast the annotation to a specific unit type with any
# callable that accepts a string and returns the desired type
T = TypeVar("T")
def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
if get_origin(tp) is Annotated:
for arg in get_args(tp):
if isinstance(arg, Unit):
return unit_cls(arg.unit)
return None
# using `pint`
import pint
pint_unit = cast_unit(Meters, pint.Unit)
# using `astropy.units`
import astropy.units as u
astropy_unit = cast_unit(Meters, u.Unit)
```
### Predicate
`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
Users should prefer the statically inspectable metadata above, but if you need
the full power and flexibility of arbitrary runtime predicates... here it is.
For some common constraints, we provide generic types:
* `IsLower = Annotated[T, Predicate(str.islower)]`
* `IsUpper = Annotated[T, Predicate(str.isupper)]`
* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
* `IsNan = Annotated[T, Predicate(math.isnan)]`
* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`
so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.
Some libraries might have special logic to handle known or understandable predicates,
for example by checking for `str.isdigit` and using its presence to both call custom
logic to enforce digit-only strings, and customise some generated external schema.
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.
We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
exception. We encourage libraries to document the behaviour they choose.
### Doc
`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.
It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.
It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.
This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).
### Integrating downstream types with `GroupedMetadata`
Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
This can help reduce verbosity and cognitive overhead for users.
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
```python
from dataclasses import dataclass
from typing import Iterator
from annotated_types import GroupedMetadata, Ge
@dataclass
class Field(GroupedMetadata):
ge: int | None = None
description: str | None = None
def __iter__(self) -> Iterator[object]:
# Iterating over a GroupedMetadata object should yield annotated-types
# constraint metadata objects which describe it as fully as possible,
# and may include other unknown objects too.
if self.ge is not None:
yield Ge(self.ge)
if self.description is not None:
yield Description(self.description)
```
Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
### Consuming metadata
We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
It is up to the implementer to determine how this metadata is used.
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
## Design & History
This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
and Hypothesis, with the goal of making it as easy as possible for end-users to
provide more informative annotations for use by runtime libraries.
It is deliberately minimal, and following PEP-593 allows considerable downstream
discretion in what (if anything!) they choose to support. Nonetheless, we expect
that staying simple and covering _only_ the most common use-cases will give users
and maintainers the best experience we can. If you'd like more constraints for your
types - follow our lead, by defining them and documenting them downstream!

View File

@ -0,0 +1,10 @@
annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
annotated_types-0.7.0.dist-info/RECORD,,
annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
annotated_types/__pycache__/__init__.cpython-311.pyc,,
annotated_types/__pycache__/test_cases.cpython-311.pyc,,
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421

View File

@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.24.2
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2022 the contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,432 @@
import math
import sys
import types
from dataclasses import dataclass
from datetime import tzinfo
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
if sys.version_info < (3, 8):
from typing_extensions import Protocol, runtime_checkable
else:
from typing import Protocol, runtime_checkable
if sys.version_info < (3, 9):
from typing_extensions import Annotated, Literal
else:
from typing import Annotated, Literal
if sys.version_info < (3, 10):
EllipsisType = type(Ellipsis)
KW_ONLY = {}
SLOTS = {}
else:
from types import EllipsisType
KW_ONLY = {"kw_only": True}
SLOTS = {"slots": True}
__all__ = (
'BaseMetadata',
'GroupedMetadata',
'Gt',
'Ge',
'Lt',
'Le',
'Interval',
'MultipleOf',
'MinLen',
'MaxLen',
'Len',
'Timezone',
'Predicate',
'LowerCase',
'UpperCase',
'IsDigits',
'IsFinite',
'IsNotFinite',
'IsNan',
'IsNotNan',
'IsInfinite',
'IsNotInfinite',
'doc',
'DocInfo',
'__version__',
)
__version__ = '0.7.0'
T = TypeVar('T')
# arguments that start with __ are considered
# positional only
# see https://peps.python.org/pep-0484/#positional-only-arguments
class SupportsGt(Protocol):
def __gt__(self: T, __other: T) -> bool:
...
class SupportsGe(Protocol):
def __ge__(self: T, __other: T) -> bool:
...
class SupportsLt(Protocol):
def __lt__(self: T, __other: T) -> bool:
...
class SupportsLe(Protocol):
def __le__(self: T, __other: T) -> bool:
...
class SupportsMod(Protocol):
def __mod__(self: T, __other: T) -> T:
...
class SupportsDiv(Protocol):
def __div__(self: T, __other: T) -> T:
...
class BaseMetadata:
"""Base class for all metadata.
This exists mainly so that implementers
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
"""
__slots__ = ()
@dataclass(frozen=True, **SLOTS)
class Gt(BaseMetadata):
"""Gt(gt=x) implies that the value must be greater than x.
It can be used with any type that supports the ``>`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
gt: SupportsGt
@dataclass(frozen=True, **SLOTS)
class Ge(BaseMetadata):
"""Ge(ge=x) implies that the value must be greater than or equal to x.
It can be used with any type that supports the ``>=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
ge: SupportsGe
@dataclass(frozen=True, **SLOTS)
class Lt(BaseMetadata):
"""Lt(lt=x) implies that the value must be less than x.
It can be used with any type that supports the ``<`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
lt: SupportsLt
@dataclass(frozen=True, **SLOTS)
class Le(BaseMetadata):
"""Le(le=x) implies that the value must be less than or equal to x.
It can be used with any type that supports the ``<=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
le: SupportsLe
@runtime_checkable
class GroupedMetadata(Protocol):
"""A grouping of multiple objects, like typing.Unpack.
`GroupedMetadata` on its own is not metadata and has no meaning.
All of the constraints and metadata should be fully expressable
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
Concrete implementations should override `GroupedMetadata.__iter__()`
to add their own metadata.
For example:
>>> @dataclass
>>> class Field(GroupedMetadata):
>>> gt: float | None = None
>>> description: str | None = None
...
>>> def __iter__(self) -> Iterable[object]:
>>> if self.gt is not None:
>>> yield Gt(self.gt)
>>> if self.description is not None:
>>> yield Description(self.gt)
Also see the implementation of `Interval` below for an example.
Parsers should recognize this and unpack it so that it can be used
both with and without unpacking:
- `Annotated[int, Field(...)]` (parser must unpack Field)
- `Annotated[int, *Field(...)]` (PEP-646)
""" # noqa: trailing-whitespace
@property
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
return True
def __iter__(self) -> Iterator[object]:
...
if not TYPE_CHECKING:
__slots__ = () # allow subclasses to use slots
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
# Basic ABC like functionality without the complexity of an ABC
super().__init_subclass__(*args, **kwargs)
if cls.__iter__ is GroupedMetadata.__iter__:
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
def __iter__(self) -> Iterator[object]: # noqa: F811
raise NotImplementedError # more helpful than "None has no attribute..." type errors
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
class Interval(GroupedMetadata):
"""Interval can express inclusive or exclusive bounds with a single object.
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
are interpreted the same way as the single-bound constraints.
"""
gt: Union[SupportsGt, None] = None
ge: Union[SupportsGe, None] = None
lt: Union[SupportsLt, None] = None
le: Union[SupportsLe, None] = None
def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack an Interval into zero or more single-bounds."""
if self.gt is not None:
yield Gt(self.gt)
if self.ge is not None:
yield Ge(self.ge)
if self.lt is not None:
yield Lt(self.lt)
if self.le is not None:
yield Le(self.le)
@dataclass(frozen=True, **SLOTS)
class MultipleOf(BaseMetadata):
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
1. Python semantics, implying ``value % multiple_of == 0``, or
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
We encourage users to be aware of these two common interpretations,
and libraries to carefully document which they implement.
"""
multiple_of: Union[SupportsDiv, SupportsMod]
@dataclass(frozen=True, **SLOTS)
class MinLen(BaseMetadata):
"""
MinLen() implies minimum inclusive length,
e.g. ``len(value) >= min_length``.
"""
min_length: Annotated[int, Ge(0)]
@dataclass(frozen=True, **SLOTS)
class MaxLen(BaseMetadata):
"""
MaxLen() implies maximum inclusive length,
e.g. ``len(value) <= max_length``.
"""
max_length: Annotated[int, Ge(0)]
@dataclass(frozen=True, **SLOTS)
class Len(GroupedMetadata):
"""
Len() implies that ``min_length <= len(value) <= max_length``.
Upper bound may be omitted or ``None`` to indicate no upper length bound.
"""
min_length: Annotated[int, Ge(0)] = 0
max_length: Optional[Annotated[int, Ge(0)]] = None
def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack a Len into zone or more single-bounds."""
if self.min_length > 0:
yield MinLen(self.min_length)
if self.max_length is not None:
yield MaxLen(self.max_length)
@dataclass(frozen=True, **SLOTS)
class Timezone(BaseMetadata):
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
tz-aware but any timezone is allowed.
You may also pass a specific timezone string or tzinfo object such as
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
you only allow a specific timezone, though we note that this is often
a symptom of poor design.
"""
tz: Union[str, tzinfo, EllipsisType, None]
@dataclass(frozen=True, **SLOTS)
class Unit(BaseMetadata):
"""Indicates that the value is a physical quantity with the specified unit.
It is intended for usage with numeric types, where the value represents the
magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
or ``speed: Annotated[float, Unit('m/s')]``.
Interpretation of the unit string is left to the discretion of the consumer.
It is suggested to follow conventions established by python libraries that work
with physical quantities, such as
- ``pint`` : <https://pint.readthedocs.io/en/stable/>
- ``astropy.units``: <https://docs.astropy.org/en/stable/units/>
For indicating a quantity with a certain dimensionality but without a specific unit
it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
Note, however, ``annotated_types`` itself makes no use of the unit string.
"""
unit: str
@dataclass(frozen=True, **SLOTS)
class Predicate(BaseMetadata):
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
Users should prefer statically inspectable metadata, but if you need the full
power and flexibility of arbitrary runtime predicates... here it is.
We provide a few predefined predicates for common string constraints:
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
Some libraries might have special logic to handle certain predicates, e.g. by
checking for `str.isdigit` and using its presence to both call custom logic to
enforce digit-only strings, and customise some generated external schema.
We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting exception.
"""
func: Callable[[Any], bool]
def __repr__(self) -> str:
if getattr(self.func, "__name__", "<lambda>") == "<lambda>":
return f"{self.__class__.__name__}({self.func!r})"
if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
namespace := getattr(self.func.__self__, "__name__", None)
):
return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
if isinstance(self.func, type(str.isascii)): # method descriptor
return f"{self.__class__.__name__}({self.func.__qualname__})"
return f"{self.__class__.__name__}({self.func.__name__})"
@dataclass
class Not:
func: Callable[[Any], bool]
def __call__(self, __v: Any) -> bool:
return not self.func(__v)
_StrType = TypeVar("_StrType", bound=str)
LowerCase = Annotated[_StrType, Predicate(str.islower)]
"""
Return True if the string is a lowercase string, False otherwise.
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
""" # noqa: E501
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
"""
Return True if the string is an uppercase string, False otherwise.
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
""" # noqa: E501
IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
"""
Return True if the string is a digit string, False otherwise.
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
""" # noqa: E501
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
"""
Return True if all characters in the string are ASCII, False otherwise.
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
"""
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
"""Return True if x is one of infinity or NaN, and False otherwise"""
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
"""Return True if x is a NaN (not a number), and False otherwise."""
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
"""Return True if x is anything but NaN (not a number), and False otherwise."""
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
"""Return True if x is a positive or negative infinity, and False otherwise."""
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
try:
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
except ImportError:
@dataclass(frozen=True, **SLOTS)
class DocInfo: # type: ignore [no-redef]
""" "
The return value of doc(), mainly to be used by tools that want to extract the
Annotated documentation at runtime.
"""
documentation: str
"""The documentation string passed to doc()."""
def doc(
documentation: str,
) -> DocInfo:
"""
Add documentation to a type annotation inside of Annotated.
For example:
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
"""
return DocInfo(documentation)

View File

@ -0,0 +1,151 @@
import math
import sys
from datetime import date, datetime, timedelta, timezone
from decimal import Decimal
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
if sys.version_info < (3, 9):
from typing_extensions import Annotated
else:
from typing import Annotated
import annotated_types as at
class Case(NamedTuple):
"""
A test case for `annotated_types`.
"""
annotation: Any
valid_cases: Iterable[Any]
invalid_cases: Iterable[Any]
def cases() -> Iterable[Case]:
# Gt, Ge, Lt, Le
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(date(2000, 1, 1))],
[date(2000, 1, 2), date(2000, 1, 3)],
[date(2000, 1, 1), date(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(Decimal('1.123'))],
[Decimal('1.1231'), Decimal('123')],
[Decimal('1.123'), Decimal('0')],
)
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
)
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)
# Interval
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
yield Case(
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
)
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
# lengths
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
# Timezone
yield Case(
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
)
yield Case(
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
)
yield Case(
Annotated[datetime, at.Timezone(timezone.utc)],
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
yield Case(
Annotated[datetime, at.Timezone('Europe/London')],
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
# Quantity
yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))
# predicate types
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
# check stacked predicates
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
# doc
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
# custom GroupedMetadata
class MyCustomGroupedMetadata(at.GroupedMetadata):
def __iter__(self) -> Iterator[at.Predicate]:
yield at.Predicate(lambda x: float(x).is_integer())
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2018 Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,105 @@
Metadata-Version: 2.2
Name: anyio
Version: 4.9.0
Summary: High level compatibility layer for multiple asynchronous event loop implementations
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
License: MIT
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
Project-URL: Source code, https://github.com/agronholm/anyio
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Framework :: AnyIO
Classifier: Typing :: Typed
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
Requires-Dist: idna>=2.8
Requires-Dist: sniffio>=1.1
Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
Provides-Extra: trio
Requires-Dist: trio>=0.26.1; extra == "trio"
Provides-Extra: test
Requires-Dist: anyio[trio]; extra == "test"
Requires-Dist: blockbuster>=1.5.23; extra == "test"
Requires-Dist: coverage[toml]>=7; extra == "test"
Requires-Dist: exceptiongroup>=1.2.0; extra == "test"
Requires-Dist: hypothesis>=4.0; extra == "test"
Requires-Dist: psutil>=5.9; extra == "test"
Requires-Dist: pytest>=7.0; extra == "test"
Requires-Dist: trustme; extra == "test"
Requires-Dist: truststore>=0.9.1; python_version >= "3.10" and extra == "test"
Requires-Dist: uvloop>=0.21; (platform_python_implementation == "CPython" and platform_system != "Windows" and python_version < "3.14") and extra == "test"
Provides-Extra: doc
Requires-Dist: packaging; extra == "doc"
Requires-Dist: Sphinx~=8.2; extra == "doc"
Requires-Dist: sphinx_rtd_theme; extra == "doc"
Requires-Dist: sphinx-autodoc-typehints>=1.2.0; extra == "doc"
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/anyio?branch=master
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
:alt: Documentation
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
:target: https://gitter.im/python-trio/AnyIO
:alt: Gitter chat
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
with the native SC of trio itself.
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
trio_. AnyIO can also be adopted into a library or application incrementally bit by bit, no full
refactoring necessary. It will blend in with the native libraries of your chosen backend.
Documentation
-------------
View full documentation at: https://anyio.readthedocs.io/
Features
--------
AnyIO offers the following functionality:
* Task groups (nurseries_ in trio terminology)
* High-level networking (TCP, UDP and UNIX sockets)
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
3.8)
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
Protocols)
* A versatile API for byte streams and object streams
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
streams)
* Worker threads
* Subprocesses
* Asynchronous file I/O (using worker threads)
* Signal handling
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
It even works with the popular Hypothesis_ library.
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _trio: https://github.com/python-trio/trio
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
.. _pytest: https://docs.pytest.org/en/latest/
.. _Hypothesis: https://hypothesis.works/

View File

@ -0,0 +1,88 @@
anyio-4.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
anyio-4.9.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
anyio-4.9.0.dist-info/METADATA,sha256=vvkWPXXTbrpTCFK7zdcYwQcSQhx6Q4qITM9t_PEQCrY,4682
anyio-4.9.0.dist-info/RECORD,,
anyio-4.9.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
anyio-4.9.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
anyio-4.9.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
anyio/__init__.py,sha256=t8bZuNXa5ncwXBaNKbv48BDgZt48RT_zCEtrnPmjNU8,4993
anyio/__pycache__/__init__.cpython-311.pyc,,
anyio/__pycache__/from_thread.cpython-311.pyc,,
anyio/__pycache__/lowlevel.cpython-311.pyc,,
anyio/__pycache__/pytest_plugin.cpython-311.pyc,,
anyio/__pycache__/to_interpreter.cpython-311.pyc,,
anyio/__pycache__/to_process.cpython-311.pyc,,
anyio/__pycache__/to_thread.cpython-311.pyc,,
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_backends/__pycache__/__init__.cpython-311.pyc,,
anyio/_backends/__pycache__/_asyncio.cpython-311.pyc,,
anyio/_backends/__pycache__/_trio.cpython-311.pyc,,
anyio/_backends/_asyncio.py,sha256=AT1oaTfCE-9YFxooMlvld2yDqY5U2A-ANMcBDh9eRfI,93455
anyio/_backends/_trio.py,sha256=HVfDqRGQ7Xj3JfTcYdgzmC7pZEplqU4NOO5kxNNSZnk,40429
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_core/__pycache__/__init__.cpython-311.pyc,,
anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc,,
anyio/_core/__pycache__/_eventloop.cpython-311.pyc,,
anyio/_core/__pycache__/_exceptions.cpython-311.pyc,,
anyio/_core/__pycache__/_fileio.cpython-311.pyc,,
anyio/_core/__pycache__/_resources.cpython-311.pyc,,
anyio/_core/__pycache__/_signals.cpython-311.pyc,,
anyio/_core/__pycache__/_sockets.cpython-311.pyc,,
anyio/_core/__pycache__/_streams.cpython-311.pyc,,
anyio/_core/__pycache__/_subprocesses.cpython-311.pyc,,
anyio/_core/__pycache__/_synchronization.cpython-311.pyc,,
anyio/_core/__pycache__/_tasks.cpython-311.pyc,,
anyio/_core/__pycache__/_tempfile.cpython-311.pyc,,
anyio/_core/__pycache__/_testing.cpython-311.pyc,,
anyio/_core/__pycache__/_typedattr.cpython-311.pyc,,
anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695
anyio/_core/_exceptions.py,sha256=RlPRlwastdmfDPoskdXNO6SI8_l3fclA2wtW6cokU9I,3503
anyio/_core/_fileio.py,sha256=qFZhkLIz0cGXluvih_vcPUTucgq8UFVgsTCtYbijZIg,23340
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905
anyio/_core/_sockets.py,sha256=5Okc_UThGDEN9KCnsIhqWPRHBNuSy6b4NmG1i51TVF4,27150
anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804
anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
anyio/_core/_synchronization.py,sha256=DwUh8Tl6cG_UMVC_GyzPoC_U9BpfDfjMl9SINSxcZN4,20320
anyio/_core/_tasks.py,sha256=f3CuWwo06cCZ6jaOv-JHFKWkgpgf2cvaF25Oh4augMA,4757
anyio/_core/_tempfile.py,sha256=s-_ucacXbxBH5Bo5eo65lN0lPwZQd5B8yNN_9nARpCM,19696
anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118
anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
anyio/abc/__init__.py,sha256=c2OQbTCS_fQowviMXanLPh8m29ccwkXmpDr7uyNZYOo,2652
anyio/abc/__pycache__/__init__.cpython-311.pyc,,
anyio/abc/__pycache__/_eventloop.cpython-311.pyc,,
anyio/abc/__pycache__/_resources.cpython-311.pyc,,
anyio/abc/__pycache__/_sockets.cpython-311.pyc,,
anyio/abc/__pycache__/_streams.cpython-311.pyc,,
anyio/abc/__pycache__/_subprocesses.cpython-311.pyc,,
anyio/abc/__pycache__/_tasks.cpython-311.pyc,,
anyio/abc/__pycache__/_testing.cpython-311.pyc,,
anyio/abc/_eventloop.py,sha256=UmL8DZCvQTgxzmyBZcGm9kWj9VQY8BMWueLh5S8yWN4,9682
anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262
anyio/abc/_streams.py,sha256=He_JpkAW2g5veOzcUq0XsRC2nId_i35L-d8cs7Uj1ZQ,6598
anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
anyio/abc/_tasks.py,sha256=yJWbMwowvqjlAX4oJ3l9Is1w-zwynr2lX1Z02AWJqsY,3080
anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
anyio/from_thread.py,sha256=MbXHZpgM9wgsRkbGhMNMomEGYj7Y_QYq6a5BZ3c5Ev8,17478
anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/pytest_plugin.py,sha256=qXNwk9Pa7hPQKWocgLl9qijqKGMkGzdH2wJa-jPkGUM,9375
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/streams/__pycache__/__init__.cpython-311.pyc,,
anyio/streams/__pycache__/buffered.cpython-311.pyc,,
anyio/streams/__pycache__/file.cpython-311.pyc,,
anyio/streams/__pycache__/memory.cpython-311.pyc,,
anyio/streams/__pycache__/stapled.cpython-311.pyc,,
anyio/streams/__pycache__/text.cpython-311.pyc,,
anyio/streams/__pycache__/tls.cpython-311.pyc,,
anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500
anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383
anyio/streams/memory.py,sha256=o1OVVx0OooteTTe2GytJreum93Ucuw5s4cAsr3X0-Ag,10560
anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302
anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094
anyio/streams/tls.py,sha256=HxzpVmUgo8SUSIBass_lvef1pAI1uRSrnysM3iEGzl4,13199
anyio/to_interpreter.py,sha256=UhuNCIucCRN7ZtyJg35Mlamzs1JpgDvK4xnL4TDWrAo,6527
anyio/to_process.py,sha256=ZvruelRM-HNmqDaql4sdNODg2QD_uSlwSCxnV4OhsfQ,9595
anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (76.0.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,2 @@
[pytest11]
anyio = anyio.pytest_plugin

View File

@ -0,0 +1 @@
anyio

View File

@ -0,0 +1,85 @@
from __future__ import annotations
from ._core._eventloop import current_time as current_time
from ._core._eventloop import get_all_backends as get_all_backends
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
from ._core._eventloop import run as run
from ._core._eventloop import sleep as sleep
from ._core._eventloop import sleep_forever as sleep_forever
from ._core._eventloop import sleep_until as sleep_until
from ._core._exceptions import BrokenResourceError as BrokenResourceError
from ._core._exceptions import BrokenWorkerIntepreter as BrokenWorkerIntepreter
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
from ._core._exceptions import BusyResourceError as BusyResourceError
from ._core._exceptions import ClosedResourceError as ClosedResourceError
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
from ._core._exceptions import EndOfStream as EndOfStream
from ._core._exceptions import IncompleteRead as IncompleteRead
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
from ._core._exceptions import WouldBlock as WouldBlock
from ._core._fileio import AsyncFile as AsyncFile
from ._core._fileio import Path as Path
from ._core._fileio import open_file as open_file
from ._core._fileio import wrap_file as wrap_file
from ._core._resources import aclose_forcefully as aclose_forcefully
from ._core._signals import open_signal_receiver as open_signal_receiver
from ._core._sockets import connect_tcp as connect_tcp
from ._core._sockets import connect_unix as connect_unix
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
from ._core._sockets import (
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
)
from ._core._sockets import create_tcp_listener as create_tcp_listener
from ._core._sockets import create_udp_socket as create_udp_socket
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
from ._core._sockets import create_unix_listener as create_unix_listener
from ._core._sockets import getaddrinfo as getaddrinfo
from ._core._sockets import getnameinfo as getnameinfo
from ._core._sockets import wait_readable as wait_readable
from ._core._sockets import wait_socket_readable as wait_socket_readable
from ._core._sockets import wait_socket_writable as wait_socket_writable
from ._core._sockets import wait_writable as wait_writable
from ._core._streams import create_memory_object_stream as create_memory_object_stream
from ._core._subprocesses import open_process as open_process
from ._core._subprocesses import run_process as run_process
from ._core._synchronization import CapacityLimiter as CapacityLimiter
from ._core._synchronization import (
CapacityLimiterStatistics as CapacityLimiterStatistics,
)
from ._core._synchronization import Condition as Condition
from ._core._synchronization import ConditionStatistics as ConditionStatistics
from ._core._synchronization import Event as Event
from ._core._synchronization import EventStatistics as EventStatistics
from ._core._synchronization import Lock as Lock
from ._core._synchronization import LockStatistics as LockStatistics
from ._core._synchronization import ResourceGuard as ResourceGuard
from ._core._synchronization import Semaphore as Semaphore
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
from ._core._tasks import CancelScope as CancelScope
from ._core._tasks import create_task_group as create_task_group
from ._core._tasks import current_effective_deadline as current_effective_deadline
from ._core._tasks import fail_after as fail_after
from ._core._tasks import move_on_after as move_on_after
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
from ._core._tempfile import TemporaryFile as TemporaryFile
from ._core._tempfile import gettempdir as gettempdir
from ._core._tempfile import gettempdirb as gettempdirb
from ._core._tempfile import mkdtemp as mkdtemp
from ._core._tempfile import mkstemp as mkstemp
from ._core._testing import TaskInfo as TaskInfo
from ._core._testing import get_current_task as get_current_task
from ._core._testing import get_running_tasks as get_running_tasks
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
from ._core._typedattr import typed_attribute as typed_attribute
# Re-export imports so they look like they live directly in this package
for __value in list(locals().values()):
if getattr(__value, "__module__", "").startswith("anyio."):
__value.__module__ = __name__
del __value

Some files were not shown because too many files have changed in this diff Show More