RC1
This commit is contained in:
@ -9,9 +9,50 @@ def concat_opus_files(user_dir: Path, output_file: Path):
|
||||
Concatenate all .opus files in user_dir (except stream.opus) in random order into output_file.
|
||||
Overwrites output_file if exists. Creates it if missing.
|
||||
"""
|
||||
files = [f for f in user_dir.glob('*.opus') if f.name != 'stream.opus']
|
||||
# Clean up any existing filelist.txt to prevent issues
|
||||
filelist_path = user_dir / 'filelist.txt'
|
||||
if filelist_path.exists():
|
||||
try:
|
||||
filelist_path.unlink()
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not clean up old filelist.txt: {e}")
|
||||
|
||||
# Get all opus files except stream.opus and remove any duplicates
|
||||
import hashlib
|
||||
file_hashes = set()
|
||||
files = []
|
||||
|
||||
for f in user_dir.glob('*.opus'):
|
||||
if f.name == 'stream.opus':
|
||||
continue
|
||||
|
||||
try:
|
||||
# Calculate file hash for duplicate detection
|
||||
hasher = hashlib.md5()
|
||||
with open(f, 'rb') as file:
|
||||
buf = file.read(65536) # Read in 64kb chunks
|
||||
while len(buf) > 0:
|
||||
hasher.update(buf)
|
||||
buf = file.read(65536)
|
||||
file_hash = hasher.hexdigest()
|
||||
|
||||
# Skip if we've seen this exact file before
|
||||
if file_hash in file_hashes:
|
||||
print(f"Removing duplicate file: {f.name}")
|
||||
f.unlink()
|
||||
continue
|
||||
|
||||
file_hashes.add(file_hash)
|
||||
files.append(f)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing {f}: {e}")
|
||||
|
||||
if not files:
|
||||
raise FileNotFoundError(f"No opus files to concatenate in {user_dir}")
|
||||
# If no files, create an empty stream.opus
|
||||
output_file.write_bytes(b'')
|
||||
return output_file
|
||||
|
||||
random.shuffle(files)
|
||||
|
||||
# Create a filelist for ffmpeg concat
|
||||
|
Reference in New Issue
Block a user