uploader-bot/app/api/routes/upload_tus.py

192 lines
6.8 KiB
Python

from __future__ import annotations
import base64
import json
import os
from datetime import datetime
from typing import Dict, Any
from base58 import b58encode
from sanic import response
from app.core._secrets import hot_pubkey
from app.core.crypto.aes_gcm_siv_stream import encrypt_file_to_encf
from app.core.crypto.aesgcm_stream import CHUNK_BYTES
from app.core.ipfs_client import add_streamed_file
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, ContentKey, IpfsSync, ContentIndexItem, UploadSession
from app.core.storage import db_session
def _b64(s: bytes) -> str:
return base64.b64encode(s).decode()
async def s_api_v1_upload_tus_hook(request):
"""
tusd HTTP hook endpoint. We mainly handle post-finish to: encrypt -> IPFS add+pin -> record DB.
"""
try:
payload: Dict[str, Any] = request.json or {}
except Exception:
payload = {}
event = payload.get("Type") or payload.get("type") or payload.get("Event") or payload.get("event")
upload = payload.get("Upload") or payload.get("upload") or {}
if not event:
return response.json({"ok": False, "error": "NO_EVENT"}, status=400)
if event not in ("post-finish", "postfinish"):
# accept but ignore other events
return response.json({"ok": True})
# Extract storage path from tusd payload
storage = upload.get("Storage") or {}
file_path = storage.get("Path") or storage.get("path")
if not file_path:
return response.json({"ok": False, "error": "NO_STORAGE_PATH"}, status=400)
meta = upload.get("MetaData") or {}
# Common metadata keys
title = meta.get("title") or meta.get("Title") or meta.get("name") or "Untitled"
description = meta.get("description") or meta.get("Description") or ""
content_type = meta.get("content_type") or meta.get("Content-Type") or "application/octet-stream"
preview_enabled = content_type.startswith("audio/") or content_type.startswith("video/")
# Optional preview window overrides from tus metadata
try:
start_ms = int(meta.get("preview_start_ms") or 0)
dur_ms = int(meta.get("preview_duration_ms") or 30000)
except Exception:
start_ms, dur_ms = 0, 30000
# Record/Update upload session
upload_id = upload.get("ID") or upload.get("Id") or upload.get("id")
try:
size = int(upload.get("Size") or 0)
except Exception:
size = None
async with db_session() as session:
us = (await session.get(UploadSession, upload_id)) if upload_id else None
if not us and upload_id:
us = UploadSession(
id=upload_id,
filename=os.path.basename(file_path),
size_bytes=size,
state='processing',
encrypted_cid=None,
)
session.add(us)
await session.commit()
# Read & encrypt by streaming (ENCF v1 / AES-SIV)
# Generate per-content random DEK and salt
dek = os.urandom(32)
salt = os.urandom(16)
key_fpr = b58encode(hot_pubkey).decode() # fingerprint as our node id for now
# Stream encrypt into IPFS add
try:
with open(file_path, 'rb') as f:
result = await add_streamed_file(
encrypt_file_to_encf(f, dek, CHUNK_BYTES, salt),
filename=os.path.basename(file_path),
params={},
)
except Exception as e:
make_log("tus-hook", f"Encrypt+add failed: {e}", level="error")
# mark failed
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'failed'
us.error = str(e)
await session.commit()
return response.json({"ok": False, "error": "ENCRYPT_ADD_FAILED"}, status=500)
encrypted_cid = result.get("Hash")
try:
enc_size = int(result.get("Size") or 0)
except Exception:
enc_size = None
# Persist records
async with db_session() as session:
ec = EncryptedContent(
encrypted_cid=encrypted_cid,
title=title,
description=description,
content_type=content_type,
enc_size_bytes=enc_size,
plain_size_bytes=os.path.getsize(file_path),
preview_enabled=preview_enabled,
preview_conf=({"duration_ms": dur_ms, "intervals": [[start_ms, start_ms + dur_ms]]} if preview_enabled else {}),
aead_scheme="AES_GCM_SIV",
chunk_bytes=CHUNK_BYTES,
salt_b64=_b64(salt),
)
session.add(ec)
await session.flush()
ck = ContentKey(
content_id=ec.id,
key_ciphertext_b64=_b64(dek), # NOTE: should be wrapped by local KEK; simplified for PoC
key_fingerprint=key_fpr,
issuer_node_id=key_fpr,
allow_auto_grant=True,
)
session.add(ck)
sync = IpfsSync(
content_id=ec.id,
pin_state='pinned',
bytes_total=enc_size,
bytes_fetched=enc_size,
pinned_at=datetime.utcnow(),
)
session.add(sync)
# Publish signed index item
item = {
"encrypted_cid": encrypted_cid,
"title": title,
"description": description,
"content_type": content_type,
"size_bytes": enc_size,
"preview_enabled": preview_enabled,
"preview_conf": ec.preview_conf,
"issuer_node_id": key_fpr,
"salt_b64": _b64(salt),
}
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
signer = Signer(hot_seed)
blob = json.dumps(item, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
except Exception:
sig = ""
session.add(ContentIndexItem(encrypted_cid=encrypted_cid, payload=item, sig=sig))
await session.commit()
# Update upload session with result and purge staging to avoid duplicates
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'pinned'
us.encrypted_cid = encrypted_cid
# prefer using IPFS for downstream conversion; remove staging
try:
if file_path and os.path.exists(file_path):
os.remove(file_path)
except Exception:
pass
us.storage_path = None
await session.commit()
make_log("tus-hook", f"Uploaded+encrypted {file_path} -> {encrypted_cid}")
return response.json({"ok": True, "encrypted_cid": encrypted_cid, "upload_id": upload_id})