1908 lines
70 KiB
Python
1908 lines
70 KiB
Python
|
|
from __future__ import annotations
|
|
|
|
import os
|
|
import platform as py_platform
|
|
import shutil
|
|
from collections import defaultdict
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
from base58 import b58encode
|
|
from sanic import response
|
|
from sqlalchemy import Integer, String, and_, case, cast, func, or_, select
|
|
|
|
from app.api.routes._system import get_git_info
|
|
from app.core._blockchain.ton.platform import platform
|
|
from app.core._config import (
|
|
BACKEND_DATA_DIR_HOST,
|
|
BACKEND_LOGS_DIR_HOST,
|
|
LOG_DIR,
|
|
CLIENT_TELEGRAM_BOT_USERNAME,
|
|
PROJECT_HOST,
|
|
UPLOADS_DIR,
|
|
)
|
|
from app.core._secrets import hot_pubkey, service_wallet
|
|
from app.core.ipfs_client import bitswap_stat, id_info, repo_stat
|
|
from app.core.logger import make_log
|
|
from app.core.models._config import ServiceConfig, ServiceConfigValue
|
|
from app.core.models.content_v3 import (
|
|
ContentDerivative,
|
|
ContentIndexItem,
|
|
EncryptedContent,
|
|
IpfsSync,
|
|
UploadSession,
|
|
)
|
|
from app.core.models.my_network import KnownNode
|
|
from app.core.models.tasks import BlockchainTask
|
|
from app.core.models.node_storage import StoredContent
|
|
from app.core.models.user import User
|
|
from app.core.models.content.user_content import UserContent
|
|
from app.core.models.transaction import StarsInvoice
|
|
from app.core.models.wallet_connection import WalletConnection
|
|
from app.core.models.user_activity import UserActivity
|
|
from app.core._utils.share_links import build_content_links
|
|
from app.core.content.content_id import ContentId
|
|
|
|
MIN_ONCHAIN_INDEX = int(os.getenv("MIN_ONCHAIN_INDEX", "8"))
|
|
|
|
ALLOWED_UPLOAD_FILTERS = {"all", "issues", "processing", "ready", "unindexed"}
|
|
|
|
ADMIN_COOKIE_NAME = os.getenv('ADMIN_COOKIE_NAME', 'admin_session')
|
|
ADMIN_COOKIE_MAX_AGE = int(os.getenv('ADMIN_COOKIE_MAX_AGE', '172800')) # 48h default
|
|
ADMIN_COOKIE_SAMESITE = os.getenv('ADMIN_COOKIE_SAMESITE', 'Lax')
|
|
ADMIN_COOKIE_SECURE_MODE = os.getenv('ADMIN_COOKIE_SECURE', 'auto').lower()
|
|
ADMIN_HEADER_NAME = os.getenv('ADMIN_HEADER_NAME', 'X-Admin-Token')
|
|
|
|
|
|
def _cookie_secure_flag(request) -> bool:
|
|
if ADMIN_COOKIE_SECURE_MODE == 'true':
|
|
return True
|
|
if ADMIN_COOKIE_SECURE_MODE == 'false':
|
|
return False
|
|
# auto mode: follow request scheme
|
|
return getattr(request, 'scheme', 'http') == 'https'
|
|
|
|
|
|
def _set_admin_cookie(resp, request, value: str, max_age: Optional[int] = None):
|
|
resp.cookies[ADMIN_COOKIE_NAME] = value
|
|
cookie = resp.cookies[ADMIN_COOKIE_NAME]
|
|
cookie['path'] = '/'
|
|
cookie['httponly'] = True
|
|
cookie['samesite'] = ADMIN_COOKIE_SAMESITE
|
|
cookie['secure'] = _cookie_secure_flag(request)
|
|
if max_age is not None:
|
|
cookie['max-age'] = max_age
|
|
|
|
|
|
def _clear_admin_cookie(resp, request):
|
|
_set_admin_cookie(resp, request, '', max_age=0)
|
|
|
|
def _get_admin_header(request) -> Optional[str]:
|
|
target = ADMIN_HEADER_NAME.lower()
|
|
for key, value in request.headers.items():
|
|
if key.lower() == target:
|
|
return value
|
|
return None
|
|
|
|
|
|
def _auth_ok(request) -> bool:
|
|
token = os.getenv('ADMIN_API_TOKEN')
|
|
if not token:
|
|
return False
|
|
cookie_value = request.cookies.get(ADMIN_COOKIE_NAME)
|
|
if cookie_value == token:
|
|
return True
|
|
header_value = _get_admin_header(request)
|
|
if not header_value:
|
|
return False
|
|
if header_value.startswith('Bearer '):
|
|
header_value = header_value.split(' ', 1)[1].strip()
|
|
return header_value == token
|
|
|
|
|
|
def _unauthorized():
|
|
return response.json({"error": "UNAUTHORIZED"}, status=401)
|
|
|
|
|
|
def _ensure_admin(request):
|
|
if not _auth_ok(request):
|
|
return _unauthorized()
|
|
return None
|
|
|
|
|
|
def _dir_stats(label: str, path: str) -> Dict[str, Any]:
|
|
target = Path(path)
|
|
exists = target.exists()
|
|
size = 0
|
|
files = 0
|
|
if exists:
|
|
if target.is_file():
|
|
try:
|
|
stat = target.stat()
|
|
size = stat.st_size
|
|
files = 1
|
|
except OSError:
|
|
pass
|
|
else:
|
|
for child in target.rglob('*'):
|
|
try:
|
|
if child.is_file():
|
|
files += 1
|
|
size += child.stat().st_size
|
|
except OSError:
|
|
continue
|
|
return {
|
|
'label': label,
|
|
'path': str(target),
|
|
'exists': exists,
|
|
'file_count': files,
|
|
'size_bytes': size,
|
|
}
|
|
|
|
|
|
def _service_states(request) -> List[Dict[str, Any]]:
|
|
now = datetime.utcnow()
|
|
items: List[Dict[str, Any]] = []
|
|
memory = getattr(request.app.ctx, 'memory', None)
|
|
known_states = getattr(memory, 'known_states', {}) if memory else {}
|
|
if isinstance(known_states, dict):
|
|
for name, payload in known_states.items():
|
|
ts: Optional[datetime] = payload.get('timestamp') if isinstance(payload, dict) else None
|
|
delay = (now - ts).total_seconds() if ts else None
|
|
healthy = delay is not None and delay < 120
|
|
items.append({
|
|
'name': name,
|
|
'status': payload.get('status') if healthy else 'not working: timeout',
|
|
'last_reported_seconds': delay,
|
|
})
|
|
items.sort(key=lambda item: item['name'])
|
|
return items
|
|
|
|
|
|
def _format_dt(value: Optional[datetime]) -> Optional[str]:
|
|
return value.isoformat() + 'Z' if isinstance(value, datetime) else None
|
|
|
|
|
|
def _extract_file_hash(local_path: Optional[str]) -> Optional[str]:
|
|
if not local_path:
|
|
return None
|
|
name = Path(local_path).name
|
|
return name or None
|
|
|
|
|
|
def _storage_download_url(file_hash: Optional[str]) -> Optional[str]:
|
|
if not file_hash:
|
|
return None
|
|
return f"{PROJECT_HOST}/api/v1.5/storage/{file_hash}"
|
|
|
|
|
|
def _pick_primary_download(candidates: List[tuple[str, Optional[str], Optional[int]]]) -> Optional[str]:
|
|
priority = (
|
|
'decrypted_high',
|
|
'decrypted_original',
|
|
'decrypted_low',
|
|
'decrypted_preview',
|
|
'high',
|
|
'low',
|
|
'preview',
|
|
'original',
|
|
'stored',
|
|
)
|
|
for target in priority:
|
|
for kind, url, _ in candidates:
|
|
if kind == target and url:
|
|
return url
|
|
for _, url, _ in candidates:
|
|
if url:
|
|
return url
|
|
return None
|
|
|
|
|
|
def _parse_bool_arg(value: Optional[str]) -> Optional[bool]:
|
|
if value is None:
|
|
return None
|
|
normalized = value.strip().lower()
|
|
if normalized in {'1', 'true', 'yes', 'y', 'on'}:
|
|
return True
|
|
if normalized in {'0', 'false', 'no', 'n', 'off'}:
|
|
return False
|
|
return None
|
|
|
|
|
|
async def s_api_v1_admin_login(request):
|
|
token = os.getenv('ADMIN_API_TOKEN')
|
|
if not token:
|
|
make_log('Admin', 'ADMIN_API_TOKEN is not configured', level='error')
|
|
return response.json({"error": "ADMIN_TOKEN_NOT_CONFIGURED"}, status=500)
|
|
|
|
payload = request.json or {}
|
|
provided = (payload.get('secret') or '').strip()
|
|
if provided != token:
|
|
resp = response.json({"error": "UNAUTHORIZED"}, status=401)
|
|
_clear_admin_cookie(resp, request)
|
|
return resp
|
|
|
|
resp = response.json({
|
|
"ok": True,
|
|
"cookie_name": ADMIN_COOKIE_NAME,
|
|
"header_name": ADMIN_HEADER_NAME,
|
|
"max_age": ADMIN_COOKIE_MAX_AGE,
|
|
})
|
|
_set_admin_cookie(resp, request, token, ADMIN_COOKIE_MAX_AGE)
|
|
return resp
|
|
|
|
|
|
async def s_api_v1_admin_logout(request):
|
|
resp = response.json({"ok": True})
|
|
_clear_admin_cookie(resp, request)
|
|
return resp
|
|
|
|
|
|
async def s_api_v1_admin_overview(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
branch, commit = get_git_info()
|
|
|
|
try:
|
|
ipfs_identity = await id_info()
|
|
except Exception as exc: # pragma: no cover - network failure path
|
|
ipfs_identity = {"error": str(exc)}
|
|
|
|
try:
|
|
bitswap = await bitswap_stat()
|
|
except Exception as exc: # pragma: no cover - network failure path
|
|
bitswap = {"error": str(exc)}
|
|
|
|
try:
|
|
repo = await repo_stat()
|
|
except Exception as exc: # pragma: no cover - network failure path
|
|
repo = {"error": str(exc)}
|
|
|
|
# Database counters
|
|
encrypted_total = (await session.execute(select(func.count()).select_from(EncryptedContent))).scalar_one()
|
|
upload_total = (await session.execute(select(func.count()).select_from(UploadSession))).scalar_one()
|
|
derivative_ready = (await session.execute(
|
|
select(func.count()).select_from(ContentDerivative).where(ContentDerivative.status == 'ready')
|
|
)).scalar_one()
|
|
|
|
node_id = b58encode(hot_pubkey).decode()
|
|
|
|
overview_payload = {
|
|
'project': {
|
|
'host': PROJECT_HOST,
|
|
'name': os.getenv('PROJECT_NAME', 'unknown'),
|
|
'privacy': os.getenv('NODE_PRIVACY', 'public'),
|
|
},
|
|
'codebase': {
|
|
'branch': branch,
|
|
'commit': commit,
|
|
},
|
|
'node': {
|
|
'id': node_id,
|
|
'service_wallet': service_wallet.address.to_string(1, 1, 1),
|
|
'ton_master': platform.address.to_string(1, 1, 1),
|
|
},
|
|
'runtime': {
|
|
'python': py_platform.python_version(),
|
|
'implementation': py_platform.python_implementation(),
|
|
'platform': py_platform.platform(),
|
|
'utc_now': datetime.utcnow().isoformat() + 'Z',
|
|
},
|
|
'ipfs': {
|
|
'identity': ipfs_identity,
|
|
'bitswap': bitswap,
|
|
'repo': repo,
|
|
},
|
|
'content': {
|
|
'encrypted_total': int(encrypted_total or 0),
|
|
'upload_sessions_total': int(upload_total or 0),
|
|
'derivatives_ready': int(derivative_ready or 0),
|
|
},
|
|
'ton': {
|
|
'host': os.getenv('TONCENTER_HOST'),
|
|
'api_key_configured': bool(os.getenv('TONCENTER_API_KEY')),
|
|
'testnet': bool(int(os.getenv('TESTNET', '0'))),
|
|
},
|
|
'services': _service_states(request),
|
|
}
|
|
|
|
return response.json(overview_payload)
|
|
|
|
|
|
async def s_api_v1_admin_storage(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
directories: List[Dict[str, Any]] = []
|
|
directories.append(_dir_stats('Encrypted uploads', UPLOADS_DIR))
|
|
directories.append(_dir_stats('Backend logs', LOG_DIR))
|
|
|
|
extra_dirs = {
|
|
'Host content mount': BACKEND_DATA_DIR_HOST,
|
|
'Host logs mount': BACKEND_LOGS_DIR_HOST,
|
|
'Tus staging': os.getenv('TUSD_DATA_DIR_HOST', ''),
|
|
}
|
|
for label, path in extra_dirs.items():
|
|
if path:
|
|
directories.append(_dir_stats(label, path))
|
|
|
|
disk_snapshot: Optional[Dict[str, Any]] = None
|
|
for entry in directories:
|
|
if entry['exists']:
|
|
try:
|
|
usage = shutil.disk_usage(entry['path'])
|
|
except Exception:
|
|
continue
|
|
disk_snapshot = {
|
|
'path': entry['path'],
|
|
'total_bytes': usage.total,
|
|
'used_bytes': usage.total - usage.free,
|
|
'free_bytes': usage.free,
|
|
'percent_used': round((usage.total - usage.free) / usage.total * 100, 2) if usage.total else None,
|
|
}
|
|
break
|
|
|
|
derivatives = (await session.execute(select(ContentDerivative))).scalars().all()
|
|
derivative_stats = {
|
|
'ready': sum(1 for d in derivatives if d.status == 'ready'),
|
|
'processing': sum(1 for d in derivatives if d.status == 'processing'),
|
|
'pending': sum(1 for d in derivatives if d.status == 'pending'),
|
|
'failed': sum(1 for d in derivatives if d.status == 'failed'),
|
|
'total_bytes': sum(int(d.size_bytes or 0) for d in derivatives if d.size_bytes),
|
|
}
|
|
|
|
storage_payload = {
|
|
'directories': directories,
|
|
'disk': disk_snapshot,
|
|
'derivatives': derivative_stats,
|
|
}
|
|
return response.json(storage_payload)
|
|
|
|
|
|
async def s_api_v1_admin_uploads(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
raw_filter = (request.args.get('filter') or '').lower()
|
|
raw_filters: List[str] = []
|
|
if raw_filter:
|
|
raw_filters = [item.strip() for item in raw_filter.split(',') if item.strip()]
|
|
effective_filters = [item for item in raw_filters if item in ALLOWED_UPLOAD_FILTERS and item != 'all']
|
|
|
|
search_query = (request.args.get('search') or '').strip()
|
|
search_lower = search_query.lower()
|
|
|
|
try:
|
|
limit = int(request.args.get('limit') or 50)
|
|
except Exception:
|
|
limit = 50
|
|
limit = max(1, min(limit, 200))
|
|
|
|
try:
|
|
scan_limit = int(request.args.get('scan') or 0)
|
|
except Exception:
|
|
scan_limit = 0
|
|
if scan_limit <= 0:
|
|
scan_default = max(limit, 100 if (effective_filters or search_lower) else limit)
|
|
scan_limit = min(max(scan_default, limit), 500)
|
|
else:
|
|
scan_limit = max(limit, min(scan_limit, 500))
|
|
|
|
counts_rows = (await session.execute(
|
|
select(UploadSession.state, func.count()).group_by(UploadSession.state)
|
|
)).all()
|
|
counts = {state: int(count) for state, count in counts_rows}
|
|
total = sum(counts.values())
|
|
|
|
recent_rows = (await session.execute(
|
|
select(UploadSession).order_by(UploadSession.updated_at.desc()).limit(25)
|
|
)).scalars().all()
|
|
recent = [
|
|
{
|
|
'id': row.id,
|
|
'filename': row.filename,
|
|
'size_bytes': row.size_bytes,
|
|
'state': row.state,
|
|
'encrypted_cid': row.encrypted_cid,
|
|
'error': row.error,
|
|
'updated_at': row.updated_at.isoformat() + 'Z',
|
|
'created_at': row.created_at.isoformat() + 'Z',
|
|
}
|
|
for row in recent_rows
|
|
]
|
|
|
|
content_rows = (await session.execute(
|
|
select(EncryptedContent).order_by(EncryptedContent.created_at.desc()).limit(scan_limit)
|
|
)).scalars().all()
|
|
|
|
content_ids = [row.id for row in content_rows]
|
|
encrypted_cids = [row.encrypted_cid for row in content_rows]
|
|
|
|
derivatives_map: Dict[int, list[ContentDerivative]] = {cid: [] for cid in content_ids}
|
|
if content_ids:
|
|
derivative_rows = (await session.execute(
|
|
select(ContentDerivative).where(ContentDerivative.content_id.in_(content_ids))
|
|
)).scalars().all()
|
|
for derivative in derivative_rows:
|
|
derivatives_map.setdefault(derivative.content_id, []).append(derivative)
|
|
|
|
ipfs_map: Dict[int, Optional[IpfsSync]] = {}
|
|
if content_ids:
|
|
ipfs_rows = (await session.execute(
|
|
select(IpfsSync).where(IpfsSync.content_id.in_(content_ids))
|
|
)).scalars().all()
|
|
for sync in ipfs_rows:
|
|
ipfs_map[sync.content_id] = sync
|
|
|
|
uploads_map: Dict[str, List[UploadSession]] = {cid: [] for cid in encrypted_cids}
|
|
if encrypted_cids:
|
|
uploads_for_content = (await session.execute(
|
|
select(UploadSession).where(UploadSession.encrypted_cid.in_(encrypted_cids))
|
|
)).scalars().all()
|
|
for upload in uploads_for_content:
|
|
uploads_map.setdefault(upload.encrypted_cid, []).append(upload)
|
|
for chain in uploads_map.values():
|
|
chain.sort(key=lambda u: (u.updated_at or u.created_at or datetime.min))
|
|
|
|
stored_map: Dict[str, StoredContent] = {}
|
|
stored_by_id: Dict[int, StoredContent] = {}
|
|
if encrypted_cids:
|
|
stored_rows = (await session.execute(
|
|
select(StoredContent).where(StoredContent.content_id.in_(encrypted_cids))
|
|
)).scalars().all()
|
|
for stored in stored_rows:
|
|
stored_map[stored.content_id] = stored
|
|
stored_by_id[stored.id] = stored
|
|
|
|
user_map: Dict[int, User] = {}
|
|
user_ids = {stored.user_id for stored in stored_map.values() if stored.user_id}
|
|
if user_ids:
|
|
user_rows = (await session.execute(select(User).where(User.id.in_(user_ids)))).scalars().all()
|
|
for user in user_rows:
|
|
user_map[user.id] = user
|
|
|
|
license_counts: Dict[int, int] = {}
|
|
stored_ids = list(stored_by_id.keys())
|
|
if stored_ids:
|
|
license_rows = (await session.execute(
|
|
select(UserContent.content_id, func.count())
|
|
.where(UserContent.content_id.in_(stored_ids))
|
|
.group_by(UserContent.content_id)
|
|
)).all()
|
|
for content_id, count in license_rows:
|
|
license_counts[int(content_id)] = int(count)
|
|
|
|
contents_payload: List[Dict[str, Any]] = []
|
|
category_totals: Dict[str, int] = {key: 0 for key in ALLOWED_UPLOAD_FILTERS if key != 'all'}
|
|
matched_total = 0
|
|
for content in content_rows:
|
|
derivatives = derivatives_map.get(content.id, [])
|
|
attempts: Dict[str, int] = defaultdict(int)
|
|
derivative_entries: List[Dict[str, Any]] = []
|
|
summary: Dict[str, int] = defaultdict(int)
|
|
download_candidates: List[tuple[str, Optional[str], Optional[int]]] = []
|
|
|
|
for derivative in sorted(derivatives, key=lambda item: item.created_at or datetime.min):
|
|
summary[derivative.status] += 1
|
|
attempts[derivative.kind] += 1
|
|
file_hash = _extract_file_hash(derivative.local_path)
|
|
download_url = _storage_download_url(file_hash)
|
|
derivative_entries.append({
|
|
'kind': derivative.kind,
|
|
'status': derivative.status,
|
|
'size_bytes': derivative.size_bytes,
|
|
'error': derivative.error,
|
|
'created_at': _format_dt(derivative.created_at),
|
|
'updated_at': _format_dt(derivative.last_access_at or derivative.created_at),
|
|
'attempts': attempts[derivative.kind],
|
|
'download_url': download_url,
|
|
})
|
|
download_candidates.append((derivative.kind, download_url, derivative.size_bytes))
|
|
|
|
conversion_state = None
|
|
if summary.get('ready'):
|
|
conversion_state = 'ready'
|
|
elif summary.get('processing'):
|
|
conversion_state = 'processing'
|
|
elif summary.get('pending'):
|
|
conversion_state = 'pending'
|
|
elif summary.get('failed'):
|
|
conversion_state = 'failed'
|
|
|
|
upload_chain = uploads_map.get(content.encrypted_cid, [])
|
|
latest_upload = upload_chain[-1] if upload_chain else None
|
|
upload_history = [
|
|
{
|
|
'state': entry.state,
|
|
'at': _format_dt(entry.updated_at or entry.created_at),
|
|
'error': entry.error,
|
|
'filename': entry.filename,
|
|
}
|
|
for entry in upload_chain
|
|
]
|
|
|
|
ipfs_sync = ipfs_map.get(content.id)
|
|
|
|
stored = stored_map.get(content.encrypted_cid)
|
|
metadata_cid = None
|
|
content_hash = None
|
|
stored_payload: Optional[Dict[str, Any]] = None
|
|
blockchain_payload: Optional[Dict[str, Any]] = None
|
|
if stored:
|
|
metadata_cid = (stored.meta or {}).get('metadata_cid')
|
|
content_hash = stored.hash
|
|
download_candidates.append(('stored', stored.web_url, None))
|
|
stored_payload = {
|
|
'stored_id': stored.id,
|
|
'type': stored.type,
|
|
'owner_address': stored.owner_address,
|
|
'user_id': stored.user_id,
|
|
'status': stored.status,
|
|
'content_url': stored.web_url,
|
|
'download_url': stored.web_url,
|
|
'created': _format_dt(stored.created),
|
|
'updated': _format_dt(stored.updated),
|
|
}
|
|
if stored.user_id and stored.user_id in user_map:
|
|
user = user_map[stored.user_id]
|
|
stored_payload['user'] = {
|
|
'id': user.id,
|
|
'telegram_id': user.telegram_id,
|
|
'username': user.username,
|
|
'first_name': (user.meta or {}).get('first_name') if user.meta else None,
|
|
'last_name': (user.meta or {}).get('last_name') if user.meta else None,
|
|
}
|
|
blockchain_payload = {
|
|
'onchain_index': stored.onchain_index,
|
|
'item_address': (stored.meta or {}).get('item_address'),
|
|
'indexed': stored.onchain_index is not None and stored.onchain_index >= MIN_ONCHAIN_INDEX,
|
|
'license_count': license_counts.get(stored.id, 0),
|
|
}
|
|
|
|
else:
|
|
try:
|
|
cid_obj = ContentId.deserialize(content.encrypted_cid)
|
|
content_hash = cid_obj.content_hash_b58
|
|
except Exception:
|
|
content_hash = None
|
|
|
|
share_target = None
|
|
if stored:
|
|
try:
|
|
share_target = stored.cid.serialize_v2()
|
|
except Exception:
|
|
share_target = content.encrypted_cid
|
|
else:
|
|
share_target = content.encrypted_cid
|
|
|
|
_, startapp_url, web_view_url = build_content_links(
|
|
share_target,
|
|
None,
|
|
project_host=PROJECT_HOST,
|
|
bot_username=CLIENT_TELEGRAM_BOT_USERNAME,
|
|
)
|
|
|
|
primary_download = _pick_primary_download(download_candidates)
|
|
derivative_downloads = [
|
|
{
|
|
'kind': kind,
|
|
'url': url,
|
|
'size_bytes': size_bytes,
|
|
}
|
|
for kind, url, size_bytes in download_candidates
|
|
if url
|
|
]
|
|
|
|
upload_state_norm = (latest_upload.state or '').lower() if latest_upload else ''
|
|
conversion_state_norm = (conversion_state or '').lower() if conversion_state else ''
|
|
ipfs_state_norm = (ipfs_sync.pin_state or '').lower() if (ipfs_sync and ipfs_sync.pin_state) else ''
|
|
derivative_states_norm = [(derivative.status or '').lower() for derivative in derivatives]
|
|
status_values = [upload_state_norm, conversion_state_norm, ipfs_state_norm] + derivative_states_norm
|
|
|
|
has_issue = any(
|
|
value and ("fail" in value or "error" in value or "timeout" in value)
|
|
for value in status_values
|
|
)
|
|
if not has_issue and any(event.get('error') for event in upload_history):
|
|
has_issue = True
|
|
if not has_issue and any(derivative.error for derivative in derivatives):
|
|
has_issue = True
|
|
if not has_issue and ipfs_sync and ipfs_sync.pin_error:
|
|
has_issue = True
|
|
|
|
is_onchain_indexed = bool(blockchain_payload and blockchain_payload.get('indexed'))
|
|
is_unindexed = not is_onchain_indexed
|
|
|
|
conversion_done = (
|
|
summary.get('ready', 0) > 0
|
|
or conversion_state_norm in ('ready', 'converted')
|
|
or any(state in ('ready', 'converted', 'complete') for state in derivative_states_norm)
|
|
)
|
|
ipfs_done = ipfs_state_norm in ('pinned', 'ready')
|
|
|
|
is_ready = not has_issue and conversion_done and (ipfs_done or not ipfs_sync) and is_onchain_indexed
|
|
|
|
processing_tokens = ('process', 'pending', 'queue', 'upload', 'pin', 'sync')
|
|
has_processing_keywords = any(
|
|
value and any(token in value for token in processing_tokens)
|
|
for value in status_values
|
|
)
|
|
|
|
categories = set()
|
|
if has_issue:
|
|
categories.add('issues')
|
|
if is_ready:
|
|
categories.add('ready')
|
|
if is_unindexed:
|
|
categories.add('unindexed')
|
|
|
|
is_processing = not is_ready and not has_issue and has_processing_keywords
|
|
if is_processing:
|
|
categories.add('processing')
|
|
if not is_ready and not has_issue and 'processing' not in categories:
|
|
categories.add('processing')
|
|
|
|
flags = {
|
|
'issues': 'issues' in categories,
|
|
'processing': 'processing' in categories,
|
|
'ready': 'ready' in categories,
|
|
'unindexed': 'unindexed' in categories,
|
|
}
|
|
|
|
search_parts: List[Any] = [
|
|
content.artist,
|
|
content.title,
|
|
content.description,
|
|
content.encrypted_cid,
|
|
metadata_cid,
|
|
content_hash,
|
|
]
|
|
if blockchain_payload:
|
|
search_parts.append(blockchain_payload.get('item_address') or '')
|
|
if stored_payload:
|
|
search_parts.append(stored_payload.get('owner_address') or '')
|
|
user_info = stored_payload.get('user') or {}
|
|
search_parts.extend(
|
|
[
|
|
str(user_info.get('id') or ''),
|
|
str(user_info.get('telegram_id') or ''),
|
|
user_info.get('username') or '',
|
|
user_info.get('first_name') or '',
|
|
user_info.get('last_name') or '',
|
|
]
|
|
)
|
|
search_blob = ' '.join(str(part) for part in search_parts if part).lower()
|
|
|
|
matches_filter = (not effective_filters) or any(cat in categories for cat in effective_filters)
|
|
matches_search = (not search_lower) or (search_lower in search_blob)
|
|
|
|
if not matches_filter or not matches_search:
|
|
continue
|
|
|
|
matched_total += 1
|
|
for cat in categories:
|
|
if cat in category_totals:
|
|
category_totals[cat] += 1
|
|
|
|
if len(contents_payload) >= limit:
|
|
continue
|
|
|
|
contents_payload.append({
|
|
'encrypted_cid': content.encrypted_cid,
|
|
'metadata_cid': metadata_cid,
|
|
'content_hash': content_hash,
|
|
'title': content.title,
|
|
'artist': content.artist,
|
|
'description': content.description,
|
|
'content_type': content.content_type,
|
|
'size': {
|
|
'encrypted': content.enc_size_bytes,
|
|
'plain': content.plain_size_bytes,
|
|
},
|
|
'created_at': _format_dt(content.created_at),
|
|
'updated_at': _format_dt(content.updated_at),
|
|
'status': {
|
|
'upload_state': latest_upload.state if latest_upload else None,
|
|
'conversion_state': conversion_state,
|
|
'ipfs_state': ipfs_sync.pin_state if ipfs_sync else None,
|
|
'onchain': blockchain_payload,
|
|
},
|
|
'upload_history': upload_history,
|
|
'derivative_summary': dict(summary),
|
|
'derivatives': derivative_entries,
|
|
'ipfs': (
|
|
{
|
|
'pin_state': ipfs_sync.pin_state,
|
|
'pin_error': ipfs_sync.pin_error,
|
|
'bytes_total': ipfs_sync.bytes_total,
|
|
'bytes_fetched': ipfs_sync.bytes_fetched,
|
|
'pinned_at': _format_dt(ipfs_sync.pinned_at),
|
|
'updated_at': _format_dt(ipfs_sync.updated_at),
|
|
}
|
|
if ipfs_sync else None
|
|
),
|
|
'stored': stored_payload,
|
|
'links': {
|
|
'web_view': web_view_url,
|
|
'start_app': startapp_url,
|
|
'api_view': f"{PROJECT_HOST}/api/v1/content.view/{share_target}",
|
|
'download_primary': primary_download,
|
|
'download_derivatives': derivative_downloads,
|
|
},
|
|
'flags': flags,
|
|
})
|
|
|
|
payload = {
|
|
'total': total,
|
|
'states': counts,
|
|
'recent': recent,
|
|
'contents': contents_payload,
|
|
'matching_total': matched_total,
|
|
'filter': effective_filters or ['all'],
|
|
'search': search_query or None,
|
|
'limit': limit,
|
|
'scan': scan_limit,
|
|
'scanned': len(content_rows),
|
|
'category_totals': category_totals,
|
|
}
|
|
return response.json(payload)
|
|
|
|
|
|
async def s_api_v1_admin_users(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
try:
|
|
limit = int(request.args.get('limit') or 50)
|
|
except (TypeError, ValueError):
|
|
limit = 50
|
|
limit = max(1, min(limit, 200))
|
|
|
|
try:
|
|
offset = int(request.args.get('offset') or 0)
|
|
except (TypeError, ValueError):
|
|
offset = 0
|
|
offset = max(0, offset)
|
|
|
|
search = (request.args.get('search') or '').strip()
|
|
|
|
filters = []
|
|
if search:
|
|
search_like = f"%{search}%"
|
|
clauses = [
|
|
cast(User.id, String).ilike(search_like),
|
|
cast(User.telegram_id, String).ilike(search_like),
|
|
User.username.ilike(search_like),
|
|
User.meta['first_name'].astext.ilike(search_like),
|
|
User.meta['last_name'].astext.ilike(search_like),
|
|
]
|
|
numeric_candidate = search.lstrip('-')
|
|
if numeric_candidate.isdigit():
|
|
try:
|
|
search_int = int(search)
|
|
clauses.append(User.id == search_int)
|
|
clauses.append(User.telegram_id == search_int)
|
|
except ValueError:
|
|
pass
|
|
filters.append(or_(*clauses))
|
|
|
|
total_stmt = select(func.count()).select_from(User)
|
|
if filters:
|
|
total_stmt = total_stmt.where(and_(*filters))
|
|
total = (await session.execute(total_stmt)).scalar_one()
|
|
|
|
query_stmt = (
|
|
select(User)
|
|
.order_by(User.last_use.desc())
|
|
.offset(offset)
|
|
.limit(limit)
|
|
)
|
|
if filters:
|
|
query_stmt = query_stmt.where(and_(*filters))
|
|
|
|
user_rows = (await session.execute(query_stmt)).scalars().all()
|
|
|
|
base_payload = {
|
|
'total': int(total or 0),
|
|
'limit': limit,
|
|
'offset': offset,
|
|
'search': search or None,
|
|
'has_more': (offset + limit) < int(total or 0),
|
|
}
|
|
|
|
if not user_rows:
|
|
base_payload.update({
|
|
'items': [],
|
|
'summary': {
|
|
'users_returned': 0,
|
|
'wallets_total': 0,
|
|
'wallets_active': 0,
|
|
'licenses_total': 0,
|
|
'licenses_active': 0,
|
|
'stars_total': 0,
|
|
'stars_paid': 0,
|
|
'stars_unpaid': 0,
|
|
'stars_amount_paid': 0,
|
|
'stars_amount_unpaid': 0,
|
|
'unique_ips_total': 0,
|
|
},
|
|
})
|
|
return response.json(base_payload)
|
|
|
|
user_ids = [user.id for user in user_rows if user.id is not None]
|
|
|
|
wallet_map: Dict[int, List[WalletConnection]] = defaultdict(list)
|
|
if user_ids:
|
|
wallet_rows = (await session.execute(
|
|
select(WalletConnection)
|
|
.where(WalletConnection.user_id.in_(user_ids))
|
|
.order_by(WalletConnection.created.desc())
|
|
)).scalars().all()
|
|
for wallet in wallet_rows:
|
|
if wallet.user_id is not None:
|
|
wallet_map[wallet.user_id].append(wallet)
|
|
|
|
content_stats_map: Dict[int, Dict[str, int]] = {}
|
|
if user_ids:
|
|
content_rows = (await session.execute(
|
|
select(
|
|
StoredContent.user_id,
|
|
func.count().label('total'),
|
|
func.sum(case((StoredContent.type.like('onchain%'), 1), else_=0)).label('onchain'),
|
|
func.sum(case((StoredContent.disabled.isnot(None), 1), else_=0)).label('disabled'),
|
|
)
|
|
.where(StoredContent.user_id.in_(user_ids))
|
|
.group_by(StoredContent.user_id)
|
|
)).all()
|
|
for user_id, total_cnt, onchain_cnt, disabled_cnt in content_rows:
|
|
if user_id is None:
|
|
continue
|
|
total_value = int(total_cnt or 0)
|
|
onchain_value = int(onchain_cnt or 0)
|
|
disabled_value = int(disabled_cnt or 0)
|
|
content_stats_map[user_id] = {
|
|
'total': total_value,
|
|
'onchain': onchain_value,
|
|
'local': max(total_value - onchain_value, 0),
|
|
'disabled': disabled_value,
|
|
}
|
|
|
|
license_type_map: Dict[int, Dict[str, int]] = defaultdict(lambda: defaultdict(int))
|
|
license_status_map: Dict[int, Dict[str, int]] = defaultdict(lambda: defaultdict(int))
|
|
if user_ids:
|
|
license_type_rows = (await session.execute(
|
|
select(UserContent.user_id, UserContent.type, func.count())
|
|
.where(UserContent.user_id.in_(user_ids))
|
|
.group_by(UserContent.user_id, UserContent.type)
|
|
)).all()
|
|
for user_id, ltype, count_value in license_type_rows:
|
|
if user_id is None:
|
|
continue
|
|
license_type_map[user_id][ltype or 'unknown'] += int(count_value or 0)
|
|
|
|
license_status_rows = (await session.execute(
|
|
select(UserContent.user_id, UserContent.status, func.count())
|
|
.where(UserContent.user_id.in_(user_ids))
|
|
.group_by(UserContent.user_id, UserContent.status)
|
|
)).all()
|
|
for user_id, status_value, count_value in license_status_rows:
|
|
if user_id is None:
|
|
continue
|
|
license_status_map[user_id][status_value or 'unknown'] += int(count_value or 0)
|
|
|
|
stars_stats_map: Dict[int, Dict[str, int]] = {}
|
|
if user_ids:
|
|
stars_rows = (await session.execute(
|
|
select(
|
|
StarsInvoice.user_id,
|
|
func.count().label('total'),
|
|
func.sum(case((StarsInvoice.paid.is_(True), 1), else_=0)).label('paid'),
|
|
func.sum(case((or_(StarsInvoice.paid.is_(False), StarsInvoice.paid.is_(None)), 1), else_=0)).label('unpaid'),
|
|
func.sum(StarsInvoice.amount).label('amount_total'),
|
|
func.sum(case((StarsInvoice.paid.is_(True), StarsInvoice.amount), else_=0)).label('amount_paid'),
|
|
)
|
|
.where(StarsInvoice.user_id.in_(user_ids))
|
|
.group_by(StarsInvoice.user_id)
|
|
)).all()
|
|
for user_id, total_cnt, paid_cnt, unpaid_cnt, amt_total, amt_paid in stars_rows:
|
|
if user_id is None:
|
|
continue
|
|
total_value = int(total_cnt or 0)
|
|
paid_value = int(paid_cnt or 0)
|
|
unpaid_value = int(unpaid_cnt or max(total_value - paid_value, 0))
|
|
amount_total_value = int(amt_total or 0)
|
|
amount_paid_value = int(amt_paid or 0)
|
|
amount_unpaid_value = max(amount_total_value - amount_paid_value, 0)
|
|
stars_stats_map[user_id] = {
|
|
'total': total_value,
|
|
'paid': paid_value,
|
|
'unpaid': unpaid_value,
|
|
'amount_total': amount_total_value,
|
|
'amount_paid': amount_paid_value,
|
|
'amount_unpaid': amount_unpaid_value,
|
|
}
|
|
|
|
ip_counts_map: Dict[int, int] = {}
|
|
if user_ids:
|
|
ip_count_rows = (await session.execute(
|
|
select(
|
|
UserActivity.user_id,
|
|
func.count(func.distinct(UserActivity.user_ip)),
|
|
)
|
|
.where(UserActivity.user_id.in_(user_ids), UserActivity.user_ip.isnot(None))
|
|
.group_by(UserActivity.user_id)
|
|
)).all()
|
|
for user_id, ip_count in ip_count_rows:
|
|
if user_id is None:
|
|
continue
|
|
ip_counts_map[user_id] = int(ip_count or 0)
|
|
|
|
latest_ip_map: Dict[int, Dict[str, Optional[str]]] = {}
|
|
if user_ids:
|
|
latest_ip_rows = (await session.execute(
|
|
select(
|
|
UserActivity.user_id,
|
|
UserActivity.user_ip,
|
|
UserActivity.type,
|
|
UserActivity.created,
|
|
)
|
|
.where(UserActivity.user_id.in_(user_ids), UserActivity.user_ip.isnot(None))
|
|
.order_by(UserActivity.user_id, UserActivity.created.desc())
|
|
.distinct(UserActivity.user_id)
|
|
)).all()
|
|
for user_id, ip_value, activity_type, created_at in latest_ip_rows:
|
|
if user_id is None:
|
|
continue
|
|
latest_ip_map[user_id] = {
|
|
'ip': ip_value,
|
|
'type': activity_type,
|
|
'seen_at': _format_dt(created_at),
|
|
}
|
|
|
|
recent_ip_map: Dict[int, List[Dict[str, Optional[str]]]] = defaultdict(list)
|
|
if user_ids:
|
|
activity_limit = max(200, len(user_ids) * 10)
|
|
recent_ip_rows = (await session.execute(
|
|
select(
|
|
UserActivity.user_id,
|
|
UserActivity.user_ip,
|
|
UserActivity.type,
|
|
UserActivity.created,
|
|
)
|
|
.where(UserActivity.user_id.in_(user_ids), UserActivity.user_ip.isnot(None))
|
|
.order_by(UserActivity.created.desc())
|
|
.limit(activity_limit)
|
|
)).all()
|
|
for user_id, ip_value, activity_type, created_at in recent_ip_rows:
|
|
if user_id is None or not ip_value:
|
|
continue
|
|
bucket = recent_ip_map[user_id]
|
|
if len(bucket) >= 5:
|
|
continue
|
|
bucket.append({
|
|
'ip': ip_value,
|
|
'type': activity_type,
|
|
'seen_at': _format_dt(created_at),
|
|
})
|
|
|
|
items: List[Dict[str, Any]] = []
|
|
summary = {
|
|
'users_returned': 0,
|
|
'wallets_total': 0,
|
|
'wallets_active': 0,
|
|
'licenses_total': 0,
|
|
'licenses_active': 0,
|
|
'stars_total': 0,
|
|
'stars_paid': 0,
|
|
'stars_unpaid': 0,
|
|
'stars_amount_total': 0,
|
|
'stars_amount_paid': 0,
|
|
'stars_amount_unpaid': 0,
|
|
'unique_ips_total': 0,
|
|
}
|
|
|
|
for user in user_rows:
|
|
summary['users_returned'] += 1
|
|
meta = user.meta or {}
|
|
|
|
wallet_list = wallet_map.get(user.id, [])
|
|
active_wallets = [wallet for wallet in wallet_list if not wallet.invalidated]
|
|
primary_wallet = active_wallets[0] if active_wallets else None
|
|
last_connected_wallet = active_wallets[0] if active_wallets else (wallet_list[0] if wallet_list else None)
|
|
wallet_payload = {
|
|
'primary_address': primary_wallet.wallet_address if primary_wallet else None,
|
|
'active_count': len(active_wallets),
|
|
'total_count': len(wallet_list),
|
|
'last_connected_at': _format_dt(last_connected_wallet.created) if last_connected_wallet else None,
|
|
'connections': [
|
|
{
|
|
'id': wallet.id,
|
|
'address': wallet.wallet_address,
|
|
'network': wallet.network,
|
|
'invalidated': wallet.invalidated,
|
|
'created_at': _format_dt(wallet.created),
|
|
'updated_at': _format_dt(wallet.updated),
|
|
}
|
|
for wallet in wallet_list[:3]
|
|
],
|
|
}
|
|
summary['wallets_total'] += wallet_payload['total_count']
|
|
summary['wallets_active'] += wallet_payload['active_count']
|
|
|
|
content_stats = content_stats_map.get(user.id, {'total': 0, 'onchain': 0, 'local': 0, 'disabled': 0})
|
|
|
|
license_types = dict(license_type_map.get(user.id, {}))
|
|
license_statuses = dict(license_status_map.get(user.id, {}))
|
|
licenses_total = sum(license_types.values()) if license_types else sum(license_statuses.values())
|
|
licenses_active = license_statuses.get('active', 0)
|
|
summary['licenses_total'] += licenses_total
|
|
summary['licenses_active'] += licenses_active
|
|
|
|
stars_stats = stars_stats_map.get(
|
|
user.id,
|
|
{
|
|
'total': 0,
|
|
'paid': 0,
|
|
'unpaid': 0,
|
|
'amount_total': 0,
|
|
'amount_paid': 0,
|
|
'amount_unpaid': 0,
|
|
},
|
|
)
|
|
summary['stars_total'] += stars_stats['total']
|
|
summary['stars_paid'] += stars_stats['paid']
|
|
summary['stars_unpaid'] += stars_stats['unpaid']
|
|
summary['stars_amount_total'] += stars_stats['amount_total']
|
|
summary['stars_amount_paid'] += stars_stats['amount_paid']
|
|
summary['stars_amount_unpaid'] += stars_stats['amount_unpaid']
|
|
|
|
unique_ips = ip_counts_map.get(user.id, 0)
|
|
summary['unique_ips_total'] += unique_ips
|
|
recent_ips = recent_ip_map.get(user.id, [])
|
|
latest_ip = latest_ip_map.get(user.id)
|
|
|
|
items.append({
|
|
'id': user.id,
|
|
'telegram_id': user.telegram_id,
|
|
'username': user.username,
|
|
'first_name': meta.get('first_name'),
|
|
'last_name': meta.get('last_name'),
|
|
'lang_code': user.lang_code,
|
|
'created_at': _format_dt(user.created),
|
|
'updated_at': _format_dt(user.updated),
|
|
'last_use': _format_dt(user.last_use),
|
|
'meta': {
|
|
'ref_id': meta.get('ref_id'),
|
|
'referrer_id': meta.get('referrer_id'),
|
|
},
|
|
'wallets': wallet_payload,
|
|
'content': content_stats,
|
|
'licenses': {
|
|
'total': licenses_total,
|
|
'active': licenses_active,
|
|
'by_type': license_types,
|
|
'by_status': license_statuses,
|
|
},
|
|
'stars': {
|
|
'total': stars_stats['total'],
|
|
'paid': stars_stats['paid'],
|
|
'unpaid': stars_stats['unpaid'],
|
|
'amount_total': stars_stats['amount_total'],
|
|
'amount_paid': stars_stats['amount_paid'],
|
|
'amount_unpaid': stars_stats['amount_unpaid'],
|
|
},
|
|
'ip_activity': {
|
|
'last': latest_ip or None,
|
|
'unique_ips': unique_ips,
|
|
'recent': recent_ips,
|
|
},
|
|
})
|
|
|
|
base_payload.update({
|
|
'items': items,
|
|
'summary': summary,
|
|
})
|
|
return response.json(base_payload)
|
|
|
|
|
|
async def s_api_v1_admin_licenses(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
try:
|
|
limit = int(request.args.get('limit') or 50)
|
|
except (TypeError, ValueError):
|
|
limit = 50
|
|
limit = max(1, min(limit, 200))
|
|
|
|
try:
|
|
offset = int(request.args.get('offset') or 0)
|
|
except (TypeError, ValueError):
|
|
offset = 0
|
|
offset = max(0, offset)
|
|
|
|
search = (request.args.get('search') or '').strip()
|
|
type_param = (request.args.get('type') or '').strip()
|
|
status_param = (request.args.get('status') or '').strip()
|
|
license_type_param = (request.args.get('license_type') or '').strip()
|
|
user_id_param = (request.args.get('user_id') or '').strip()
|
|
owner_address_param = (request.args.get('owner') or '').strip()
|
|
onchain_address_param = (request.args.get('address') or '').strip()
|
|
content_hash_param = (request.args.get('content_hash') or '').strip()
|
|
|
|
filters = []
|
|
applied_filters: Dict[str, Any] = {}
|
|
|
|
if type_param:
|
|
type_values = [value.strip() for value in type_param.split(',') if value.strip()]
|
|
if type_values:
|
|
filters.append(UserContent.type.in_(type_values))
|
|
applied_filters['type'] = type_values
|
|
|
|
if status_param:
|
|
status_values = [value.strip() for value in status_param.split(',') if value.strip()]
|
|
if status_values:
|
|
filters.append(UserContent.status.in_(status_values))
|
|
applied_filters['status'] = status_values
|
|
|
|
license_type_field = cast(UserContent.meta['license_type'], String)
|
|
|
|
if license_type_param:
|
|
lt_values: List[str] = []
|
|
for part in license_type_param.split(','):
|
|
part = (part or '').strip()
|
|
if part:
|
|
lt_values.append(part)
|
|
if lt_values:
|
|
clauses = []
|
|
plain_values = [value for value in lt_values if value.lower() not in {'unknown', 'null'}]
|
|
if plain_values:
|
|
clauses.append(license_type_field.in_(plain_values))
|
|
if any(value.lower() in {'unknown', 'null'} for value in lt_values):
|
|
clauses.append(license_type_field.is_(None))
|
|
if clauses:
|
|
filters.append(or_(*clauses))
|
|
applied_filters['license_type'] = lt_values
|
|
|
|
if user_id_param:
|
|
try:
|
|
filters.append(UserContent.user_id == int(user_id_param))
|
|
applied_filters['user_id'] = int(user_id_param)
|
|
except ValueError:
|
|
pass
|
|
|
|
if owner_address_param:
|
|
filters.append(UserContent.owner_address.ilike(f"%{owner_address_param}%"))
|
|
applied_filters['owner'] = owner_address_param
|
|
|
|
if onchain_address_param:
|
|
filters.append(UserContent.onchain_address.ilike(f"%{onchain_address_param}%"))
|
|
applied_filters['address'] = onchain_address_param
|
|
|
|
if content_hash_param:
|
|
stored_target = (await session.execute(
|
|
select(StoredContent.id).where(StoredContent.hash == content_hash_param)
|
|
)).scalars().first()
|
|
if stored_target is None:
|
|
payload = {
|
|
'total': 0,
|
|
'limit': limit,
|
|
'offset': offset,
|
|
'search': search or None,
|
|
'filters': {**applied_filters, 'content_hash': content_hash_param},
|
|
'items': [],
|
|
'counts': {
|
|
'status': {},
|
|
'type': {},
|
|
'license_type': {},
|
|
},
|
|
'has_more': False,
|
|
}
|
|
return response.json(payload)
|
|
filters.append(UserContent.content_id == int(stored_target))
|
|
applied_filters['content_hash'] = content_hash_param
|
|
|
|
if search:
|
|
search_like = f"%{search}%"
|
|
clauses = [
|
|
cast(UserContent.id, String).ilike(search_like),
|
|
UserContent.onchain_address.ilike(search_like),
|
|
UserContent.owner_address.ilike(search_like),
|
|
]
|
|
filters.append(or_(*clauses))
|
|
|
|
total_stmt = select(func.count()).select_from(UserContent)
|
|
if filters:
|
|
total_stmt = total_stmt.where(and_(*filters))
|
|
total = (await session.execute(total_stmt)).scalar_one()
|
|
|
|
base_payload = {
|
|
'total': int(total or 0),
|
|
'limit': limit,
|
|
'offset': offset,
|
|
'search': search or None,
|
|
'filters': applied_filters,
|
|
'has_more': (offset + limit) < int(total or 0),
|
|
}
|
|
|
|
if not total:
|
|
base_payload.update({
|
|
'items': [],
|
|
'counts': {
|
|
'status': {},
|
|
'type': {},
|
|
'license_type': {},
|
|
},
|
|
})
|
|
return response.json(base_payload)
|
|
|
|
query_stmt = (
|
|
select(UserContent)
|
|
.order_by(UserContent.created.desc())
|
|
.offset(offset)
|
|
.limit(limit)
|
|
)
|
|
if filters:
|
|
query_stmt = query_stmt.where(and_(*filters))
|
|
|
|
license_rows = (await session.execute(query_stmt)).scalars().all()
|
|
|
|
user_ids = [row.user_id for row in license_rows if row.user_id is not None]
|
|
content_ids = [row.content_id for row in license_rows if row.content_id is not None]
|
|
wallet_ids = [row.wallet_connection_id for row in license_rows if row.wallet_connection_id is not None]
|
|
|
|
users_map: Dict[int, User] = {}
|
|
if user_ids:
|
|
user_records = (await session.execute(
|
|
select(User).where(User.id.in_(user_ids))
|
|
)).scalars().all()
|
|
for user in user_records:
|
|
users_map[user.id] = user
|
|
|
|
wallet_map: Dict[int, WalletConnection] = {}
|
|
if wallet_ids:
|
|
wallet_records = (await session.execute(
|
|
select(WalletConnection).where(WalletConnection.id.in_(wallet_ids))
|
|
)).scalars().all()
|
|
for wallet in wallet_records:
|
|
wallet_map[wallet.id] = wallet
|
|
|
|
content_map: Dict[int, StoredContent] = {}
|
|
if content_ids:
|
|
content_records = (await session.execute(
|
|
select(StoredContent).where(StoredContent.id.in_(content_ids))
|
|
)).scalars().all()
|
|
for content in content_records:
|
|
content_map[content.id] = content
|
|
|
|
status_stmt = select(UserContent.status, func.count()).group_by(UserContent.status)
|
|
if filters:
|
|
status_stmt = status_stmt.where(and_(*filters))
|
|
status_counts_rows = (await session.execute(status_stmt)).all()
|
|
status_counts = {status or 'unknown': int(count or 0) for status, count in status_counts_rows}
|
|
|
|
type_stmt = select(UserContent.type, func.count()).group_by(UserContent.type)
|
|
if filters:
|
|
type_stmt = type_stmt.where(and_(*filters))
|
|
type_counts_rows = (await session.execute(type_stmt)).all()
|
|
type_counts = {ctype or 'unknown': int(count or 0) for ctype, count in type_counts_rows}
|
|
|
|
license_type_expr = func.coalesce(license_type_field, 'unknown')
|
|
license_type_stmt = select(license_type_expr.label('license_type'), func.count()).group_by(license_type_expr)
|
|
if filters:
|
|
license_type_stmt = license_type_stmt.where(and_(*filters))
|
|
license_type_counts_rows = (await session.execute(license_type_stmt)).all()
|
|
license_type_counts: Dict[str, int] = {}
|
|
for lt_value, count in license_type_counts_rows:
|
|
key = 'unknown' if lt_value in (None, 'null', 'None') else str(lt_value)
|
|
license_type_counts[key] = int(count or 0)
|
|
|
|
items: List[Dict[str, Any]] = []
|
|
for license_row in license_rows:
|
|
meta = license_row.meta or {}
|
|
license_type_value = meta.get('license_type')
|
|
|
|
owner_user = users_map.get(license_row.user_id)
|
|
wallet_connection = wallet_map.get(license_row.wallet_connection_id)
|
|
stored_content = content_map.get(license_row.content_id)
|
|
|
|
owner_payload = None
|
|
if owner_user:
|
|
owner_meta = owner_user.meta or {}
|
|
owner_payload = {
|
|
'id': owner_user.id,
|
|
'telegram_id': owner_user.telegram_id,
|
|
'username': owner_user.username,
|
|
'first_name': owner_meta.get('first_name'),
|
|
'last_name': owner_meta.get('last_name'),
|
|
}
|
|
|
|
wallet_payload = None
|
|
if wallet_connection:
|
|
wallet_payload = {
|
|
'id': wallet_connection.id,
|
|
'address': wallet_connection.wallet_address,
|
|
'network': wallet_connection.network,
|
|
'invalidated': wallet_connection.invalidated,
|
|
'created_at': _format_dt(wallet_connection.created),
|
|
'updated_at': _format_dt(wallet_connection.updated),
|
|
}
|
|
|
|
content_payload = None
|
|
if stored_content:
|
|
stored_meta = stored_content.meta or {}
|
|
metadata_candidate = stored_meta.get('metadata') if isinstance(stored_meta.get('metadata'), dict) else {}
|
|
title_candidates = [
|
|
stored_meta.get('title'),
|
|
metadata_candidate.get('title') if isinstance(metadata_candidate, dict) else None,
|
|
metadata_candidate.get('name') if isinstance(metadata_candidate, dict) else None,
|
|
stored_meta.get('license', {}).get('title') if isinstance(stored_meta.get('license'), dict) else None,
|
|
]
|
|
title_value = next((value for value in title_candidates if isinstance(value, str) and value.strip()), None)
|
|
artist_candidates = []
|
|
if isinstance(metadata_candidate, dict):
|
|
artist_candidates.extend([
|
|
metadata_candidate.get('artist'),
|
|
(metadata_candidate.get('authors') or [None])[0] if isinstance(metadata_candidate.get('authors'), list) else None,
|
|
])
|
|
artist_candidates.extend([
|
|
stored_meta.get('artist'),
|
|
])
|
|
artist_value = next((value for value in artist_candidates if isinstance(value, str) and value.strip()), None)
|
|
try:
|
|
cid_value = stored_content.cid.serialize_v2()
|
|
except Exception:
|
|
cid_value = None
|
|
content_payload = {
|
|
'id': stored_content.id,
|
|
'hash': stored_content.hash,
|
|
'cid': cid_value,
|
|
'title': (title_value or stored_content.hash),
|
|
'artist': artist_value,
|
|
'type': stored_content.type,
|
|
'owner_address': stored_content.owner_address,
|
|
'onchain_index': stored_content.onchain_index,
|
|
'user_id': stored_content.user_id,
|
|
'download_url': stored_content.web_url,
|
|
}
|
|
|
|
items.append({
|
|
'id': license_row.id,
|
|
'type': license_row.type,
|
|
'status': license_row.status,
|
|
'license_type': license_type_value,
|
|
'onchain_address': license_row.onchain_address,
|
|
'owner_address': license_row.owner_address,
|
|
'user': owner_payload,
|
|
'wallet_connection': wallet_payload,
|
|
'content': content_payload,
|
|
'created_at': _format_dt(license_row.created),
|
|
'updated_at': _format_dt(license_row.updated),
|
|
'meta': meta,
|
|
'links': {
|
|
'tonviewer': f"https://tonviewer.com/{license_row.onchain_address}" if license_row.onchain_address else None,
|
|
'content_view': f"{PROJECT_HOST}/api/v1/content.view/{license_row.onchain_address}" if license_row.onchain_address else None,
|
|
},
|
|
})
|
|
|
|
base_payload.update({
|
|
'items': items,
|
|
'counts': {
|
|
'status': status_counts,
|
|
'type': type_counts,
|
|
'license_type': license_type_counts,
|
|
},
|
|
})
|
|
return response.json(base_payload)
|
|
|
|
|
|
async def s_api_v1_admin_stars(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
try:
|
|
limit = int(request.args.get('limit') or 50)
|
|
except (TypeError, ValueError):
|
|
limit = 50
|
|
limit = max(1, min(limit, 200))
|
|
|
|
try:
|
|
offset = int(request.args.get('offset') or 0)
|
|
except (TypeError, ValueError):
|
|
offset = 0
|
|
offset = max(0, offset)
|
|
|
|
search = (request.args.get('search') or '').strip()
|
|
type_param = (request.args.get('type') or '').strip()
|
|
paid_param = _parse_bool_arg(request.args.get('paid'))
|
|
user_id_param = (request.args.get('user_id') or '').strip()
|
|
content_hash_param = (request.args.get('content_hash') or '').strip()
|
|
|
|
filters = []
|
|
applied_filters: Dict[str, Any] = {}
|
|
|
|
if paid_param is True:
|
|
filters.append(StarsInvoice.paid.is_(True))
|
|
applied_filters['paid'] = True
|
|
elif paid_param is False:
|
|
filters.append(or_(StarsInvoice.paid.is_(False), StarsInvoice.paid.is_(None)))
|
|
applied_filters['paid'] = False
|
|
|
|
if type_param:
|
|
type_values = [value.strip() for value in type_param.split(',') if value.strip()]
|
|
if type_values:
|
|
filters.append(StarsInvoice.type.in_(type_values))
|
|
applied_filters['type'] = type_values
|
|
|
|
if user_id_param:
|
|
try:
|
|
filters.append(StarsInvoice.user_id == int(user_id_param))
|
|
applied_filters['user_id'] = int(user_id_param)
|
|
except ValueError:
|
|
pass
|
|
|
|
if content_hash_param:
|
|
filters.append(StarsInvoice.content_hash == content_hash_param)
|
|
applied_filters['content_hash'] = content_hash_param
|
|
|
|
if search:
|
|
search_like = f"%{search}%"
|
|
clauses = [
|
|
StarsInvoice.external_id.ilike(search_like),
|
|
StarsInvoice.invoice_url.ilike(search_like),
|
|
cast(StarsInvoice.id, String).ilike(search_like),
|
|
]
|
|
filters.append(or_(*clauses))
|
|
|
|
total_stmt = select(func.count()).select_from(StarsInvoice)
|
|
if filters:
|
|
total_stmt = total_stmt.where(and_(*filters))
|
|
total = (await session.execute(total_stmt)).scalar_one()
|
|
|
|
base_payload = {
|
|
'total': int(total or 0),
|
|
'limit': limit,
|
|
'offset': offset,
|
|
'search': search or None,
|
|
'filters': applied_filters,
|
|
'has_more': (offset + limit) < int(total or 0),
|
|
}
|
|
|
|
if not total:
|
|
base_payload.update({
|
|
'items': [],
|
|
'stats': {
|
|
'total': 0,
|
|
'paid': 0,
|
|
'unpaid': 0,
|
|
'amount_total': 0,
|
|
'amount_paid': 0,
|
|
'amount_unpaid': 0,
|
|
'by_type': {},
|
|
},
|
|
})
|
|
return response.json(base_payload)
|
|
|
|
query_stmt = (
|
|
select(StarsInvoice)
|
|
.order_by(StarsInvoice.created.desc())
|
|
.offset(offset)
|
|
.limit(limit)
|
|
)
|
|
if filters:
|
|
query_stmt = query_stmt.where(and_(*filters))
|
|
|
|
invoice_rows = (await session.execute(query_stmt)).scalars().all()
|
|
|
|
user_ids = [row.user_id for row in invoice_rows if row.user_id is not None]
|
|
content_hashes = [row.content_hash for row in invoice_rows if row.content_hash]
|
|
|
|
users_map: Dict[int, User] = {}
|
|
if user_ids:
|
|
user_records = (await session.execute(select(User).where(User.id.in_(user_ids)))).scalars().all()
|
|
for user in user_records:
|
|
users_map[user.id] = user
|
|
|
|
content_map: Dict[str, StoredContent] = {}
|
|
if content_hashes:
|
|
content_records = (await session.execute(
|
|
select(StoredContent).where(StoredContent.hash.in_(content_hashes))
|
|
)).scalars().all()
|
|
for content in content_records:
|
|
content_map[content.hash] = content
|
|
|
|
stats_stmt = select(
|
|
func.count().label('total'),
|
|
func.sum(case((StarsInvoice.paid.is_(True), 1), else_=0)).label('paid'),
|
|
func.sum(StarsInvoice.amount).label('amount_total'),
|
|
func.sum(case((StarsInvoice.paid.is_(True), StarsInvoice.amount), else_=0)).label('amount_paid'),
|
|
)
|
|
if filters:
|
|
stats_stmt = stats_stmt.where(and_(*filters))
|
|
stats_row = (await session.execute(stats_stmt)).first()
|
|
total_filtered = int((stats_row.total if stats_row else 0) or 0)
|
|
paid_count = int((stats_row.paid if stats_row else 0) or 0)
|
|
amount_total_value = int((stats_row.amount_total if stats_row else 0) or 0)
|
|
amount_paid_value = int((stats_row.amount_paid if stats_row else 0) or 0)
|
|
unpaid_count = max(total_filtered - paid_count, 0)
|
|
amount_unpaid_value = max(amount_total_value - amount_paid_value, 0)
|
|
|
|
type_stats_stmt = select(
|
|
StarsInvoice.type,
|
|
func.count().label('total'),
|
|
func.sum(case((StarsInvoice.paid.is_(True), 1), else_=0)).label('paid'),
|
|
func.sum(StarsInvoice.amount).label('amount_total'),
|
|
func.sum(case((StarsInvoice.paid.is_(True), StarsInvoice.amount), else_=0)).label('amount_paid'),
|
|
).group_by(StarsInvoice.type)
|
|
if filters:
|
|
type_stats_stmt = type_stats_stmt.where(and_(*filters))
|
|
type_stats_rows = (await session.execute(type_stats_stmt)).all()
|
|
type_stats: Dict[str, Dict[str, int]] = {}
|
|
for invoice_type, total_cnt, paid_cnt, amt_total, amt_paid in type_stats_rows:
|
|
total_value = int(total_cnt or 0)
|
|
paid_value = int(paid_cnt or 0)
|
|
amt_total_value = int(amt_total or 0)
|
|
amt_paid_value = int(amt_paid or 0)
|
|
type_stats[invoice_type or 'unknown'] = {
|
|
'total': total_value,
|
|
'paid': paid_value,
|
|
'unpaid': max(total_value - paid_value, 0),
|
|
'amount_total': amt_total_value,
|
|
'amount_paid': amt_paid_value,
|
|
'amount_unpaid': max(amt_total_value - amt_paid_value, 0),
|
|
}
|
|
|
|
items: List[Dict[str, Any]] = []
|
|
for invoice in invoice_rows:
|
|
user_payload = None
|
|
if invoice.user_id and invoice.user_id in users_map:
|
|
user = users_map[invoice.user_id]
|
|
user_meta = user.meta or {}
|
|
user_payload = {
|
|
'id': user.id,
|
|
'telegram_id': user.telegram_id,
|
|
'username': user.username,
|
|
'first_name': user_meta.get('first_name'),
|
|
'last_name': user_meta.get('last_name'),
|
|
}
|
|
|
|
content_payload = None
|
|
if invoice.content_hash and invoice.content_hash in content_map:
|
|
stored_content = content_map[invoice.content_hash]
|
|
stored_meta = stored_content.meta or {}
|
|
metadata_candidate = stored_meta.get('metadata') if isinstance(stored_meta.get('metadata'), dict) else {}
|
|
title_candidates = [
|
|
stored_meta.get('title'),
|
|
metadata_candidate.get('name') if isinstance(metadata_candidate, dict) else None,
|
|
stored_meta.get('license', {}).get('title') if isinstance(stored_meta.get('license'), dict) else None,
|
|
]
|
|
title_value = next((value for value in title_candidates if isinstance(value, str) and value.strip()), None)
|
|
try:
|
|
cid_value = stored_content.cid.serialize_v2()
|
|
except Exception:
|
|
cid_value = None
|
|
content_payload = {
|
|
'id': stored_content.id,
|
|
'hash': stored_content.hash,
|
|
'cid': cid_value,
|
|
'title': title_value or stored_content.hash,
|
|
'onchain_index': stored_content.onchain_index,
|
|
'owner_address': stored_content.owner_address,
|
|
'user_id': stored_content.user_id,
|
|
'download_url': stored_content.web_url,
|
|
}
|
|
|
|
items.append({
|
|
'id': invoice.id,
|
|
'external_id': invoice.external_id,
|
|
'type': invoice.type,
|
|
'amount': invoice.amount,
|
|
'paid': bool(invoice.paid),
|
|
'invoice_url': invoice.invoice_url,
|
|
'created_at': _format_dt(invoice.created),
|
|
'user': user_payload,
|
|
'content': content_payload,
|
|
'status': 'paid' if invoice.paid else 'pending',
|
|
})
|
|
|
|
base_payload.update({
|
|
'items': items,
|
|
'stats': {
|
|
'total': total_filtered,
|
|
'paid': paid_count,
|
|
'unpaid': unpaid_count,
|
|
'amount_total': amount_total_value,
|
|
'amount_paid': amount_paid_value,
|
|
'amount_unpaid': amount_unpaid_value,
|
|
'by_type': type_stats,
|
|
},
|
|
})
|
|
return response.json(base_payload)
|
|
|
|
|
|
async def s_api_v1_admin_system(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
config_rows = (await session.execute(select(ServiceConfigValue).order_by(ServiceConfigValue.key))).scalars().all()
|
|
config_payload = []
|
|
for row in config_rows:
|
|
key_lower = (row.key or '').lower()
|
|
masked = ('private' in key_lower and 'key' in key_lower) or ('seed' in key_lower)
|
|
config_payload.append({
|
|
'key': row.key,
|
|
'value': '*** hidden ***' if masked else row.value,
|
|
'raw': None if masked else row.packed_value,
|
|
})
|
|
|
|
env_summary = {
|
|
'PROJECT_NAME': os.getenv('PROJECT_NAME'),
|
|
'PROJECT_HOST': PROJECT_HOST,
|
|
'NODE_PRIVACY': os.getenv('NODE_PRIVACY'),
|
|
'SANIC_PORT': os.getenv('SANIC_PORT'),
|
|
'LOG_LEVEL': os.getenv('LOG_LEVEL'),
|
|
'TESTNET': os.getenv('TESTNET'),
|
|
}
|
|
|
|
blockchain_counts_rows = (await session.execute(
|
|
select(BlockchainTask.status, func.count()).group_by(BlockchainTask.status)
|
|
)).all()
|
|
blockchain_counts = {status: int(count) for status, count in blockchain_counts_rows}
|
|
|
|
latest_index = (await session.execute(
|
|
select(ContentIndexItem).order_by(ContentIndexItem.updated_at.desc()).limit(5)
|
|
)).scalars().all()
|
|
index_entries = [
|
|
{
|
|
'encrypted_cid': item.encrypted_cid,
|
|
'updated_at': item.updated_at.isoformat() + 'Z',
|
|
}
|
|
for item in latest_index
|
|
]
|
|
|
|
payload = {
|
|
'env': env_summary,
|
|
'service_config': config_payload,
|
|
'services': _service_states(request),
|
|
'blockchain_tasks': blockchain_counts,
|
|
'latest_index_items': index_entries,
|
|
}
|
|
return response.json(payload)
|
|
|
|
|
|
async def s_api_v1_admin_blockchain(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
|
|
counts_rows = (await session.execute(
|
|
select(BlockchainTask.status, func.count()).group_by(BlockchainTask.status)
|
|
)).all()
|
|
counts = {status: int(count) for status, count in counts_rows}
|
|
|
|
recent_rows = (await session.execute(
|
|
select(BlockchainTask).order_by(BlockchainTask.updated.desc()).limit(20)
|
|
)).scalars().all()
|
|
recent = [
|
|
{
|
|
'id': task.id,
|
|
'destination': task.destination,
|
|
'amount': task.amount,
|
|
'status': task.status,
|
|
'epoch': task.epoch,
|
|
'seqno': task.seqno,
|
|
'transaction_hash': task.transaction_hash,
|
|
'updated': task.updated.isoformat() + 'Z',
|
|
}
|
|
for task in recent_rows
|
|
]
|
|
|
|
payload = {
|
|
'counts': counts,
|
|
'recent': recent,
|
|
}
|
|
return response.json(payload)
|
|
|
|
|
|
async def s_api_v1_admin_node_setrole(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
data = request.json or {}
|
|
role = (data.get('role') or '').strip()
|
|
if role not in ('trusted', 'read-only', 'deny'):
|
|
return response.json({"error": "BAD_ROLE"}, status=400)
|
|
pub = (data.get('public_key') or '').strip()
|
|
host = (data.get('host') or '').strip()
|
|
if not pub and not host:
|
|
return response.json({"error": "MISSING_TARGET"}, status=400)
|
|
session = request.ctx.db_session
|
|
row = None
|
|
if pub:
|
|
row = (await session.execute(select(KnownNode).where(KnownNode.public_key == pub))).scalars().first()
|
|
if not row and host:
|
|
row = (await session.execute(select(KnownNode).where(KnownNode.ip == host))).scalars().first()
|
|
if not row:
|
|
return response.json({"error": "NOT_FOUND"}, status=404)
|
|
meta = {**(row.meta or {})}
|
|
meta['role'] = role
|
|
row.meta = meta
|
|
await session.commit()
|
|
return response.json({"ok": True, "node": {"ip": row.ip, "public_key": row.public_key, "role": role}})
|
|
|
|
|
|
async def s_api_v1_admin_nodes(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
rows = (await session.execute(select(KnownNode))).scalars().all()
|
|
items = []
|
|
for r in rows:
|
|
meta = r.meta or {}
|
|
items.append({
|
|
"ip": r.ip,
|
|
"port": r.port,
|
|
"public_key": r.public_key,
|
|
"role": meta.get('role') or 'read-only',
|
|
"version": meta.get('version'),
|
|
"last_seen": (r.last_sync.isoformat() + 'Z') if r.last_sync else None,
|
|
"notes": meta.get('notes'),
|
|
})
|
|
return response.json({"items": items})
|
|
|
|
|
|
async def s_api_v1_admin_status(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
|
|
session = request.ctx.db_session
|
|
pin_counts: Dict[str, int] = defaultdict(int)
|
|
rows = (await session.execute(select(IpfsSync))).scalars().all()
|
|
for r in rows:
|
|
pin_counts[r.pin_state] += 1
|
|
deriv = (await session.execute(select(ContentDerivative))).scalars().all()
|
|
deriv_counts = {
|
|
'ready': sum(1 for d in deriv if d.status == 'ready'),
|
|
'processing': sum(1 for d in deriv if d.status == 'processing'),
|
|
'pending': sum(1 for d in deriv if d.status == 'pending'),
|
|
'failed': sum(1 for d in deriv if d.status == 'failed'),
|
|
}
|
|
total_deriv_bytes = sum(int(d.size_bytes or 0) for d in deriv)
|
|
ec = (await session.execute(select(EncryptedContent))).scalars().all()
|
|
backlog = 0
|
|
for e in ec:
|
|
ctype = (e.content_type or '').lower()
|
|
if ctype.startswith('audio/'):
|
|
req = {'decrypted_low', 'decrypted_high'}
|
|
elif ctype.startswith('video/'):
|
|
req = {'decrypted_low', 'decrypted_high', 'decrypted_preview'}
|
|
else:
|
|
req = {'decrypted_original'}
|
|
if not req:
|
|
continue
|
|
kinds = {d.kind for d in deriv if d.content_id == e.id and d.status == 'ready'}
|
|
if not req.issubset(kinds):
|
|
backlog += 1
|
|
try:
|
|
bs = await bitswap_stat()
|
|
except Exception:
|
|
bs = {}
|
|
try:
|
|
rs = await repo_stat()
|
|
except Exception:
|
|
rs = {}
|
|
cfg = ServiceConfig(session)
|
|
max_gb = await cfg.get('DERIVATIVE_CACHE_MAX_GB', os.getenv('DERIVATIVE_CACHE_MAX_GB', '50'))
|
|
ttl_days = await cfg.get('DERIVATIVE_CACHE_TTL_DAYS', os.getenv('DERIVATIVE_CACHE_TTL_DAYS', '0'))
|
|
max_pins = await cfg.get('SYNC_MAX_CONCURRENT_PINS', os.getenv('SYNC_MAX_CONCURRENT_PINS', '4'))
|
|
disk_pct = await cfg.get('SYNC_DISK_LOW_WATERMARK_PCT', os.getenv('SYNC_DISK_LOW_WATERMARK_PCT', '90'))
|
|
return response.json({
|
|
'ipfs': {'bitswap': bs, 'repo': rs},
|
|
'pin_counts': dict(pin_counts),
|
|
'derivatives': {**deriv_counts, 'total_bytes': total_deriv_bytes},
|
|
'convert_backlog': backlog,
|
|
'limits': {
|
|
'DERIVATIVE_CACHE_MAX_GB': float(max_gb),
|
|
'DERIVATIVE_CACHE_TTL_DAYS': int(ttl_days),
|
|
'SYNC_MAX_CONCURRENT_PINS': int(max_pins),
|
|
'SYNC_DISK_LOW_WATERMARK_PCT': int(disk_pct),
|
|
}
|
|
})
|
|
|
|
|
|
async def s_api_v1_admin_cache_setlimits(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
data = request.json or {}
|
|
max_gb = float(data.get('max_gb'))
|
|
ttl_days = int(data.get('ttl_days'))
|
|
cfg = ServiceConfig(request.ctx.db_session)
|
|
await cfg.set('DERIVATIVE_CACHE_MAX_GB', max_gb)
|
|
await cfg.set('DERIVATIVE_CACHE_TTL_DAYS', ttl_days)
|
|
return response.json({"ok": True})
|
|
|
|
|
|
async def s_api_v1_admin_cache_cleanup(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
data = request.json or {}
|
|
mode = (data.get('mode') or 'fit')
|
|
removed = 0
|
|
from datetime import timedelta
|
|
session = request.ctx.db_session
|
|
if mode == 'ttl':
|
|
cfg = ServiceConfig(session)
|
|
ttl = int(await cfg.get('DERIVATIVE_CACHE_TTL_DAYS', os.getenv('DERIVATIVE_CACHE_TTL_DAYS', '0')))
|
|
if ttl > 0:
|
|
now = datetime.utcnow()
|
|
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.status == 'ready'))).scalars().all()
|
|
for r in rows:
|
|
la = r.last_access_at or r.created_at
|
|
if la and (now - la) > timedelta(days=ttl):
|
|
try:
|
|
if r.local_path and os.path.exists(r.local_path):
|
|
os.remove(r.local_path)
|
|
except Exception:
|
|
pass
|
|
r.status = 'pending'
|
|
r.local_path = None
|
|
r.size_bytes = None
|
|
r.last_access_at = None
|
|
removed += 1
|
|
await session.commit()
|
|
else:
|
|
target_gb = float(data.get('max_gb') or 0)
|
|
if target_gb <= 0:
|
|
return response.json({"error": "BAD_MAX_GB"}, status=400)
|
|
limit_bytes = int(target_gb * (1024 ** 3))
|
|
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.status == 'ready'))).scalars().all()
|
|
rows.sort(key=lambda r: (r.last_access_at or r.created_at or datetime.utcfromtimestamp(0)))
|
|
total = sum(int(r.size_bytes or 0) for r in rows)
|
|
for r in rows:
|
|
if total <= limit_bytes:
|
|
break
|
|
try:
|
|
if r.local_path and os.path.exists(r.local_path):
|
|
os.remove(r.local_path)
|
|
except Exception:
|
|
pass
|
|
total -= int(r.size_bytes or 0)
|
|
r.status = 'pending'
|
|
r.local_path = None
|
|
r.size_bytes = None
|
|
r.last_access_at = None
|
|
removed += 1
|
|
await session.commit()
|
|
return response.json({"ok": True, "removed": removed})
|
|
|
|
|
|
async def s_api_v1_admin_sync_setlimits(request):
|
|
if (unauth := _ensure_admin(request)):
|
|
return unauth
|
|
data = request.json or {}
|
|
max_pins = int(data.get('max_concurrent_pins'))
|
|
disk_pct = int(data.get('disk_low_watermark_pct'))
|
|
cfg = ServiceConfig(request.ctx.db_session)
|
|
await cfg.set('SYNC_MAX_CONCURRENT_PINS', max_pins)
|
|
await cfg.set('SYNC_DISK_LOW_WATERMARK_PCT', disk_pct)
|
|
return response.json({"ok": True})
|