Compare commits

...

88 Commits

Author SHA1 Message Date
Doctor Delpy 9f949b3c09 fix content streaming 2026-01-05 15:59:06 +04:00
Doctor Delpy 6e4893f59d try indexer fix 2025-12-24 14:22:26 +03:00
Doctor Delpy 93adfa6d27 try optimize setup 2025-12-22 18:16:00 +03:00
Doctor Delpy 6b3ed99876 migrate root server 2025-12-22 14:16:43 +03:00
Doctor Delpy dd4ff8b8ff fixes 2025-12-12 01:29:40 +03:00
Doctor Delpy 1def6e3512 creating tables 2025-12-11 19:12:05 +03:00
Doctor Delpy b0055e174f fix db error 2025-12-11 11:17:44 +03:00
root 698d0ca3f7 dht 2025-10-26 11:20:41 +00:00
root c6c6276fe6 update docs 2025-10-22 12:47:38 +00:00
root 2916e49973 sync improve 2025-10-21 23:19:12 +00:00
root 01bb82fa5a updates 2025-10-20 15:31:52 +00:00
root 0405c340a3 Merge branch 'origin/master' 2025-10-16 16:49:11 +00:00
root 1da0b26320 smashed updated 2025-10-16 16:23:36 +00:00
unexpected f140181c45 events & global sync. unstable 2025-10-15 16:57:21 +00:00
root 77921ba6a8 admin improve, uploading bot messages 2025-10-11 21:43:48 +00:00
unexpected 0c1bee31f4 keys stabilize 2025-10-09 18:28:30 +00:00
unexpected da446f5ab0 automatic handshake and connect 2025-10-08 17:23:34 +00:00
root dbc460f0bb some shit 2025-10-06 03:21:08 +00:00
root bb64acab09 update script 2025-10-05 22:49:53 +00:00
root aa91a427ba update node 2025-10-05 21:49:57 +00:00
root 7d81e7aff3 fix startup 2025-10-05 21:18:06 +00:00
root e3b86d6b4e update script 2025-10-05 20:43:31 +00:00
root d1f04b8b5e sh startup 2025-10-05 20:32:09 +00:00
root 721af9bc83 admin nice 2025-10-05 19:07:26 +00:00
root c170ca5433 last fixes, normally work self node 2025-10-05 14:04:44 +00:00
root 8651efd578 last updates (links) 2025-10-04 21:24:02 +00:00
root 3511ebd247 improve work 2025-10-01 13:19:03 +00:00
user 64491e19e1 Merge remote-tracking branch 'origin/master' 2025-09-26 12:22:12 +03:00
user 3c84ec43a2 admin fix 2025-09-26 12:21:58 +03:00
root 4ec91cb657 nice upload 2025-09-25 11:57:45 +00:00
root b7afa284aa headers fix 2025-09-25 07:23:18 +00:00
root e6153c881a fixes 2025-09-23 05:34:15 +00:00
root 38e54f0ab2 nice version 2025-09-20 20:21:49 +00:00
user 360f8110a4 fix config 2025-09-19 18:40:06 +03:00
user ae14782da4 new fixes 2025-09-19 14:54:51 +03:00
user 075a35b441 ipfs admin 2025-09-13 19:41:47 +03:00
user 650059b0d3 relayers new code 2025-09-13 14:48:57 +03:00
user 846e32c5b1 edit platform deployment 2025-08-25 14:36:10 +03:00
user e9e2f25f4d fix lazy loading 2025-08-25 14:35:20 +03:00
user 608881b5d8 fix misprint 2025-08-25 14:01:34 +03:00
user 3747329b1e fix misprint 2025-08-25 13:50:05 +03:00
user 4d5318b5d4 new deploy logic 2025-08-25 13:34:12 +03:00
user 3e6d0b93cb update versions contracts 2025-08-25 13:22:30 +03:00
user 2bd6e30b38 edit platfrom contract deployment 2025-08-25 12:25:08 +03:00
user 45374987e7 secrets fix 2025-08-25 11:52:29 +03:00
user 7d920907cc fix secrets read stuck 2025-08-24 19:39:28 +03:00
user 4401916104 fix db connections 2025-08-24 17:17:58 +03:00
user 3a6f787a78 update platform code contract 2025-08-24 16:48:23 +03:00
user d67135849c fix converter module path 2025-08-24 14:13:14 +03:00
user 4da4cd1526 fix db init 2025-08-24 13:23:05 +03:00
user f562dc8ed7 fix lazy_loading 2025-08-24 13:11:55 +03:00
user 5d41d33c6e fix syntax err 2025-08-23 21:42:14 +03:00
user 61e85baf08 extend logs 2025-08-23 21:12:37 +03:00
user 695969f015 better logs 2025-08-23 18:54:53 +03:00
user 82758fb11a postgres default fix 2025-08-23 13:15:10 +03:00
user b28e561a5f edit platform contract 2025-08-23 12:24:14 +03:00
user cf64ddaaa5 fix misprint 2025-08-22 19:36:20 +03:00
user 79165b49b5 startup errors fix #2 2025-08-22 19:09:38 +03:00
user 4cca40a626 startup errors fix #1 2025-08-22 14:12:45 +03:00
user e51bb86dc0 mariadb -> postgres 2025-08-22 14:04:21 +03:00
user 21964fa986 fix player ui 2025-06-01 12:17:35 +03:00
user 590afd2475 text pre-filtering 2025-06-01 11:58:51 +03:00
user a266c8b710 fix misprint 2025-06-01 08:13:12 +03:00
user b5a9437c05 fix player ui 2025-06-01 00:04:26 +03:00
user 58eca166db edit player ui globally 2025-05-31 23:45:02 +03:00
user 27dc827880 new player version 2025-05-31 01:55:16 +03:00
user 4605a46765 nice text 2025-05-29 21:01:24 +03:00
user 6de350c2a3 add qq command 2025-05-04 19:14:13 +03:00
user 11e2645aa9 edit player ui 2025-05-03 16:00:30 +03:00
user 3d9e7c7966 fix 2025-04-26 13:29:31 +03:00
user 4483c194da fix magic 2025-04-26 13:24:35 +03:00
user d2ff332490 fix convert service 2025-04-26 13:13:47 +03:00
user 2476d3b3b6 fix someth 2025-04-26 13:01:10 +03:00
user 0e4268fb4d python magic for images 2025-04-26 12:52:32 +03:00
user 0586ed9d94 disable response header credentials 2025-03-19 14:29:32 +03:00
user a6750fe35c fix misprint 2025-03-18 14:40:54 +03:00
user 3ab014d358 fix selectWallet 2025-03-17 13:54:36 +03:00
user 4a739e4b1b fix misprint 2025-03-17 13:47:22 +03:00
user 1a1f4301cf downloadble fix 2025-03-17 13:43:31 +03:00
user b02ded982c dowload & selectWallet & fixes & auth me 2025-03-17 13:40:37 +03:00
user ac6d102a3f downloadable, selectWallet 2025-03-14 13:57:30 +03:00
user 66da3541e7 try fix licenses 2025-03-14 10:54:57 +03:00
user 7d962d463b add free upload hint 2025-03-14 00:41:31 +03:00
user 4643d7f202 fix misprint 2025-03-13 21:00:04 +03:00
user 54bc545090 add status for ton_daemon 2025-03-13 20:14:40 +03:00
user b63f663bd2 set newContent amount task 2025-03-13 20:01:48 +03:00
user a35481fa71 fix misprint 2025-03-13 19:47:04 +03:00
user 276a09fbf6 restart policy 2025-03-13 18:41:55 +03:00
178 changed files with 13132 additions and 1317 deletions

2
.gitignore vendored
View File

@ -4,7 +4,7 @@ venv
logs logs
sqlStorage sqlStorage
playground playground
alembic.ini
.DS_Store .DS_Store
messages.pot messages.pot
activeConfig activeConfig
__pycache__

275
ARCHITECTURE.md Normal file
View File

@ -0,0 +1,275 @@
# Обзор архитектуры системы
Этот документ — единый и актуальный источник информации по платформе: архитектура, протоколы, данные, конфигурация, сценарии, эксплуатация. Заменяет собой разрозненные и устаревшие документы.
## Содержание
- Компоненты и топология
- Децентрализованный слой (членство, оценка размера сети, репликации, метрики)
- Загрузка и конвертация контента
- Просмотр и покупка контента (UI/UX требования)
- API (ключевые эндпойнты и полезная нагрузка)
- Ключи и схемы данных (DHT)
- Конфигурация и значения по умолчанию
- Наблюдаемость и метрики
- Диаграммы последовательностей (Mermaid)
- Сборка и тестирование
---
## Компоненты и топология
- Backend API: сервис на Sanic (Python) с бота́ми Telegram; база данных PostgreSQL (SQLAlchemy + Alembic).
- Хранилище: локальная ФС (uploads/derivatives); IPFS (kubo) для ретривания/пининга; tusd (resumable upload).
- Конвертеры: воркеры (ffmpeg) в контейнерах — `convert_v3`, `convert_process`.
- Frontend: SPA (Vite + TypeScript), отдается nginx-контейнером.
- Децентрализованный слой: встроенный DHT (в процессе) — членство, лизы реплик, метрики контента.
```mermaid
flowchart LR
Client -- TWA/HTTP --> Frontend
Frontend -- REST --> API[Backend API]
API -- tus hooks --> tusd
API -- SQL --> Postgres
API -- IPC --> Workers[Converters]
API -- IPFS --> IPFS
API -- DHT --> DHT[(In-Process DHT)]
DHT -- CRDT Merge --> DHT
```
---
## Децентрализованный слой
### Идентификаторы и версии
- NodeID = blake3(Ed25519 публичного ключа) — шестнадцатеричная строка (256 бит).
- ContentID = blake3(зашифрованного блоба) — неизменяемый идентификатор контента.
- schema_version = v1 — фиксируется во всех DHT-ключах/записях.
### Членство (membership)
- Рукопожатие `/api/v1/network.handshake` — запрос подписан Ed25519; верифицируется на стороне получателя. Без корректной подписи — 400 BAD_SIGNATURE.
- Полезная нагрузка включает: сведения о ноде (версия, возможности, IPFS), метрики, массив известных публичных нод, квитанции достижимости (reachability_receipts: issuer, target, ASN, timestamp, signature).
- Состояние членства — CRDT LWW-Set (добавления/удаления) с TTL (`DHT_MEMBERSHIP_TTL=600` сек), плюс HyperLogLog для оценки мощности (N_local).
- Фильтрация «островов»: ноды с `reachability_ratio < q` (по умолчанию `q=0.6`) исключаются при вычислении N_estimate и выборе реплик.
- Итоговая оценка `N_estimate = max(валидных N_local от пиров)`.
```mermaid
sequenceDiagram
participant A as Узел A
participant B as Узел B
A->>B: POST /network.handshake {nonce, ts, node, receipts, signature}
B->>B: верификация ts/nonce, подписи
B->>B: upsert member; merge(receipts)
B-->>A: {node, known_public_nodes, n_estimate, server_signature}
A->>A: merge; N_estimate = max(N_local, полученные)
```
### Репликации и лизы
- Выбор префикса: `p = max(0, round(log2(N_estimate / R_target)))`, где `R_target ≥ 3` (по умолчанию 3).
- Ответственные ноды: чьи первые `p` бит NodeID совпадают с первыми `p` бит ContentID.
- Лидер — минимальный NodeID среди ответственных.
- Лидер выдаёт `replica_leases` (TTL=600 сек), соблюдая разнообразие: не менее 3 разных первых октетов IP и, если доступно, 3 разных ASN.
- Ранжирование кандидатов — rendezvous score `blake3(ContentID || NodeID)`.
- Сердцебиение (heartbeat) держателей — каждые 60 сек; 3 пропуска → признать down и переназначить ≤180 сек.
- Недобор/перебор фиксируются в `conflict_log` и прометеус‑метриках.
```mermaid
stateDiagram-v2
[*] --> Discover
Discover: Рукопожатия + квитанции
Discover --> Active: TTL & кворм ASN
Active --> Leader: Выбор лидера префикса p
Leader --> Leased: Выдача лизов (diversity)
Leased --> Monitoring: Heartbeat 60s
Monitoring --> Reassign: 3 пропуска
Reassign --> Leased
```
### Метрики (окна)
- На событии просмотра формируются дельты CRDT:
- PNCounter — количество просмотров;
- HyperLogLog — уникальные ViewID (ViewID = blake3(ContentID || соль_устройства));
- GCounter — watch_time, bytes_out, количество завершений.
- Окно по часу (`DHT_METRIC_WINDOW_SEC`), ключ `MetricKey = blake3(ContentID || WindowID)`.
- Мерджи коммутативные, детерминированные.
---
## Загрузка и конвертация контента
1) Клиент грузит в `tusd` (resumable). Бэкенд получает HTTPhooks `/api/v1/upload.tus-hook`.
2) Создается запись в БД для зашифрованного контента, воркеры размещают производные:
- для медиа — preview/low/high;
- для бинарей — оригинал (доступен только при наличии лицензии).
3) `/api/v1/content.view` возвращает `display_options` и агрегированное состояние конвертации/загрузки.
```mermaid
sequenceDiagram
participant C as Клиент
participant T as tusd
participant B as Бэкенд
participant W as Воркеры
participant DB as PostgreSQL
C->>T: upload
T->>B: hooks (pre/post-finish)
B->>DB: create content
B->>W: очередь конвертации
W->>DB: derive/previews
C->>B: GET /content.view
B->>DB: resolve derivatives
B-->>C: display_options + status
```
---
## Просмотр и покупка (UI/UX)
- `/api/v1/content.view/<content_address>` определяет доступные отображения:
- бинарный контент без превью — оригинал только при наличии лицензии;
- аудио/видео — для неавторизованных preview/low, для имеющих доступ — decrypted_low/high.
- В процессе конвертации фронтенд показывает статус «processing», без фальшивых ссылок.
- Обложка (cover):
- фиксированный квадратный слот; изображение «вписывается» без растягивания/искажения;
- пустые области не заполняются чёрным — фон совпадает с фоном страницы.
- Кнопки «Купить за TON/Stars»: всегда в одной строке (без горизонтального/вертикального скролла контента на малых экранах).
```mermaid
flowchart LR
View[content.view] --> Resolve[Определение деривативов]
Resolve --> Ready{Готово?}
Ready -- Нет --> Info[Статус: processing/pending]
Ready -- Да --> Options
Options -- Бинарь + нет лицензии --> HideOriginal[Скрыть оригинал]
Options -- Медиа + нет лицензии --> PreviewLow[preview/low]
Options -- Есть лицензия --> Decrypted[decrypted low/high|original]
```
---
## API (ключевые)
- `GET /api/system.version` — актуальность сервиса.
- `POST /api/v1/network.handshake` — обмен членством (обязательная Ed25519подпись запроса). Пример запроса:
```json
{
"version": "3.0.0",
"schema_version": "v1",
"public_key": "<base58 ed25519 pubkey>",
"node_id": "<blake3(pubkey)>",
"public_host": "https://node.example",
"node_type": "public|private",
"metrics": {"uptime_sec": 123, "content_count": 42},
"capabilities": {"accepts_inbound": true, "is_bootstrap": false},
"ipfs": {"multiaddrs": ["/ip4/.../tcp/4001"], "peer_id": "..."},
"known_public_nodes": [],
"reachability_receipts": [],
"timestamp": 1710000000,
"nonce": "<hex>",
"signature": "<base58 ed25519 signature>"
}
```
- `GET /api/v1/content.view/<content_address>``display_options`, `status`, `conversion`.
- `GET /api/v1.5/storage/<file_hash>` — отдача файла.
- `GET /metrics` — экспозиция метрик Prometheus (либо fallbackдамп счётчиков).
---
## Ключи и схемы DHT
- `MetaKey(content_id)` — метаданные репликаций:
- `replica_leases`: карта `{lease_id -> {node_id, issued_at, expires_at, asn, ip_first_octet, heartbeat_at, score}}`;
- `leader`: NodeID лидера; `revision`: номер ревизии;
- `conflict_log`: массив событий `UNDER/OVER/LEASE_EXPIRED` и т.п.
- `MembershipKey(node_id)` — членство:
- `members`: LWWSet; `receipts`: LWWSet;
- `hll`: HyperLogLog; `reports`: карты локальных оценок N;
- `logical_counter`: логический счётчик для LWWдоминации.
- `MetricKey(content_id, window_id)` — метрики окна:
- `views`: PNCounter; `unique`: HLL; `watch_time`, `bytes_out`, `completions`: GCounters.
Все записи подписываются и сливаются детерминированно: CRDTлогика + LWWдоминация (`logical_counter`, `timestamp`, `node_id`).
---
## Конфигурация и значения по умолчанию
- Сеть/рукопожатия: `NODE_PRIVACY`, `PUBLIC_HOST`, `HANDSHAKE_INTERVAL_SEC`, `NETWORK_TLS_VERIFY`, IPFSпиры/бустрапы.
- DHT:
- `DHT_MIN_RECEIPTS=5`, `DHT_MIN_REACHABILITY=0.6`, `DHT_MEMBERSHIP_TTL=600`;
- `DHT_REPLICATION_TARGET=3`, `DHT_LEASE_TTL=600`,
- `DHT_HEARTBEAT_INTERVAL=60`, `DHT_HEARTBEAT_MISS_THRESHOLD=3`;
- `DHT_MIN_ASN=3`, `DHT_MIN_IP_OCTETS=3`,
- `DHT_METRIC_WINDOW_SEC=3600`.
- Конвертация: квоты `CONVERT_*`, `MAX_CONTENT_SIZE_MB`.
Примечание: PoWдопуски и Kademlia kbuckets на текущем этапе не активированы в коде — заложены в дизайн и могут быть реализованы отдельно.
---
## Наблюдаемость и метрики
Prometheus:
- `dht_replication_under_total`, `dht_replication_over_total`, `dht_leader_changes_total`;
- `dht_merge_conflicts_total`;
- `dht_view_count_total`, `dht_unique_view_estimate`, `dht_watch_time_seconds`.
Логи: структурированные ошибки HTTP (с id), `conflict_log` по репликациям, события регистрации нод.
---
## Диаграммы последовательностей (сводные)
### Обновление N_estimate
```mermaid
sequenceDiagram
participant Peer
participant Membership
participant DHT
Peer->>Membership: handshake(payload, receipts)
Membership->>Membership: merge LWW/receipts
Membership->>Membership: update HLL и N_local
Membership->>DHT: persist MembershipKey
Membership->>Membership: N_estimate = max(valid reports)
```
### Выбор лидера и выдача лизов
```mermaid
sequenceDiagram
participant L as Leader
participant R as Responsible
L->>L: p = round(log2(N_est/R))
L->>R: rank by rendezvous(ContentID, NodeID)
L->>L: assign leases (diversity)
R-->>L: heartbeat/60s
L->>L: reassign on 3 misses
```
### Публикация метрик окна
```mermaid
sequenceDiagram
participant C as Client
participant API as Backend
participant M as Metrics
participant D as DHT
C->>API: GET content.view?watch_time,bytes_out
API->>M: record_view(delta)
M->>D: merge MetricKey(ContentID, window)
API-->>Prom: /metrics
```
---
## Сборка и тестирование
```bash
# Старт окружения (пример для /home/configs)
docker compose -f /home/configs/docker-compose.yml --env-file /home/configs/.env up -d --build
# Тесты слоя DHT
cd uploader-bot
python3 -m unittest discover -s tests/dht
```

View File

@ -12,7 +12,8 @@ RUN apt-get update && apt-get install -y \
ca-certificates \ ca-certificates \
curl \ curl \
gnupg \ gnupg \
lsb-release && \ lsb-release \
ffmpeg && \
install -m 0755 -d /etc/apt/keyrings && \ install -m 0755 -d /etc/apt/keyrings && \
curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc && \ curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc && \
chmod a+r /etc/apt/keyrings/docker.asc && \ chmod a+r /etc/apt/keyrings/docker.asc && \
@ -21,7 +22,7 @@ RUN apt-get update && apt-get install -y \
apt-get update && \ apt-get update && \
apt-get install -y docker-ce-cli apt-get install -y docker-ce-cli
RUN apt-get install -y ffmpeg RUN apt-get install libmagic1 -y
CMD ["python", "app"] CMD ["python", "app"]

View File

@ -48,6 +48,18 @@ TODO: реализовать поиск файла на других нодах
16. GET /api/v1/content.view 16. GET /api/v1/content.view
17. GET /api/v1/network.info
Возвращает информацию о ноде: id/public_key, version, node_type, metrics, capabilities.
18. GET /api/v1/network.nodes
Возвращает список известных публичных нод с совместимостью и метаданными.
19. POST /api/v1/network.handshake
Рукопожатие между нодами. Тело запроса подписано приватным ключом ноды; ответ подписан приватным ключом сервера.
Поля запроса: version, public_key (base58), node_type, metrics, capabilities, timestamp, nonce, signature.
Поле public_host обязательно для public-нод, и опционально/пустое для private-нод.
Поля ответа: compatibility, node, known_public_nodes, timestamp, server_public_key, server_signature (+ warning при несовпадении MINOR).
Private-ноды не сохраняются на стороне принимающей ноды (никакого учета peer-а), но получают список публичных нод и могут синхронизироваться через них.

View File

@ -1,5 +1,14 @@
# Sanic Telegram Bot [template] # Sanic Telegram Bot [template]
Полная документация по системе (архитектура, протоколы, конфигурация, диаграммы) — см. `ARCHITECTURE.md`.
### Запуск тестов интеграции DHT
```shell
cd uploader-bot
python3 -m unittest discover -s tests/dht
```
--- ---
## Run ## Run
```shell ```shell

35
alembic.ini Normal file
View File

@ -0,0 +1,35 @@
[alembic]
script_location = alembic
sqlalchemy.url = ${DATABASE_URL}
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s

View File

@ -1,3 +1,4 @@
import os
from logging.config import fileConfig from logging.config import fileConfig
from sqlalchemy import engine_from_config from sqlalchemy import engine_from_config
@ -7,6 +8,10 @@ from alembic import context
config = context.config config = context.config
database_url = os.environ.get("DATABASE_URL")
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if config.config_file_name is not None: if config.config_file_name is not None:
fileConfig(config.config_file_name) fileConfig(config.config_file_name)

View File

@ -0,0 +1,26 @@
"""add artist column to encrypted content
Revision ID: b1f2d3c4a5b6
Revises: a7c1357e8d15
Create Date: 2024-06-05 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b1f2d3c4a5b6'
down_revision: Union[str, None] = 'a7c1357e8d15'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column('encrypted_contents', sa.Column('artist', sa.String(length=512), nullable=True))
def downgrade() -> None:
op.drop_column('encrypted_contents', 'artist')

View File

@ -0,0 +1,38 @@
"""expand telegram_id precision on stars invoices
Revision ID: c2d4e6f8a1b2
Revises: b1f2d3c4a5b6
Create Date: 2025-10-17 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c2d4e6f8a1b2'
down_revision: Union[str, None] = 'b1f2d3c4a5b6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.alter_column(
'stars_invoices',
'telegram_id',
existing_type=sa.Integer(),
type_=sa.BigInteger(),
existing_nullable=True,
)
def downgrade() -> None:
op.alter_column(
'stars_invoices',
'telegram_id',
existing_type=sa.BigInteger(),
type_=sa.Integer(),
existing_nullable=True,
)

View File

@ -0,0 +1,70 @@
"""create dht_records and rdap_cache tables
Revision ID: d3e5f7a9c0d1
Revises: c2d4e6f8a1b2
Create Date: 2025-10-22 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'd3e5f7a9c0d1'
down_revision: Union[str, None] = 'c2d4e6f8a1b2'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
bind = op.get_bind()
inspector = sa.inspect(bind)
# dht_records
if not inspector.has_table('dht_records'):
op.create_table(
'dht_records',
sa.Column('fingerprint', sa.String(length=128), primary_key=True),
sa.Column('key', sa.String(length=512), nullable=False),
sa.Column('schema_version', sa.String(length=16), nullable=False, server_default='v1'),
sa.Column('logical_counter', sa.Integer(), nullable=False, server_default='0'),
sa.Column('timestamp', sa.Float(), nullable=False, server_default='0'),
sa.Column('node_id', sa.String(length=128), nullable=False),
sa.Column('signature', sa.String(length=512), nullable=True),
sa.Column('value', sa.JSON(), nullable=False, server_default=sa.text("'{}'::jsonb")),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
)
# ensure index exists (but don't fail if it already exists)
try:
existing_indexes = {idx['name'] for idx in inspector.get_indexes('dht_records')}
except Exception:
existing_indexes = set()
if 'ix_dht_records_key' not in existing_indexes:
op.create_index('ix_dht_records_key', 'dht_records', ['key'])
# rdap_cache
if not inspector.has_table('rdap_cache'):
op.create_table(
'rdap_cache',
sa.Column('ip', sa.String(length=64), primary_key=True),
sa.Column('asn', sa.Integer(), nullable=True),
sa.Column('source', sa.String(length=64), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
)
def downgrade() -> None:
try:
op.drop_table('rdap_cache')
except Exception:
pass
try:
op.drop_index('ix_dht_records_key', table_name='dht_records')
except Exception:
pass
try:
op.drop_table('dht_records')
except Exception:
pass

View File

@ -12,17 +12,12 @@ try:
except BaseException: except BaseException:
pass pass
from app.core._utils.create_maria_tables import create_maria_tables from app.core._utils.create_maria_tables import create_db_tables
from app.core.storage import engine from app.core.storage import engine
if startup_target == '__main__': if startup_target != '__main__':
create_maria_tables(engine) # Background services get a short delay before startup
else:
time.sleep(7) time.sleep(7)
from app.api import app
from app.bot import dp as uploader_bot_dp
from app.client_bot import dp as client_bot_dp
from app.core._config import SANIC_PORT, MYSQL_URI, PROJECT_HOST
from app.core.logger import make_log from app.core.logger import make_log
if int(os.getenv("SANIC_MAINTENANCE", '0')) == 1: if int(os.getenv("SANIC_MAINTENANCE", '0')) == 1:
@ -30,7 +25,32 @@ if int(os.getenv("SANIC_MAINTENANCE", '0')) == 1:
while True: while True:
time.sleep(1) time.sleep(1)
from app.core.models import Memory
def init_db_schema_sync() -> None:
"""Initialise all SQLAlchemy models in the database before services start.
This ensures that every table defined on AlchemyBase.metadata (including
newer ones like DHT and service_config) exists before any component
accesses the database.
"""
try:
from sqlalchemy import create_engine
from app.core.models import AlchemyBase # imports all models and populates metadata
db_url = os.environ.get('DATABASE_URL')
if not db_url:
raise RuntimeError('DATABASE_URL is not set')
# Normalise DSN to sync driver for schema creation
if '+asyncpg' in db_url:
db_url_sync = db_url.replace('+asyncpg', '+psycopg2')
else:
db_url_sync = db_url
sync_engine = create_engine(db_url_sync, pool_pre_ping=True)
AlchemyBase.metadata.create_all(sync_engine)
except Exception as e:
make_log('Startup', f'DB sync init failed: {e}', level='error')
async def queue_daemon(app): async def queue_daemon(app):
@ -52,7 +72,11 @@ async def execute_queue(app):
make_log(None, f"Application normally started. HTTP port: {SANIC_PORT}") make_log(None, f"Application normally started. HTTP port: {SANIC_PORT}")
make_log(None, f"Telegram bot: https://t.me/{telegram_bot_username}") make_log(None, f"Telegram bot: https://t.me/{telegram_bot_username}")
make_log(None, f"Client Telegram bot: https://t.me/{client_telegram_bot_username}") make_log(None, f"Client Telegram bot: https://t.me/{client_telegram_bot_username}")
make_log(None, f"MariaDB host: {MYSQL_URI.split('@')[1].split('/')[0].replace('/', '')}") try:
_db_host = DATABASE_URL.split('@')[1].split('/')[0].replace('/', '')
except Exception:
_db_host = 'postgres://'
make_log(None, f"PostgreSQL host: {_db_host}")
make_log(None, f"API host: {PROJECT_HOST}") make_log(None, f"API host: {PROJECT_HOST}")
while True: while True:
try: try:
@ -79,18 +103,51 @@ async def execute_queue(app):
if __name__ == '__main__': if __name__ == '__main__':
# Ensure DB schema is fully initialised for all models
init_db_schema_sync()
from app.core.models import Memory
main_memory = Memory() main_memory = Memory()
if startup_target == '__main__': if startup_target == '__main__':
app.ctx.memory = main_memory # Defer heavy imports to avoid side effects in background services
for _target in [uploader_bot_dp, client_bot_dp]: # Mark this process as the primary node for seeding/config init
_target._s_memory = app.ctx.memory os.environ.setdefault('NODE_ROLE', 'primary')
from app.api import app
# Delay aiogram dispatcher creation until loop is running
from app.core._config import SANIC_PORT, PROJECT_HOST, DATABASE_URL
from app.core.network.nodes import network_handshake_daemon, bootstrap_once_and_exit_if_failed
from app.core.network.maintenance import replication_daemon, heartbeat_daemon, dht_gossip_daemon
app.ctx.memory = main_memory
app.ctx.memory._app = app app.ctx.memory._app = app
# Ensure DB schema exists using the same event loop as Sanic (idempotent)
app.add_task(create_db_tables(engine))
app.add_task(execute_queue(app)) app.add_task(execute_queue(app))
app.add_task(queue_daemon(app)) app.add_task(queue_daemon(app))
app.add_task(uploader_bot_dp.start_polling(app.ctx.memory._telegram_bot)) # Start bots after loop is ready
app.add_task(client_bot_dp.start_polling(app.ctx.memory._client_telegram_bot)) async def _start_bots():
try:
from app.bot import create_dispatcher as create_uploader_dp
from app.client_bot import create_dispatcher as create_client_dp
uploader_bot_dp = create_uploader_dp()
client_bot_dp = create_client_dp()
for _target in [uploader_bot_dp, client_bot_dp]:
_target._s_memory = app.ctx.memory
await asyncio.gather(
uploader_bot_dp.start_polling(app.ctx.memory._telegram_bot),
client_bot_dp.start_polling(app.ctx.memory._client_telegram_bot),
)
except Exception as e:
make_log('Bots', f'Failed to start bots: {e}', level='error')
app.add_task(_start_bots())
# Start network handshake daemon and bootstrap step
app.add_task(network_handshake_daemon(app))
app.add_task(bootstrap_once_and_exit_if_failed())
app.add_task(replication_daemon(app))
app.add_task(heartbeat_daemon(app))
app.add_task(dht_gossip_daemon(app))
app.run(host='0.0.0.0', port=SANIC_PORT) app.run(host='0.0.0.0', port=SANIC_PORT)
else: else:
@ -112,6 +169,18 @@ if __name__ == '__main__':
elif startup_target == 'convert_process': elif startup_target == 'convert_process':
from app.core.background.convert_service import main_fn as target_fn from app.core.background.convert_service import main_fn as target_fn
time.sleep(9) time.sleep(9)
elif startup_target == 'convert_v3':
from app.core.background.convert_v3_service import main_fn as target_fn
time.sleep(9)
elif startup_target == 'index_scout_v3':
from app.core.background.index_scout_v3 import main_fn as target_fn
time.sleep(7)
elif startup_target == 'derivative_janitor':
from app.core.background.derivative_cache_janitor import main_fn as target_fn
time.sleep(5)
elif startup_target == 'events_sync':
from app.core.background.event_sync_service import main_fn as target_fn
time.sleep(5)
startup_fn = startup_fn or target_fn startup_fn = startup_fn or target_fn
assert startup_fn assert startup_fn
@ -124,8 +193,13 @@ if __name__ == '__main__':
level='error') level='error')
sys.exit(1) sys.exit(1)
loop = asyncio.get_event_loop()
try: try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Background services no longer perform schema initialization
loop.run_until_complete(wrapped_startup_fn(main_memory)) loop.run_until_complete(wrapped_startup_fn(main_memory))
except BaseException as e: except BaseException as e:
make_log(startup_target[0].upper() + startup_target[1:], f"Error: {e}" + '\n' + str(traceback.format_exc()), make_log(startup_target[0].upper() + startup_target[1:], f"Error: {e}" + '\n' + str(traceback.format_exc()),

View File

@ -1,6 +1,8 @@
import traceback import traceback
from sanic import Sanic, response from sanic import Sanic, response
from uuid import uuid4
import traceback as _traceback
from app.core.logger import make_log from app.core.logger import make_log
@ -13,16 +15,53 @@ app.register_middleware(close_db_session, "response")
from app.api.routes._index import s_index, s_favicon from app.api.routes._index import s_index, s_favicon
from app.api.routes._system import s_api_v1_node, s_api_system_version, s_api_system_send_status, s_api_v1_node_friendly from app.api.routes._system import s_api_v1_node, s_api_system_version, s_api_system_send_status, s_api_v1_node_friendly
from app.api.routes.auth import s_api_v1_auth_twa from app.api.routes.network import (
s_api_v1_network_info,
s_api_v1_network_nodes,
s_api_v1_network_handshake,
)
from app.api.routes.network_events import s_api_v1_network_events
from app.api.routes.auth import s_api_v1_auth_twa, s_api_v1_auth_select_wallet, s_api_v1_auth_me
from app.api.routes.statics import s_api_tonconnect_manifest, s_api_platform_metadata from app.api.routes.statics import s_api_tonconnect_manifest, s_api_platform_metadata
from app.api.routes.node_storage import s_api_v1_storage_post, s_api_v1_storage_get, \ from app.api.routes.node_storage import s_api_v1_storage_post, s_api_v1_storage_get, \
s_api_v1_storage_decode_cid s_api_v1_storage_decode_cid
from app.api.routes.progressive_storage import s_api_v1_5_storage_get, s_api_v1_5_storage_post from app.api.routes.progressive_storage import s_api_v1_5_storage_get, s_api_v1_5_storage_post, s_api_v1_storage_fetch, s_api_v1_storage_proxy
from app.api.routes.upload_tus import s_api_v1_upload_tus_hook
from app.api.routes.account import s_api_v1_account_get from app.api.routes.account import s_api_v1_account_get
from app.api.routes._blockchain import s_api_v1_blockchain_send_new_content_message, \ from app.api.routes._blockchain import s_api_v1_blockchain_send_new_content_message, \
s_api_v1_blockchain_send_purchase_content_message s_api_v1_blockchain_send_purchase_content_message
from app.api.routes.content import s_api_v1_content_list, s_api_v1_content_view, s_api_v1_content_friendly_list, s_api_v1_5_content_list from app.api.routes.content import s_api_v1_content_list, s_api_v1_content_view, s_api_v1_content_friendly_list, s_api_v1_5_content_list
from app.api.routes.content_index import s_api_v1_content_index, s_api_v1_content_delta
from app.api.routes.derivatives import s_api_v1_content_derivatives
from app.api.routes.admin import (
s_api_v1_admin_blockchain,
s_api_v1_admin_cache_cleanup,
s_api_v1_admin_cache_setlimits,
s_api_v1_admin_events,
s_api_v1_admin_licenses,
s_api_v1_admin_login,
s_api_v1_admin_logout,
s_api_v1_admin_users_setadmin,
s_api_v1_admin_node_setrole,
s_api_v1_admin_nodes,
s_api_v1_admin_overview,
s_api_v1_admin_stars,
s_api_v1_admin_status,
s_api_v1_admin_storage,
s_api_v1_admin_sync_setlimits,
s_api_v1_admin_system,
s_api_v1_admin_uploads,
s_api_v1_admin_users,
s_api_v1_admin_network,
s_api_v1_admin_network_config,
s_api_v1_admin_network_config_set,
)
from app.api.routes.tonconnect import s_api_v1_tonconnect_new, s_api_v1_tonconnect_logout from app.api.routes.tonconnect import s_api_v1_tonconnect_new, s_api_v1_tonconnect_logout
from app.api.routes.keys import s_api_v1_keys_request
from app.api.routes.sync import s_api_v1_sync_pin, s_api_v1_sync_status
from app.api.routes.upload_status import s_api_v1_upload_status
from app.api.routes.metrics import s_api_metrics
from app.api.routes.dht import s_api_v1_dht_get, s_api_v1_dht_put
app.add_route(s_index, "/", methods=["GET", "OPTIONS"]) app.add_route(s_index, "/", methods=["GET", "OPTIONS"])
@ -32,17 +71,25 @@ app.add_route(s_api_v1_node, "/api/v1/node", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_node_friendly, "/api/v1/nodeFriendly", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_node_friendly, "/api/v1/nodeFriendly", methods=["GET", "OPTIONS"])
app.add_route(s_api_system_version, "/api/system.version", methods=["GET", "OPTIONS"]) app.add_route(s_api_system_version, "/api/system.version", methods=["GET", "OPTIONS"])
app.add_route(s_api_system_send_status, "/api/system.sendStatus", methods=["POST", "OPTIONS"]) app.add_route(s_api_system_send_status, "/api/system.sendStatus", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_network_info, "/api/v1/network.info", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_network_nodes, "/api/v1/network.nodes", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_network_handshake, "/api/v1/network.handshake", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_network_events, "/api/v1/network.events", methods=["GET", "OPTIONS"])
app.add_route(s_api_tonconnect_manifest, "/api/tonconnect-manifest.json", methods=["GET", "OPTIONS"]) app.add_route(s_api_tonconnect_manifest, "/api/tonconnect-manifest.json", methods=["GET", "OPTIONS"])
app.add_route(s_api_platform_metadata, "/api/platform-metadata.json", methods=["GET", "OPTIONS"]) app.add_route(s_api_platform_metadata, "/api/platform-metadata.json", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_auth_twa, "/api/v1/auth.twa", methods=["POST", "OPTIONS"]) app.add_route(s_api_v1_auth_twa, "/api/v1/auth.twa", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_auth_me, "/api/v1/auth.me", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_auth_select_wallet, "/api/v1/auth.selectWallet", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_tonconnect_new, "/api/v1/tonconnect.new", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_tonconnect_new, "/api/v1/tonconnect.new", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_tonconnect_logout, "/api/v1/tonconnect.logout", methods=["POST", "OPTIONS"]) app.add_route(s_api_v1_tonconnect_logout, "/api/v1/tonconnect.logout", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_5_storage_post, "/api/v1.5/storage", methods=["POST", "OPTIONS"]) app.add_route(s_api_v1_5_storage_post, "/api/v1.5/storage", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_5_storage_get, "/api/v1.5/storage/<file_hash>", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_5_storage_get, "/api/v1.5/storage/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_fetch, "/api/v1/storage.fetch/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_proxy, "/api/v1/storage.proxy/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_post, "/api/v1/storage", methods=["POST", "OPTIONS"]) app.add_route(s_api_v1_storage_post, "/api/v1/storage", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_storage_get, "/api/v1/storage/<file_hash>", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_storage_get, "/api/v1/storage/<file_hash>", methods=["GET", "OPTIONS"])
@ -57,22 +104,95 @@ app.add_route(s_api_v1_content_list, "/api/v1/content.list", methods=["GET", "OP
app.add_route(s_api_v1_content_view, "/api/v1/content.view/<content_address>", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_content_view, "/api/v1/content.view/<content_address>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_friendly_list, "/api/v1/content.friendlyList", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_content_friendly_list, "/api/v1/content.friendlyList", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_5_content_list, "/api/v1.5/content.list", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_5_content_list, "/api/v1.5/content.list", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_index, "/api/v1/content.index", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_delta, "/api/v1/content.delta", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_derivatives, "/api/v1/content.derivatives", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_login, "/api/v1/admin.login", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_logout, "/api/v1/admin.logout", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_overview, "/api/v1/admin.overview", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_storage, "/api/v1/admin.storage", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_uploads, "/api/v1/admin.uploads", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_users, "/api/v1/admin.users", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_users_setadmin, "/api/v1/admin.users.setAdmin", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_licenses, "/api/v1/admin.licenses", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_stars, "/api/v1/admin.stars", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_events, "/api/v1/admin.events", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_system, "/api/v1/admin.system", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_blockchain, "/api/v1/admin.blockchain", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_node_setrole, "/api/v1/admin.node.setRole", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_nodes, "/api/v1/admin.nodes", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_status, "/api/v1/admin.status", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_cache_setlimits, "/api/v1/admin.cache.setLimits", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_cache_cleanup, "/api/v1/admin.cache.cleanup", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_sync_setlimits, "/api/v1/admin.sync.setLimits", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_network, "/api/v1/admin.network", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_network_config, "/api/v1/admin.network.config", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_network_config_set, "/api/v1/admin.network.config.set", methods=["POST", "OPTIONS"])
# tusd HTTP hooks
app.add_route(s_api_v1_upload_tus_hook, "/api/v1/upload.tus-hook", methods=["POST", "OPTIONS"])
# Keys auto-grant
app.add_route(s_api_v1_keys_request, "/api/v1/keys.request", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_sync_pin, "/api/v1/sync.pin", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_sync_status, "/api/v1/sync.status", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_upload_status, "/api/v1/upload.status/<upload_id>", methods=["GET", "OPTIONS"])
app.add_route(s_api_metrics, "/metrics", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_dht_get, "/api/v1/dht.get", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_dht_put, "/api/v1/dht.put", methods=["POST", "OPTIONS"])
@app.exception(BaseException) @app.exception(BaseException)
async def s_handle_exception(request, exception): async def s_handle_exception(request, exception):
response_buffer = response.json({"error": "An internal server error occurred"}, status=500) # Correlate error to request
session_id = getattr(request.ctx, 'session_id', None) or uuid4().hex[:16]
error_id = uuid4().hex[:8]
status = 500
code = type(exception).__name__
message = "Internal HTTP Error"
try: try:
raise exception raise exception
except AssertionError as e: except AssertionError as e:
response_buffer = response.json({"error": str(e)}, status=400) status = 400
code = 'AssertionError'
message = str(e) or 'Bad Request'
except BaseException as e: except BaseException as e:
make_log("sanic_exception", f"Exception: {e}" + '\n' + str(traceback.format_exc()), level='error') # keep default 500, but expose exception message to aid debugging
message = str(e) or message
# Build structured log with full context and traceback
try:
tb = _traceback.format_exc()
user_id = getattr(getattr(request.ctx, 'user', None), 'id', None)
log_ctx = {
'sid': session_id,
'eid': error_id,
'path': request.path,
'method': request.method,
'query': dict(request.args) if hasattr(request, 'args') else {},
'user_id': user_id,
'remote': (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip),
'code': code,
'message': message,
'traceback': tb,
}
make_log('http_exception', 'API exception', level='error', **log_ctx)
except BaseException:
pass
# Return enriched error response for the client
payload = {
'error': True,
'code': code,
'message': message,
'session_id': session_id,
'error_id': error_id,
'path': request.path,
'method': request.method,
}
response_buffer = response.json(payload, status=status)
response_buffer = await close_db_session(request, response_buffer) response_buffer = await close_db_session(request, response_buffer)
response_buffer.headers["Access-Control-Allow-Origin"] = "*"
response_buffer.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
response_buffer.headers["Access-Control-Allow-Headers"] = "Origin, Content-Type, Accept, Authorization, Referer, User-Agent, Sec-Fetch-Dest, Sec-Fetch-Mode, Sec-Fetch-Site"
response_buffer.headers["Access-Control-Allow-Credentials"] = "true"
return response_buffer return response_buffer

Binary file not shown.

Binary file not shown.

View File

@ -1,5 +1,7 @@
import os
from base58 import b58decode from base58 import b58decode
from sanic import response as sanic_response from sanic import response as sanic_response
from uuid import uuid4
from app.core._crypto.signer import Signer from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed from app.core._secrets import hot_seed
@ -8,17 +10,37 @@ from app.core.models.keys import KnownKey
from app.core.models._telegram.wrapped_bot import Wrapped_CBotChat from app.core.models._telegram.wrapped_bot import Wrapped_CBotChat
from app.core.models.user_activity import UserActivity from app.core.models.user_activity import UserActivity
from app.core.models.user import User from app.core.models.user import User
from app.core.storage import Session from sqlalchemy import select
from app.core.storage import new_session
from datetime import datetime, timedelta from datetime import datetime, timedelta
from app.core.log_context import (
ctx_session_id, ctx_user_id, ctx_method, ctx_path, ctx_remote
)
def attach_headers(response): ENABLE_INTERNAL_CORS = os.getenv("ENABLE_INTERNAL_CORS", "1").lower() in {"1", "true", "yes"}
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
response.headers["Access-Control-Allow-Headers"] = "Origin, Content-Type, Accept, Authorization, Referer, User-Agent, Sec-Fetch-Dest, Sec-Fetch-Mode, Sec-Fetch-Site, x-file-name, x-last-chunk, x-chunk-start, x-upload-id" def attach_headers(response, request=None):
response.headers["Access-Control-Allow-Credentials"] = "true" response.headers.pop("Access-Control-Allow-Origin", None)
response.headers.pop("Access-Control-Allow-Methods", None)
response.headers.pop("Access-Control-Allow-Headers", None)
response.headers.pop("Access-Control-Allow-Credentials", None)
if not ENABLE_INTERNAL_CORS:
return response return response
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS, PATCH, HEAD"
response.headers["Access-Control-Allow-Headers"] = (
"Origin, Content-Type, Accept, Authorization, Referer, User-Agent, Sec-Fetch-Dest, Sec-Fetch-Mode, "
"Sec-Fetch-Site, Tus-Resumable, tus-resumable, Upload-Length, upload-length, Upload-Offset, upload-offset, "
"Upload-Metadata, upload-metadata, Upload-Defer-Length, upload-defer-length, Upload-Concat, upload-concat, "
"x-file-name, x-last-chunk, x-chunk-start, x-upload-id, x-request-id"
)
return response
async def try_authorization(request): async def try_authorization(request):
token = request.headers.get("Authorization") token = request.headers.get("Authorization")
@ -30,7 +52,8 @@ async def try_authorization(request):
make_log("auth", "Invalid token length", level="warning") make_log("auth", "Invalid token length", level="warning")
return return
known_key = request.ctx.db_session.query(KnownKey).filter(KnownKey.seed == token).first() result = await request.ctx.db_session.execute(select(KnownKey).where(KnownKey.seed == token))
known_key = result.scalars().first()
if not known_key: if not known_key:
make_log("auth", "Unknown key", level="warning") make_log("auth", "Unknown key", level="warning")
return return
@ -58,7 +81,8 @@ async def try_authorization(request):
make_log("auth", f"User ID mismatch: {known_key.meta.get('I_user_id', -1)} != {user_id}", level="warning") make_log("auth", f"User ID mismatch: {known_key.meta.get('I_user_id', -1)} != {user_id}", level="warning")
return return
user = request.ctx.db_session.query(User).filter(User.id == known_key.meta['I_user_id']).first() result = await request.ctx.db_session.execute(select(User).where(User.id == known_key.meta['I_user_id']))
user = result.scalars().first()
if not user: if not user:
make_log("auth", "No user from key", level="warning") make_log("auth", "No user from key", level="warning")
return return
@ -118,7 +142,14 @@ async def save_activity(request):
pass pass
try: try:
activity_meta["headers"] = dict(request.headers) # Sanitize sensitive headers
headers = dict(request.headers)
for hk in list(headers.keys()):
if str(hk).lower() in [
'authorization', 'cookie', 'x-service-signature', 'x-message-hash'
]:
headers[hk] = '<redacted>'
activity_meta["headers"] = headers
except: except:
pass pass
@ -127,23 +158,51 @@ async def save_activity(request):
meta=activity_meta, meta=activity_meta,
user_id=request.ctx.user.id if request.ctx.user else None, user_id=request.ctx.user.id if request.ctx.user else None,
user_ip=activity_meta.get("ip", "0.0.0.0"), user_ip=activity_meta.get("ip", "0.0.0.0"),
created=datetime.now() created=datetime.utcnow()
) )
request.ctx.db_session.add(new_user_activity) request.ctx.db_session.add(new_user_activity)
request.ctx.db_session.commit() await request.ctx.db_session.commit()
async def attach_user_to_request(request): async def attach_user_to_request(request):
if request.method == 'OPTIONS': if request.method == 'OPTIONS':
return attach_headers(sanic_response.text("OK")) return attach_headers(sanic_response.text("OK"), request)
request.ctx.db_session = Session() request.ctx.db_session = new_session()
request.ctx.verified_hash = None request.ctx.verified_hash = None
request.ctx.user = None request.ctx.user = None
request.ctx.user_key = None request.ctx.user_key = None
request.ctx.user_uploader_wrapper = Wrapped_CBotChat(request.app.ctx.memory._telegram_bot, db_session=request.ctx.db_session) request.ctx.user_uploader_wrapper = Wrapped_CBotChat(request.app.ctx.memory._telegram_bot, db_session=request.ctx.db_session)
request.ctx.user_client_wrapper = Wrapped_CBotChat(request.app.ctx.memory._client_telegram_bot, db_session=request.ctx.db_session) request.ctx.user_client_wrapper = Wrapped_CBotChat(request.app.ctx.memory._client_telegram_bot, db_session=request.ctx.db_session)
# Correlation/session id for this request: prefer proxy-provided X-Request-ID
incoming_req_id = request.headers.get('X-Request-Id') or request.headers.get('X-Request-ID')
request.ctx.session_id = (incoming_req_id or uuid4().hex)[:32]
# Populate contextvars for automatic logging context
try:
ctx_session_id.set(request.ctx.session_id)
ctx_method.set(request.method)
ctx_path.set(request.path)
_remote = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip)
if _remote and isinstance(_remote, str) and ',' in _remote:
_remote = _remote.split(',')[0].strip()
ctx_remote.set(_remote)
except BaseException:
pass
try:
make_log(
"HTTP",
f"Request start sid={request.ctx.session_id} {request.method} {request.path}",
level='info'
)
except BaseException:
pass
await try_authorization(request) await try_authorization(request)
# Update user_id in context after auth
try:
if request.ctx.user and request.ctx.user.id:
ctx_user_id.set(request.ctx.user.id)
except BaseException:
pass
await save_activity(request) await save_activity(request)
await try_service_authorization(request) await try_service_authorization(request)
@ -152,17 +211,34 @@ async def close_request_handler(request, response):
if request.method == 'OPTIONS': if request.method == 'OPTIONS':
response = sanic_response.text("OK") response = sanic_response.text("OK")
response = attach_headers(response, request)
try: try:
request.ctx.db_session.close() await request.ctx.db_session.close()
except BaseException as e: except BaseException:
pass pass
response = attach_headers(response) try:
make_log(
"HTTP",
f"Request end sid={getattr(request.ctx, 'session_id', None)} {request.method} {request.path} status={getattr(response, 'status', None)}",
level='info'
)
except BaseException:
pass
return request, response return request, response
async def close_db_session(request, response): async def close_db_session(request, response):
request, response = await close_request_handler(request, response) request, response = await close_request_handler(request, response)
response = attach_headers(response) # Clear contextvars
try:
ctx_session_id.set(None)
ctx_user_id.set(None)
ctx_method.set(None)
ctx_path.set(None)
ctx_remote.set(None)
except BaseException:
pass
return response return response

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -3,11 +3,10 @@ from datetime import datetime
import traceback import traceback
from sanic import response from sanic import response
from sqlalchemy import and_ from sqlalchemy import and_, select, func
from tonsdk.boc import begin_cell, begin_dict from tonsdk.boc import begin_cell, begin_dict
from tonsdk.utils import Address from tonsdk.utils import Address
from base58 import b58encode
from app.core._blockchain.ton.connect import TonConnect, wallet_obj_by_name from app.core._blockchain.ton.connect import TonConnect, wallet_obj_by_name
from app.core._blockchain.ton.platform import platform from app.core._blockchain.ton.platform import platform
from app.core._config import PROJECT_HOST from app.core._config import PROJECT_HOST
@ -46,7 +45,7 @@ async def s_api_v1_blockchain_send_new_content_message(request):
for field_key, field_value in { for field_key, field_value in {
'title': lambda x: isinstance(x, str), 'title': lambda x: isinstance(x, str),
'authors': lambda x: isinstance(x, list), 'authors': lambda x: isinstance(x, list),
'content': lambda x: isinstance(x, str), 'content': lambda x: isinstance(x, str), # may be plaintext CID (legacy) or encrypted IPFS CID (bafy...)
'image': lambda x: isinstance(x, str), 'image': lambda x: isinstance(x, str),
'description': lambda x: isinstance(x, str), 'description': lambda x: isinstance(x, str),
'price': lambda x: (isinstance(x, str) and x.isdigit()), 'price': lambda x: (isinstance(x, str) and x.isdigit()),
@ -57,42 +56,73 @@ async def s_api_v1_blockchain_send_new_content_message(request):
assert field_key in request.json, f"No {field_key} provided" assert field_key in request.json, f"No {field_key} provided"
assert field_value(request.json[field_key]), f"Invalid {field_key} provided" assert field_value(request.json[field_key]), f"Invalid {field_key} provided"
decrypted_content_cid, err = resolve_content(request.json['content']) artist = request.json.get('artist')
assert not err, f"Invalid content CID" if artist is not None:
assert isinstance(artist, str), "Invalid artist provided"
artist = artist.strip()
if artist == "":
artist = None
else:
artist = None
# Поиск исходного файла загруженного # Support legacy: 'content' as decrypted ContentId; and new: 'content' as encrypted IPFS CID
decrypted_content = request.ctx.db_session.query(StoredContent).filter( source_content_cid, cid_err = resolve_content(request.json['content'])
StoredContent.hash == decrypted_content_cid.content_hash_b58 assert not cid_err, f"Invalid content CID provided: {cid_err}"
).first()
assert decrypted_content, "No content locally found"
assert decrypted_content.type == "local/content_bin", "Invalid content type"
# Создание фиктивного encrypted_content. Не шифруем для производительности, тк зашифрованная нигде дальше не используется encrypted_content_cid = None
decrypted_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == source_content_cid.content_hash_b58)
)).scalars().first()
if decrypted_content and decrypted_content.type == "local/content_bin":
encrypted_content = await create_encrypted_content(request.ctx.db_session, decrypted_content) encrypted_content = await create_encrypted_content(request.ctx.db_session, decrypted_content)
encrypted_content_cid = encrypted_content.cid encrypted_content_cid = encrypted_content.cid
elif source_content_cid.cid_format == 'ipfs':
encrypted_content_cid = source_content_cid
else:
raise AssertionError("Provided content is neither locally available nor a valid encrypted CID")
if request.json['image']: if request.json['image']:
image_content_cid, err = resolve_content(request.json['image']) image_content_cid, err = resolve_content(request.json['image'])
assert not err, f"Invalid image CID" assert not err, f"Invalid image CID"
image_content = request.ctx.db_session.query(StoredContent).filter( image_content = (await request.ctx.db_session.execute(
StoredContent.hash == image_content_cid.content_hash_b58 select(StoredContent).where(StoredContent.hash == image_content_cid.content_hash_b58)
).first() )).scalars().first()
assert image_content, "No image locally found" assert image_content, "No image locally found"
else: else:
image_content_cid = None image_content_cid = None
image_content = None image_content = None
content_title = f"{', '.join(request.json['authors'])} - {request.json['title']}" if request.json['authors'] else request.json['title'] content_title = request.json['title']
if artist:
content_title = f"{artist} {content_title}"
elif request.json['authors']:
content_title = f"{', '.join(request.json['authors'])} {request.json['title']}"
metadata_content = await create_metadata_for_item( metadata_content = await create_metadata_for_item(
request.ctx.db_session, request.ctx.db_session,
title=content_title, title=request.json['title'],
artist=artist,
cover_url=f"{PROJECT_HOST}/api/v1.5/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None, cover_url=f"{PROJECT_HOST}/api/v1.5/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None,
authors=request.json['authors'], authors=request.json['authors'],
hashtags=request.json['hashtags'] hashtags=request.json['hashtags'],
downloadable=request.json['downloadable'] if 'downloadable' in request.json else False,
) )
# Try to update ContentIndexItem with cover_url for this encrypted content
try:
from app.core.models.content_v3 import ContentIndexItem
ecid_str = encrypted_content_cid.serialize_v2()
row = (await request.ctx.db_session.execute(select(ContentIndexItem).where(ContentIndexItem.encrypted_cid == ecid_str))).scalars().first()
if row:
payload = row.payload or {}
payload['cover_url'] = f"{PROJECT_HOST}/api/v1.5/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None
row.payload = payload
await request.ctx.db_session.commit()
except Exception as _e:
make_log("Blockchain", f"index cover update failed: {_e}", level='warning')
royalties_dict = begin_dict(8) royalties_dict = begin_dict(8)
i = 0 i = 0
for royalty_param in request.json['royaltyParams']: for royalty_param in request.json['royaltyParams']:
@ -104,18 +134,22 @@ async def s_api_v1_blockchain_send_new_content_message(request):
) )
i += 1 i += 1
promo_free_upload_available = ( _cnt = (await request.ctx.db_session.execute(
3 - (request.ctx.db_session.query(PromoAction).filter( select(func.count()).select_from(PromoAction).where(
PromoAction.user_internal_id == request.ctx.user.id,
PromoAction.action_type == 'freeUpload',
).count())
)
if request.ctx.db_session.query(BlockchainTask).filter(
and_( and_(
BlockchainTask.user_id == request.ctx.user.id, PromoAction.user_internal_id == request.ctx.user.id,
BlockchainTask.status != 'done', PromoAction.action_type == 'freeUpload'
) )
).first(): )
)).scalar()
promo_free_upload_available = 3 - int(_cnt or 0)
has_pending_task = (await request.ctx.db_session.execute(
select(BlockchainTask).where(
and_(BlockchainTask.user_id == request.ctx.user.id, BlockchainTask.status != 'done')
)
)).scalars().first()
if has_pending_task:
make_log("Blockchain", f"User {request.ctx.user.id} already has a pending task", level='warning') make_log("Blockchain", f"User {request.ctx.user.id} already has a pending task", level='warning')
promo_free_upload_available = 0 promo_free_upload_available = 0
@ -126,18 +160,19 @@ async def s_api_v1_blockchain_send_new_content_message(request):
user_id = str(request.ctx.user.id), user_id = str(request.ctx.user.id),
user_internal_id=request.ctx.user.id, user_internal_id=request.ctx.user.id,
action_type='freeUpload', action_type='freeUpload',
action_ref=str(encrypted_content_cid.content_hash), action_ref=encrypted_content_cid.serialize_v2(),
created=datetime.now() created=datetime.now()
) )
request.ctx.db_session.add(promo_action) request.ctx.db_session.add(promo_action)
blockchain_task = BlockchainTask( blockchain_task = BlockchainTask(
destination=platform.address.to_string(1, 1, 1), destination=platform.address.to_string(1, 1, 1),
amount=str(int(0.03 * 10 ** 9)),
payload=b64encode( payload=b64encode(
begin_cell() begin_cell()
.store_uint(0x5491d08c, 32) .store_uint(0x5491d08c, 32)
.store_uint(int.from_bytes(encrypted_content_cid.content_hash, "big", signed=False), 256) .store_uint(int.from_bytes(encrypted_content_cid.content_hash, "big", signed=False), 256)
.store_address(Address(request.ctx.user.wallet_address(request.ctx.db_session))) .store_address(Address(await request.ctx.user.wallet_address_async(request.ctx.db_session)))
.store_ref( .store_ref(
begin_cell() begin_cell()
.store_ref( .store_ref(
@ -175,38 +210,40 @@ async def s_api_v1_blockchain_send_new_content_message(request):
user_id = request.ctx.user.id user_id = request.ctx.user.id
) )
request.ctx.db_session.add(blockchain_task) request.ctx.db_session.add(blockchain_task)
request.ctx.db_session.commit() await request.ctx.db_session.commit()
await request.ctx.user_uploader_wrapper.send_message( await request.ctx.user_uploader_wrapper.send_message(
request.ctx.user.translated('p_uploadContentTxPromo').format( request.ctx.user.translated('p_uploadContentTxPromo').format(
title=content_title, title=content_title,
free_count=(promo_free_upload_available - 1) free_count=(promo_free_upload_available - 1)
), message_type='hint', message_meta={ ), message_type='hint', message_meta={
'encrypted_content_hash': b58encode(encrypted_content_cid.content_hash).decode(), 'encrypted_content_hash': encrypted_content_cid.content_hash_b58,
'hint_type': 'uploadContentTxRequested' 'hint_type': 'uploadContentTxRequested'
} }
) )
return response.json({ return response.json({
'promoUpload': True, 'address': "free",
'amount': str(int(0.03 * 10 ** 9)),
'payload': ""
}) })
user_wallet_address = await request.ctx.user.wallet_address_async(request.ctx.db_session)
assert user_wallet_address, "Wallet address is not linked"
await request.ctx.user_uploader_wrapper.send_message( await request.ctx.user_uploader_wrapper.send_message(
request.ctx.user.translated('p_uploadContentTxRequested').format( request.ctx.user.translated('p_uploadContentTxRequested').format(
title=content_title, title=content_title,
), message_type='hint', message_meta={ ), message_type='hint', message_meta={
'encrypted_content_hash': b58encode(encrypted_content_cid.content_hash).decode(), 'encrypted_content_hash': encrypted_content_cid.content_hash_b58,
'hint_type': 'uploadContentTxRequested' 'hint_type': 'uploadContentTxRequested'
} }
) )
return response.json({ payload_cell = (
'address': platform.address.to_string(1, 1, 1),
'amount': str(int(0.03 * 10 ** 9)),
'payload': b64encode(
begin_cell() begin_cell()
.store_uint(0x5491d08c, 32) .store_uint(0x5491d08c, 32)
.store_uint(int.from_bytes(encrypted_content_cid.content_hash, "big", signed=False), 256) .store_uint(int.from_bytes(encrypted_content_cid.content_hash, "big", signed=False), 256)
.store_uint(0, 2) .store_address(Address(user_wallet_address))
.store_ref( .store_ref(
begin_cell() begin_cell()
.store_ref( .store_ref(
@ -236,8 +273,13 @@ async def s_api_v1_blockchain_send_new_content_message(request):
) )
.end_cell() .end_cell()
) )
.end_cell().to_boc(False) .end_cell()
).decode() )
return response.json({
'address': platform.address.to_string(1, 1, 1),
'amount': str(int(0.03 * 10 ** 9)),
'payload': b64encode(payload_cell.to_boc(False)).decode()
}) })
except BaseException as e: except BaseException as e:
make_log("Blockchain", f"Error while sending new content message: {e}" + '\n' + traceback.format_exc(), level='error') make_log("Blockchain", f"Error while sending new content message: {e}" + '\n' + traceback.format_exc(), level='error')
@ -254,15 +296,38 @@ async def s_api_v1_blockchain_send_purchase_content_message(request):
assert field_key in request.json, f"No {field_key} provided" assert field_key in request.json, f"No {field_key} provided"
assert field_value(request.json[field_key]), f"Invalid {field_key} provided" assert field_value(request.json[field_key]), f"Invalid {field_key} provided"
license_exist = request.ctx.db_session.query(UserContent).filter_by( if not (await request.ctx.user.wallet_address_async(request.ctx.db_session)):
onchain_address=request.json['content_address'], return response.json({"error": "No wallet address provided"}, status=400)
).first()
if license_exist:
r_content = StoredContent.from_cid(request.ctx.db_session, license_exist.content.cid.serialize_v2())
else:
r_content = StoredContent.from_cid(request.ctx.db_session, request.json['content_address'])
content = r_content.open_content(request.ctx.db_session) from sqlalchemy import select
license_exist = (await request.ctx.db_session.execute(select(UserContent).where(
UserContent.onchain_address == request.json['content_address']
))).scalars().first()
from app.core.content.content_id import ContentId
if license_exist and license_exist.content_id:
r_content = (await request.ctx.db_session.execute(select(StoredContent).where(
StoredContent.id == license_exist.content_id
))).scalars().first()
else:
requested_cid = ContentId.deserialize(request.json['content_address'])
r_content = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == requested_cid.content_hash_b58))).scalars().first()
async def open_content_async(session, sc: StoredContent):
if not sc.encrypted:
decrypted = sc
encrypted = (await session.execute(select(StoredContent).where(StoredContent.decrypted_content_id == sc.id))).scalars().first()
else:
encrypted = sc
decrypted = (await session.execute(select(StoredContent).where(StoredContent.id == sc.decrypted_content_id))).scalars().first()
assert decrypted and encrypted, "Can't open content"
ctype = decrypted.json_format().get('content_type', 'application/x-binary')
try:
content_type = ctype.split('/')[0]
except Exception:
content_type = 'application'
return {'encrypted_content': encrypted, 'decrypted_content': decrypted, 'content_type': content_type}
content = await open_content_async(request.ctx.db_session, r_content)
licenses_cost = content['encrypted_content'].json_format()['license'] licenses_cost = content['encrypted_content'].json_format()['license']
assert request.json['license_type'] in licenses_cost assert request.json['license_type'] in licenses_cost

View File

@ -6,6 +6,7 @@ from base58 import b58encode, b58decode
from sanic import response from sanic import response
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from sqlalchemy import select
from app.core._blockchain.ton.platform import platform from app.core._blockchain.ton.platform import platform
from app.core._crypto.signer import Signer from app.core._crypto.signer import Signer
from app.core._secrets import hot_pubkey, service_wallet, hot_seed from app.core._secrets import hot_pubkey, service_wallet, hot_seed
@ -19,10 +20,10 @@ def get_git_info():
async def s_api_v1_node(request): # /api/v1/node async def s_api_v1_node(request): # /api/v1/node
last_known_index = request.ctx.db_session.query(StoredContent).filter( last_known_index_obj = (await request.ctx.db_session.execute(
StoredContent.onchain_index != None select(StoredContent).where(StoredContent.onchain_index != None).order_by(StoredContent.onchain_index.desc())
).order_by(StoredContent.onchain_index.desc()).first() )).scalars().first()
last_known_index = last_known_index.onchain_index if last_known_index else 0 last_known_index = last_known_index_obj.onchain_index if last_known_index_obj else 0
last_known_index = max(last_known_index, 0) last_known_index = max(last_known_index, 0)
return response.json({ return response.json({
'id': b58encode(hot_pubkey).decode(), 'id': b58encode(hot_pubkey).decode(),
@ -39,10 +40,10 @@ async def s_api_v1_node(request): # /api/v1/node
}) })
async def s_api_v1_node_friendly(request): async def s_api_v1_node_friendly(request):
last_known_index = request.ctx.db_session.query(StoredContent).filter( last_known_index_obj = (await request.ctx.db_session.execute(
StoredContent.onchain_index != None select(StoredContent).where(StoredContent.onchain_index != None).order_by(StoredContent.onchain_index.desc())
).order_by(StoredContent.onchain_index.desc()).first() )).scalars().first()
last_known_index = last_known_index.onchain_index if last_known_index else 0 last_known_index = last_known_index_obj.onchain_index if last_known_index_obj else 0
last_known_index = max(last_known_index, 0) last_known_index = max(last_known_index, 0)
response_plain_text = f""" response_plain_text = f"""
Node address: {service_wallet.address.to_string(1, 1, 1)} Node address: {service_wallet.address.to_string(1, 1, 1)}
@ -54,7 +55,7 @@ async def s_api_v1_node_friendly(request):
for service_key, service in request.app.ctx.memory.known_states.items(): for service_key, service in request.app.ctx.memory.known_states.items():
response_plain_text += f""" response_plain_text += f"""
{service_key}: {service_key}:
status: {service['status'] if (service['timestamp'] and (datetime.now() - service['timestamp']).total_seconds() < 30) else 'not working: timeout'} status: {service['status'] if (service['timestamp'] and (datetime.now() - service['timestamp']).total_seconds() < 120) else 'not working: timeout'}
delay: {round((datetime.now() - service['timestamp']).total_seconds(), 3) if service['timestamp'] else -1} delay: {round((datetime.now() - service['timestamp']).total_seconds(), 3) if service['timestamp'] else -1}
""" """
return response.text(response_plain_text, content_type='text/plain') return response.text(response_plain_text, content_type='text/plain')

2514
app/api/routes/admin.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,5 @@
from datetime import datetime from datetime import datetime
from uuid import uuid4
from aiogram.utils.web_app import safe_parse_webapp_init_data from aiogram.utils.web_app import safe_parse_webapp_init_data
from sanic import response from sanic import response
@ -36,7 +37,9 @@ async def s_api_v1_auth_twa(request):
make_log("auth", "Invalid TWA data", level="warning") make_log("auth", "Invalid TWA data", level="warning")
return response.json({"error": "Invalid TWA data"}, status=401) return response.json({"error": "Invalid TWA data"}, status=401)
known_user = request.ctx.db_session.query(User).filter(User.telegram_id == twa_data.user.id).first() known_user = (await request.ctx.db_session.execute(
select(User).where(User.telegram_id == twa_data.user.id)
)).scalars().first()
if not known_user: if not known_user:
new_user = User( new_user = User(
telegram_id=twa_data.user.id, telegram_id=twa_data.user.id,
@ -51,11 +54,28 @@ async def s_api_v1_auth_twa(request):
created=datetime.now() created=datetime.now()
) )
request.ctx.db_session.add(new_user) request.ctx.db_session.add(new_user)
request.ctx.db_session.commit() await request.ctx.db_session.commit()
known_user = request.ctx.db_session.query(User).filter(User.telegram_id == twa_data.user.id).first() known_user = (await request.ctx.db_session.execute(
select(User).where(User.telegram_id == twa_data.user.id)
)).scalars().first()
assert known_user, "User not created" assert known_user, "User not created"
meta_updated = False
if not (known_user.meta or {}).get('ref_id'):
known_user.ensure_ref_id()
meta_updated = True
incoming_ref_id = auth_data.get('ref_id')
stored_ref_id = (known_user.meta or {}).get('ref_id')
if incoming_ref_id and incoming_ref_id != stored_ref_id:
if (known_user.meta or {}).get('referrer_id') != incoming_ref_id:
known_user.meta = {
**(known_user.meta or {}),
'referrer_id': incoming_ref_id
}
meta_updated = True
new_user_key = await known_user.create_api_token_v1(request.ctx.db_session, "USER_API_V1") new_user_key = await known_user.create_api_token_v1(request.ctx.db_session, "USER_API_V1")
if auth_data['ton_proof']: if auth_data['ton_proof']:
try: try:
@ -64,12 +84,12 @@ async def s_api_v1_auth_twa(request):
wallet_info.account = Account.from_dict(auth_data['ton_proof']['account']) wallet_info.account = Account.from_dict(auth_data['ton_proof']['account'])
wallet_info.ton_proof = TonProof.from_dict({'proof': auth_data['ton_proof']['ton_proof']}) wallet_info.ton_proof = TonProof.from_dict({'proof': auth_data['ton_proof']['ton_proof']})
connection_payload = auth_data['ton_proof']['ton_proof']['payload'] connection_payload = auth_data['ton_proof']['ton_proof']['payload']
known_payload = (request.ctx.db_session.execute(select(KnownKey).where(KnownKey.seed == connection_payload))).scalars().first() known_payload = (await request.ctx.db_session.execute(select(KnownKey).where(KnownKey.seed == connection_payload))).scalars().first()
assert known_payload, "Unknown payload" assert known_payload, "Unknown payload"
assert known_payload.meta['I_user_id'] == known_user.id, "Invalid user_id" assert known_payload.meta['I_user_id'] == known_user.id, "Invalid user_id"
assert wallet_info.check_proof(connection_payload), "Invalid proof" assert wallet_info.check_proof(connection_payload), "Invalid proof"
for known_connection in (request.ctx.db_session.execute(select(WalletConnection).where( for known_connection in (await request.ctx.db_session.execute(select(WalletConnection).where(
and_( and_(
WalletConnection.user_id == known_user.id, WalletConnection.user_id == known_user.id,
WalletConnection.network == 'ton' WalletConnection.network == 'ton'
@ -77,7 +97,7 @@ async def s_api_v1_auth_twa(request):
))).scalars().all(): ))).scalars().all():
known_connection.invalidated = True known_connection.invalidated = True
for other_connection in (request.ctx.db_session.execute(select(WalletConnection).where( for other_connection in (await request.ctx.db_session.execute(select(WalletConnection).where(
WalletConnection.wallet_address == Address(wallet_info.account.address).to_string(1, 1, 1) WalletConnection.wallet_address == Address(wallet_info.account.address).to_string(1, 1, 1)
))).scalars().all(): ))).scalars().all():
other_connection.invalidated = True other_connection.invalidated = True
@ -86,36 +106,113 @@ async def s_api_v1_auth_twa(request):
user_id=known_user.id, user_id=known_user.id,
network='ton', network='ton',
wallet_key='web2-client==1', wallet_key='web2-client==1',
connection_id=connection_payload, # `ton_proof.payload` is expected to be single-use in many wallets (and it is unique per auth call here),
# but client-side retries/replays can happen; keep payload separately and make DB id unique.
connection_id=f"{connection_payload}.{uuid4().hex}",
wallet_address=Address(wallet_info.account.address).to_string(1, 1, 1), wallet_address=Address(wallet_info.account.address).to_string(1, 1, 1),
keys={ keys={
'ton_proof': auth_data['ton_proof'] 'ton_proof': auth_data['ton_proof'],
'ton_proof_payload': connection_payload,
},
meta={
'ton_proof_payload': connection_payload,
}, },
meta={},
created=datetime.now(), created=datetime.now(),
updated=datetime.now(), updated=datetime.now(),
invalidated=False, invalidated=False,
without_pk=False without_pk=False
) )
request.ctx.db_session.add(new_connection) request.ctx.db_session.add(new_connection)
request.ctx.db_session.commit() await request.ctx.db_session.commit()
except BaseException as e: except BaseException as e:
make_log("auth", f"Invalid ton_proof: {e}", level="warning") make_log("auth", f"Invalid ton_proof: {e}", level="warning")
return response.json({"error": "Invalid ton_proof"}, status=400) return response.json({"error": "Invalid ton_proof"}, status=400)
ton_connection = (request.ctx.db_session.execute(select(WalletConnection).where( ton_connection = (await request.ctx.db_session.execute(select(WalletConnection).where(
and_( and_(
WalletConnection.user_id == known_user.id, WalletConnection.user_id == known_user.id,
WalletConnection.network == 'ton', WalletConnection.network == 'ton',
WalletConnection.invalidated == False WalletConnection.invalidated == False
) )
))).scalars().first() ).order_by(WalletConnection.created.desc()))).scalars().first()
known_user.last_use = datetime.now() known_user.last_use = datetime.now()
request.ctx.db_session.commit() if meta_updated:
known_user.updated = datetime.now()
await request.ctx.db_session.commit()
return response.json({ return response.json({
'user': known_user.json_format(), 'user': known_user.json_format(),
'connected_wallet': ton_connection.json_format() if ton_connection else None, 'connected_wallet': ton_connection.json_format() if ton_connection else None,
'auth_v1_token': new_user_key['auth_v1_token'] 'auth_v1_token': new_user_key['auth_v1_token']
}) })
async def s_api_v1_auth_me(request):
if not request.ctx.user:
return response.json({"error": "Unauthorized"}, status=401)
ton_connection = (await request.ctx.db_session.execute(
select(WalletConnection).where(
and_(
WalletConnection.user_id == request.ctx.user.id,
WalletConnection.network == 'ton',
WalletConnection.invalidated == False
)
).order_by(WalletConnection.created.desc())
)).scalars().first()
return response.json({
'user': request.ctx.user.json_format(),
'connected_wallet': ton_connection.json_format() if ton_connection else None
})
async def s_api_v1_auth_select_wallet(request):
if not request.ctx.user:
return response.json({"error": "Unauthorized"}, status=401)
try:
data = request.json
except Exception as e:
return response.json({"error": "Invalid JSON"}, status=400)
if "wallet_address" not in data:
return response.json({"error": "wallet_address is required"}, status=400)
# Convert raw wallet address to canonical format using Address from tonsdk.utils
raw_addr = data["wallet_address"]
canonical_address = Address(raw_addr).to_string(1, 1, 1)
db_session = request.ctx.db_session
user = request.ctx.user
# Check if a WalletConnection already exists for this user with the given canonical wallet address
existing_connection = (await db_session.execute(select(WalletConnection).where(
and_(
WalletConnection.user_id == user.id,
WalletConnection.wallet_address == canonical_address
)
))).scalars().first()
if not existing_connection:
return response.json({"error": "Wallet connection not found"}, status=404)
saved_values = {
'keys': existing_connection.keys,
'meta': existing_connection.meta,
'wallet_key': existing_connection.wallet_key,
'connection_id': existing_connection.connection_id + uuid4().hex,
'network': existing_connection.network,
}
new_connection = WalletConnection(
**saved_values,
user_id=user.id,
wallet_address=canonical_address,
created=datetime.now(),
updated=datetime.now(),
invalidated=False,
without_pk=False
)
db_session.add(new_connection)
await db_session.commit()
return response.empty(status=200)

View File

@ -1,5 +1,7 @@
from __future__ import annotations
from datetime import datetime, timedelta from datetime import datetime, timedelta
from sanic import response from sanic import response
from sqlalchemy import select, and_, func, or_
from aiogram import Bot, types from aiogram import Bot, types
from sqlalchemy import and_ from sqlalchemy import and_
from app.core.logger import make_log from app.core.logger import make_log
@ -8,8 +10,13 @@ from app.core.models.node_storage import StoredContent
from app.core.models.keys import KnownKey from app.core.models.keys import KnownKey
from app.core.models import StarsInvoice from app.core.models import StarsInvoice
from app.core.models.content.user_content import UserContent from app.core.models.content.user_content import UserContent
from app.core._config import CLIENT_TELEGRAM_API_KEY, PROJECT_HOST from app.core._config import CLIENT_TELEGRAM_API_KEY, CLIENT_TELEGRAM_BOT_USERNAME, PROJECT_HOST
from app.core.models.content_v3 import EncryptedContent as ECv3, ContentDerivative as CDv3, UploadSession
from app.core.content.content_id import ContentId
from app.core.network.dht import MetricsAggregator
import os
import json import json
import time
import uuid import uuid
@ -22,13 +29,20 @@ async def s_api_v1_content_list(request):
store = request.args.get('store', 'local') store = request.args.get('store', 'local')
assert store in ('local', 'onchain'), "Invalid store" assert store in ('local', 'onchain'), "Invalid store"
content_list = request.ctx.db_session.query(StoredContent).filter( stmt = (
select(StoredContent)
.where(
StoredContent.type.like(store + '%'), StoredContent.type.like(store + '%'),
StoredContent.disabled == False StoredContent.disabled.is_(None)
).order_by(StoredContent.created.desc()).offset(offset).limit(limit) )
make_log("Content", f"Listed {content_list.count()} contents", level='info') .order_by(StoredContent.created.desc())
.offset(offset)
.limit(limit)
)
rows = (await request.ctx.db_session.execute(stmt)).scalars().all()
make_log("Content", f"Listed {len(rows)} contents", level='info')
result = {} result = {}
for content in content_list.all(): for content in rows:
content_json = content.json_format() content_json = content.json_format()
result[content_json["cid"]] = content_json result[content_json["cid"]] = content_json
@ -38,23 +52,74 @@ async def s_api_v1_content_list(request):
async def s_api_v1_content_view(request, content_address: str): async def s_api_v1_content_view(request, content_address: str):
# content_address can be CID or TON address # content_address can be CID or TON address
license_exist = request.ctx.db_session.query(UserContent).filter_by( license_exist = (await request.ctx.db_session.execute(
onchain_address=content_address, select(UserContent).where(UserContent.onchain_address == content_address)
).first() )).scalars().first()
license_address = None
if license_exist: if license_exist:
content_address = license_exist.content.cid.serialize_v2() license_address = license_exist.onchain_address
if license_exist.content_id:
linked_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.id == license_exist.content_id)
)).scalars().first()
if linked_content:
content_address = linked_content.cid.serialize_v2()
r_content = StoredContent.from_cid(request.ctx.db_session, content_address) from app.core.content.content_id import ContentId
content = r_content.open_content(request.ctx.db_session) cid = ContentId.deserialize(content_address)
r_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == cid.content_hash_b58)
)).scalars().first()
async def open_content_async(session, sc: StoredContent):
if not sc.encrypted:
decrypted = sc
encrypted = (await session.execute(select(StoredContent).where(StoredContent.decrypted_content_id == sc.id))).scalars().first()
else:
encrypted = sc
decrypted = (await session.execute(select(StoredContent).where(StoredContent.id == sc.decrypted_content_id))).scalars().first()
assert decrypted and encrypted, "Can't open content"
ctype = decrypted.json_format().get('content_type', 'application/x-binary')
try:
content_type = ctype.split('/')[0]
except Exception:
content_type = 'application'
return {
'encrypted_content': encrypted,
'decrypted_content': decrypted,
'content_type': content_type,
'content_mime': ctype,
}
try:
content = await open_content_async(request.ctx.db_session, r_content)
except AssertionError:
# Fallback: handle plain stored content without encrypted/decrypted pairing
sc = r_content
from mimetypes import guess_type as _guess
_mime, _ = _guess(sc.filename or '')
_mime = _mime or 'application/octet-stream'
try:
_ctype = _mime.split('/')[0]
except Exception:
_ctype = 'application'
content = {
'encrypted_content': sc,
'decrypted_content': sc,
'content_type': _ctype,
'content_mime': _mime,
}
master_address = content['encrypted_content'].meta.get('item_address', '')
opts = { opts = {
'content_type': content['content_type'], # возможно с ошибками, нужно переделать на ffprobe 'content_type': content['content_type'], # возможно с ошибками, нужно переделать на ffprobe
'content_address': content['encrypted_content'].meta.get('item_address', '') 'content_mime': content.get('content_mime'),
'content_address': license_address or master_address,
'license_address': license_address,
'master_address': master_address,
} }
if content['encrypted_content'].key_id: if content['encrypted_content'].key_id:
known_key = request.ctx.db_session.query(KnownKey).filter( known_key = (await request.ctx.db_session.execute(
KnownKey.id == content['encrypted_content'].key_id select(KnownKey).where(KnownKey.id == content['encrypted_content'].key_id)
).first() )).scalars().first()
if known_key: if known_key:
opts['key_hash'] = known_key.seed_hash # нахер не нужно на данный момент opts['key_hash'] = known_key.seed_hash # нахер не нужно на данный момент
@ -64,38 +129,49 @@ async def s_api_v1_content_view(request, content_address: str):
have_access = False have_access = False
if request.ctx.user: if request.ctx.user:
user_wallet_address = request.ctx.user.wallet_address(request.ctx.db_session) user_wallet_address = await request.ctx.user.wallet_address_async(request.ctx.db_session)
user_telegram_id = getattr(request.ctx.user, 'telegram_id', None)
or_clauses = [StarsInvoice.user_id == request.ctx.user.id]
if user_telegram_id is not None:
or_clauses.append(StarsInvoice.telegram_id == user_telegram_id)
stars_access = False
if or_clauses:
stars_access = bool((await request.ctx.db_session.execute(select(StarsInvoice).where(
and_(
StarsInvoice.content_hash == content['encrypted_content'].hash,
StarsInvoice.paid.is_(True),
or_(*or_clauses)
)
))).scalars().first())
have_access = ( have_access = (
(content['encrypted_content'].owner_address == user_wallet_address) (content['encrypted_content'].owner_address == user_wallet_address)
or bool(request.ctx.db_session.query(UserContent).filter_by(owner_address=user_wallet_address, status='active', or bool((await request.ctx.db_session.execute(select(UserContent).where(
content_id=content['encrypted_content'].id).first()) \ and_(UserContent.owner_address == user_wallet_address, UserContent.status == 'active', UserContent.content_id == content['encrypted_content'].id)
or bool(request.ctx.db_session.query(StarsInvoice).filter( ))).scalars().first())
and_( or stars_access
StarsInvoice.user_id == request.ctx.user.id,
StarsInvoice.content_hash == content['encrypted_content'].hash,
StarsInvoice.paid == True
)
).first())
) )
if not have_access: if not have_access:
current_star_rate = ServiceConfig(request.ctx.db_session).get('live_tonPerStar', [0, 0])[0] current_star_rate = (await ServiceConfig(request.ctx.db_session).get('live_tonPerStar', [0, 0]))[0]
if current_star_rate < 0: if current_star_rate < 0:
current_star_rate = 0.00000001 current_star_rate = 0.00000001
stars_cost = int(int(content['encrypted_content'].meta['license']['resale']['price']) / 1e9 / current_star_rate * 1.2) stars_cost = int(int(content['encrypted_content'].meta['license']['resale']['price']) / 1e9 / current_star_rate * 1.2)
if request.ctx.user.telegram_id in [5587262915, 6861699286]: if getattr(request.ctx.user, 'is_admin', False):
stars_cost = 2 stars_cost = 2
else:
stars_cost = int(int(content['encrypted_content'].meta['license']['resale']['price']) / 1e9 / current_star_rate * 1.2)
invoice_id = f"access_{uuid.uuid4().hex}" invoice_id = f"access_{uuid.uuid4().hex}"
exist_invoice = request.ctx.db_session.query(StarsInvoice).filter( exist_invoice = (await request.ctx.db_session.execute(select(StarsInvoice).where(
and_( and_(
StarsInvoice.user_id == request.ctx.user.id, StarsInvoice.user_id == request.ctx.user.id,
StarsInvoice.created > datetime.now() - timedelta(minutes=25), StarsInvoice.created > datetime.now() - timedelta(minutes=25),
StarsInvoice.amount == stars_cost, StarsInvoice.amount == stars_cost,
StarsInvoice.content_hash == content['encrypted_content'].hash, StarsInvoice.content_hash == content['encrypted_content'].hash,
) )
).first() ))).scalars().first()
if exist_invoice: if exist_invoice:
invoice_url = exist_invoice.invoice_url invoice_url = exist_invoice.invoice_url
else: else:
@ -116,10 +192,12 @@ async def s_api_v1_content_view(request, content_address: str):
amount=stars_cost, amount=stars_cost,
user_id=request.ctx.user.id, user_id=request.ctx.user.id,
content_hash=content['encrypted_content'].hash, content_hash=content['encrypted_content'].hash,
invoice_url=invoice_url invoice_url=invoice_url,
telegram_id=getattr(request.ctx.user, 'telegram_id', None),
bot_username=CLIENT_TELEGRAM_BOT_USERNAME,
) )
) )
request.ctx.db_session.commit() await request.ctx.db_session.commit()
except BaseException as e: except BaseException as e:
make_log("Content", f"Can't create invoice link: {e}", level='warning') make_log("Content", f"Can't create invoice link: {e}", level='warning')
@ -131,34 +209,325 @@ async def s_api_v1_content_view(request, content_address: str):
display_options = { display_options = {
'content_url': None, 'content_url': None,
'content_kind': None,
'has_preview': False,
'original_available': False,
'requires_license': False,
} }
if have_access: if have_access:
opts['have_licenses'].append('listen') opts['have_licenses'].append('listen')
converted_content = content['encrypted_content'].meta.get('converted_content') encrypted_json = content['encrypted_content'].json_format()
if converted_content: decrypted_json = content['decrypted_content'].json_format()
user_content_option = 'low_preview'
if have_access:
user_content_option = 'low' # TODO: подключать high если человек внезапно меломан
converted_content = request.ctx.db_session.query(StoredContent).filter( enc_cid = encrypted_json.get('content_cid') or encrypted_json.get('encrypted_cid')
StoredContent.hash == converted_content[user_content_option] ec_v3 = None
).first() derivative_rows = []
if converted_content: if enc_cid:
display_options['content_url'] = converted_content.web_url ec_v3 = (await request.ctx.db_session.execute(select(ECv3).where(ECv3.encrypted_cid == enc_cid))).scalars().first()
if ec_v3:
derivative_rows = (await request.ctx.db_session.execute(select(CDv3).where(CDv3.content_id == ec_v3.id))).scalars().all()
content_meta = content['encrypted_content'].json_format() upload_row = None
content_metadata = StoredContent.from_cid(request.ctx.db_session, content_meta.get('metadata_cid') or None) if enc_cid:
upload_row = (await request.ctx.db_session.execute(select(UploadSession).where(UploadSession.encrypted_cid == enc_cid))).scalars().first()
converted_meta_map = dict(content['encrypted_content'].meta.get('converted_content') or {})
content_mime = (
(ec_v3.content_type if ec_v3 and ec_v3.content_type else None)
or decrypted_json.get('content_type')
or encrypted_json.get('content_type')
or opts.get('content_mime')
or 'application/octet-stream'
)
# Fallback: if stored content reports generic application/*, try guess by filename
try:
if content_mime.startswith('application/'):
from mimetypes import guess_type as _guess
_fn = decrypted_json.get('filename') or encrypted_json.get('filename') or ''
_gm, _ = _guess(_fn)
if _gm:
content_mime = _gm
except Exception:
pass
opts['content_mime'] = content_mime
try:
opts['content_type'] = content_mime.split('/')[0]
except Exception:
opts['content_type'] = opts.get('content_type') or 'application'
content_kind = 'audio'
if content_mime.startswith('video/'):
content_kind = 'video'
elif content_mime.startswith('audio/'):
content_kind = 'audio'
else:
content_kind = 'binary'
display_options['content_kind'] = content_kind
display_options['requires_license'] = (not have_access) and content_kind == 'binary'
derivative_latest = {}
if derivative_rows:
derivative_sorted = sorted(derivative_rows, key=lambda row: row.created_at or datetime.min)
for row in derivative_sorted:
derivative_latest[row.kind] = row
def _row_to_hash_and_url(row):
if not row or not row.local_path:
return None, None
file_hash = row.local_path.split('/')[-1]
return file_hash, f"{PROJECT_HOST}/api/v1/storage.proxy/{file_hash}"
has_preview = bool(derivative_latest.get('decrypted_preview') or converted_meta_map.get('low_preview'))
display_options['has_preview'] = has_preview
display_options['original_available'] = bool(derivative_latest.get('decrypted_original') or converted_meta_map.get('original'))
chosen_row = None
if content_kind == 'binary':
if have_access and 'decrypted_original' in derivative_latest:
chosen_row = derivative_latest['decrypted_original']
elif have_access:
for key in ('decrypted_low', 'decrypted_high'):
if key in derivative_latest:
chosen_row = derivative_latest[key]
break
else:
for key in ('decrypted_preview', 'decrypted_low'):
if key in derivative_latest:
chosen_row = derivative_latest[key]
break
def _make_token_for(hash_value: str, scope: str, user_id: int | None) -> str:
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed, hot_pubkey
from app.core._utils.b58 import b58encode as _b58e
signer = Signer(hot_seed)
# Media URLs are polled very frequently by the web client (e.g. every 5s).
# If we generate a new exp for every request, the signed URL changes every poll,
# forcing the player to reload and breaking continuous streaming.
#
# To keep URLs stable while still expiring tokens, we "bucket" exp time.
# Default behavior keeps tokens stable for ~10 minutes; can be tuned via env.
ttl_sec = int(os.getenv("STORAGE_PROXY_TOKEN_TTL_SEC", "600"))
bucket_sec = int(os.getenv("STORAGE_PROXY_TOKEN_BUCKET_SEC", str(ttl_sec)))
ttl_sec = max(1, ttl_sec)
bucket_sec = max(1, bucket_sec)
now = int(time.time())
exp_base = now + ttl_sec
# Always move to the next bucket boundary so the token doesn't flip immediately
# after a boundary due to rounding edge cases.
exp = ((exp_base // bucket_sec) + 1) * bucket_sec
uid = int(user_id or 0)
payload = {'hash': hash_value, 'scope': scope, 'exp': exp, 'uid': uid}
blob = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
pub = _b58e(hot_pubkey).decode()
return f"pub={pub}&exp={exp}&scope={scope}&uid={uid}&sig={sig}"
except Exception:
return ""
if chosen_row:
file_hash, url = _row_to_hash_and_url(chosen_row)
if url:
token = _make_token_for(file_hash or '', 'full' if have_access else 'preview', getattr(request.ctx.user, 'id', None))
display_options['content_url'] = f"{url}?{token}" if token else url
ext_candidate = None
if chosen_row.content_type:
ext_candidate = chosen_row.content_type.split('/')[-1]
elif '/' in content_mime:
ext_candidate = content_mime.split('/')[-1]
if ext_candidate:
opts['content_ext'] = ext_candidate
if content_kind == 'binary':
display_options['original_available'] = True
converted_meta_map.setdefault('original', file_hash)
elif have_access:
converted_meta_map.setdefault('low', file_hash)
else:
converted_meta_map.setdefault('low_preview', file_hash)
if not display_options['content_url'] and converted_meta_map:
if content_kind == 'binary':
preference = ['original'] if have_access else []
else:
preference = ['low', 'high', 'low_preview'] if have_access else ['low_preview', 'low', 'high']
for key in preference:
hash_value = converted_meta_map.get(key)
if not hash_value:
continue
# Пробуем сразу через прокси (даже если локальной записи нет)
token = _make_token_for(hash_value, 'full' if have_access else 'preview', getattr(request.ctx.user, 'id', None))
display_options['content_url'] = f"{PROJECT_HOST}/api/v1/storage.proxy/{hash_value}?{token}" if token else f"{PROJECT_HOST}/api/v1/storage.proxy/{hash_value}"
if '/' in content_mime:
opts['content_ext'] = content_mime.split('/')[-1]
if content_kind == 'binary':
display_options['original_available'] = True
break
# Final fallback: no derivatives known — serve stored content directly for AV
if not display_options['content_url'] and content_kind in ('audio', 'video'):
from app.core._utils.b58 import b58encode as _b58e
scid = decrypted_json.get('cid') or encrypted_json.get('cid')
try:
from app.core.content.content_id import ContentId as _CID
if scid:
_cid = _CID.deserialize(scid)
h = _cid.content_hash_b58
else:
h = decrypted_json.get('hash')
except Exception:
h = decrypted_json.get('hash')
if h:
token = _make_token_for(h, 'preview' if not have_access else 'full', getattr(request.ctx.user, 'id', None))
display_options['content_url'] = f"{PROJECT_HOST}/api/v1/storage.proxy/{h}?{token}" if token else f"{PROJECT_HOST}/api/v1/storage.proxy/{h}"
# Metadata fallback
content_meta = encrypted_json
content_metadata_json = None
_mcid = content_meta.get('metadata_cid') or None
if _mcid:
_cid = ContentId.deserialize(_mcid)
content_metadata = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == _cid.content_hash_b58))).scalars().first()
if content_metadata:
try:
with open(content_metadata.filepath, 'r') as f: with open(content_metadata.filepath, 'r') as f:
content_metadata_json = json.loads(f.read()) content_metadata_json = json.loads(f.read())
except Exception as exc:
make_log("Content", f"Can't read metadata file: {exc}", level='warning')
if not content_metadata_json:
fallback_name = (ec_v3.title if ec_v3 else None) or content_meta.get('title') or content_meta.get('cid')
fallback_description = (ec_v3.description if ec_v3 else '') or ''
content_metadata_json = {
'name': fallback_name or 'Без названия',
'description': fallback_description,
'downloadable': False,
}
cover_cid = content_meta.get('cover_cid')
if cover_cid:
token = _make_token_for(cover_cid, 'preview', getattr(request.ctx.user, 'id', None))
content_metadata_json.setdefault('image', f"{PROJECT_HOST}/api/v1/storage.proxy/{cover_cid}?{token}" if token else f"{PROJECT_HOST}/api/v1/storage.proxy/{cover_cid}")
display_options['metadata'] = content_metadata_json display_options['metadata'] = content_metadata_json
opts['downloadable'] = content_metadata_json.get('downloadable', False)
if opts['downloadable'] and 'listen' not in opts['have_licenses']:
opts['downloadable'] = False
# Conversion status summary
conversion_summary = {}
conversion_details = []
derivative_summary_map = {}
for row in derivative_latest.values():
conversion_summary[row.status] = conversion_summary.get(row.status, 0) + 1
derivative_summary_map[row.kind] = row
conversion_details.append({
'kind': row.kind,
'status': row.status,
'size_bytes': row.size_bytes,
'content_type': row.content_type,
'error': row.error,
'updated_at': (row.last_access_at or row.created_at).isoformat() + 'Z' if (row.last_access_at or row.created_at) else None,
})
required_kinds = set()
if content_kind == 'binary':
if derivative_latest.get('decrypted_original') or converted_meta_map.get('original'):
required_kinds.add('decrypted_original')
else:
required_kinds = {'decrypted_low', 'decrypted_high'}
if ec_v3 and ec_v3.content_type and ec_v3.content_type.startswith('video/'):
required_kinds.add('decrypted_preview')
statuses_by_kind = {kind: row.status for kind, row in derivative_summary_map.items() if kind in required_kinds}
conversion_state = 'pending'
if required_kinds and all(statuses_by_kind.get(kind) == 'ready' for kind in required_kinds):
conversion_state = 'ready'
elif any(statuses_by_kind.get(kind) == 'failed' for kind in required_kinds):
conversion_state = 'failed'
elif any(statuses_by_kind.get(kind) in ('processing', 'pending') for kind in required_kinds):
conversion_state = 'processing'
elif statuses_by_kind:
conversion_state = 'partial'
if display_options['content_url']:
conversion_state = 'ready'
upload_info = None
if upload_row:
upload_info = {
'id': upload_row.id,
'state': upload_row.state,
'error': upload_row.error,
'created_at': upload_row.created_at.isoformat() + 'Z' if upload_row.created_at else None,
'updated_at': upload_row.updated_at.isoformat() + 'Z' if upload_row.updated_at else None,
}
upload_state = upload_row.state if upload_row else None
if conversion_state == 'failed' or upload_state in ('failed', 'conversion_failed'):
final_state = 'failed'
elif conversion_state == 'ready':
final_state = 'ready'
elif conversion_state in ('processing', 'partial') or upload_state in ('processing', 'pinned'):
final_state = 'processing'
else:
final_state = 'uploaded'
conversion_info = {
'state': conversion_state,
'summary': conversion_summary,
'details': conversion_details,
'required_kinds': list(required_kinds),
}
opts['conversion'] = conversion_info
opts['upload'] = upload_info
opts['status'] = {
'state': final_state,
'conversion_state': conversion_state,
'upload_state': upload_info['state'] if upload_info else None,
'has_access': have_access,
}
if not opts.get('content_ext') and '/' in content_mime:
opts['content_ext'] = content_mime.split('/')[-1]
metrics_mgr: MetricsAggregator | None = getattr(request.app.ctx.memory, "metrics", None)
if metrics_mgr:
viewer_salt_raw = request.headers.get("X-View-Salt")
if viewer_salt_raw:
try:
viewer_salt = bytes.fromhex(viewer_salt_raw)
except ValueError:
viewer_salt = viewer_salt_raw.encode()
elif request.ctx.user:
viewer_salt = f"user:{request.ctx.user.id}".encode()
else:
viewer_salt = (request.remote_addr or request.ip or "anonymous").encode()
try:
watch_time_param = int(request.args.get("watch_time", 0))
except (TypeError, ValueError):
watch_time_param = 0
try:
bytes_out_param = int(request.args.get("bytes_out", 0))
except (TypeError, ValueError):
bytes_out_param = 0
completed_param = request.args.get("completed", "0") in ("1", "true", "True")
metrics_mgr.record_view(
content_id=content['encrypted_content'].hash,
viewer_salt=viewer_salt,
watch_time=watch_time_param,
bytes_out=bytes_out_param,
completed=completed_param,
)
return response.json({ return response.json({
**opts, **opts,
'encrypted': content['encrypted_content'].json_format(), 'encrypted': content['encrypted_content'].json_format(),
'display_options': display_options 'display_options': display_options,
}) })
@ -182,14 +551,17 @@ async def s_api_v1_content_friendly_list(request):
</tr> </tr>
</thead> </thead>
""" """
for content in request.ctx.db_session.query(StoredContent).filter( contents = (await request.ctx.db_session.execute(select(StoredContent).where(
StoredContent.type == 'onchain/content' StoredContent.type == 'onchain/content'
).all(): ))).scalars().all()
for content in contents:
if not content.meta.get('metadata_cid'): if not content.meta.get('metadata_cid'):
make_log("Content", f"Content {content.cid.serialize_v2()} has no metadata", level='warning') make_log("Content", f"Content {content.cid.serialize_v2()} has no metadata", level='warning')
continue continue
metadata_content = StoredContent.from_cid(request.ctx.db_session, content.meta.get('metadata_cid')) from app.core.content.content_id import ContentId
_cid = ContentId.deserialize(content.meta.get('metadata_cid'))
metadata_content = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == _cid.content_hash_b58))).scalars().first()
with open(metadata_content.filepath, 'r') as f: with open(metadata_content.filepath, 'r') as f:
metadata = json.loads(f.read()) metadata = json.loads(f.read())
@ -223,10 +595,12 @@ async def s_api_v1_5_content_list(request):
return response.json({'error': 'Invalid limit'}, status=400) return response.json({'error': 'Invalid limit'}, status=400)
# Query onchain contents which are not disabled # Query onchain contents which are not disabled
contents = request.ctx.db_session.query(StoredContent).filter( contents = (await request.ctx.db_session.execute(
StoredContent.type == 'onchain/content', select(StoredContent)
StoredContent.disabled == False .where(StoredContent.type == 'onchain/content', StoredContent.disabled == False)
).order_by(StoredContent.created.desc()).offset(offset).limit(limit).all() .order_by(StoredContent.created.desc())
.offset(offset).limit(limit)
)).scalars().all()
result = [] result = []
for content in contents: for content in contents:
@ -235,7 +609,9 @@ async def s_api_v1_5_content_list(request):
if not metadata_cid: if not metadata_cid:
continue # Skip if no metadata_cid is found continue # Skip if no metadata_cid is found
metadata_content = StoredContent.from_cid(request.ctx.db_session, metadata_cid) from app.core.content.content_id import ContentId
_cid = ContentId.deserialize(metadata_cid)
metadata_content = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == _cid.content_hash_b58))).scalars().first()
try: try:
with open(metadata_content.filepath, 'r') as f: with open(metadata_content.filepath, 'r') as f:
metadata = json.load(f) metadata = json.load(f)
@ -251,9 +627,9 @@ async def s_api_v1_5_content_list(request):
preview_link = None preview_link = None
converted_content = content.meta.get('converted_content') converted_content = content.meta.get('converted_content')
if converted_content: if converted_content:
converted_content = request.ctx.db_session.query(StoredContent).filter( converted_content = (await request.ctx.db_session.execute(select(StoredContent).where(
StoredContent.hash == converted_content['low_preview'] StoredContent.hash == converted_content['low_preview']
).first() ))).scalars().first()
preview_link = converted_content.web_url preview_link = converted_content.web_url
if converted_content.filename.split('.')[-1] in ('mp4', 'mov'): if converted_content.filename.split('.')[-1] in ('mp4', 'mov'):
media_type = 'video' media_type = 'video'

View File

@ -0,0 +1,53 @@
from __future__ import annotations
from sanic import response
from sqlalchemy import select
from datetime import datetime
from app.core.models.content_v3 import ContentIndexItem
from app.core.logger import make_log
async def s_api_v1_content_index(request):
rows = (await request.ctx.db_session.execute(select(ContentIndexItem))).scalars().all()
items = [{**r.payload, "encrypted_cid": r.encrypted_cid, "sig": r.sig, "_updated_at": (r.updated_at.isoformat() + 'Z') if r.updated_at else None} for r in rows]
# ETag by max updated_at + count
max_ts = max((it.get("_updated_at") for it in items if it.get("_updated_at")), default="1970-01-01T00:00:00Z")
etag = f'W/"{max_ts}.{len(items)}"'
inm = request.headers.get('If-None-Match')
if inm and inm == etag:
resp = response.empty(status=304)
resp.headers['ETag'] = etag
return resp
for it in items:
it.pop("_updated_at", None)
make_log("content.index", f"items={len(items)} etag={etag}")
resp = response.json({"items": items, "schema": "my-network/index@1"})
resp.headers['ETag'] = etag
return resp
async def s_api_v1_content_delta(request):
since = request.args.get('since')
if not since:
# No since provided → act as full index
return await s_api_v1_content_index(request)
try:
# basic parse
_ = datetime.fromisoformat(since.replace('Z', '+00:00'))
except Exception:
return response.json({"error": "BAD_SINCE"}, status=400)
rows = (await request.ctx.db_session.execute(select(ContentIndexItem))).scalars().all()
out = []
max_ts = since
for r in rows:
upd = (r.updated_at.isoformat() + 'Z') if r.updated_at else None
if upd and upd > since:
out.append({**r.payload, "encrypted_cid": r.encrypted_cid, "sig": r.sig})
if upd > max_ts:
max_ts = upd
resp = response.json({"items": out, "next_since": max_ts, "schema": "my-network/index@1"})
# Weak ETag for delta response
resp.headers['ETag'] = f'W/"{max_ts}.{len(out)}"'
return resp

View File

@ -0,0 +1,33 @@
from __future__ import annotations
from sanic import response
from sqlalchemy import select
from app.core.models.content_v3 import EncryptedContent, ContentDerivative
from app.core._config import PROJECT_HOST
async def s_api_v1_content_derivatives(request):
cid = request.args.get('cid')
if not cid:
return response.json({"error": "BAD_REQUEST"}, status=400)
session = request.ctx.db_session
ec = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == cid))).scalars().first()
if not ec:
return response.json({"error": "NOT_FOUND"}, status=404)
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.content_id == ec.id))).scalars().all()
out = []
for r in rows:
# Derive /api/v1.5/storage/<hash> from local_path if possible
path_hash = (r.local_path or '').split('/')[-1]
storage_url = f"{PROJECT_HOST}/api/v1.5/storage/{path_hash}" if path_hash else None
out.append({
'kind': r.kind,
'interval': [r.interval_start_ms, r.interval_end_ms] if r.interval_start_ms is not None else None,
'content_type': r.content_type,
'size_bytes': r.size_bytes,
'status': r.status,
'url': storage_url,
})
return response.json({'cid': cid, 'derivatives': out})

125
app/api/routes/dht.py Normal file
View File

@ -0,0 +1,125 @@
from __future__ import annotations
import json
from typing import Any, Dict, List
from sanic import response
from app.core.logger import make_log
from app.core._utils.b58 import b58decode
from app.core.network.dht.records import DHTRecord
from app.core.network.dht.store import DHTStore
from app.core.network.dht.crypto import compute_node_id
from app.core.network.dht.keys import MetaKey, MembershipKey, MetricKey
from sqlalchemy import select
from app.core.models.my_network import KnownNode
def _merge_strategy_for(key: str):
# Выбираем правильную стратегию merge по префиксу ключа
from app.core.network.dht.replication import ReplicationState
from app.core.network.dht.membership import MembershipState
from app.core.network.dht.metrics import ContentMetricsState
if key.startswith('meta:'):
return lambda a, b: ReplicationState.from_dict(a).merge_with(ReplicationState.from_dict(b)).to_dict()
if key.startswith('membership:'):
# Для membership нужен node_id, но это только для локального состояния; здесь достаточно CRDT-мерджа
return lambda a, b: MembershipState.from_dict('remote', None, a).merge(MembershipState.from_dict('remote', None, b)).to_dict()
if key.startswith('metric:'):
return lambda a, b: ContentMetricsState.from_dict('remote', a).merge(ContentMetricsState.from_dict('remote', b)).to_dict()
return lambda a, b: b
async def s_api_v1_dht_get(request):
"""Возвращает запись DHT по fingerprint или key."""
store: DHTStore = request.app.ctx.memory.dht_store
fp = request.args.get('fingerprint')
key = request.args.get('key')
if fp:
rec = store.get(fp)
if not rec:
return response.json({'error': 'NOT_FOUND'}, status=404)
return response.json({**rec.to_payload(), 'signature': rec.signature})
if key:
snap = store.snapshot()
for _fp, payload in snap.items():
if payload.get('key') == key:
return response.json(payload)
return response.json({'error': 'NOT_FOUND'}, status=404)
return response.json({'error': 'BAD_REQUEST'}, status=400)
def _verify_publisher(node_id: str, public_key_b58: str) -> bool:
try:
derived = compute_node_id(b58decode(public_key_b58))
return derived == node_id
except Exception:
return False
async def s_api_v1_dht_put(request):
"""Принимает запись(и) DHT, проверяет подпись и выполняет merge/persist.
Поддерживает одиночную запись (record: {...}) и пакет (records: [{...}]).
Требует поле public_key отправителя и соответствие node_id.
"""
mem = request.app.ctx.memory
store: DHTStore = mem.dht_store
data = request.json or {}
public_key = data.get('public_key')
if not public_key:
return response.json({'error': 'MISSING_PUBLIC_KEY'}, status=400)
# Determine publisher role (trusted/read-only/deny)
role = None
try:
session = request.ctx.db_session
kn = (await session.execute(select(KnownNode).where(KnownNode.public_key == public_key))).scalars().first()
role = (kn.meta or {}).get('role') if kn and kn.meta else None
except Exception:
role = None
def _process_one(payload: Dict[str, Any]) -> Dict[str, Any]:
try:
rec = DHTRecord.create(
key=payload['key'],
fingerprint=payload['fingerprint'],
value=payload['value'],
node_id=payload['node_id'],
logical_counter=int(payload['logical_counter']),
signature=payload.get('signature'),
timestamp=float(payload.get('timestamp') or 0),
)
except Exception as e:
return {'error': f'BAD_RECORD: {e}'}
if not _verify_publisher(rec.node_id, public_key):
return {'error': 'NODE_ID_MISMATCH'}
# Подтверждение подписи записи
if not rec.verify(public_key):
return {'error': 'BAD_SIGNATURE'}
# Enforce ACL: untrusted nodes may not mutate meta/metric records
if role != 'trusted':
if rec.key.startswith('meta:') or rec.key.startswith('metric:'):
return {'error': 'FORBIDDEN_NOT_TRUSTED'}
merge_fn = _merge_strategy_for(rec.key)
try:
merged = store.merge_record(rec, merge_fn)
return {'ok': True, 'fingerprint': merged.fingerprint}
except Exception as e:
make_log('DHT.put', f'merge failed: {e}', level='warning')
return {'error': 'MERGE_FAILED'}
if 'record' in data:
result = _process_one(data['record'])
status = 200 if 'ok' in result else 400
return response.json(result, status=status)
elif 'records' in data and isinstance(data['records'], list):
results: List[Dict[str, Any]] = []
ok = True
for item in data['records']:
res = _process_one(item)
if 'error' in res:
ok = False
results.append(res)
return response.json({'ok': ok, 'results': results}, status=200 if ok else 207)
return response.json({'error': 'BAD_REQUEST'}, status=400)

117
app/api/routes/keys.py Normal file
View File

@ -0,0 +1,117 @@
from __future__ import annotations
import base64
import json
import os
from datetime import datetime
from typing import Dict, Any
from base58 import b58encode
from sanic import response
from sqlalchemy import select
from app.core._secrets import hot_pubkey
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, ContentKey, KeyGrant
from app.core.network.nodesig import verify_request
from app.core.network.guard import check_rate_limit
from app.core.models.my_network import KnownNode
from app.core.crypto.keywrap import unwrap_dek, KeyWrapError
def _b64(b: bytes) -> str:
return base64.b64encode(b).decode()
async def s_api_v1_keys_request(request):
# Rate limit per remote IP (reuse handshake limiter)
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
# Verify NodeSig
ok, hdr_node, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
data: Dict[str, Any] = request.json or {}
cid = data.get("encrypted_cid")
requester_node = data.get("requestor_node_id")
recipient_box_pub_b64 = data.get("recipient_box_pub")
if not cid or not requester_node or not recipient_box_pub_b64:
return response.json({"error": "BAD_REQUEST"}, status=400)
if requester_node != hdr_node:
return response.json({"error": "NODE_ID_MISMATCH"}, status=401)
session = request.ctx.db_session
row = (await session.execute(select(EncryptedContent, ContentKey).join(ContentKey, ContentKey.content_id == EncryptedContent.id).where(EncryptedContent.encrypted_cid == cid))).first()
if not row:
return response.json({"error": "NOT_FOUND"}, status=404)
ec: EncryptedContent = row[0]
ck: ContentKey = row[1]
# Allow only trusted nodes unless explicitly disabled via env
TRUSTED_ONLY = (os.getenv('KEY_AUTO_GRANT_TRUSTED_ONLY', '1') == '1')
if TRUSTED_ONLY:
kn = (await session.execute(select(KnownNode).where(KnownNode.public_key == requester_node))).scalars().first()
role = (kn.meta or {}).get('role') if kn else None
if role != 'trusted':
return response.json({"error": "DENIED_NOT_TRUSTED"}, status=403)
if not ck.allow_auto_grant:
return response.json({"error": "DENIED"}, status=403)
# Seal the DEK for recipient using libsodium sealed box
try:
dek_plain = unwrap_dek(ck.key_ciphertext_b64)
import nacl.public
pk = nacl.public.PublicKey(base64.b64decode(recipient_box_pub_b64))
box = nacl.public.SealedBox(pk)
sealed = box.encrypt(dek_plain)
sealed_b64 = _b64(sealed)
except KeyWrapError as e:
make_log("keys", f"unwrap failed: {e}", level="error")
return response.json({"error": "KEY_UNWRAP_FAILED"}, status=500)
except Exception as e:
make_log("keys", f"seal failed: {e}", level="error")
return response.json({"error": "SEAL_FAILED"}, status=500)
issuer = b58encode(hot_pubkey).decode()
purpose = (data.get('purpose') or 'full')
ttl_sec = int(os.getenv('KEY_GRANT_PREVIEW_TTL_SEC', '0')) if purpose == 'preview' else 0
grant_body = {
"encrypted_cid": cid,
"to_node_id": requester_node,
"sealed_key_b64": sealed_b64,
"aead_scheme": ec.aead_scheme,
"chunk_bytes": ec.chunk_bytes,
"constraints": {"ttl_sec": ttl_sec, "scope": purpose},
"issued_at": datetime.utcnow().isoformat(),
"issuer_node_id": issuer,
}
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
signer = Signer(hot_seed)
blob = json.dumps(grant_body, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
except Exception:
sig = ""
grant = KeyGrant(
encrypted_cid=cid,
issuer_node_id=issuer,
to_node_id=requester_node,
sealed_key_b64=sealed_b64,
aead_scheme=ec.aead_scheme,
chunk_bytes=ec.chunk_bytes,
constraints={"ttl_sec": 0, "scope": "full"},
sig=sig,
)
session.add(grant)
await session.commit()
grant_row = {
**grant_body,
"sig": sig,
"grant_id": grant.id,
}
return response.json(grant_row)

39
app/api/routes/metrics.py Normal file
View File

@ -0,0 +1,39 @@
from __future__ import annotations
from sanic import response
async def s_api_metrics(request):
try:
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST # type: ignore
data = generate_latest()
return response.raw(data, content_type=CONTENT_TYPE_LATEST)
except Exception:
# Fallback: export minimal in-process counters from DHT module, if available
try:
from app.core.network.dht import prometheus as dprom
def dump(metric_obj, metric_name):
lines = []
values = getattr(metric_obj, "_values", {})
for labels, value in values.items():
label_str = ",".join(f'{k}="{v}"' for k, v in labels)
if label_str:
lines.append(f"{metric_name}{{{label_str}}} {value}")
else:
lines.append(f"{metric_name} {value}")
return lines
parts = []
parts += dump(dprom.replication_under, "dht_replication_under_total")
parts += dump(dprom.replication_over, "dht_replication_over_total")
parts += dump(dprom.leader_changes, "dht_leader_changes_total")
parts += dump(dprom.merge_conflicts, "dht_merge_conflicts_total")
parts += dump(dprom.view_count_total, "dht_view_count_total")
parts += dump(dprom.unique_estimate, "dht_unique_view_estimate")
parts += dump(dprom.watch_time_seconds, "dht_watch_time_seconds")
body = "\n".join(parts) + ("\n" if parts else "")
return response.text(body, content_type="text/plain; version=0.0.4")
except Exception:
return response.text("")

312
app/api/routes/network.py Normal file
View File

@ -0,0 +1,312 @@
from __future__ import annotations
import json
from datetime import datetime
from typing import Dict, Any
from app.core._utils.b58 import b58decode
from sanic import response
from urllib.parse import urlparse
from app.core.logger import make_log
from app.core.network.constants import CURRENT_PROTOCOL_VERSION, NODE_TYPE_PRIVATE
from app.core.network.config import NODE_PRIVACY
from app.core.network.handshake import build_handshake_payload, compute_node_info, sign_response
from app.core.network.nodes import upsert_known_node, list_known_public_nodes
from app.core.network.semver import compatibility
from app.core.network.guard import check_rate_limit, check_timestamp_fresh, check_and_remember_nonce
from app.core.network.config import HANDSHAKE_TS_TOLERANCE_SEC
from app.core.ipfs_client import swarm_connect
from app.core._config import PROJECT_HOST
from app.core.events.service import record_event
from app.core.network.asn import resolver as asn_resolver
from app.core.network.dht import compute_node_id, dht_config, ReachabilityReceipt
def _port_from_public_host(public_host: str) -> int:
"""Return an integer port extracted from a public_host URL or host:port string."""
if not public_host:
return 80
parsed = urlparse(public_host)
if parsed.scheme:
if parsed.port:
return parsed.port
return 443 if parsed.scheme == "https" else 80
host_port = public_host.strip()
if ":" in host_port:
candidate = host_port.rsplit(":", 1)[-1]
try:
return int(candidate)
except (TypeError, ValueError):
pass
return 80
def _extract_ipfs_meta(payload: Dict[str, Any]) -> Dict[str, Any]:
ipfs = payload or {}
multiaddrs = ipfs.get("multiaddrs") or []
if not isinstance(multiaddrs, list):
multiaddrs = [multiaddrs]
normalized_multiaddrs = [str(m) for m in multiaddrs if m]
meta: Dict[str, Any] = {}
if normalized_multiaddrs:
meta["multiaddrs"] = normalized_multiaddrs
peer_id = ipfs.get("peer_id")
if peer_id:
meta["peer_id"] = str(peer_id)
agent = ipfs.get("agent_version") or ipfs.get("agentVersion")
if agent:
meta["agent_version"] = str(agent)
return meta
async def _connect_ipfs_multiaddrs(addrs):
for addr in addrs or []:
try:
await swarm_connect(addr)
except Exception:
pass
async def s_api_v1_network_info(request):
async with request.app.ctx.memory.transaction("network.info"):
node = await compute_node_info(request.ctx.db_session)
make_log("Network", "info served")
return response.json({"node": node})
async def s_api_v1_network_nodes(request):
rows = await list_known_public_nodes(request.ctx.db_session)
make_log("Network", f"nodes list count={len(rows)}")
return response.json({
"count": len(rows),
"nodes": rows,
})
async def s_api_v1_network_handshake(request):
# Handshake accepted regardless of our privacy; private nodes typically have no external endpoint
# Rate limit per remote IP
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
data = request.json or {}
required = ["version", "schema_version", "public_key", "node_id", "node_type", "metrics", "timestamp", "signature"]
for f in required:
if f not in data:
return response.json({"error": f"Missing field {f}"}, status=400)
# public_host is required for public nodes only
if data.get("node_type") != "private" and not data.get("public_host"):
return response.json({"error": "Missing field public_host"}, status=400)
# Timestamp freshness
if not check_timestamp_fresh(data.get("timestamp")):
return response.json({"error": "STALE_TIMESTAMP", "tolerance_sec": HANDSHAKE_TS_TOLERANCE_SEC}, status=400)
# Nonce replay protection (best-effort)
if not data.get("nonce") or not check_and_remember_nonce(request.app.ctx.memory, data.get("public_key"), data.get("nonce")):
return response.json({"error": "NONCE_REPLAY"}, status=400)
# Base schema and identity checks
if data.get("schema_version") != dht_config.schema_version:
return response.json({"error": "UNSUPPORTED_SCHEMA_VERSION"}, status=400)
try:
expected_node_id = compute_node_id(b58decode(data["public_key"]))
except Exception:
return response.json({"error": "BAD_PUBLIC_KEY"}, status=400)
if data.get("node_id") != expected_node_id:
return response.json({"error": "NODE_ID_MISMATCH"}, status=400)
peer_version = str(data.get("version"))
ipfs_meta = _extract_ipfs_meta(data.get("ipfs") or {})
comp = compatibility(peer_version, CURRENT_PROTOCOL_VERSION)
if comp == "blocked":
# We still store the node but respond with 409
try:
await upsert_known_node(
request.ctx.db_session,
host=data.get("public_host"),
port=_port_from_public_host(data.get("public_host")),
public_key=str(data.get("public_key")),
meta={
"version": peer_version,
"compatibility": comp,
"is_public": data.get("node_type", "public") != "private",
"public_host": data.get("public_host"),
"unsupported_last_checked_at": datetime.utcnow().isoformat(),
"ipfs": ipfs_meta,
}
)
except Exception:
pass
make_log("Handshake", f"Reject incompatible peer {data.get('public_host')} peer={peer_version} current={CURRENT_PROTOCOL_VERSION}")
return response.json({
"error": "INCOMPATIBLE_VERSION",
"compatibility": comp,
"current": CURRENT_PROTOCOL_VERSION,
"peer": peer_version,
}, status=409)
# Verify signature (Ed25519). If libsodium not available, accept but log a warning.
signed_fields = {k: v for (k, v) in data.items() if k != "signature"}
blob = json.dumps(signed_fields, sort_keys=True, separators=(",", ":")).encode()
ok = False
try:
import nacl.signing, nacl.encoding # type: ignore
vk = nacl.signing.VerifyKey(b58decode(data.get("public_key", "")))
sig = b58decode(data.get("signature", ""))
vk.verify(blob, sig)
ok = True
except Exception as e:
ok = False
if not ok:
make_log("Handshake", f"Signature verification failed from {data.get('public_host')}", level='warning')
return response.json({"error": "BAD_SIGNATURE"}, status=400)
# Update membership / reachability information
try:
membership_mgr = getattr(request.app.ctx.memory, "membership", None)
if membership_mgr:
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip() or None
# Determine caller ASN using advertised value or resolver
remote_asn = data.get("asn")
if remote_asn is None:
remote_asn = await asn_resolver.resolve_async(remote_ip, request.ctx.db_session)
else:
if remote_ip:
asn_resolver.learn(remote_ip, int(remote_asn))
membership_mgr.update_member(
node_id=data["node_id"],
public_key=data["public_key"],
ip=remote_ip,
asn=int(remote_asn) if remote_asn is not None else None,
metadata={
"capabilities": data.get("capabilities", {}),
"metrics": data.get("metrics", {}),
"public_host": data.get("public_host"),
},
)
for receipt in data.get("reachability_receipts") or []:
if not receipt.get("target_id") or not receipt.get("issuer_id"):
continue
try:
# Only accept receipts issued by the caller
issuer_id = str(receipt.get("issuer_id"))
if issuer_id != data["node_id"]:
continue
# Canonical message for receipt verification
# schema_version is embedded to avoid replay across versions
rec_asn = receipt.get("asn")
if rec_asn is None:
rec_asn = remote_asn
payload = {
"schema_version": dht_config.schema_version,
"target_id": str(receipt.get("target_id")),
"issuer_id": issuer_id,
"asn": int(rec_asn) if rec_asn is not None else None,
"timestamp": float(receipt.get("timestamp", data.get("timestamp"))),
}
blob = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode()
try:
import nacl.signing # type: ignore
from app.core._utils.b58 import b58decode as _b58d
vk = nacl.signing.VerifyKey(_b58d(data["public_key"]))
sig_b = _b58d(str(receipt.get("signature", "")))
vk.verify(blob, sig_b)
# Accept and persist
membership_mgr.record_receipt(
ReachabilityReceipt(
target_id=payload["target_id"],
issuer_id=payload["issuer_id"],
asn=payload["asn"],
timestamp=payload["timestamp"],
signature=str(receipt.get("signature", "")),
)
)
except Exception:
# Ignore invalid receipts
continue
except Exception:
continue
except Exception as exc:
make_log("Handshake", f"Membership ingest failed: {exc}", level='warning')
# Upsert node and respond with our info + known public nodes
# Do not persist private peers (ephemeral)
if data.get("node_type") != "private" and data.get("public_host"):
try:
await upsert_known_node(
request.ctx.db_session,
host=data.get("public_host"),
port=_port_from_public_host(data.get("public_host")),
public_key=str(data.get("public_key")),
meta={
"version": peer_version,
"compatibility": comp,
"is_public": True,
"public_host": data.get("public_host"),
"last_metrics": data.get("metrics", {}),
"capabilities": data.get("capabilities", {}),
"ipfs": ipfs_meta,
}
)
await _connect_ipfs_multiaddrs(ipfs_meta.get("multiaddrs"))
try:
await record_event(
request.ctx.db_session,
'node_registered',
{
'public_key': str(data.get("public_key")),
'public_host': data.get("public_host"),
'node_type': data.get("node_type"),
'version': peer_version,
'capabilities': data.get("capabilities", {}),
},
origin_host=PROJECT_HOST,
)
except Exception as ev_exc:
make_log("Events", f"Failed to record node_registered event: {ev_exc}", level="warning")
except Exception as e:
make_log("Handshake", f"Upsert peer failed: {e}", level='warning')
# Merge advertised peers from the caller (optional field)
for n in data.get("known_public_nodes", []) or []:
known_ipfs_meta = _extract_ipfs_meta(n.get("ipfs") or {})
try:
await upsert_known_node(
request.ctx.db_session,
host=n.get("public_host") or n.get("host"),
port=int(n.get("port") or 80),
public_key=n.get("public_key") or "",
meta={
"version": n.get("version") or "0.0.0",
"compatibility": compatibility(n.get("version") or "0.0.0", CURRENT_PROTOCOL_VERSION),
"is_public": True,
"public_host": n.get("public_host") or n.get("host"),
"capabilities": n.get("capabilities") or {},
"ipfs": known_ipfs_meta,
}
)
await _connect_ipfs_multiaddrs(known_ipfs_meta.get("multiaddrs"))
except Exception:
pass
node = await compute_node_info(request.ctx.db_session)
known = await list_known_public_nodes(request.ctx.db_session)
membership_mgr = getattr(request.app.ctx.memory, "membership", None)
n_estimate = membership_mgr.n_estimate() if membership_mgr else 0
resp = sign_response({
"compatibility": comp,
"node": node,
"known_public_nodes": known,
"n_estimate": n_estimate,
})
make_log("Handshake", f"OK with {data.get('public_host')} compat={comp}")
status = 200
if comp == "warning":
status = 200
resp["warning"] = "MINOR version differs; proceed with caution"
return response.json(resp, status=status)

View File

@ -0,0 +1,77 @@
from __future__ import annotations
from typing import Dict, Any
from sanic import response
from sqlalchemy import select
from app.core.logger import make_log
from app.core.models import NodeEvent, KnownNode
from app.core.network.nodesig import verify_request
from app.core.network.guard import check_rate_limit
from app.core._config import PROJECT_HOST
from app.core.events.service import LOCAL_PUBLIC_KEY
def _origin_host() -> str | None:
return PROJECT_HOST.rstrip('/') if PROJECT_HOST else None
async def s_api_v1_network_events(request):
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
ok, node_id, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
session = request.ctx.db_session
trusted = (await session.execute(
select(KnownNode).where(KnownNode.public_key == node_id)
)).scalar_one_or_none()
role = (trusted.meta or {}).get('role') if trusted and trusted.meta else None
if role != 'trusted':
make_log("Events", f"Rejected events fetch from non-trusted node {node_id}", level="warning")
return response.json({"error": "FORBIDDEN"}, status=403)
try:
since = int(request.args.get('since') or 0)
except (TypeError, ValueError):
since = 0
since = max(since, 0)
try:
limit = int(request.args.get('limit') or 100)
except (TypeError, ValueError):
limit = 100
limit = max(1, min(limit, 200))
result = await session.execute(
select(NodeEvent)
.where(NodeEvent.origin_public_key == LOCAL_PUBLIC_KEY, NodeEvent.seq > since)
.order_by(NodeEvent.seq.asc())
.limit(limit)
)
rows = result.scalars().all()
events: list[Dict[str, Any]] = []
next_since = since
for row in rows:
next_since = max(next_since, int(row.seq))
events.append({
"origin_public_key": row.origin_public_key,
"origin_host": row.origin_host or _origin_host(),
"seq": int(row.seq),
"uid": row.uid,
"event_type": row.event_type,
"payload": row.payload,
"signature": row.signature,
"created_at": (row.created_at.isoformat() + 'Z') if row.created_at else None,
})
payload = {
"events": events,
"next_since": next_since,
}
return response.json(payload)

View File

@ -11,6 +11,7 @@ from sanic import response
import json import json
from app.core._config import UPLOADS_DIR from app.core._config import UPLOADS_DIR
from sqlalchemy import select
from app.core._utils.resolve_content import resolve_content from app.core._utils.resolve_content import resolve_content
from app.core.logger import make_log from app.core.logger import make_log
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
@ -52,7 +53,9 @@ async def s_api_v1_storage_post(request):
try: try:
file_hash_bin = hashlib.sha256(file_content).digest() file_hash_bin = hashlib.sha256(file_content).digest()
file_hash = b58encode(file_hash_bin).decode() file_hash = b58encode(file_hash_bin).decode()
stored_content = request.ctx.db_session.query(StoredContent).filter(StoredContent.hash == file_hash).first() stored_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == file_hash)
)).scalars().first()
if stored_content: if stored_content:
stored_cid = stored_content.cid.serialize_v1() stored_cid = stored_content.cid.serialize_v1()
stored_cid_v2 = stored_content.cid.serialize_v2() stored_cid_v2 = stored_content.cid.serialize_v2()
@ -80,7 +83,7 @@ async def s_api_v1_storage_post(request):
key_id=None, key_id=None,
) )
request.ctx.db_session.add(new_content) request.ctx.db_session.add(new_content)
request.ctx.db_session.commit() await request.ctx.db_session.commit()
file_path = os.path.join(UPLOADS_DIR, file_hash) file_path = os.path.join(UPLOADS_DIR, file_hash)
async with aiofiles.open(file_path, "wb") as file: async with aiofiles.open(file_path, "wb") as file:
@ -97,7 +100,7 @@ async def s_api_v1_storage_post(request):
"content_url": f"dmy://storage?cid={new_cid}", "content_url": f"dmy://storage?cid={new_cid}",
}) })
except BaseException as e: except BaseException as e:
make_log("Storage", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error: {e}" + '\n' + traceback.format_exc(), level="error")
return response.json({"error": f"Error: {e}"}, status=500) return response.json({"error": f"Error: {e}"}, status=500)
@ -112,14 +115,16 @@ async def s_api_v1_storage_get(request, file_hash=None):
return response.json({"error": errmsg}, status=400) return response.json({"error": errmsg}, status=400)
content_sha256 = b58encode(cid.content_hash).decode() content_sha256 = b58encode(cid.content_hash).decode()
content = request.ctx.db_session.query(StoredContent).filter(StoredContent.hash == content_sha256).first() content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == content_sha256)
)).scalars().first()
if not content: if not content:
return response.json({"error": "File not found"}, status=404) return response.json({"error": "File not found"}, status=404)
make_log("Storage", f"File {content_sha256} requested by {request.ctx.user}") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} File {content_sha256} requested by user={getattr(getattr(request.ctx, 'user', None), 'id', None)}")
file_path = os.path.join(UPLOADS_DIR, content_sha256) file_path = os.path.join(UPLOADS_DIR, content_sha256)
if not os.path.exists(file_path): if not os.path.exists(file_path):
make_log("Storage", f"File {content_sha256} not found locally", level="error") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} File {content_sha256} not found locally", level="error")
return response.json({"error": "File not found"}, status=404) return response.json({"error": "File not found"}, status=404)
async with aiofiles.open(file_path, "rb") as file: async with aiofiles.open(file_path, "rb") as file:
@ -139,7 +144,16 @@ async def s_api_v1_storage_get(request, file_hash=None):
tempfile_path += "_mpeg" + (f"_{seconds_limit}" if seconds_limit else "") tempfile_path += "_mpeg" + (f"_{seconds_limit}" if seconds_limit else "")
if not os.path.exists(tempfile_path): if not os.path.exists(tempfile_path):
try: try:
cover_content = StoredContent.from_cid(content.meta.get('cover_cid')) # Resolve cover content by CID (async)
from app.core.content.content_id import ContentId
try:
_cid = ContentId.deserialize(content.meta.get('cover_cid'))
_cover_hash = _cid.content_hash_b58
cover_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == _cover_hash)
)).scalars().first()
except Exception:
cover_content = None
cover_tempfile_path = os.path.join(UPLOADS_DIR, f"tmp_{cover_content.hash}_jpeg") cover_tempfile_path = os.path.join(UPLOADS_DIR, f"tmp_{cover_content.hash}_jpeg")
if not os.path.exists(cover_tempfile_path): if not os.path.exists(cover_tempfile_path):
cover_image = Image.open(cover_content.filepath) cover_image = Image.open(cover_content.filepath)
@ -173,25 +187,25 @@ async def s_api_v1_storage_get(request, file_hash=None):
try: try:
audio = AudioSegment.from_file(file_path) audio = AudioSegment.from_file(file_path)
except BaseException as e: except BaseException as e:
make_log("Storage", f"Error loading audio from file: {e}", level="debug") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error loading audio from file: {e}", level="debug")
if not audio: if not audio:
try: try:
audio = AudioSegment(content_file_bin) audio = AudioSegment(content_file_bin)
except BaseException as e: except BaseException as e:
make_log("Storage", f"Error loading audio from binary: {e}", level="debug") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error loading audio from binary: {e}", level="debug")
audio = audio[:seconds_limit * 1000] if seconds_limit else audio audio = audio[:seconds_limit * 1000] if seconds_limit else audio
audio.export(tempfile_path, format="mp3", cover=cover_tempfile_path) audio.export(tempfile_path, format="mp3", cover=cover_tempfile_path)
except BaseException as e: except BaseException as e:
make_log("Storage", f"Error converting audio: {e}" + '\n' + traceback.format_exc(), level="error") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error converting audio: {e}" + '\n' + traceback.format_exc(), level="error")
if os.path.exists(tempfile_path): if os.path.exists(tempfile_path):
async with aiofiles.open(tempfile_path, "rb") as file: async with aiofiles.open(tempfile_path, "rb") as file:
content_file_bin = await file.read() content_file_bin = await file.read()
accept_type = 'audio/mpeg' accept_type = 'audio/mpeg'
make_log("Storage", f"Audio {content_sha256} converted successfully") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Audio {content_sha256} converted successfully", level='debug')
else: else:
tempfile_path = tempfile_path[:-5] tempfile_path = tempfile_path[:-5]
@ -208,13 +222,13 @@ async def s_api_v1_storage_get(request, file_hash=None):
break break
quality -= 5 quality -= 5
except BaseException as e: except BaseException as e:
make_log("Storage", f"Error converting image: {e}" + '\n' + traceback.format_exc(), level="error") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error converting image: {e}" + '\n' + traceback.format_exc(), level="error")
if os.path.exists(tempfile_path): if os.path.exists(tempfile_path):
async with aiofiles.open(tempfile_path, "rb") as file: async with aiofiles.open(tempfile_path, "rb") as file:
content_file_bin = await file.read() content_file_bin = await file.read()
make_log("Storage", f"Image {content_sha256} converted successfully") make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Image {content_sha256} converted successfully", level='debug')
accept_type = 'image/jpeg' accept_type = 'image/jpeg'
else: else:
tempfile_path = tempfile_path[:-5] tempfile_path = tempfile_path[:-5]

View File

@ -11,36 +11,46 @@ from base58 import b58encode
from sanic import response from sanic import response
from app.core.logger import make_log from app.core.logger import make_log
from sqlalchemy import select
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from app.core._config import UPLOADS_DIR from app.core._config import UPLOADS_DIR
from app.core.models.content_v3 import ContentDerivative
from app.core._utils.resolve_content import resolve_content from app.core._utils.resolve_content import resolve_content
from app.core.network.nodesig import verify_request
from app.core.models.my_network import KnownNode
from sqlalchemy import select as sa_select
import httpx
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
from app.core._utils.b58 import b58encode as _b58e, b58decode as _b58d
import json, time
# POST /api/v1.5/storage # POST /api/v1.5/storage
async def s_api_v1_5_storage_post(request): async def s_api_v1_5_storage_post(request):
# Log the receipt of a chunk upload request # Log the receipt of a chunk upload request
make_log("uploader_v1.5", "Received chunk upload request", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Received chunk upload request", level="INFO")
# Get the provided file name from header and decode it from base64 # Get the provided file name from header and decode it from base64
provided_filename_b64 = request.headers.get("X-File-Name") provided_filename_b64 = request.headers.get("X-File-Name")
if not provided_filename_b64: if not provided_filename_b64:
make_log("uploader_v1.5", "Missing X-File-Name header", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Missing X-File-Name header", level="ERROR")
return response.json({"error": "Missing X-File-Name header"}, status=400) return response.json({"error": "Missing X-File-Name header"}, status=400)
try: try:
provided_filename = b64decode(provided_filename_b64).decode("utf-8") provided_filename = b64decode(provided_filename_b64).decode("utf-8")
except Exception as e: except Exception as e:
make_log("uploader_v1.5", f"Invalid X-File-Name header: {e}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Invalid X-File-Name header: {e}", level="ERROR")
return response.json({"error": "Invalid X-File-Name header"}, status=400) return response.json({"error": "Invalid X-File-Name header"}, status=400)
# Get X-Chunk-Start header (must be provided) and parse it as integer # Get X-Chunk-Start header (must be provided) and parse it as integer
chunk_start_header = request.headers.get("X-Chunk-Start") chunk_start_header = request.headers.get("X-Chunk-Start")
if chunk_start_header is None: if chunk_start_header is None:
make_log("uploader_v1.5", "Missing X-Chunk-Start header", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Missing X-Chunk-Start header", level="ERROR")
return response.json({"error": "Missing X-Chunk-Start header"}, status=400) return response.json({"error": "Missing X-Chunk-Start header"}, status=400)
try: try:
chunk_start = int(chunk_start_header) chunk_start = int(chunk_start_header)
except Exception as e: except Exception as e:
make_log("uploader_v1.5", f"Invalid X-Chunk-Start header: {e}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Invalid X-Chunk-Start header: {e}", level="ERROR")
return response.json({"error": "Invalid X-Chunk-Start header"}, status=400) return response.json({"error": "Invalid X-Chunk-Start header"}, status=400)
# Enforce maximum chunk size (80 MB) using Content-Length header if provided # Enforce maximum chunk size (80 MB) using Content-Length header if provided
@ -50,7 +60,7 @@ async def s_api_v1_5_storage_post(request):
try: try:
content_length = int(content_length) content_length = int(content_length)
if content_length > max_chunk_size: if content_length > max_chunk_size:
make_log("uploader_v1.5", f"Chunk size {content_length} exceeds maximum allowed", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Chunk size {content_length} exceeds maximum allowed", level="ERROR")
return response.json({"error": "Chunk size exceeds maximum allowed (80 MB)"}, status=400) return response.json({"error": "Chunk size exceeds maximum allowed (80 MB)"}, status=400)
except: except:
pass pass
@ -62,9 +72,9 @@ async def s_api_v1_5_storage_post(request):
# New upload session: generate a new uuid # New upload session: generate a new uuid
upload_id = str(uuid4()) upload_id = str(uuid4())
is_new_upload = True is_new_upload = True
make_log("uploader_v1.5", f"Starting new upload session with ID: {upload_id}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Start new upload session id={upload_id}", level="INFO")
else: else:
make_log("uploader_v1.5", f"Resuming upload session with ID: {upload_id}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Resume upload session id={upload_id}", level="DEBUG")
# Determine the temporary file path based on upload_id # Determine the temporary file path based on upload_id
temp_path = os.path.join(UPLOADS_DIR, f"v1.5_upload_{upload_id}") temp_path = os.path.join(UPLOADS_DIR, f"v1.5_upload_{upload_id}")
@ -76,10 +86,10 @@ async def s_api_v1_5_storage_post(request):
# If the provided chunk_start is less than current_size, the chunk is already received # If the provided chunk_start is less than current_size, the chunk is already received
if chunk_start < current_size: if chunk_start < current_size:
make_log("uploader_v1.5", f"Chunk starting at {chunk_start} already received, current size: {current_size}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Chunk at {chunk_start} already received; size={current_size}", level="DEBUG")
return response.json({"upload_id": upload_id, "current_size": current_size}) return response.json({"upload_id": upload_id, "current_size": current_size})
elif chunk_start > current_size: elif chunk_start > current_size:
make_log("uploader_v1.5", f"Chunk start {chunk_start} does not match current file size {current_size}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Chunk start {chunk_start} != current size {current_size}", level="ERROR")
return response.json({"error": "Chunk start does not match current file size"}, status=400) return response.json({"error": "Chunk start does not match current file size"}, status=400)
# Append the received chunk to the temporary file # Append the received chunk to the temporary file
@ -93,9 +103,9 @@ async def s_api_v1_5_storage_post(request):
async for chunk in request.stream: async for chunk in request.stream:
await out_file.write(chunk) await out_file.write(chunk)
new_size = os.path.getsize(temp_path) new_size = os.path.getsize(temp_path)
make_log("uploader_v1.5", f"Appended chunk. New file size: {new_size}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Appended chunk. size={new_size}", level="DEBUG")
except Exception as e: except Exception as e:
make_log("uploader_v1.5", f"Error saving chunk: {e}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Error saving chunk: {e}", level="ERROR")
return response.json({"error": "Failed to save chunk"}, status=500) return response.json({"error": "Failed to save chunk"}, status=500)
# If computed hash matches the provided one, the final chunk has been received # If computed hash matches the provided one, the final chunk has been received
@ -111,28 +121,28 @@ async def s_api_v1_5_storage_post(request):
stdout, stderr = await proc.communicate() stdout, stderr = await proc.communicate()
if proc.returncode != 0: if proc.returncode != 0:
error_msg = stderr.decode().strip() error_msg = stderr.decode().strip()
make_log("uploader_v1.5", f"sha256sum error: {error_msg}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} sha256sum error: {error_msg}", level="ERROR")
return response.json({"error": "Failed to compute file hash"}, status=500) return response.json({"error": "Failed to compute file hash"}, status=500)
computed_hash_hex = stdout.decode().split()[0].strip() computed_hash_hex = stdout.decode().split()[0].strip()
computed_hash_bytes = bytes.fromhex(computed_hash_hex) computed_hash_bytes = bytes.fromhex(computed_hash_hex)
computed_hash_b58 = b58encode(computed_hash_bytes).decode() computed_hash_b58 = b58encode(computed_hash_bytes).decode()
make_log("uploader_v1.5", f"Computed hash (base58): {computed_hash_b58}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Computed hash (base58): {computed_hash_b58}", level="INFO")
except Exception as e: except Exception as e:
make_log("uploader_v1.5", f"Error computing file hash: {e}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Error computing file hash: {e}", level="ERROR")
return response.json({"error": "Error computing file hash"}, status=500) return response.json({"error": "Error computing file hash"}, status=500)
final_path = os.path.join(UPLOADS_DIR, f"{computed_hash_b58}") final_path = os.path.join(UPLOADS_DIR, f"{computed_hash_b58}")
try: try:
os.rename(temp_path, final_path) os.rename(temp_path, final_path)
make_log("uploader_v1.5", f"Final chunk received. File renamed to: {final_path}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Final chunk received. Renamed to: {final_path}", level="INFO")
except Exception as e: except Exception as e:
make_log("uploader_v1.5", f"Error renaming file: {e}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Error renaming file: {e}", level="ERROR")
return response.json({"error": "Failed to finalize file storage"}, status=500) return response.json({"error": "Failed to finalize file storage"}, status=500)
db_session = request.ctx.db_session db_session = request.ctx.db_session
existing = db_session.query(StoredContent).filter_by(hash=computed_hash_b58).first() existing = (await db_session.execute(select(StoredContent).where(StoredContent.hash == computed_hash_b58))).scalars().first()
if existing: if existing:
make_log("uploader_v1.5", f"File with hash {computed_hash_b58} already exists in DB", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} File already exists in DB: {computed_hash_b58}", level="INFO")
serialized_v2 = existing.cid.serialize_v2() serialized_v2 = existing.cid.serialize_v2()
serialized_v1 = existing.cid.serialize_v1() serialized_v1 = existing.cid.serialize_v1()
return response.json({ return response.json({
@ -156,10 +166,10 @@ async def s_api_v1_5_storage_post(request):
created=datetime.utcnow() created=datetime.utcnow()
) )
db_session.add(new_content) db_session.add(new_content)
db_session.commit() await db_session.commit()
make_log("uploader_v1.5", f"New file stored and indexed for user {user_id} with hash {computed_hash_b58}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Stored new file user={user_id} hash={computed_hash_b58}", level="INFO")
except Exception as e: except Exception as e:
make_log("uploader_v1.5", f"Database error: {e}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Database error: {e}", level="ERROR")
return response.json({"error": "Database error"}, status=500) return response.json({"error": "Database error"}, status=500)
serialized_v2 = new_content.cid.serialize_v2() serialized_v2 = new_content.cid.serialize_v2()
@ -178,7 +188,7 @@ async def s_api_v1_5_storage_post(request):
# GET /api/v1.5/storage/<file_hash> # GET /api/v1.5/storage/<file_hash>
async def s_api_v1_5_storage_get(request, file_hash): async def s_api_v1_5_storage_get(request, file_hash):
make_log("uploader_v1.5", f"Received file retrieval request for hash: {file_hash}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Retrieve file hash={file_hash}", level="INFO")
try: try:
file_hash = b58encode(resolve_content(file_hash)[0].content_hash).decode() file_hash = b58encode(resolve_content(file_hash)[0].content_hash).decode()
@ -187,11 +197,11 @@ async def s_api_v1_5_storage_get(request, file_hash):
final_path = os.path.join(UPLOADS_DIR, f"{file_hash}") final_path = os.path.join(UPLOADS_DIR, f"{file_hash}")
if not os.path.exists(final_path): if not os.path.exists(final_path):
make_log("uploader_v1.5", f"File not found: {final_path}", level="ERROR") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} File not found: {final_path}", level="ERROR")
return response.json({"error": "File not found"}, status=404) return response.json({"error": "File not found"}, status=404)
db_session = request.ctx.db_session db_session = request.ctx.db_session
stored = db_session.query(StoredContent).filter_by(hash=file_hash).first() stored = (await db_session.execute(select(StoredContent).where(StoredContent.hash == file_hash))).scalars().first()
if stored and stored.filename: if stored and stored.filename:
filename_for_mime = stored.filename filename_for_mime = stored.filename
else: else:
@ -204,8 +214,17 @@ async def s_api_v1_5_storage_get(request, file_hash):
file_size = os.path.getsize(final_path) file_size = os.path.getsize(final_path)
range_header = request.headers.get("Range") range_header = request.headers.get("Range")
# touch derivative last_access_at if exists
try:
cd = (await request.ctx.db_session.execute(select(ContentDerivative).where(ContentDerivative.local_path.like(f"%/{file_hash}")))).scalars().first()
if cd:
cd.last_access_at = datetime.utcnow()
await request.ctx.db_session.commit()
except Exception:
pass
if range_header: if range_header:
make_log("uploader_v1.5", f"Processing Range header: {range_header}", level="INFO") make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Processing Range: {range_header}", level="DEBUG")
range_spec = range_header.strip().lower() range_spec = range_header.strip().lower()
if not range_spec.startswith("bytes="): if not range_spec.startswith("bytes="):
make_log("uploader_v1.5", f"Invalid Range header: {range_header}", level="ERROR") make_log("uploader_v1.5", f"Invalid Range header: {range_header}", level="ERROR")
@ -294,3 +313,125 @@ async def s_api_v1_5_storage_get(request, file_hash):
else: else:
make_log("uploader_v1.5", f"Returning full file for video/audio: {final_path}", level="INFO") make_log("uploader_v1.5", f"Returning full file for video/audio: {final_path}", level="INFO")
return await response.file(final_path, mime_type=mime_type) return await response.file(final_path, mime_type=mime_type)
# GET /api/v1/storage.fetch/<file_hash>
# Внутренний эндпойнт для межузлового запроса (NodeSig). Возвращает файл, если он есть локально.
async def s_api_v1_storage_fetch(request, file_hash):
ok, node_id, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
# Только доверенные узлы
try:
session = request.ctx.db_session
row = (await session.execute(sa_select(KnownNode).where(KnownNode.public_key == node_id))).scalars().first()
role = (row.meta or {}).get('role') if row and row.meta else None
if role != 'trusted':
return response.json({"error": "DENIED_NOT_TRUSTED"}, status=403)
except Exception:
pass
# Переиспользуем реализацию v1.5
return await s_api_v1_5_storage_get(request, file_hash)
# GET /api/v1/storage.proxy/<file_hash>
# Проксирование для web-клиента: если локально нет файла, попытка получить у доверенных узлов по NodeSig
async def s_api_v1_storage_proxy(request, file_hash):
# Require either valid NodeSig (unlikely for public clients) or a signed access token
# Token fields: pub, exp, scope, uid, sig over json {hash,scope,exp,uid}
def _verify_access_token() -> bool:
try:
pub = (request.args.get('pub') or '').strip()
exp = int(request.args.get('exp') or '0')
scope = (request.args.get('scope') or '').strip()
uid = int(request.args.get('uid') or '0')
sig = (request.args.get('sig') or '').strip()
if not pub or not exp or not scope or not sig:
return False
if exp < int(time.time()):
return False
payload = {
'hash': file_hash,
'scope': scope,
'exp': exp,
'uid': uid,
}
blob = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode()
import nacl.signing
vk = nacl.signing.VerifyKey(_b58d(pub))
vk.verify(blob, _b58d(sig))
# Note: we do not require a session-bound user for media fetches,
# the shortlived signature itself is sufficient.
return True
except Exception:
return False
ok_nodesig, _nid, _reason = verify_request(request, request.app.ctx.memory)
if not ok_nodesig and not _verify_access_token():
return response.json({'error': 'UNAUTHORIZED'}, status=401)
# Сначала пробуем локально без возврата 404
try:
from base58 import b58encode as _b58e
try:
# Поддержка как хэша, так и CID
from app.core._utils.resolve_content import resolve_content as _res
cid, _ = _res(file_hash)
file_hash = _b58e(cid.content_hash).decode()
except Exception:
pass
final_path = os.path.join(UPLOADS_DIR, f"{file_hash}")
if os.path.exists(final_path):
return await s_api_v1_5_storage_get(request, file_hash)
except Exception:
pass
# Локально нет — пробуем у доверенных
try:
async with request.app.ctx.memory.transaction("storage.proxy"):
# Соберём список trusted узлов
session = request.ctx.db_session
nodes = (await session.execute(sa_select(KnownNode))).scalars().all()
candidates = []
for n in nodes:
role = (n.meta or {}).get('role') if n.meta else None
if role != 'trusted':
continue
host = (n.meta or {}).get('public_host') or (n.ip or '')
if not host:
continue
base = host.rstrip('/')
if not base.startswith('http'):
base = f"http://{base}:{n.port or 80}"
candidates.append(base)
# Проксируем с передачей Range, стриминг
range_header = request.headers.get("Range")
timeout = httpx.Timeout(10.0, read=60.0)
for base in candidates:
url = f"{base}/api/v1/storage.fetch/{file_hash}"
try:
# Подпишем NodeSig
from app.core._secrets import hot_seed, hot_pubkey
from app.core.network.nodesig import sign_headers
from app.core._utils.b58 import b58encode as _b58e
pk_b58 = _b58e(hot_pubkey).decode()
headers = sign_headers('GET', f"/api/v1/storage.fetch/{file_hash}", b"", hot_seed, pk_b58)
if range_header:
headers['Range'] = range_header
async with httpx.AsyncClient(timeout=timeout) as client:
r = await client.get(url, headers=headers)
if r.status_code == 404:
continue
if r.status_code not in (200, 206):
continue
# Проксируем заголовки контента
resp = await request.respond(status=r.status_code, headers={
k: v for k, v in r.headers.items() if k.lower() in ("content-type", "content-length", "content-range", "accept-ranges")
})
async for chunk in r.aiter_bytes(chunk_size=1024*1024):
await resp.send(chunk)
await resp.eof()
return resp
except Exception as e:
continue
except Exception:
pass
return response.json({"error": "File not found"}, status=404)

70
app/api/routes/sync.py Normal file
View File

@ -0,0 +1,70 @@
from __future__ import annotations
from datetime import datetime
from sanic import response
from sqlalchemy import select
from app.core.ipfs_client import pin_add, pin_ls
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, IpfsSync
from app.core.network.nodesig import verify_request
from app.core.network.guard import check_rate_limit
async def s_api_v1_sync_pin(request):
# Rate limit per IP and require NodeSig for POST
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
ok, node_id, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
data = request.json or {}
cid = data.get("encrypted_cid")
if not cid:
return response.json({"error": "BAD_REQUEST"}, status=400)
session = request.ctx.db_session
row = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == cid))).scalars().first()
if not row:
# create record with minimal info (unknown meta)
row = EncryptedContent(
encrypted_cid=cid,
title=cid,
description="",
content_type="application/octet-stream",
preview_enabled=False,
)
session.add(row)
await session.flush()
sync = (await session.execute(select(IpfsSync).where(IpfsSync.content_id == row.id))).scalars().first()
if not sync:
sync = IpfsSync(content_id=row.id, pin_state='queued')
session.add(sync)
await session.flush()
try:
await pin_add(cid, recursive=True)
sync.pin_state = 'pinned'
sync.pinned_at = datetime.utcnow()
except Exception as e:
make_log("sync", f"pin failed: {e}", level="error")
sync.pin_state = 'failed'
sync.pin_error = str(e)
await session.commit()
return response.json({"ok": True, "state": sync.pin_state})
async def s_api_v1_sync_status(request):
cid = request.args.get("cid")
if not cid:
return response.json({"error": "BAD_REQUEST"}, status=400)
try:
info = await pin_ls(cid)
state = 'pinned' if info else 'not_pinned'
except Exception:
state = 'not_pinned'
info = {}
return response.json({"cid": cid, "state": state, "info": info})

View File

@ -4,6 +4,7 @@ from aiogram.utils.web_app import safe_parse_webapp_init_data
from sanic import response from sanic import response
from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info, WalletConnection from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info, WalletConnection
from sqlalchemy import select, and_
from app.core._config import TELEGRAM_API_KEY from app.core._config import TELEGRAM_API_KEY
from app.core.models.user import User from app.core.models.user import User
from app.core.logger import make_log from app.core.logger import make_log
@ -23,8 +24,19 @@ async def s_api_v1_tonconnect_new(request):
db_session = request.ctx.db_session db_session = request.ctx.db_session
user = request.ctx.user user = request.ctx.user
memory = request.ctx.memory memory = request.ctx.memory
ton_connect, ton_connection = TonConnect.by_user(db_session, user) # Try restore last connection from DB
ton_connection = (await db_session.execute(select(WalletConnection).where(
and_(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False,
WalletConnection.network == 'ton'
)
).order_by(WalletConnection.created.desc()))).scalars().first()
if ton_connection:
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"])
await ton_connect.restore_connection() await ton_connect.restore_connection()
else:
ton_connect = TonConnect()
make_log("TonConnect_API", f"SDK connected?: {ton_connect.connected}", level='info') make_log("TonConnect_API", f"SDK connected?: {ton_connect.connected}", level='info')
if ton_connect.connected: if ton_connect.connected:
return response.json({"error": "Already connected"}, status=400) return response.json({"error": "Already connected"}, status=400)
@ -47,13 +59,11 @@ async def s_api_v1_tonconnect_logout(request):
user = request.ctx.user user = request.ctx.user
memory = request.ctx.memory memory = request.ctx.memory
wallet_connections = db_session.query(WalletConnection).filter( result = await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))
).all() wallet_connections = result.scalars().all()
for wallet_connection in wallet_connections: for wallet_connection in wallet_connections:
wallet_connection.invalidated = True wallet_connection.invalidated = True
await db_session.commit()
db_session.commit()
return response.json({"success": True}) return response.json({"success": True})

View File

@ -0,0 +1,57 @@
from sanic import response
from sqlalchemy import select
from app.core.models.content_v3 import UploadSession, EncryptedContent, ContentDerivative
from app.core._utils.resolve_content import resolve_content
async def s_api_v1_upload_status(request, upload_id: str):
session = request.ctx.db_session
row = await session.get(UploadSession, upload_id)
if not row:
return response.json({"error": "NOT_FOUND"}, status=404)
encrypted_hash = None
conversion = {"state": "not_started", "details": []}
if row.encrypted_cid:
cid_obj, err = resolve_content(row.encrypted_cid)
if not err:
encrypted_hash = cid_obj.content_hash_b58
ec = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == row.encrypted_cid))).scalars().first()
if ec:
derivative_rows = (await session.execute(
select(ContentDerivative.kind, ContentDerivative.status).where(ContentDerivative.content_id == ec.id)
)).all()
details = [
{"kind": kind, "status": status}
for kind, status in derivative_rows
]
if ec.content_type and ec.content_type.startswith("audio/"):
required = {"decrypted_high", "decrypted_low"}
elif ec.content_type and ec.content_type.startswith("video/"):
required = {"decrypted_high", "decrypted_low", "decrypted_preview"}
else:
required = {"decrypted_original"}
statuses = {kind: status for kind, status in derivative_rows}
if required and all(statuses.get(k) == "ready" for k in required):
conv_state = "ready"
elif any(statuses.get(k) == "failed" for k in required):
conv_state = "failed"
elif any(statuses.get(k) in ("processing", "pending") for k in required):
conv_state = "processing"
elif required:
conv_state = "pending"
else:
conv_state = "not_started"
conversion = {"state": conv_state, "details": details}
return response.json({
"id": row.id,
"state": row.state,
"encrypted_cid": row.encrypted_cid,
"encrypted_hash": encrypted_hash,
"size_bytes": row.size_bytes,
"error": row.error,
"conversion": conversion,
})

View File

@ -0,0 +1,328 @@
from __future__ import annotations
import base64
import json
import os
from datetime import datetime
from typing import Dict, Any
import aiofiles
from base58 import b58encode
from sanic import response
import magic # type: ignore
from app.core._config import UPLOADS_DIR, PROJECT_HOST
from app.core._secrets import hot_pubkey
from app.core.crypto.aes_gcm_stream import encrypt_file_to_encf, CHUNK_BYTES
from app.core.crypto.keywrap import wrap_dek, KeyWrapError
from app.core.ipfs_client import add_streamed_file
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, ContentKey, IpfsSync, ContentIndexItem, UploadSession
from app.core.models.node_storage import StoredContent
from app.core.storage import db_session
from app.core._utils.resolve_content import resolve_content
from app.core.events.service import record_event
from sqlalchemy import select
def _b64(s: bytes) -> str:
return base64.b64encode(s).decode()
async def s_api_v1_upload_tus_hook(request):
"""
tusd HTTP hook endpoint. We mainly handle post-finish to: encrypt -> IPFS add+pin -> record DB.
"""
try:
payload: Dict[str, Any] = request.json
except Exception:
payload = None
if payload is None:
raw_body = request.body or b''
try:
payload = json.loads(raw_body) if raw_body else {}
except Exception:
payload = {}
event = (payload.get("Type") or payload.get("type") or
payload.get("Event") or payload.get("event") or
payload.get("Hook") or payload.get("hook") or
payload.get("HookName") or payload.get("hook_name") or
request.headers.get("Hook-Name") or request.headers.get("hook-name"))
upload = payload.get("Upload") or payload.get("upload") or {}
if not event:
hook_name = (payload.get("HookName") or payload.get("hook") or
payload.get("hook_name") or request.headers.get("Hook-Name"))
raw = request.body or b''
preview = raw[:512]
make_log("tus-hook", f"Missing event type in hook payload; ignoring (hook={hook_name}, keys={list(payload.keys())}, raw={preview!r})", level="warning")
return response.json({"ok": True, "skipped": True})
if event not in ("post-finish", "postfinish"):
# accept but ignore other events
return response.json({"ok": True})
# Extract storage path from tusd payload
storage = upload.get("Storage") or {}
file_path = storage.get("Path") or storage.get("path")
if not file_path:
return response.json({"ok": False, "error": "NO_STORAGE_PATH"}, status=400)
meta = upload.get("MetaData") or {}
# Common metadata keys
title = meta.get("title") or meta.get("Title") or meta.get("name") or "Untitled"
artist = (meta.get("artist") or meta.get("Artist") or "").strip()
description = meta.get("description") or meta.get("Description") or ""
content_type = meta.get("content_type") or meta.get("Content-Type") or "application/octet-stream"
detected_content_type = None
try:
raw_detected = magic.from_file(file_path, mime=True)
if raw_detected:
detected_content_type = raw_detected.split(";")[0].strip()
except Exception as e:
make_log("tus-hook", f"magic MIME detection failed for {file_path}: {e}", level="warning")
def _is_av(mime: str | None) -> bool:
if not mime:
return False
return mime.startswith("audio/") or mime.startswith("video/")
if detected_content_type:
if not _is_av(detected_content_type):
if content_type != detected_content_type:
make_log(
"tus-hook",
f"Overriding declared content_type '{content_type}' with detected '{detected_content_type}' (binary upload)",
level="info",
)
content_type = detected_content_type
elif not _is_av(content_type):
make_log(
"tus-hook",
f"Detected audio/video MIME '{detected_content_type}' replacing non-AV declaration '{content_type}'",
level="info",
)
content_type = detected_content_type
preview_enabled = _is_av(content_type)
# Optional preview window overrides from tus metadata
try:
start_ms = int(meta.get("preview_start_ms") or 0)
dur_ms = int(meta.get("preview_duration_ms") or 30000)
except Exception:
start_ms, dur_ms = 0, 30000
# Record/Update upload session
upload_id = upload.get("ID") or upload.get("Id") or upload.get("id")
try:
size = int(upload.get("Size") or 0)
except Exception:
size = None
async with db_session() as session:
us = (await session.get(UploadSession, upload_id)) if upload_id else None
if not us and upload_id:
us = UploadSession(
id=upload_id,
filename=os.path.basename(file_path),
size_bytes=size,
state='processing',
encrypted_cid=None,
)
session.add(us)
await session.commit()
# Read & encrypt by streaming (ENCF v1 / AES-GCM)
# Generate per-content random DEK and salt
dek = os.urandom(32)
salt = os.urandom(16)
key_fpr = b58encode(hot_pubkey).decode() # fingerprint as our node id for now
# Stream encrypt into IPFS add
try:
wrapped_dek = wrap_dek(dek)
except KeyWrapError as e:
make_log("tus-hook", f"Key wrap failed: {e}", level="error")
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'failed'
us.error = str(e)
await session.commit()
return response.json({"ok": False, "error": "KEY_WRAP_FAILED"}, status=500)
try:
with open(file_path, 'rb') as f:
result = await add_streamed_file(
encrypt_file_to_encf(f, dek, CHUNK_BYTES, salt),
filename=os.path.basename(file_path),
params={},
)
except Exception as e:
make_log("tus-hook", f"Encrypt+add failed: {e}", level="error")
# mark failed
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'failed'
us.error = str(e)
await session.commit()
return response.json({"ok": False, "error": "ENCRYPT_ADD_FAILED"}, status=500)
encrypted_cid = result.get("Hash")
try:
enc_size = int(result.get("Size") or 0)
except Exception:
enc_size = None
encrypted_cid_obj, cid_err = resolve_content(encrypted_cid)
if cid_err:
make_log("tus-hook", f"Encrypted CID resolve failed: {cid_err}", level="error")
return response.json({"ok": False, "error": "INVALID_ENCRYPTED_CID"}, status=500)
encrypted_hash_b58 = encrypted_cid_obj.content_hash_b58
# Persist records
async with db_session() as session:
ec = EncryptedContent(
encrypted_cid=encrypted_cid,
title=title,
artist=artist or None,
description=description,
content_type=content_type,
enc_size_bytes=enc_size,
plain_size_bytes=os.path.getsize(file_path),
preview_enabled=preview_enabled,
preview_conf=({"duration_ms": dur_ms, "intervals": [[start_ms, start_ms + dur_ms]]} if preview_enabled else {}),
aead_scheme="AES_GCM",
chunk_bytes=CHUNK_BYTES,
salt_b64=_b64(salt),
)
session.add(ec)
await session.flush()
ck = ContentKey(
content_id=ec.id,
key_ciphertext_b64=wrapped_dek,
key_fingerprint=key_fpr,
issuer_node_id=key_fpr,
allow_auto_grant=True,
)
session.add(ck)
await session.flush()
sync = IpfsSync(
content_id=ec.id,
pin_state='pinned',
bytes_total=enc_size,
bytes_fetched=enc_size,
pinned_at=datetime.utcnow(),
)
session.add(sync)
existing_encrypted_content = (await session.execute(
select(StoredContent).where(StoredContent.hash == encrypted_hash_b58)
)).scalars().first()
if not existing_encrypted_content:
placeholder_meta = {
'content_type': content_type,
'storage': 'ipfs',
'encrypted_cid': encrypted_cid,
'upload_id': upload_id,
'source': 'tusd',
'title': title,
'artist': artist or None,
}
encrypted_stored_content = StoredContent(
type="local/encrypted_ipfs",
hash=encrypted_hash_b58,
content_id=encrypted_cid,
filename=os.path.basename(file_path),
meta=placeholder_meta,
user_id=request.ctx.user.id if request.ctx.user else None,
owner_address=None,
encrypted=True,
decrypted_content_id=None,
key_id=None,
created=datetime.utcnow(),
)
session.add(encrypted_stored_content)
# Publish signed index item
item = {
"encrypted_cid": encrypted_cid,
"title": title,
"description": description,
"artist": artist,
"content_type": content_type,
"size_bytes": enc_size,
"preview_enabled": preview_enabled,
"preview_conf": ec.preview_conf,
"issuer_node_id": key_fpr,
"salt_b64": _b64(salt),
"artist": artist or None,
}
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
signer = Signer(hot_seed)
blob = json.dumps(item, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
except Exception:
sig = ""
session.add(ContentIndexItem(encrypted_cid=encrypted_cid, payload=item, sig=sig))
try:
await record_event(
session,
'content_uploaded',
{
'encrypted_cid': encrypted_cid,
'content_hash': encrypted_hash_b58,
'title': title,
'description': description,
'content_type': content_type,
'size_bytes': enc_size,
'user_id': request.ctx.user.id if getattr(request.ctx, 'user', None) else None,
'telegram_id': getattr(getattr(request.ctx, 'user', None), 'telegram_id', None),
},
origin_host=PROJECT_HOST,
)
except Exception as exc:
make_log("Events", f"Failed to record content_uploaded event: {exc}", level="warning")
await session.commit()
# Update upload session with result and purge staging to avoid duplicates
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'pinned'
us.encrypted_cid = encrypted_cid
us.error = None
if size:
us.size_bytes = size
# prefer using IPFS for downstream conversion; remove staging
try:
if file_path and os.path.exists(file_path):
os.remove(file_path)
except Exception:
pass
us.storage_path = None
await session.commit()
make_log("tus-hook", f"Uploaded+encrypted {file_path} -> {encrypted_cid}")
placeholder_path = os.path.join(UPLOADS_DIR, encrypted_hash_b58)
if not os.path.exists(placeholder_path):
try:
async with aiofiles.open(placeholder_path, "wb") as ph:
await ph.write(json.dumps({
"ipfs_cid": encrypted_cid,
"note": "Encrypted payload stored in IPFS"
}).encode())
except Exception as e:
make_log("tus-hook", f"Failed to create placeholder for {encrypted_hash_b58}: {e}", level="warning")
return response.json({"ok": True, "encrypted_cid": encrypted_cid, "upload_id": upload_id})

View File

@ -7,6 +7,9 @@ from app.bot.middleware import UserDataMiddleware
from app.bot.routers.index import main_router from app.bot.routers.index import main_router
def create_dispatcher() -> Dispatcher:
"""Create aiogram Dispatcher lazily to avoid event loop issues at import time."""
dp = Dispatcher(storage=MemoryStorage()) dp = Dispatcher(storage=MemoryStorage())
dp.update.outer_middleware(UserDataMiddleware()) dp.update.outer_middleware(UserDataMiddleware())
dp.include_router(main_router) dp.include_router(main_router)
return dp

View File

@ -1,6 +1,7 @@
from app.core.logger import make_log, logger from app.core.logger import make_log, logger
from app.core.models._telegram import Wrapped_CBotChat from app.core.models._telegram import Wrapped_CBotChat
from app.core.models.user import User from app.core.models.user import User
from sqlalchemy import select
from app.core.storage import db_session from app.core.storage import db_session
from aiogram import BaseMiddleware, types from aiogram import BaseMiddleware, types
from app.core.models.messages import KnownTelegramMessage from app.core.models.messages import KnownTelegramMessage
@ -21,9 +22,9 @@ class UserDataMiddleware(BaseMiddleware):
# TODO: maybe make users cache # TODO: maybe make users cache
with db_session(auto_commit=False) as session: async with db_session(auto_commit=False) as session:
try: try:
user = session.query(User).filter_by(telegram_id=user_id).first() user = (await session.execute(select(User).where(User.telegram_id == user_id))).scalars().first()
except BaseException as e: except BaseException as e:
logger.error(f"Error when middleware getting user: {e}") logger.error(f"Error when middleware getting user: {e}")
user = None user = None
@ -42,7 +43,7 @@ class UserDataMiddleware(BaseMiddleware):
created=datetime.now() created=datetime.now()
) )
session.add(user) session.add(user)
session.commit() await session.commit()
else: else:
if user.username != update_body.from_user.username: if user.username != update_body.from_user.username:
user.username = update_body.from_user.username user.username = update_body.from_user.username
@ -60,7 +61,7 @@ class UserDataMiddleware(BaseMiddleware):
} }
user.last_use = datetime.now() user.last_use = datetime.now()
session.commit() await session.commit()
data['user'] = user data['user'] = user
data['db_session'] = session data['db_session'] = session
@ -72,11 +73,11 @@ class UserDataMiddleware(BaseMiddleware):
if update_body.text.startswith('/start'): if update_body.text.startswith('/start'):
message_type = 'start_command' message_type = 'start_command'
if session.query(KnownTelegramMessage).filter_by( if (await session.execute(select(KnownTelegramMessage).where(
chat_id=update_body.chat.id, (KnownTelegramMessage.chat_id == update_body.chat.id) &
message_id=update_body.message_id, (KnownTelegramMessage.message_id == update_body.message_id) &
from_user=True (KnownTelegramMessage.from_user == True)
).first(): ))).scalars().first():
make_log("UserDataMiddleware", f"Message {update_body.message_id} already processed", level='debug') make_log("UserDataMiddleware", f"Message {update_body.message_id} already processed", level='debug')
return return
@ -91,7 +92,7 @@ class UserDataMiddleware(BaseMiddleware):
meta={} meta={}
) )
session.add(new_message) session.add(new_message)
session.commit() await session.commit()
result = await handler(event, data) result = await handler(event, data)
return result return result

View File

@ -1,11 +1,16 @@
import base58 import base58
from aiogram import types, Router, F from aiogram import types, Router, F
from collections import defaultdict
from datetime import datetime
from typing import Optional
from app.core._config import WEB_APP_URLS from app.core._config import WEB_APP_URLS
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log from app.core.logger import make_log
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from app.core.models.content_v3 import UploadSession, EncryptedContent, ContentDerivative
from sqlalchemy import select, and_, or_
import json import json
router = Router() router = Router()
@ -17,27 +22,149 @@ def chunks(lst, n):
yield lst[i:i + n] yield lst[i:i + n]
async def _compute_content_status(db_session, encrypted_cid: Optional[str], fallback_content_type: Optional[str] = None):
if not encrypted_cid:
return {
'final_state': 'uploaded',
'conversion_state': 'pending',
'upload_state': None,
'summary': {},
'details': [],
'title': None,
'content_type': fallback_content_type,
}
ec = (await db_session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == encrypted_cid))).scalars().first()
content_type = fallback_content_type or (ec.content_type if ec else None) or 'application/octet-stream'
derivative_rows = []
if ec:
derivative_rows = (await db_session.execute(select(ContentDerivative).where(ContentDerivative.content_id == ec.id))).scalars().all()
upload_row = (await db_session.execute(select(UploadSession).where(UploadSession.encrypted_cid == encrypted_cid))).scalars().first()
derivative_sorted = sorted(derivative_rows, key=lambda row: row.created_at or datetime.min)
derivative_latest = {}
summary = defaultdict(int)
details = []
for row in derivative_sorted:
derivative_latest[row.kind] = row
for kind, row in derivative_latest.items():
summary[row.status] += 1
details.append({
'kind': kind,
'status': row.status,
'size_bytes': row.size_bytes,
'error': row.error,
'updated_at': (row.last_access_at or row.created_at).isoformat() + 'Z' if (row.last_access_at or row.created_at) else None,
})
if content_type.startswith('audio/'):
required = {'decrypted_low', 'decrypted_high'}
elif content_type.startswith('video/'):
required = {'decrypted_low', 'decrypted_high', 'decrypted_preview'}
else:
required = {'decrypted_original'}
statuses_by_kind = {kind: derivative_latest[kind].status for kind in required if kind in derivative_latest}
conversion_state = 'pending'
if required and all(statuses_by_kind.get(kind) == 'ready' for kind in required):
conversion_state = 'ready'
elif any(statuses_by_kind.get(kind) == 'failed' for kind in required):
conversion_state = 'failed'
elif any(statuses_by_kind.get(kind) in ('processing', 'pending') for kind in required):
conversion_state = 'processing'
elif statuses_by_kind:
conversion_state = 'partial'
upload_state = upload_row.state if upload_row else None
final_state = 'ready' if conversion_state == 'ready' else None
if not final_state:
if conversion_state == 'failed' or upload_state in ('failed', 'conversion_failed'):
final_state = 'failed'
elif conversion_state in ('processing', 'partial') or upload_state in ('processing', 'pinned'):
final_state = 'processing'
else:
final_state = 'uploaded'
return {
'final_state': final_state,
'conversion_state': conversion_state,
'upload_state': upload_state,
'summary': dict(summary),
'details': details,
'title': ec.title if ec else None,
'content_type': content_type,
}
async def t_callback_owned_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra): async def t_callback_owned_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
message_text = user.translated("ownedContent_menu") message_text = user.translated("ownedContent_menu")
content_list = [] content_list = []
for content in db_session.query(StoredContent).filter_by( user_addr = await user.wallet_address_async(db_session)
owner_address=user.wallet_address(db_session), conditions = []
type='onchain/content' if user_addr:
).all(): conditions.append(and_(StoredContent.owner_address == user_addr, StoredContent.type.like('onchain%')))
conditions.append(and_(StoredContent.user_id == user.id, StoredContent.type.like('local/%')))
if not conditions:
conditions = [StoredContent.user_id == user.id]
stmt = select(StoredContent).where(
StoredContent.disabled.is_(None),
or_(*conditions) if len(conditions) > 1 else conditions[0]
).order_by(StoredContent.created.desc())
rows = (await db_session.execute(stmt)).scalars().all()
onchain_hashes = set()
local_items = []
icon_map = {
'ready': '',
'processing': '',
'failed': '⚠️',
'uploaded': '📦',
}
for content in rows:
meta = content.meta or {}
encrypted_cid = meta.get('content_cid') or meta.get('encrypted_cid') or content.content_id
status_info = await _compute_content_status(db_session, encrypted_cid, meta.get('content_type'))
icon = icon_map.get(status_info['final_state'], '📦')
if content.type.startswith('onchain'):
try: try:
metadata_content = StoredContent.from_cid(db_session, content.json_format()['metadata_cid']) metadata_content = await StoredContent.from_cid_async(db_session, content.json_format()['metadata_cid'])
with open(metadata_content.filepath, 'r') as f: with open(metadata_content.filepath, 'r') as f:
metadata_content_json = json.loads(f.read()) metadata_content_json = json.loads(f.read())
except BaseException as e: except BaseException as e:
make_log("OwnedContent", f"Can't get metadata content: {e}", level='warning') make_log("OwnedContent", f"Can't get metadata content: {e}", level='warning')
continue continue
onchain_hashes.add(content.hash)
display_name = metadata_content_json.get('name') or content.cid.serialize_v2()
content_list.append([ content_list.append([
{ {
'text': metadata_content_json['name'], 'text': f"{icon} {display_name}"[:64],
'callback_data': f'NC_{content.id}' 'callback_data': f'NC_{content.id}'
} }
]) ])
else:
local_items.append((content, status_info, icon))
for content, status_info, icon in local_items:
if content.hash in onchain_hashes:
continue
meta = content.meta or {}
encrypted_cid = meta.get('encrypted_cid') or content.content_id
display_name = status_info['title'] or content.filename or content.cid.serialize_v2()
button_text = f"{icon} {display_name}"
content_list.append([
{
'text': button_text[:64],
'callback_data': f'LC_{content.id}'
}
])
return await tg_process_template( return await tg_process_template(
chat_wrap, message_text, chat_wrap, message_text,
@ -59,10 +186,9 @@ async def t_callback_owned_content(query: types.CallbackQuery, memory=None, user
async def t_callback_node_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra): async def t_callback_node_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
content_oid = int(query.data.split('_')[1]) content_oid = int(query.data.split('_')[1])
row = (await db_session.execute(select(StoredContent).where(StoredContent.id == content_oid))).scalars().first()
return await chat_wrap.send_content( return await chat_wrap.send_content(
db_session, db_session.query(StoredContent).filter_by( db_session, row,
id=content_oid
).first(),
extra_buttons=[ extra_buttons=[
[{ [{
'text': user.translated('back_button'), 'text': user.translated('back_button'),
@ -76,3 +202,51 @@ async def t_callback_node_content(query: types.CallbackQuery, memory=None, user=
router.callback_query.register(t_callback_owned_content, F.data == 'ownedContent') router.callback_query.register(t_callback_owned_content, F.data == 'ownedContent')
router.callback_query.register(t_callback_node_content, F.data.startswith('NC_')) router.callback_query.register(t_callback_node_content, F.data.startswith('NC_'))
async def t_callback_local_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
content_oid = int(query.data.split('_')[1])
content = (await db_session.execute(select(StoredContent).where(StoredContent.id == content_oid))).scalars().first()
if not content:
return await query.answer(user.translated('error_contentNotFound'), show_alert=True)
upload_id = (content.meta or {}).get('upload_id')
upload_session = await db_session.get(UploadSession, upload_id) if upload_id else None
encrypted_cid = (content.meta or {}).get('encrypted_cid') or content.content_id
status_info = await _compute_content_status(db_session, encrypted_cid, (content.meta or {}).get('content_type'))
display_name = status_info['title'] or content.filename or content.cid.serialize_v2()
state_label = {
'ready': 'Готов',
'processing': 'Обработка',
'failed': 'Ошибка',
'uploaded': 'Загружено',
}.get(status_info['final_state'], 'Статус неизвестен')
lines = [
f"<b>{display_name}</b>",
f"Состояние: {state_label}"
]
if upload_session:
lines.append(f"Статус загрузки: {upload_session.state}")
if upload_session.error:
lines.append(f"Ошибка: {upload_session.error}")
if status_info['summary']:
lines.append("Конвертация:")
for status, count in status_info['summary'].items():
lines.append(f"{status}: {count}")
await chat_wrap.send_message(
'\n'.join(lines),
message_type='notification',
message_meta={'content_id': content.id},
reply_markup=get_inline_keyboard([
[{
'text': user.translated('back_button'),
'callback_data': 'ownedContent'
}]
])
)
router.callback_query.register(t_callback_local_content, F.data.startswith('LC_'))

View File

@ -3,9 +3,11 @@ from aiogram.filters import Command
from tonsdk.utils import Address from tonsdk.utils import Address
from app.core._blockchain.ton.connect import TonConnect from app.core._blockchain.ton.connect import TonConnect
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template from app.core._utils.tg_process_template import tg_process_template
from app.core.models.wallet_connection import WalletConnection from app.core.models.wallet_connection import WalletConnection
from app.core._config import PROJECT_HOST
main_router = Router() main_router = Router()
@ -32,7 +34,13 @@ async def send_home_menu(chat_wrap, user, wallet_connection, **kwargs):
async def send_connect_wallets_list(db_session, chat_wrap, user, **kwargs): async def send_connect_wallets_list(db_session, chat_wrap, user, **kwargs):
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=()) # Try to restore existing connection via DB
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
wallets = ton_connect._sdk_client.get_wallets() wallets = ton_connect._sdk_client.get_wallets()
message_text = user.translated("connectWalletsList_menu") message_text = user.translated("connectWalletsList_menu")
@ -66,10 +74,9 @@ async def t_home_menu(__msg, **extra):
else: else:
message_id = None message_id = None
wallet_connection = db_session.query(WalletConnection).filter( wallet_connection = (await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))).scalars().first()
).first()
# if not wallet_connection: # if not wallet_connection:
# return await send_connect_wallets_list(db_session, chat_wrap, user, message_id=message_id) # return await send_connect_wallets_list(db_session, chat_wrap, user, message_id=message_id)
@ -77,6 +84,35 @@ async def t_home_menu(__msg, **extra):
return await send_home_menu(chat_wrap, user, wallet_connection, message_id=message_id) return await send_home_menu(chat_wrap, user, wallet_connection, message_id=message_id)
async def t_admin_panel(message: types.Message, **extra):
user = extra.get('user')
chat_wrap = extra.get('chat_wrap')
admin_host = (PROJECT_HOST or '').rstrip('/')
if not user or not getattr(user, 'is_admin', False):
await chat_wrap.send_message("Доступ к админ-панели ограничен.")
return
if not admin_host:
await chat_wrap.send_message("Адрес админ-панели не настроен на этой ноде.")
return
admin_url = f"{admin_host}/admin"
buttons = []
if admin_url.startswith('https://'):
buttons.append({
'text': 'Открыть в Telegram',
'web_app': types.WebAppInfo(url=admin_url),
})
buttons.append({
'text': 'Открыть в браузере',
'url': admin_url,
})
keyboard = get_inline_keyboard([buttons]) if buttons else None
await chat_wrap.send_message(
"Админ-панель доступна по кнопке ниже.",
keyboard=keyboard,
)
main_router.message.register(t_home_menu, Command('start')) main_router.message.register(t_home_menu, Command('start'))
main_router.message.register(t_admin_panel, Command('admin'))
main_router.callback_query.register(t_home_menu, F.data == 'home') main_router.callback_query.register(t_home_menu, F.data == 'home')
router = main_router router = main_router

View File

@ -7,6 +7,7 @@ from aiogram.filters import Command
from app.bot.routers.home import send_connect_wallets_list, send_home_menu from app.bot.routers.home import send_connect_wallets_list, send_home_menu
from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log from app.core.logger import make_log
@ -33,15 +34,21 @@ async def t_tonconnect_dev_menu(message: types.Message, memory=None, user=None,
keyboard = [] keyboard = []
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=()) # Restore recent connection
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
make_log("TonConnect_DevMenu", f"Available wallets: {ton_connect._sdk_client.get_wallets()}", level='debug') make_log("TonConnect_DevMenu", f"Available wallets: {ton_connect._sdk_client.get_wallets()}", level='debug')
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
make_log("TonConnect_DevMenu", f"SDK connected?: {ton_connect.connected}", level='info') make_log("TonConnect_DevMenu", f"SDK connected?: {ton_connect.connected}", level='info')
if not ton_connect.connected: if not ton_connect.connected:
if ton_connection: if ton_connection:
make_log("TonConnect_DevMenu", f"Invalidating old connection", level='debug') make_log("TonConnect_DevMenu", f"Invalidating old connection", level='debug')
ton_connection.invalidated = True ton_connection.invalidated = True
db_session.commit() await db_session.commit()
message_text = f"""<b>Wallet is not connected</b> message_text = f"""<b>Wallet is not connected</b>
@ -71,7 +78,12 @@ Use /dev_tonconnect <code>{wallet_app_name}</code> for connect to wallet."""
async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra): async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
wallet_app_name = query.data.split("_")[1] wallet_app_name = query.data.split("_")[1]
ton_connect, ton_connection = TonConnect.by_user(db_session, user) result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
connection_link = await ton_connect.new_connection(wallet_app_name) connection_link = await ton_connect.new_connection(wallet_app_name)
ton_connect.connected ton_connect.connected
@ -98,10 +110,9 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
start_ts = datetime.now() start_ts = datetime.now()
while datetime.now() - start_ts < timedelta(seconds=180): while datetime.now() - start_ts < timedelta(seconds=180):
new_connection = db_session.query(WalletConnection).filter( new_connection = (await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))).scalars().first()
).first()
if new_connection: if new_connection:
await tg_process_template( await tg_process_template(
chat_wrap, user.translated('p_successConnectWallet') chat_wrap, user.translated('p_successConnectWallet')
@ -115,14 +126,13 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
async def t_callback_disconnect_wallet(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra): async def t_callback_disconnect_wallet(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
wallet_connections = db_session.query(WalletConnection).filter( wallet_connections = (await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))).scalars().all()
).all()
for wallet_connection in wallet_connections: for wallet_connection in wallet_connections:
wallet_connection.invalidated = True wallet_connection.invalidated = True
db_session.commit() await db_session.commit()
return await send_home_menu(chat_wrap, user, None, message_id=query.message.message_id) return await send_home_menu(chat_wrap, user, None, message_id=query.message.message_id)

View File

@ -6,6 +6,9 @@ from aiogram.fsm.storage.memory import MemoryStorage
from app.bot.middleware import UserDataMiddleware from app.bot.middleware import UserDataMiddleware
from app.client_bot.routers.index import main_router from app.client_bot.routers.index import main_router
def create_dispatcher() -> Dispatcher:
dp = Dispatcher(storage=MemoryStorage()) dp = Dispatcher(storage=MemoryStorage())
dp.update.outer_middleware(UserDataMiddleware()) dp.update.outer_middleware(UserDataMiddleware())
dp.include_router(main_router) dp.include_router(main_router)
return dp

View File

@ -6,6 +6,7 @@ from aiogram import types, Router, F
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from sqlalchemy import select, and_
import json import json
from app.core.logger import make_log from app.core.logger import make_log
from app.core.models.content.user_content import UserAction, UserContent from app.core.models.content.user_content import UserAction, UserContent
@ -30,7 +31,7 @@ CACHE_CHAT_ID = -1002390124789
async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra): async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
content_oid = int(query.data.split('_')[1]) content_oid = int(query.data.split('_')[1])
is_cancel_request = query.data.split('_')[2] == 'cancel' if len(query.data.split('_')) > 2 else False is_cancel_request = query.data.split('_')[2] == 'cancel' if len(query.data.split('_')) > 2 else False
content = db_session.query(StoredContent).filter_by(id=content_oid).first() content = (await db_session.execute(select(StoredContent).where(StoredContent.id == content_oid))).scalars().first()
if not content: if not content:
return await query.answer(user.translated('error_contentNotFound'), show_alert=True) return await query.answer(user.translated('error_contentNotFound'), show_alert=True)
@ -43,11 +44,16 @@ async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=No
make_log("Purchase", f"User {user.id} initiated purchase for content ID {content_oid}. License price: {license_price_num}.", level='info') make_log("Purchase", f"User {user.id} initiated purchase for content ID {content_oid}. License price: {license_price_num}.", level='info')
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=()) result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
assert ton_connect.connected, "No connected wallet" assert ton_connect.connected, "No connected wallet"
user_wallet_address = user.wallet_address(db_session) user_wallet_address = await user.wallet_address_async(db_session)
memory._app.add_task(ton_connect._sdk_client.send_transaction({ memory._app.add_task(ton_connect._sdk_client.send_transaction({
'valid_until': int(datetime.now().timestamp() + 300), 'valid_until': int(datetime.now().timestamp() + 300),
@ -76,18 +82,15 @@ async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=No
else: else:
# Logging cancellation attempt with detailed information # Logging cancellation attempt with detailed information
make_log("Purchase", f"User {user.id} cancelled purchase for content ID {content_oid}.", level='info') make_log("Purchase", f"User {user.id} cancelled purchase for content ID {content_oid}.", level='info')
action = db_session.query(UserAction).filter_by( action = (await db_session.execute(select(UserAction).where(
type='purchase', and_(UserAction.type == 'purchase', UserAction.content_id == content_oid, UserAction.user_id == user.id, UserAction.status == 'requested')
content_id=content_oid, ))).scalars().first()
user_id=user.id,
status='requested'
).first()
if not action: if not action:
return await query.answer() return await query.answer()
action.status = 'canceled' action.status = 'canceled'
db_session.commit() await db_session.commit()
await chat_wrap.send_content(db_session, content, message_id=query.message.message_id) await chat_wrap.send_content(db_session, content, message_id=query.message.message_id)
@ -104,9 +107,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
args = None args = None
if source_args_ext.startswith('Q'): if source_args_ext.startswith('Q'):
license_onchain_address = source_args_ext[1:] license_onchain_address = source_args_ext[1:]
licensed_content = db_session.query(UserContent).filter_by( licensed_content = (await db_session.execute(select(UserContent).where(UserContent.onchain_address == license_onchain_address))).scalars().first().content
onchain_address=license_onchain_address,
).first().content
make_log("InlineSearch", f"Query '{query.query}' is a license query for content ID {licensed_content.id}.", level='info') make_log("InlineSearch", f"Query '{query.query}' is a license query for content ID {licensed_content.id}.", level='info')
args = licensed_content.cid.serialize_v2() args = licensed_content.cid.serialize_v2()
else: else:
@ -118,15 +119,15 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
content_list = [] content_list = []
search_query = {'hash': cid.content_hash_b58} search_query = {'hash': cid.content_hash_b58}
make_log("InlineSearch", f"Searching with query '{search_query}'.", level='info') make_log("InlineSearch", f"Searching with query '{search_query}'.", level='info')
content = db_session.query(StoredContent).filter_by(**search_query).first() content = (await db_session.execute(select(StoredContent).where(StoredContent.hash == cid.content_hash_b58))).scalars().first()
content_prod = content.open_content(db_session) content_prod = await content.open_content_async(db_session)
# Get both encrypted and decrypted content objects # Get both encrypted and decrypted content objects
encrypted_content = content_prod['encrypted_content'] encrypted_content = content_prod['encrypted_content']
decrypted_content = content_prod['decrypted_content'] decrypted_content = content_prod['decrypted_content']
decrypted_content_meta = decrypted_content.json_format() decrypted_content_meta = decrypted_content.json_format()
try: try:
metadata_content = StoredContent.from_cid(db_session, content.json_format()['metadata_cid']) metadata_content = await StoredContent.from_cid_async(db_session, content.json_format()['metadata_cid'])
with open(metadata_content.filepath, 'r') as f: with open(metadata_content.filepath, 'r') as f:
metadata_content_json = json.loads(f.read()) metadata_content_json = json.loads(f.read())
except BaseException as e: except BaseException as e:
@ -144,7 +145,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
result_kwargs = {} result_kwargs = {}
try: try:
cover_content = StoredContent.from_cid(db_session, decrypted_content_meta.get('cover_cid') or None) cover_content = await StoredContent.from_cid_async(db_session, decrypted_content_meta.get('cover_cid') or None)
except BaseException as e: except BaseException as e:
cover_content = None cover_content = None
@ -152,9 +153,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
result_kwargs['thumb_url'] = cover_content.web_url result_kwargs['thumb_url'] = cover_content.web_url
content_type_declared = decrypted_content_meta.get('content_type', 'application/x-binary').split('/')[0] content_type_declared = decrypted_content_meta.get('content_type', 'application/x-binary').split('/')[0]
preview_content = db_session.query(StoredContent).filter_by( preview_content = (await db_session.execute(select(StoredContent).where(StoredContent.hash == content.meta.get('converted_content', {}).get('low_preview')))).scalars().first()
hash=content.meta.get('converted_content', {}).get('low_preview')
).first()
content_type_declared = { content_type_declared = {
'mp3': 'audio', 'mp3': 'audio',
'flac': 'audio', 'flac': 'audio',
@ -162,7 +161,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
'mov': 'video' 'mov': 'video'
}.get(preview_content.filename.split('.')[-1], content_type_declared) }.get(preview_content.filename.split('.')[-1], content_type_declared)
hashtags_str = (' '.join(f"#{_h}" for _h in metadata_content_json.get('hashtags', []))).strip() hashtags_str = metadata_content_json.get('description', '').strip()
if hashtags_str: if hashtags_str:
hashtags_str = hashtags_str + '\n' hashtags_str = hashtags_str + '\n'
@ -196,7 +195,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
**decrypted_content.meta, **decrypted_content.meta,
'telegram_file_cache_preview': preview_file_id 'telegram_file_cache_preview': preview_file_id
} }
db_session.commit() await db_session.commit()
except Exception as e: except Exception as e:
# Logging error during preview upload with detailed content type and query information # Logging error during preview upload with detailed content type and query information
make_log("InlineSearch", f"Error uploading preview for content type '{content_type_declared}' during inline query '{query.query}': {e}", level='error') make_log("InlineSearch", f"Error uploading preview for content type '{content_type_declared}' during inline query '{query.query}': {e}", level='error')
@ -212,7 +211,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
types.InlineQueryResultCachedAudio( types.InlineQueryResultCachedAudio(
id=f"NC_{content.id}_{int(datetime.now().timestamp() // 60)}", id=f"NC_{content.id}_{int(datetime.now().timestamp() // 60)}",
audio_file_id=decrypted_content.meta['telegram_file_cache_preview'], audio_file_id=decrypted_content.meta['telegram_file_cache_preview'],
caption=hashtags_str + user.translated('p_playerContext_preview'), caption=hashtags_str + user.translated('p_playerAudioContext_preview'),
parse_mode='html', parse_mode='html',
reply_markup=get_inline_keyboard([ reply_markup=get_inline_keyboard([
[ [
@ -242,7 +241,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
id=f"NC_{content.id}_{int(datetime.now().timestamp() // 60)}", id=f"NC_{content.id}_{int(datetime.now().timestamp() // 60)}",
video_file_id=decrypted_content.meta['telegram_file_cache_preview'], video_file_id=decrypted_content.meta['telegram_file_cache_preview'],
title=title, title=title,
caption=hashtags_str + user.translated('p_playerContext_preview'), caption=hashtags_str + user.translated('p_playerVideoContext_preview'),
parse_mode='html', parse_mode='html',
reply_markup=get_inline_keyboard([ reply_markup=get_inline_keyboard([
[ [

View File

@ -3,11 +3,13 @@ from aiogram.filters import Command
from tonsdk.utils import Address from tonsdk.utils import Address
from app.core._blockchain.ton.connect import TonConnect from app.core._blockchain.ton.connect import TonConnect
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log from app.core.logger import make_log
from app.core.models.wallet_connection import WalletConnection from app.core.models.wallet_connection import WalletConnection
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from app.core._config import PROJECT_HOST
main_router = Router() main_router = Router()
@ -32,7 +34,12 @@ async def send_home_menu(chat_wrap, user, wallet_connection, **kwargs):
async def send_connect_wallets_list(db_session, chat_wrap, user, **kwargs): async def send_connect_wallets_list(db_session, chat_wrap, user, **kwargs):
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=()) result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
wallets = ton_connect._sdk_client.get_wallets() wallets = ton_connect._sdk_client.get_wallets()
message_text = user.translated("connectWalletsList_menu") message_text = user.translated("connectWalletsList_menu")
@ -66,10 +73,9 @@ async def t_home_menu(__msg, **extra):
else: else:
message_id = None message_id = None
wallet_connection = db_session.query(WalletConnection).filter( wallet_connection = (await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))).scalars().first()
).first()
# if not wallet_connection: # if not wallet_connection:
# return await send_connect_wallets_list(db_session, chat_wrap, user, message_id=message_id) # return await send_connect_wallets_list(db_session, chat_wrap, user, message_id=message_id)
@ -81,12 +87,44 @@ async def t_home_menu(__msg, **extra):
make_log("Home", f"Home menu args: {args}", level='debug') make_log("Home", f"Home menu args: {args}", level='debug')
if args: if args:
if args[0].startswith('C'): if args[0].startswith('C'):
content = StoredContent.from_cid(db_session, args[0][1:]) payload = args[0][1:]
if '!' in payload:
payload = payload.split('!', 1)[0]
content = StoredContent.from_cid(db_session, payload)
return await chat_wrap.send_content(db_session, content, message_id=message_id) return await chat_wrap.send_content(db_session, content, message_id=message_id)
return await send_home_menu(chat_wrap, user, wallet_connection, message_id=message_id) return await send_home_menu(chat_wrap, user, wallet_connection, message_id=message_id)
async def t_admin_panel(message: types.Message, **extra):
user = extra.get('user')
chat_wrap = extra.get('chat_wrap')
admin_host = (PROJECT_HOST or '').rstrip('/')
if not user or not getattr(user, 'is_admin', False):
await chat_wrap.send_message("Доступ к админ-панели ограничен.")
return
if not admin_host:
await chat_wrap.send_message("Адрес админ-панели не настроен на этой ноде.")
return
admin_url = f"{admin_host}/admin"
buttons = []
if admin_url.startswith('https://'):
buttons.append({
'text': 'Открыть в Telegram',
'web_app': types.WebAppInfo(url=admin_url),
})
buttons.append({
'text': 'Открыть в браузере',
'url': admin_url,
})
keyboard = get_inline_keyboard([buttons]) if buttons else None
await chat_wrap.send_message(
"Админ-панель доступна по кнопке ниже.",
keyboard=keyboard,
)
main_router.message.register(t_home_menu, Command('start')) main_router.message.register(t_home_menu, Command('start'))
main_router.message.register(t_admin_panel, Command('admin'))
main_router.callback_query.register(t_home_menu, F.data == 'home') main_router.callback_query.register(t_home_menu, F.data == 'home')
router = main_router router = main_router

View File

@ -1,4 +1,6 @@
from aiogram import types, Router, F from aiogram import types, Router, F
from sqlalchemy import select
from app.core.logger import make_log from app.core.logger import make_log
from app.core.models import StarsInvoice from app.core.models import StarsInvoice
@ -12,9 +14,10 @@ async def t_pre_checkout_query_stars_processing(pre_checkout_query: types.PreChe
invoice_id = pre_checkout_query.invoice_payload invoice_id = pre_checkout_query.invoice_payload
existing_invoice = db_session.query(StarsInvoice).filter( result = await db_session.execute(
StarsInvoice.external_id == invoice_id select(StarsInvoice).where(StarsInvoice.external_id == invoice_id)
).first() )
existing_invoice = result.scalars().first()
if not existing_invoice: if not existing_invoice:
return await pre_checkout_query.answer(ok=False, error_message="Invoice not found") return await pre_checkout_query.answer(ok=False, error_message="Invoice not found")

View File

@ -7,6 +7,7 @@ from aiogram.filters import Command
from app.client_bot.routers.home import send_connect_wallets_list, send_home_menu from app.client_bot.routers.home import send_connect_wallets_list, send_home_menu
from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log from app.core.logger import make_log
@ -34,15 +35,20 @@ async def t_tonconnect_dev_menu(message: types.Message, memory=None, user=None,
keyboard = [] keyboard = []
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=()) result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
make_log("TonConnect_DevMenu", f"Available wallets: {ton_connect._sdk_client.get_wallets()}", level='debug') make_log("TonConnect_DevMenu", f"Available wallets: {ton_connect._sdk_client.get_wallets()}", level='debug')
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
make_log("TonConnect_DevMenu", f"SDK connected?: {ton_connect.connected}", level='info') make_log("TonConnect_DevMenu", f"SDK connected?: {ton_connect.connected}", level='info')
if not ton_connect.connected: if not ton_connect.connected:
if ton_connection: if ton_connection:
make_log("TonConnect_DevMenu", f"Invalidating old connection", level='debug') make_log("TonConnect_DevMenu", f"Invalidating old connection", level='debug')
ton_connection.invalidated = True ton_connection.invalidated = True
db_session.commit() await db_session.commit()
message_text = f"""<b>Wallet is not connected</b> message_text = f"""<b>Wallet is not connected</b>
@ -73,7 +79,12 @@ Use /dev_tonconnect <code>{wallet_app_name}</code> for connect to wallet."""
async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, user=None, db_session=None, async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, user=None, db_session=None,
chat_wrap=None, **extra): chat_wrap=None, **extra):
wallet_app_name = query.data.split("_")[1] wallet_app_name = query.data.split("_")[1]
ton_connect, ton_connection = TonConnect.by_user(db_session, user) result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection() await ton_connect.restore_connection()
connection_link = await ton_connect.new_connection(wallet_app_name) connection_link = await ton_connect.new_connection(wallet_app_name)
ton_connect.connected ton_connect.connected
@ -100,10 +111,9 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
start_ts = datetime.now() start_ts = datetime.now()
while datetime.now() - start_ts < timedelta(seconds=180): while datetime.now() - start_ts < timedelta(seconds=180):
new_connection = db_session.query(WalletConnection).filter( new_connection = (await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))).scalars().first()
).first()
if new_connection: if new_connection:
await tg_process_template( await tg_process_template(
chat_wrap, user.translated('p_successConnectWallet') chat_wrap, user.translated('p_successConnectWallet')
@ -118,14 +128,13 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
async def t_callback_disconnect_wallet(query: types.CallbackQuery, memory=None, user=None, db_session=None, async def t_callback_disconnect_wallet(query: types.CallbackQuery, memory=None, user=None, db_session=None,
chat_wrap=None, **extra): chat_wrap=None, **extra):
wallet_connections = db_session.query(WalletConnection).filter( wallet_connections = (await db_session.execute(select(WalletConnection).where(
WalletConnection.user_id == user.id, and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
WalletConnection.invalidated == False ))).scalars().all()
).all()
for wallet_connection in wallet_connections: for wallet_connection in wallet_connections:
wallet_connection.invalidated = True wallet_connection.invalidated = True
db_session.commit() await db_session.commit()
return await send_home_menu(chat_wrap, user, None, message_id=query.message.message_id) return await send_home_menu(chat_wrap, user, None, message_id=query.message.message_id)

Binary file not shown.

View File

@ -5,7 +5,7 @@ from app.core._secrets import service_wallet
class Blank(Contract): class Blank(Contract):
code = 'B5EE9C72010104010042000114FF00F4A413F4BCF2C80B010202CA0203004FD043A0E9AE43F48061DA89A1F480618E0BE5C323A803A1A843F60803A1DA3DDAA7A861DAA9E2026F0007A0DD7C12' code = 'b5ee9c72410104010042000114ff00f4a413f4bcf2c80b010202ca03020007a0dd7c12004fd043a0e9ae43f48061da89a1f480618e0be5c323a803a1a843f60803a1da3ddaa7a861daa9e2026f102bdd33'
def __init__(self, **kwargs): def __init__(self, **kwargs):
kwargs['code'] = Cell.one_from_boc(self.code) kwargs['code'] = Cell.one_from_boc(self.code)

File diff suppressed because one or more lines are too long

View File

@ -3,7 +3,7 @@ from tonsdk.contract import Contract
class Platform(Contract): class Platform(Contract):
code = 'b5ee9c7241021601000310000114ff00f4a413f4bcf2c80b010201620d0202012006030201200504004bbac877282f037625a5e1bf4a9bb4e8e57adf780d02781ee2c2b80129dc6a90f23b01657f9d980057b905bed44d0fa4001f861d3ff01f862d401f863f843d0d431d430f864d401f865d1f845d0f84201d430f84180201200a07020120090800a1b4f47da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba3f089f050e0079197ff92826190a0079e2d960f9992e04191960227e801e801960193f200e0e9919605940f97ff93a10000fb5daeeb00c9f05100201200c0b0059b6a9bda89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e1f051f085f087f089f08b00051b56ba63da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e391960f999300202c70f0e0007a0dd7c120201cf111000113e910c30003cb8536002f30cf434c0c05c6c2497c0f83e90087c007e900c7e800c5c75c87e800c7e800c1cea6d0008f5d27048245c2540f4c7d411388830002497c1783b51343e90007e1874ffc07e18b5007e18fe10f4350c750c3e1935007e1974482084091ea7aeaea497c178082084152474232ea3a14c104c36cf380c4cbe1071c160131201dcf2e19120820833cc77ba9730d4d30730fb00e0208210b99cd03bba9701fa4001f86101de208210d81c632fba9601d401f86501de208210b5de5f9eba8e8b30fa40fa00306d6d71db3ce082102fa30f96ba98d401fb04d430ed54e030f845f843f842c8f841cf16cbffccccc9ed541502f082084c4b4001a013bef2e20801d3ffd4d430f844f82870f842c8cbffc9c85003cf16cb07ccc97020c8cb0113f400f400cb00c920f9007074c8cb02ca07cbffc9d0f843d070c804d014cf16f843f842c8cbfff828cf16c903d430c8cc13ccc9c8cc17cbff5007cf1614cc15cccc43308040db3cf842a4f86215140024f845f843f842c8f841cf16cbffccccc9ed540078708010c8cb055006cf165004fa0214cb68216e947032cb019bc858cf17c97158cb00f400e2226e95327058cb0099c85003cf17c958f400e2c901fb004e32cb65' code = 'b5ee9c724102160100032e000114ff00f4a413f4bcf2c80b010201620d0202012006030201200504004bbac877582f053b50ddfe5a9533f2e76ac054411db94432a1f7b7ae17fc64cf7aec5df8705d580057b905bed44d0fa4001f861d3ff01f862d401f863f843d0d431d430f864d401f865d1f845d0f84201d430f84180201200a07020120090800a1b4f47da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba3f089f050e0079197ff92826190a0079e2d960f9992e04191960227e801e801960193f200e0e9919605940f97ff93a10000fb5daeeb00c9f05100201200c0b0059b6a9bda89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e1f051f085f087f089f08b00051b56ba63da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e391960f999300202c70f0e0007a0dd7c120201cf111000113e910c30003cb8536002f30cf434c0c05c6c2497c0f83e90087c007e900c7e800c5c75c87e800c7e800c1cea6d0008f5d27048245c2540f4c7d411388830002497c1783b51343e90007e1874ffc07e18b5007e18fe10f4350c750c3e1935007e1974482084091ea7aeaea497c178082084152474232ea3a14c104c36cf380c4cbe1071c160131201faf2e19120820833cc77ba9730d4d30730fb00e0208210b99cd03bba9701fa4001f86101de208210d81c632fba9601d401f86501de208210b5de5f9eba8e8b30fa40fa00306d6d71db3ce082102fa30f96ba8e16f404216e91319301fb04e2f40430206e913092ed54e2e030f845f843f842c8f841cf16cbffccccc9ed541501f682084c4b4001a013bef2e20801d3fffa4021d70b01c0009231029133e202d4d430f844f82870f842c8cbffc9c85003cf16cb07ccc97020c8cb0113f400f400cb00c920f9007074c8cb02ca07cbffc9d0f843d070c804d014cf16f843f842c8cbfff828cf16c903d430c8cc13ccc9c8cc17cbff5007cf1614cc15cc14013ccc43308040db3cf842a4f862f845f843f842c8f841cf16cbffccccc9ed54150078708010c8cb055006cf165004fa0214cb68216e947032cb009bc858cf17c97158cb00f400e2226e95327058cb0099c85003cf17c958f400e2c901fb003366cbbe'
codebase_version = 5 codebase_version = 5
def __init__(self, **kwargs): def __init__(self, **kwargs):

View File

@ -12,11 +12,34 @@ kwargs = {}
if int(os.getenv('INIT_DEPLOY_PLATFORM_CONTRACT', 0)) == 0: if int(os.getenv('INIT_DEPLOY_PLATFORM_CONTRACT', 0)) == 0:
kwargs['address'] = Address(MY_PLATFORM_CONTRACT) kwargs['address'] = Address(MY_PLATFORM_CONTRACT)
platform = Platform( def platform_with_salt(s: int = 0):
admin_address=Address('UQAjz4Kdqoo4_Obg-UrUmuhoUB2W00vngZoX0MnAAnetZuAk'), return Platform(
admin_address=Address('UQD3XALhbETNo7ItrdPNFzMJtRHC5u6dIb39DCYa40jnWZdg'),
blank_code=Cell.one_from_boc(Blank.code), blank_code=Cell.one_from_boc(Blank.code),
cop_code=Cell.one_from_boc(COP_NFT.code), cop_code=Cell.one_from_boc(COP_NFT.code),
collection_content_uri=f'{PROJECT_HOST}/api/platform-metadata.json' + f"?s={s}",
collection_content_uri=f'{PROJECT_HOST}/api/platform-metadata.json',
**kwargs **kwargs
) )
platform = platform_with_salt()
if int(os.getenv('INIT_DEPLOY_PLATFORM_CONTRACT', 0)) == 1:
def is_nice_address(address: Address):
bounceable_addr = address.to_string(True, True, True)
non_bounceable_addr = address.to_string(True, True, False)
if '-' in bounceable_addr or '-' in non_bounceable_addr:
return False
if '_' in bounceable_addr or '_' in non_bounceable_addr:
return False
if bounceable_addr[-1] != 'A':
return False
return True
salt_value = 0
while not is_nice_address(platform.address):
platform = platform_with_salt(salt_value)
salt_value += 1

View File

@ -7,7 +7,12 @@ load_dotenv(dotenv_path='.env')
PROJECT_HOST = os.getenv('PROJECT_HOST', 'http://127.0.0.1:8080') PROJECT_HOST = os.getenv('PROJECT_HOST', 'http://127.0.0.1:8080')
SANIC_PORT = int(os.getenv('SANIC_PORT', '8080')) SANIC_PORT = int(os.getenv('SANIC_PORT', '8080'))
# Path inside the running backend container where content files are visible
UPLOADS_DIR = os.getenv('UPLOADS_DIR', '/app/data') UPLOADS_DIR = os.getenv('UPLOADS_DIR', '/app/data')
# Host path where the same content directory is mounted (used for docker -v from within container)
BACKEND_DATA_DIR_HOST = os.getenv('BACKEND_DATA_DIR_HOST', '/Storage/storedContent')
# Host path for converter logs (used for docker -v). Optional.
BACKEND_LOGS_DIR_HOST = os.getenv('BACKEND_LOGS_DIR_HOST', '/Storage/logs/converter')
if not os.path.exists(UPLOADS_DIR): if not os.path.exists(UPLOADS_DIR):
os.makedirs(UPLOADS_DIR) os.makedirs(UPLOADS_DIR)
@ -16,12 +21,31 @@ assert TELEGRAM_API_KEY, "Telegram API_KEY required"
CLIENT_TELEGRAM_API_KEY = os.environ.get('CLIENT_TELEGRAM_API_KEY') CLIENT_TELEGRAM_API_KEY = os.environ.get('CLIENT_TELEGRAM_API_KEY')
assert CLIENT_TELEGRAM_API_KEY, "Client Telegram API_KEY required" assert CLIENT_TELEGRAM_API_KEY, "Client Telegram API_KEY required"
import httpx import httpx
TELEGRAM_BOT_USERNAME = httpx.get(f"https://api.telegram.org/bot{TELEGRAM_API_KEY}/getMe").json()['result']['username']
CLIENT_TELEGRAM_BOT_USERNAME = httpx.get(f"https://api.telegram.org/bot{CLIENT_TELEGRAM_API_KEY}/getMe").json()['result']['username']
MYSQL_URI = os.environ['MYSQL_URI'] def _resolve_bot_username(token: str, label: str) -> str:
MYSQL_DATABASE = os.environ['MYSQL_DATABASE'] try:
resp = httpx.get(f"https://api.telegram.org/bot{token}/getMe", timeout=10.0)
resp.raise_for_status()
payload = resp.json()
except Exception as exc:
raise RuntimeError(f"{label} Telegram token validation failed: {exc}") from exc
if not payload.get('ok'):
detail = payload.get('description') or 'unknown Telegram API error'
raise RuntimeError(f"{label} Telegram token validation failed: {detail}")
username = (payload.get('result') or {}).get('username')
if not username:
raise RuntimeError(f"{label} Telegram token validation failed: username missing in Telegram response")
return username
TELEGRAM_BOT_USERNAME = _resolve_bot_username(TELEGRAM_API_KEY, 'Uploader bot')
CLIENT_TELEGRAM_BOT_USERNAME = _resolve_bot_username(CLIENT_TELEGRAM_API_KEY, 'Client bot')
# Unified database URL (PostgreSQL)
DATABASE_URL = os.environ['DATABASE_URL']
LOG_LEVEL = os.getenv('LOG_LEVEL', 'DEBUG') LOG_LEVEL = os.getenv('LOG_LEVEL', 'DEBUG')
LOG_DIR = os.getenv('LOG_DIR', 'logs') LOG_DIR = os.getenv('LOG_DIR', 'logs')
@ -32,7 +56,7 @@ _now_str = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
LOG_FILEPATH = f"{LOG_DIR}/{_now_str}.log" LOG_FILEPATH = f"{LOG_DIR}/{_now_str}.log"
WEB_APP_URLS = { WEB_APP_URLS = {
'uploadContent': f"https://web2-client.vercel.app/uploadContent" 'uploadContent': f"https://my-public-node-103.projscale.dev/uploadContent"
} }
ALLOWED_CONTENT_TYPES = [ ALLOWED_CONTENT_TYPES = [
@ -48,5 +72,5 @@ TONCENTER_HOST = os.getenv('TONCENTER_HOST', 'https://toncenter.com/api/v2/')
TONCENTER_API_KEY = os.getenv('TONCENTER_API_KEY') TONCENTER_API_KEY = os.getenv('TONCENTER_API_KEY')
TONCENTER_V3_HOST = os.getenv('TONCENTER_V3_HOST', 'https://toncenter.com/api/v3/') TONCENTER_V3_HOST = os.getenv('TONCENTER_V3_HOST', 'https://toncenter.com/api/v3/')
MY_PLATFORM_CONTRACT = 'EQDmWp6hbJlYUrXZKb9N88sOrTit630ZuRijfYdXEHLtheMY' MY_PLATFORM_CONTRACT = 'EQBVjuNuaIK87v9nm7mghgJ41ikqfx3GNBFz05GfmNbRQ9EA'
MY_FUND_ADDRESS = 'UQDarChHFMOI2On9IdHJNeEKttqepgo0AY4bG1trw8OAAwMY' MY_FUND_ADDRESS = 'UQDarChHFMOI2On9IdHJNeEKttqepgo0AY4bG1trw8OAAwMY'

Binary file not shown.

Binary file not shown.

View File

@ -36,9 +36,10 @@ async def create_new_encryption_key(db_session, user_id: int = None) -> KnownKey
meta={"I_user_id": user_id} if user_id else None, meta={"I_user_id": user_id} if user_id else None,
created=datetime.now() created=datetime.now()
) )
from sqlalchemy import select
db_session.add(new_key) db_session.add(new_key)
db_session.commit() await db_session.commit()
new_key = db_session.query(KnownKey).filter(KnownKey.seed_hash == new_seed_hash).first() new_key = (await db_session.execute(select(KnownKey).where(KnownKey.seed_hash == new_seed_hash))).scalars().first()
assert new_key, "Key not created" assert new_key, "Key not created"
return new_key return new_key
@ -46,42 +47,51 @@ async def create_new_encryption_key(db_session, user_id: int = None) -> KnownKey
async def create_encrypted_content( async def create_encrypted_content(
db_session, decrypted_content: StoredContent, db_session, decrypted_content: StoredContent,
) -> StoredContent: ) -> StoredContent:
encrypted_content = db_session.query(StoredContent).filter( from sqlalchemy import select
StoredContent.id == decrypted_content.decrypted_content_id # Try to find an already created encrypted counterpart for this decrypted content
).first() encrypted_content = (
await db_session.execute(
select(StoredContent).where(StoredContent.decrypted_content_id == decrypted_content.id)
)
).scalars().first()
if encrypted_content: if encrypted_content:
make_log("create_encrypted_content", f"(d={decrypted_content.cid.serialize_v2()}) => (e={encrypted_content.cid.serialize_v2()}): already exist (found by decrypted content)", level="debug") make_log("create_encrypted_content", f"(d={decrypted_content.cid.serialize_v2()}) => (e={encrypted_content.cid.serialize_v2()}): already exist (found by decrypted content)", level="debug")
return encrypted_content return encrypted_content
encrypted_content = None encrypted_content = None
if decrypted_content.key is None: # Avoid accessing relationship attributes in async context to prevent MissingGreenlet
if not decrypted_content.key_id:
key = await create_new_encryption_key(db_session, user_id=decrypted_content.user_id) key = await create_new_encryption_key(db_session, user_id=decrypted_content.user_id)
decrypted_content.key_id = key.id decrypted_content.key_id = key.id
db_session.commit() await db_session.commit()
decrypted_content = db_session.query(StoredContent).filter(
StoredContent.id == decrypted_content.id
).first()
assert decrypted_content.key_id, "Key not assigned" assert decrypted_content.key_id, "Key not assigned"
# Explicitly load the key to avoid lazy-loading via relationship in async mode
key = (
await db_session.execute(select(KnownKey).where(KnownKey.id == decrypted_content.key_id))
).scalars().first()
# If the referenced key is missing or malformed, create a fresh one
if not key or not key.seed:
key = await create_new_encryption_key(db_session, user_id=decrypted_content.user_id)
decrypted_content.key_id = key.id
await db_session.commit()
decrypted_path = os.path.join(UPLOADS_DIR, decrypted_content.hash) decrypted_path = os.path.join(UPLOADS_DIR, decrypted_content.hash)
decrypted_bin = b58decode(decrypted_content.hash) decrypted_bin = b58decode(decrypted_content.hash)
key = decrypted_content.key
cipher = AESCipher(key.seed_bin) cipher = AESCipher(key.seed_bin)
encrypted_bin = cipher.encrypt(decrypted_bin) encrypted_bin = cipher.encrypt(decrypted_bin)
encrypted_hash_bin = sha256(encrypted_bin).digest() encrypted_hash_bin = sha256(encrypted_bin).digest()
encrypted_hash = b58encode(encrypted_hash_bin).decode() encrypted_hash = b58encode(encrypted_hash_bin).decode()
encrypted_content = db_session.query(StoredContent).filter( encrypted_content = (await db_session.execute(select(StoredContent).where(StoredContent.hash == encrypted_hash))).scalars().first()
StoredContent.hash == encrypted_hash
).first()
if encrypted_content: if encrypted_content:
make_log("create_encrypted_content", f"(d={decrypted_content.cid.serialize_v2()}) => (e={encrypted_content.cid.serialize_v2()}): already exist (found by encrypted_hash)", level="debug") make_log("create_encrypted_content", f"(d={decrypted_content.cid.serialize_v2()}) => (e={encrypted_content.cid.serialize_v2()}): already exist (found by encrypted_hash)", level="debug")
return encrypted_content return encrypted_content
encrypted_content = None encrypted_content = None
encrypted_meta = decrypted_content.meta encrypted_meta = dict(decrypted_content.meta or {})
encrypted_meta["encrypt_algo"] = "AES256" encrypted_meta["encrypt_algo"] = "AES256"
encrypted_content = StoredContent( encrypted_content = StoredContent(
@ -99,19 +109,15 @@ async def create_encrypted_content(
created=datetime.now(), created=datetime.now(),
) )
db_session.add(encrypted_content) db_session.add(encrypted_content)
db_session.commit() await db_session.commit()
encrypted_path = os.path.join(UPLOADS_DIR, encrypted_hash) encrypted_path = os.path.join(UPLOADS_DIR, encrypted_hash)
async with aiofiles.open(encrypted_path, mode='wb') as file: async with aiofiles.open(encrypted_path, mode='wb') as file:
await file.write(encrypted_bin) await file.write(encrypted_bin)
encrypted_content = db_session.query(StoredContent).filter( encrypted_content = (await db_session.execute(select(StoredContent).where(StoredContent.hash == encrypted_hash))).scalars().first()
StoredContent.hash == encrypted_hash
).first()
assert encrypted_content, "Content not created" assert encrypted_content, "Content not created"
make_log("create_encrypted_content", f"(d={decrypted_content.cid.serialize_v2()}) => (e={encrypted_content.cid.serialize_v2()}): created new content/bin", level="debug") make_log("create_encrypted_content", f"(d={decrypted_content.cid.serialize_v2()}) => (e={encrypted_content.cid.serialize_v2()}): created new content/bin", level="debug")
return encrypted_content return encrypted_content

View File

@ -1,7 +1,17 @@
import base58 from app.core._utils.b58 import b58encode, b58decode
try:
import nacl.encoding import nacl.encoding
import nacl.signing import nacl.signing
import nacl.exceptions
_HAS_NACL = True
except Exception: # pragma: no cover - fallback path
_HAS_NACL = False
from app.core._utils.hash import blake3_digest
if _HAS_NACL:
class Signer: class Signer:
def __init__(self, seed: bytes): def __init__(self, seed: bytes):
@ -13,12 +23,36 @@ class Signer:
def sign(self, data_bytes: bytes) -> str: def sign(self, data_bytes: bytes) -> str:
signed_message = self.signing_key.sign(data_bytes) signed_message = self.signing_key.sign(data_bytes)
signature = signed_message.signature signature = signed_message.signature
return base58.b58encode(signature).decode() return b58encode(signature).decode()
def verify(self, data_bytes: bytes, signature: str) -> bool: def verify(self, data_bytes: bytes, signature: str) -> bool:
signature_bytes = base58.b58decode(signature) signature_bytes = b58decode(signature)
try: try:
self.verify_key.verify(data_bytes, signature_bytes) self.verify_key.verify(data_bytes, signature_bytes)
return True return True
except nacl.exceptions.BadSignatureError: except nacl.exceptions.BadSignatureError:
return False return False
else:
class _VerifyKey:
def __init__(self, key_bytes: bytes):
self._key_bytes = key_bytes
def encode(self) -> bytes:
return self._key_bytes
class Signer:
def __init__(self, seed: bytes):
if len(seed) != 32:
raise ValueError("Seed must be 32 bytes")
self.seed = seed
self.verify_key = _VerifyKey(seed)
def sign(self, data_bytes: bytes) -> str:
digest = blake3_digest(self.seed + data_bytes)
return b58encode(digest).decode()
def verify(self, data_bytes: bytes, signature: str) -> bool:
expected = self.sign(data_bytes)
return expected == signature

View File

@ -1,44 +1,115 @@
from os import getenv, urandom from os import getenv, urandom
import os
import time
import json
from nacl.bindings import crypto_sign_seed_keypair from nacl.bindings import crypto_sign_seed_keypair
from tonsdk.utils import Address from tonsdk.utils import Address
from app.core._blockchain.ton.wallet_v3cr3 import WalletV3CR3 from app.core._blockchain.ton.wallet_v3cr3 import WalletV3CR3
from app.core.models._config import ServiceConfig
from app.core.storage import db_session
from app.core.logger import make_log from app.core.logger import make_log
import os from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from typing import Optional
from app.core.models._config import ServiceConfigValue
def load_hot_pair(): def _load_seed_from_env_or_generate() -> bytes:
with db_session() as session: seed_hex = os.getenv("TON_INIT_HOT_SEED")
service_config = ServiceConfig(session) if seed_hex:
hot_seed = service_config.get('private_key') make_log("HotWallet", "Loaded seed from env")
if hot_seed is None: return bytes.fromhex(seed_hex)
make_log("HotWallet", "No seed found, generating new one", level='info') make_log("HotWallet", "No seed provided; generating ephemeral seed", level='info')
hot_seed = os.getenv("TON_INIT_HOT_SEED") return urandom(32)
if not hot_seed:
hot_seed = urandom(32)
make_log("HotWallet", f"Generated random seed") def _init_seed_via_db() -> bytes:
"""Store and read hot seed from PostgreSQL service_config (key='private_key').
Primary node writes it once; workers wait until it appears.
"""
from app.core._config import DATABASE_URL
engine = create_engine(DATABASE_URL, pool_pre_ping=True)
role = os.getenv("NODE_ROLE", "worker").lower()
def db_ready(conn) -> bool:
try:
inspector = inspect(conn)
return inspector.has_table('service_config')
except Exception:
return False
# Wait for table to exist
start = time.time()
# Wait for table existence, reconnecting to avoid stale transactions
while True:
with engine.connect() as conn:
if db_ready(conn):
break
time.sleep(0.5)
if time.time() - start > 120:
raise TimeoutError("service_config table not available")
def read_seed() -> Optional[bytes]:
# Use a fresh connection/session per read to avoid snapshot staleness
try:
with engine.connect() as rconn:
with Session(bind=rconn) as s:
row = s.query(ServiceConfigValue).filter(ServiceConfigValue.key == 'private_key').first()
if not row:
return None
packed = row.packed_value or {}
if isinstance(packed, str):
packed = json.loads(packed)
seed_hex = packed.get('value')
return bytes.fromhex(seed_hex) if seed_hex else None
except Exception:
return None
seed = read_seed()
if seed:
return seed
if role == "primary":
seed = _load_seed_from_env_or_generate()
# Try insert; if another primary raced, ignore
try:
with engine.connect() as wconn:
with Session(bind=wconn) as s:
s.add(ServiceConfigValue(key='private_key', packed_value={"value": seed.hex()}))
s.commit()
make_log("HotWallet", "Seed saved in service_config by primary", level='info')
return seed
except Exception:
# Read again in case of race
seed2 = read_seed()
if seed2:
return seed2
raise
else: else:
hot_seed = bytes.fromhex(hot_seed) make_log("HotWallet", "Worker waiting for seed in service_config...", level='info')
make_log("HotWallet", f"Loaded seed from env") while True:
seed = read_seed()
service_config.set('private_key', hot_seed.hex()) if seed:
return load_hot_pair() return seed
time.sleep(0.5)
hot_seed = bytes.fromhex(hot_seed)
public_key, private_key = crypto_sign_seed_keypair(hot_seed)
return hot_seed, public_key, private_key
_extra_ton_wallet_options = {} _extra_ton_wallet_options = {}
if getenv('TON_CUSTOM_WALLET_ADDRESS'): if getenv('TON_CUSTOM_WALLET_ADDRESS'):
_extra_ton_wallet_options['address'] = Address(getenv('TON_CUSTOM_WALLET_ADDRESS')) _extra_ton_wallet_options['address'] = Address(getenv('TON_CUSTOM_WALLET_ADDRESS'))
hot_seed, hot_pubkey, hot_privkey = load_hot_pair()
service_wallet = WalletV3CR3( def _init_wallet():
private_key=hot_privkey, # Primary writes to DB; workers wait and read from DB
public_key=hot_pubkey, hot_seed_bytes = _init_seed_via_db()
pub, priv = crypto_sign_seed_keypair(hot_seed_bytes)
wallet = WalletV3CR3(
private_key=priv,
public_key=pub,
**_extra_ton_wallet_options **_extra_ton_wallet_options
) )
return hot_seed_bytes, pub, priv, wallet
hot_seed, hot_pubkey, hot_privkey, service_wallet = _init_wallet()

Binary file not shown.

Binary file not shown.

Binary file not shown.

51
app/core/_utils/b58.py Normal file
View File

@ -0,0 +1,51 @@
from __future__ import annotations
try:
# Prefer external package if available
from base58 import b58encode, b58decode # type: ignore
except Exception:
# Minimal fallback (compatible subset)
ALPHABET = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
ALPHABET_INDEX = {c: i for i, c in enumerate(ALPHABET)}
def _to_bytes(value: bytes | bytearray | str) -> bytes:
if isinstance(value, (bytes, bytearray)):
return bytes(value)
if isinstance(value, str):
return value.encode()
raise TypeError("value must be bytes or str")
def b58encode(data: bytes | bytearray | str) -> bytes:
data = _to_bytes(data)
if not data:
return b""
n = int.from_bytes(data, "big")
out = []
while n > 0:
n, rem = divmod(n, 58)
out.append(ALPHABET[rem])
enc = "".join(reversed(out))
leading = 0
for b in data:
if b == 0:
leading += 1
else:
break
return ("1" * leading + enc).encode()
def b58decode(data: bytes | bytearray | str) -> bytes:
data_b = _to_bytes(data)
if not data_b:
return b""
num = 0
for ch in data_b.decode():
num = num * 58 + ALPHABET_INDEX[ch]
full = num.to_bytes((num.bit_length() + 7) // 8, "big")
leading = 0
for ch in data_b:
if ch == ord('1'):
leading += 1
else:
break
return b"\x00" * leading + full

View File

@ -1,10 +1,46 @@
from app.core.models import Asset from sqlalchemy.ext.asyncio import AsyncEngine
from sqlalchemy import text
from app.core.models import BlockchainTask
from app.core.models.base import AlchemyBase from app.core.models.base import AlchemyBase
def create_maria_tables(engine): async def create_db_tables(engine: AsyncEngine):
"""Create all tables in the database.""" """Create all tables in the database (PostgreSQL, async)."""
Asset() # ensure model import side-effects initialize mappers
AlchemyBase.metadata.create_all(engine) BlockchainTask()
async with engine.begin() as conn:
await conn.run_sync(AlchemyBase.metadata.create_all)
await conn.execute(text("""
ALTER TABLE users
ADD COLUMN IF NOT EXISTS is_admin BOOLEAN DEFAULT FALSE
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS telegram_id BIGINT
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS paid_at TIMESTAMPTZ
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS payment_tx_id VARCHAR(256)
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS payment_node_id VARCHAR(128)
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS payment_node_public_host VARCHAR(256)
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS bot_username VARCHAR(128)
"""))
await conn.execute(text("""
ALTER TABLE stars_invoices
ADD COLUMN IF NOT EXISTS is_remote BOOLEAN DEFAULT FALSE
"""))

29
app/core/_utils/hash.py Normal file
View File

@ -0,0 +1,29 @@
from __future__ import annotations
import hashlib
from typing import Iterable
def _to_bytes(data: Iterable[int] | bytes | bytearray | str) -> bytes:
if isinstance(data, (bytes, bytearray)):
return bytes(data)
if isinstance(data, str):
return data.encode()
return bytes(data)
def blake3_digest(data: Iterable[int] | bytes | bytearray | str) -> bytes:
try:
from blake3 import blake3 # type: ignore
return blake3(_to_bytes(data)).digest()
except Exception:
return hashlib.blake2s(_to_bytes(data)).digest()
def blake3_hex(data: Iterable[int] | bytes | bytearray | str) -> str:
try:
from blake3 import blake3 # type: ignore
return blake3(_to_bytes(data)).hexdigest()
except Exception:
return hashlib.blake2s(_to_bytes(data)).hexdigest()

View File

@ -2,8 +2,9 @@ from app.core.content.content_id import ContentId
def resolve_content(content_id) -> ContentId: # -> [content, error] def resolve_content(content_id) -> ContentId: # -> [content, error]
if isinstance(content_id, ContentId):
return content_id, None
try: try:
return ContentId.deserialize(content_id), None return ContentId.deserialize(content_id), None
except BaseException as e: except BaseException as e:
return None, f"{e}" return None, f"{e}"

View File

@ -5,7 +5,6 @@ from httpx import AsyncClient
from app.core._config import PROJECT_HOST from app.core._config import PROJECT_HOST
from app.core._crypto.signer import Signer from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
from app.core.logger import make_log from app.core.logger import make_log
@ -17,6 +16,8 @@ async def send_status(service: str, status: str):
'status': status, 'status': status,
} }
message_bytes = dumps(message).encode() message_bytes = dumps(message).encode()
# Lazy import to avoid triggering _secrets before DB is ready
from app.core._secrets import hot_seed
signer = Signer(hot_seed) signer = Signer(hot_seed)
message_signature = signer.sign(message_bytes) message_signature = signer.sign(message_bytes)
async with AsyncClient() as client: async with AsyncClient() as client:

View File

@ -0,0 +1,20 @@
from typing import Optional
from urllib.parse import urlencode
STARTAPP_LIMIT = 64
def build_content_links(content_token: str, ref_id: Optional[str], *, project_host: str, bot_username: str):
"""Return tuple of (startapp_payload, telegram_url, web_url)."""
payload = (content_token or '').strip()
if len(payload) > STARTAPP_LIMIT:
payload = payload[:STARTAPP_LIMIT]
telegram_url = f"https://t.me/{bot_username}/content?startapp={payload}"
query = [('content', content_token)]
if ref_id:
query.append(('ref', ref_id))
web_url = f"{project_host}/viewContent?{urlencode(query)}"
return payload, telegram_url, web_url

View File

@ -56,9 +56,10 @@ class AuthenticationMixin:
}, },
created=datetime.fromtimestamp(init_ts) created=datetime.fromtimestamp(init_ts)
) )
from sqlalchemy import select
db_session.add(new_key) db_session.add(new_key)
db_session.commit() await db_session.commit()
new_key = db_session.query(KnownKey).filter(KnownKey.seed_hash == new_key.seed_hash).first() new_key = (await db_session.execute(select(KnownKey).where(KnownKey.seed_hash == new_key.seed_hash))).scalars().first()
assert new_key, "Key not created" assert new_key, "Key not created"
make_log("auth", f"[new-K] User {user_id} created new {token_type} key {new_key.id}") make_log("auth", f"[new-K] User {user_id} created new {token_type} key {new_key.id}")
return { return {

View File

@ -4,8 +4,9 @@ import os
import uuid import uuid
import json import json
import shutil import shutil
import magic # python-magic for MIME detection
from base58 import b58decode, b58encode from base58 import b58decode, b58encode
from sqlalchemy import and_, or_ from sqlalchemy import and_, or_, select
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from app.core.models._telegram import Wrapped_CBotChat from app.core.models._telegram import Wrapped_CBotChat
from app.core._utils.send_status import send_status from app.core._utils.send_status import send_status
@ -13,14 +14,14 @@ from app.core.logger import make_log
from app.core.models.user import User from app.core.models.user import User
from app.core.models import WalletConnection from app.core.models import WalletConnection
from app.core.storage import db_session from app.core.storage import db_session
from app.core._config import UPLOADS_DIR from app.core._config import UPLOADS_DIR, BACKEND_DATA_DIR_HOST, BACKEND_LOGS_DIR_HOST
from app.core.content.content_id import ContentId from app.core.content.content_id import ContentId
async def convert_loop(memory): async def convert_loop(memory):
with db_session() as session: async with db_session() as session:
# Query for unprocessed encrypted content # Query for unprocessed encrypted content
unprocessed_encrypted_content = session.query(StoredContent).filter( unprocessed_encrypted_content = (await session.execute(select(StoredContent).where(
and_( and_(
StoredContent.type == "onchain/content", StoredContent.type == "onchain/content",
or_( or_(
@ -28,66 +29,115 @@ async def convert_loop(memory):
StoredContent.ipfs_cid == None, StoredContent.ipfs_cid == None,
) )
) )
).first() ))).scalars().first()
if not unprocessed_encrypted_content: if not unprocessed_encrypted_content:
make_log("ConvertProcess", "No content to convert", level="debug") make_log("ConvertProcess", "No content to convert", level="debug")
return return
# Достаем расшифрованный файл
decrypted_content = (await session.execute(select(StoredContent).where(
StoredContent.id == unprocessed_encrypted_content.decrypted_content_id
))).scalars().first()
if not decrypted_content:
make_log("ConvertProcess", "Decrypted content not found", level="error")
return
# Определяем путь и расширение входного файла
# Путь внутри текущего контейнера (доступен Python процессу)
input_file_container = os.path.join(UPLOADS_DIR, decrypted_content.hash)
# Хостовый путь (нужен для docker -v маппинга при запуске конвертера)
input_file_host = os.path.join(BACKEND_DATA_DIR_HOST, decrypted_content.hash)
input_ext = (unprocessed_encrypted_content.filename.split('.')[-1]
if '.' in unprocessed_encrypted_content.filename else "mp4")
# ==== Новая логика: определение MIME-тип через python-magic ====
try:
mime_type = magic.from_file(input_file_container, mime=True)
except Exception as e:
make_log("ConvertProcess", f"magic probe failed: {e}", level="warning")
mime_type = ""
if mime_type.startswith("video/"):
content_kind = "video"
elif mime_type.startswith("audio/"):
content_kind = "audio"
else:
content_kind = "other"
make_log("ConvertProcess", f"Detected content_kind={content_kind}, mime={mime_type}", level="info")
# Для прочих типов сохраняем raw копию и выходим
if content_kind == "other":
make_log("ConvertProcess", f"Content {unprocessed_encrypted_content.id} processed. Not audio/video, copy just", level="info")
unprocessed_encrypted_content.btfs_cid = ContentId(
version=2, content_hash=b58decode(decrypted_content.hash)
).serialize_v2()
unprocessed_encrypted_content.ipfs_cid = ContentId(
version=2, content_hash=b58decode(decrypted_content.hash)
).serialize_v2()
unprocessed_encrypted_content.meta = {
**unprocessed_encrypted_content.meta,
'converted_content': {
option_name: decrypted_content.hash for option_name in ['high', 'low', 'low_preview']
}
}
await session.commit()
return
# ==== Конвертация для видео или аудио: оригинальная логика ====
# Static preview interval in seconds # Static preview interval in seconds
preview_interval = [0, 30] preview_interval = [0, 30]
if unprocessed_encrypted_content.onchain_index in [2]: if unprocessed_encrypted_content.onchain_index in [2]:
preview_interval = [0, 60] preview_interval = [0, 60]
make_log("ConvertProcess", f"Processing content {unprocessed_encrypted_content.id} with preview interval {preview_interval}", level="info") make_log(
decrypted_content = session.query(StoredContent).filter( "ConvertProcess",
StoredContent.id == unprocessed_encrypted_content.decrypted_content_id f"Processing content {unprocessed_encrypted_content.id} as {content_kind} with preview interval {preview_interval}",
).first() level="info"
if not decrypted_content: )
make_log("ConvertProcess", "Decrypted content not found", level="error")
return
# Выбираем опции конвертации для видео и аудио
# List of conversion options to process if content_kind == "video":
REQUIRED_CONVERT_OPTIONS = ['high', 'low', 'low_preview'] REQUIRED_CONVERT_OPTIONS = ['high', 'low', 'low_preview']
converted_content = {} # Mapping: option -> sha256 hash of output file else:
REQUIRED_CONVERT_OPTIONS = ['high', 'low'] # no preview for audio
# Define input file path and extract its extension from filename converted_content = {}
input_file_path = f"/Storage/storedContent/{decrypted_content.hash}" # Директория логов на хосте для docker-контейнера конвертера
logs_dir_host = BACKEND_LOGS_DIR_HOST
input_ext = unprocessed_encrypted_content.filename.split('.')[-1] if '.' in unprocessed_encrypted_content.filename else "mp4"
# Logs directory mapping
logs_dir = "/Storage/logs/converter"
# Process each conversion option in sequence
for option in REQUIRED_CONVERT_OPTIONS: for option in REQUIRED_CONVERT_OPTIONS:
# Set quality parameter and trim option (only for preview) # Set quality parameter and trim option (only for preview)
if option == "low_preview": if option == "low_preview":
quality = "low" quality = "low"
trim_value = f"{preview_interval[0]}-{preview_interval[1]}" trim_value = f"{preview_interval[0]}-{preview_interval[1]}"
else: else:
quality = option # 'high' or 'low' quality = option
trim_value = None trim_value = None
# Generate a unique output directory for docker container # Generate a unique output directory for docker container
output_uuid = str(uuid.uuid4()) output_uuid = str(uuid.uuid4())
output_dir = f"/Storage/storedContent/converter-output/{output_uuid}" # Директория вывода в текущем контейнере (та же что и в UPLOADS_DIR, смонтирована с хоста)
output_dir_container = os.path.join(UPLOADS_DIR, "converter-output", output_uuid)
os.makedirs(output_dir_container, exist_ok=True)
# Соответствующая директория на хосте — нужна для docker -v
output_dir_host = os.path.join(BACKEND_DATA_DIR_HOST, "converter-output", output_uuid)
# Build the docker command with appropriate volume mounts and parameters # Build the docker command
cmd = [ cmd = [
"docker", "run", "--rm", "docker", "run", "--rm",
"-v", f"{input_file_path}:/app/input", # Важно: источники - это ХОСТОВЫЕ пути, так как docker демону они нужны на хосте
"-v", f"{output_dir}:/app/output", "-v", f"{input_file_host}:/app/input:ro",
"-v", f"{logs_dir}:/app/logs", "-v", f"{output_dir_host}:/app/output",
"-v", f"{logs_dir_host}:/app/logs",
"media_converter", "media_converter",
"--ext", input_ext, "--ext", input_ext,
"--quality", quality "--quality", quality
] ]
if trim_value: if trim_value:
cmd.extend(["--trim", trim_value]) cmd.extend(["--trim", trim_value])
# converter auto-detects audio/video, no explicit flag required
# Run the docker container asynchronously
process = await asyncio.create_subprocess_exec( process = await asyncio.create_subprocess_exec(
*cmd, *cmd,
stdout=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE,
@ -98,22 +148,21 @@ async def convert_loop(memory):
make_log("ConvertProcess", f"Docker conversion failed for option {option}: {stderr.decode()}", level="error") make_log("ConvertProcess", f"Docker conversion failed for option {option}: {stderr.decode()}", level="error")
return return
# List files in the output directory # List files in output dir
try: try:
files = os.listdir(output_dir.replace("/Storage/storedContent", "/app/data")) files = os.listdir(output_dir_container)
except Exception as e: except Exception as e:
make_log("ConvertProcess", f"Error reading output directory {output_dir}: {e}", level="error") make_log("ConvertProcess", f"Error reading output directory {output_dir}: {e}", level="error")
return return
# Exclude 'output.json' and expect exactly one media output file
media_files = [f for f in files if f != "output.json"] media_files = [f for f in files if f != "output.json"]
if len(media_files) != 1: if len(media_files) != 1:
make_log("ConvertProcess", f"Expected one media file, found {len(media_files)} for option {option}", level="error") make_log("ConvertProcess", f"Expected one media file, found {len(media_files)} for option {option}", level="error")
return return
output_file = os.path.join(output_dir.replace("/Storage/storedContent", "/app/data"), media_files[0]) output_file = os.path.join(output_dir_container, media_files[0])
# Compute SHA256 hash of the output file using async subprocess # Compute SHA256 hash of the output file
hash_process = await asyncio.create_subprocess_exec( hash_process = await asyncio.create_subprocess_exec(
"sha256sum", output_file, "sha256sum", output_file,
stdout=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE,
@ -126,21 +175,18 @@ async def convert_loop(memory):
file_hash = hash_stdout.decode().split()[0] file_hash = hash_stdout.decode().split()[0]
file_hash = b58encode(bytes.fromhex(file_hash)).decode() file_hash = b58encode(bytes.fromhex(file_hash)).decode()
if not session.query(StoredContent).filter( # Save new StoredContent if not exists
StoredContent.hash == file_hash if not (await session.execute(select(StoredContent).where(StoredContent.hash == file_hash))).scalars().first():
).first():
new_content = StoredContent( new_content = StoredContent(
type="local/content_bin", type="local/content_bin",
hash=file_hash, hash=file_hash,
user_id=unprocessed_encrypted_content.user_id, user_id=unprocessed_encrypted_content.user_id,
filename=media_files[0], filename=media_files[0],
meta={ meta={'encrypted_file_hash': unprocessed_encrypted_content.hash},
'encrypted_file_hash': unprocessed_encrypted_content.hash,
},
created=datetime.now(), created=datetime.now(),
) )
session.add(new_content) session.add(new_content)
session.commit() await session.commit()
save_path = os.path.join(UPLOADS_DIR, file_hash) save_path = os.path.join(UPLOADS_DIR, file_hash)
try: try:
@ -156,41 +202,29 @@ async def convert_loop(memory):
converted_content[option] = file_hash converted_content[option] = file_hash
# Process output.json: read its contents and update meta['ffprobe_meta'] # Process output.json for ffprobe_meta
output_json_path = os.path.join(output_dir.replace("/Storage/storedContent", "/app/data"), "output.json") output_json_path = os.path.join(output_dir_container, "output.json")
if os.path.exists(output_json_path): if os.path.exists(output_json_path) and unprocessed_encrypted_content.meta.get('ffprobe_meta') is None:
if unprocessed_encrypted_content.meta.get('ffprobe_meta') is None:
try: try:
with open(output_json_path, "r") as f: with open(output_json_path, "r") as f:
output_json_content = f.read() ffprobe_meta = json.load(f)
except Exception as e:
make_log("ConvertProcess", f"Error reading output.json for option {option}: {e}", level="error")
return
try:
ffprobe_meta = json.loads(output_json_content)
except Exception as e:
make_log("ConvertProcess", f"Error parsing output.json for option {option}: {e}", level="error")
return
unprocessed_encrypted_content.meta = { unprocessed_encrypted_content.meta = {
**unprocessed_encrypted_content.meta, **unprocessed_encrypted_content.meta,
'ffprobe_meta': ffprobe_meta 'ffprobe_meta': ffprobe_meta
} }
else:
make_log("ConvertProcess", f"output.json not found for option {option}", level="error")
# Remove the output directory after processing
try:
shutil.rmtree(output_dir.replace("/Storage/storedContent", "/app/data"))
except Exception as e: except Exception as e:
make_log("ConvertProcess", f"Error removing output directory {output_dir}: {e}", level="error") make_log("ConvertProcess", f"Error handling output.json for option {option}: {e}", level="error")
# Continue even if deletion fails
# Cleanup output directory
try:
shutil.rmtree(output_dir_container)
except Exception as e:
make_log("ConvertProcess", f"Error removing output dir {output_dir}: {e}", level="warning")
# Finalize original record
make_log("ConvertProcess", f"Content {unprocessed_encrypted_content.id} processed. Converted content: {converted_content}", level="info") make_log("ConvertProcess", f"Content {unprocessed_encrypted_content.id} processed. Converted content: {converted_content}", level="info")
unprocessed_encrypted_content.btfs_cid = ContentId( unprocessed_encrypted_content.btfs_cid = ContentId(
version=2, content_hash=b58decode(converted_content['high']) version=2, content_hash=b58decode(converted_content['high' if content_kind=='video' else 'low'])
).serialize_v2() ).serialize_v2()
unprocessed_encrypted_content.ipfs_cid = ContentId( unprocessed_encrypted_content.ipfs_cid = ContentId(
version=2, content_hash=b58decode(converted_content['low']) version=2, content_hash=b58decode(converted_content['low'])
@ -199,32 +233,47 @@ async def convert_loop(memory):
**unprocessed_encrypted_content.meta, **unprocessed_encrypted_content.meta,
'converted_content': converted_content 'converted_content': converted_content
} }
await session.commit()
session.commit() # Notify user if needed
if not unprocessed_encrypted_content.meta.get('upload_notify_msg_id'): if not unprocessed_encrypted_content.meta.get('upload_notify_msg_id'):
wallet_owner_connection = session.query(WalletConnection).filter( wallet_owner_connection = (await session.execute(select(WalletConnection).where(
WalletConnection.wallet_address == unprocessed_encrypted_content.owner_address WalletConnection.wallet_address == unprocessed_encrypted_content.owner_address
).order_by(WalletConnection.id.desc()).first() ).order_by(WalletConnection.id.desc()))).scalars().first()
if wallet_owner_connection: if wallet_owner_connection:
wallet_owner_user = wallet_owner_connection.user wallet_owner_user = wallet_owner_connection.user
wallet_owner_bot = Wrapped_CBotChat(memory._client_telegram_bot, chat_id=wallet_owner_user.telegram_id, user=wallet_owner_user, db_session=session) bot = Wrapped_CBotChat(
unprocessed_encrypted_content.meta = { memory._client_telegram_bot,
**unprocessed_encrypted_content.meta, chat_id=wallet_owner_user.telegram_id,
'upload_notify_msg_id': await wallet_owner_bot.send_content(session, unprocessed_encrypted_content) user=wallet_owner_user,
} db_session=session
)
unprocessed_encrypted_content.meta['upload_notify_msg_id'] = await bot.send_content(session, unprocessed_encrypted_content)
await session.commit()
session.commit()
async def main_fn(memory): async def main_fn(memory):
make_log("ConvertProcess", "Service started", level="info") make_log("ConvertProcess", "Service started", level="info")
seqno = 0 seqno = 0
while True: while True:
try: try:
make_log("ConvertProcess", "Service running", level="debug") rid = __import__('uuid').uuid4().hex[:8]
try:
from app.core.log_context import ctx_rid
ctx_rid.set(rid)
except BaseException:
pass
make_log("ConvertProcess", "Service running", level="debug", rid=rid)
await convert_loop(memory) await convert_loop(memory)
await asyncio.sleep(5) await asyncio.sleep(5)
await send_status("convert_service", f"working (seqno={seqno})") await send_status("convert_service", f"working (seqno={seqno})")
seqno += 1 seqno += 1
except BaseException as e: except BaseException as e:
make_log("ConvertProcess", f"Error: {e}", level="error") make_log("ConvertProcess", f"Error: {e}", level="error", rid=locals().get('rid'))
await asyncio.sleep(3) await asyncio.sleep(3)
finally:
try:
from app.core.log_context import ctx_rid
ctx_rid.set(None)
except BaseException:
pass

View File

@ -0,0 +1,524 @@
import asyncio
import os
import json
import shutil
import tempfile
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from typing import List, Optional, Tuple
from sqlalchemy import select, and_, or_
from app.core.logger import make_log
from app.core.storage import db_session
from app.core._config import UPLOADS_DIR, BACKEND_LOGS_DIR_HOST
from app.core.models.content_v3 import (
EncryptedContent,
ContentKey,
ContentDerivative,
UploadSession,
)
from app.core.models.node_storage import StoredContent
from app.core.ipfs_client import cat_stream
from app.core.crypto.encf_stream import decrypt_encf_auto
from app.core.crypto.keywrap import unwrap_dek, wrap_dek, KeyWrapError
from app.core.network.key_client import request_key_from_peer
from app.core.models.my_network import KnownNode
from app.core._utils.resolve_content import resolve_content
from app.core.content.content_id import ContentId
CONCURRENCY = int(os.getenv("CONVERT_V3_MAX_CONCURRENCY", "3"))
STAGING_SUBDIR = os.getenv("CONVERT_V3_STAGING_SUBDIR", "convert-staging")
UPLOADS_PATH = Path(UPLOADS_DIR).resolve()
_host_uploads_env = os.getenv("BACKEND_DATA_DIR_HOST")
HOST_UPLOADS_PATH = Path(_host_uploads_env).resolve() if _host_uploads_env else None
@dataclass
class PlainStaging:
container_path: str
host_path: str
def _container_to_host(path: str) -> str:
"""Map a container path under UPLOADS_DIR to the host path for docker -v."""
if not HOST_UPLOADS_PATH:
raise RuntimeError("BACKEND_DATA_DIR_HOST is not configured for convert_v3")
real_path = Path(path).resolve()
try:
real_path.relative_to(UPLOADS_PATH)
except ValueError:
# Not under uploads; best effort fallback to original string
return str(real_path)
rel = real_path.relative_to(UPLOADS_PATH)
return str(HOST_UPLOADS_PATH / rel)
MEDIA_CONVERTER_CPU_LIMIT = os.getenv("MEDIA_CONVERTER_CPU_LIMIT")
MEDIA_CONVERTER_MEM_LIMIT = os.getenv("MEDIA_CONVERTER_MEM_LIMIT")
MEDIA_CONVERTER_CPUSET = os.getenv("MEDIA_CONVERTER_CPUSET") or os.getenv("CONVERT_CPUSET")
ERROR_TRUNCATE_LIMIT = 512
def _ensure_dir(path: str):
try:
os.makedirs(path, exist_ok=True)
except Exception:
pass
async def _sha256_b58(file_path: str) -> str:
import hashlib
import base58
h = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(2 * 1024 * 1024), b''):
h.update(chunk)
return base58.b58encode(h.digest()).decode()
async def _save_derivative(file_path: str, filename: str) -> Tuple[str, int]:
"""Move file into UPLOADS_DIR under sha256 b58 name; return (hash_b58, size)."""
file_hash = await _sha256_b58(file_path)
dst = os.path.join(UPLOADS_DIR, file_hash)
try:
os.remove(dst)
except FileNotFoundError:
pass
shutil.move(file_path, dst)
size = os.path.getsize(dst)
return file_hash, size
async def _run_media_converter(staging: PlainStaging, input_ext: str, quality: str, trim_value: Optional[str], is_audio: bool):
if not os.path.exists(staging.container_path):
raise FileNotFoundError(f"Plain input missing at {staging.container_path}")
host_input_path = staging.host_path
if not host_input_path or not host_input_path.startswith('/'):
host_input_path = os.path.abspath(host_input_path)
rid = __import__('uuid').uuid4().hex[:8]
output_dir_container = UPLOADS_PATH / "convert-output" / f"conv_{rid}"
output_dir_host = _container_to_host(output_dir_container)
_ensure_dir(str(output_dir_container))
logs_dir_candidate = os.getenv("BACKEND_LOGS_DIR_HOST", "")
logs_dir_host = logs_dir_candidate if logs_dir_candidate else str(HOST_UPLOADS_PATH / "logs" / "converter") if HOST_UPLOADS_PATH else "/tmp/converter-logs"
if not logs_dir_host.startswith('/'):
logs_dir_host = os.path.join(os.getcwd(), logs_dir_host)
try:
os.makedirs(logs_dir_host, exist_ok=True)
except Exception:
fallback_logs = HOST_UPLOADS_PATH / "logs" / "converter" if HOST_UPLOADS_PATH else Path("/tmp/converter-logs")
logs_dir_host = str(fallback_logs)
os.makedirs(logs_dir_host, exist_ok=True)
cmd = [
"docker", "run", "--rm",
"-v", f"{host_input_path}:/app/input:ro",
"-v", f"{output_dir_host}:/app/output",
"-v", f"{logs_dir_host}:/app/logs",
]
if MEDIA_CONVERTER_CPU_LIMIT:
cmd.extend(["--cpus", str(MEDIA_CONVERTER_CPU_LIMIT)])
if MEDIA_CONVERTER_MEM_LIMIT:
cmd.extend(["--memory", str(MEDIA_CONVERTER_MEM_LIMIT)])
if MEDIA_CONVERTER_CPUSET:
cmd.extend(["--cpuset-cpus", MEDIA_CONVERTER_CPUSET])
cmd.append("media_converter")
cmd.extend(["--ext", input_ext, "--quality", quality])
if trim_value:
cmd.extend(["--trim", trim_value])
make_log('convert_v3', f"Run media_converter cmd: {' '.join(cmd)}")
proc = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
raise RuntimeError(f"media_converter failed: {stderr.decode()}")
# Find produced media file and optional output.json
try:
files = os.listdir(output_dir_container)
except Exception as e:
raise RuntimeError(f"Read output dir error: {e}")
media_files = [f for f in files if f != "output.json"]
if len(media_files) != 1:
raise RuntimeError(f"Expected one media file, found {len(media_files)}: {media_files}")
output_media = os.path.join(output_dir_container, media_files[0])
ffprobe_meta = {}
out_json = os.path.join(output_dir_container, "output.json")
if os.path.exists(out_json):
try:
with open(out_json, 'r') as f:
ffprobe_meta = json.load(f)
except Exception:
ffprobe_meta = {}
return output_media, ffprobe_meta
async def _update_upload_session(ec: EncryptedContent, all_success: bool, errors: List[str]):
async with db_session() as session:
upload_row = (await session.execute(
select(UploadSession).where(UploadSession.encrypted_cid == ec.encrypted_cid)
)).scalars().first()
if upload_row:
if all_success:
upload_row.state = 'converted'
upload_row.error = None
elif upload_row.state != 'converted':
upload_row.state = 'conversion_failed'
if errors:
upload_row.error = _short_error(errors[0])
await session.commit()
async def _convert_content(ec: EncryptedContent, staging: PlainStaging):
content_kind = 'audio' if ec.content_type.startswith('audio/') else ('video' if ec.content_type.startswith('video/') else 'other')
input_ext = (ec.content_type.split('/')[-1] or 'bin')
is_audio = content_kind == 'audio'
encrypted_hash_b58 = ContentId.deserialize(ec.encrypted_cid).content_hash_b58
if content_kind == 'other':
errors: List[str] = []
all_success = True
try:
file_hash, size_bytes = await _save_derivative(staging.container_path, staging.container_path)
plain_path = os.path.join(UPLOADS_DIR, file_hash)
plain_filename = f"{ec.encrypted_cid}.{input_ext}" if input_ext else ec.encrypted_cid
async with db_session() as session:
existing = (await session.execute(select(StoredContent).where(StoredContent.hash == file_hash))).scalars().first()
if existing:
sc = existing
sc.type = sc.type or "local/content_bin"
sc.filename = plain_filename
sc.meta = {
**(sc.meta or {}),
'encrypted_cid': ec.encrypted_cid,
'kind': 'original',
'content_type': ec.content_type,
}
sc.updated = datetime.utcnow()
else:
sc = StoredContent(
type="local/content_bin",
hash=file_hash,
user_id=None,
filename=plain_filename,
meta={
'encrypted_cid': ec.encrypted_cid,
'kind': 'original',
'content_type': ec.content_type,
},
created=datetime.utcnow(),
)
session.add(sc)
await session.flush()
encrypted_records = (await session.execute(select(StoredContent).where(StoredContent.hash == encrypted_hash_b58))).scalars().all()
for encrypted_sc in encrypted_records:
meta = dict(encrypted_sc.meta or {})
converted = dict(meta.get('converted_content') or {})
converted['original'] = file_hash
meta['converted_content'] = converted
if 'content_type' not in meta:
meta['content_type'] = ec.content_type
encrypted_sc.meta = meta
encrypted_sc.decrypted_content_id = sc.id
encrypted_sc.updated = datetime.utcnow()
derivative = ContentDerivative(
content_id=ec.id,
kind='decrypted_original',
local_path=plain_path,
content_type=ec.content_type,
size_bytes=size_bytes,
status='ready',
)
session.add(derivative)
await session.commit()
make_log('convert_v3', f"Stored original derivative for {ec.encrypted_cid}")
except Exception as e:
all_success = False
errors.append(str(e))
make_log('convert_v3', f"Convert error {ec.encrypted_cid} opt=original: {e}", level='error')
await _update_upload_session(ec, all_success, errors)
return
# audio/video path
required = ['high', 'low', 'low_preview']
conf = ec.preview_conf or {}
intervals = conf.get('intervals') or [[0, int(conf.get('duration_ms', 30000))]]
main_interval = intervals[0]
start_s = max(0, int(main_interval[0]) // 1000)
dur_s = max(1, int((main_interval[1] - main_interval[0]) // 1000) or 30)
trim_value = f"{start_s}-{start_s + dur_s}"
qualities = {
'high': 'high',
'low': 'low',
'low_preview': 'low',
}
all_success = True
errors: List[str] = []
for opt in required:
derivative_kind = f"decrypted_{opt if opt != 'low_preview' else 'preview'}"
derivative_id: Optional[int] = None
try:
async with db_session() as session:
cd = ContentDerivative(
content_id=ec.id,
kind=derivative_kind,
interval_start_ms=main_interval[0] if opt == 'low_preview' else None,
interval_end_ms=main_interval[1] if opt == 'low_preview' else None,
local_path="",
status='processing',
)
session.add(cd)
await session.flush()
derivative_id = cd.id
await session.commit()
out_path, ffprobe = await _run_media_converter(
staging=staging,
input_ext=input_ext,
quality=qualities[opt],
trim_value=trim_value if opt == 'low_preview' else None,
is_audio=is_audio,
)
file_hash, size_bytes = await _save_derivative(out_path, os.path.basename(out_path))
async with db_session() as session:
sc = (await session.execute(select(StoredContent).where(StoredContent.hash == file_hash))).scalars().first()
meta_payload = {'encrypted_cid': ec.encrypted_cid, 'kind': opt, 'ffprobe_meta': ffprobe}
if sc:
sc.type = sc.type or "local/content_bin"
sc.filename = os.path.basename(out_path)
sc.meta = meta_payload
sc.updated = datetime.utcnow()
else:
sc = StoredContent(
type="local/content_bin",
hash=file_hash,
user_id=None,
filename=os.path.basename(out_path),
meta=meta_payload,
created=datetime.utcnow(),
)
session.add(sc)
await session.flush()
encrypted_sc = (await session.execute(select(StoredContent).where(StoredContent.hash == encrypted_hash_b58))).scalars().first()
if encrypted_sc:
meta = dict(encrypted_sc.meta or {})
converted = dict(meta.get('converted_content') or {})
converted[opt] = file_hash
meta['converted_content'] = converted
encrypted_sc.meta = meta
if opt == 'high':
encrypted_sc.decrypted_content_id = sc.id
encrypted_sc.updated = datetime.utcnow()
cd = await session.get(ContentDerivative, derivative_id) if derivative_id else None
if cd:
cd.local_path = os.path.join(UPLOADS_DIR, file_hash)
cd.size_bytes = size_bytes
if is_audio:
cd.content_type = 'audio/flac' if opt == 'high' else 'audio/mpeg'
else:
cd.content_type = ec.content_type if opt == 'high' else 'video/mp4'
cd.status = 'ready'
cd.error = None
await session.commit()
output_parent = Path(out_path).parent
shutil.rmtree(output_parent, ignore_errors=True)
make_log('convert_v3', f"Converted {ec.encrypted_cid} opt={opt} -> {file_hash}")
except Exception as e:
make_log('convert_v3', f"Convert error {ec.encrypted_cid} opt={opt}: {e}", level='error')
all_success = False
errors.append(_short_error(e))
async with db_session() as session:
cd = await session.get(ContentDerivative, derivative_id) if derivative_id else None
if cd:
cd.status = 'failed'
cd.error = _short_error(e)
else:
session.add(ContentDerivative(
content_id=ec.id,
kind=derivative_kind,
status='failed',
error=_short_error(e),
local_path="",
))
await session.commit()
await _update_upload_session(ec, all_success, errors)
async def _pick_pending(limit: int) -> List[Tuple[EncryptedContent, PlainStaging]]:
async with db_session() as session:
# Include preview-enabled media and non-media content that need decrypted originals
non_media_filter = and_(
EncryptedContent.content_type.isnot(None),
~EncryptedContent.content_type.like('audio/%'),
~EncryptedContent.content_type.like('video/%'),
)
ecs = (await session.execute(
select(EncryptedContent)
.where(or_(EncryptedContent.preview_enabled == True, non_media_filter))
.order_by(EncryptedContent.created_at.desc())
)).scalars().all()
picked: List[Tuple[EncryptedContent, PlainStaging]] = []
for ec in ecs:
try:
cid_obj, cid_err = resolve_content(ec.encrypted_cid)
if cid_err:
make_log('convert_v3', f"Skip {ec.encrypted_cid}: resolve error {cid_err}", level='debug')
continue
encrypted_hash_b58 = cid_obj.content_hash_b58
except Exception as exc:
make_log('convert_v3', f"Skip {ec.encrypted_cid}: resolve exception {exc}", level='warning')
continue
sc = (await session.execute(select(StoredContent).where(StoredContent.hash == encrypted_hash_b58))).scalars().first()
if not sc or sc.onchain_index is None:
continue
# Check if derivatives already ready
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.content_id == ec.id))).scalars().all()
kinds_ready = {r.kind for r in rows if r.status == 'ready'}
if ec.content_type.startswith('audio/'):
required = {'decrypted_low', 'decrypted_high'}
elif ec.content_type.startswith('video/'):
required = {'decrypted_low', 'decrypted_high', 'decrypted_preview'}
else:
required = {'decrypted_original'}
if required.issubset(kinds_ready):
continue
# Always decrypt from IPFS using local or remote key
staging: Optional[PlainStaging] = None
ck = (await session.execute(select(ContentKey).where(ContentKey.content_id == ec.id))).scalars().first()
if ck:
staging = await stage_plain_from_ipfs(ec, ck.key_ciphertext_b64)
if not staging:
peers = (await session.execute(select(KnownNode))).scalars().all()
for peer in peers:
meta = peer.meta or {}
public_host = meta.get('public_host')
if not public_host:
last_resp = (meta.get('last_response') or {}).get('node', {}) if isinstance(meta, dict) else {}
public_host = last_resp.get('public_host')
base_url = public_host or f"http://{peer.ip}:{peer.port}"
dek = await request_key_from_peer(base_url, ec.encrypted_cid)
if not dek:
continue
try:
dek_b64 = wrap_dek(dek)
except KeyWrapError as exc:
make_log('convert_v3', f"wrap failed for peer DEK: {exc}", level='error')
continue
session_ck = ContentKey(
content_id=ec.id,
key_ciphertext_b64=dek_b64,
key_fingerprint=peer.public_key,
issuer_node_id=peer.public_key,
allow_auto_grant=True,
)
session.add(session_ck)
await session.commit()
staging = await stage_plain_from_ipfs(ec, dek_b64)
if staging:
break
if not staging or not os.path.exists(staging.container_path):
continue
picked.append((ec, staging))
if len(picked) >= limit:
break
return picked
async def worker_loop():
sem = asyncio.Semaphore(CONCURRENCY)
async def _run_one(ec: EncryptedContent, staging: PlainStaging):
async with sem:
try:
await _convert_content(ec, staging)
# After successful conversion, attempt to remove staging file to avoid duplicates
try:
if staging and staging.container_path and os.path.exists(staging.container_path):
os.remove(staging.container_path)
except Exception:
pass
except Exception as e:
make_log('convert_v3', f"job error {ec.encrypted_cid}: {e}", level='error')
while True:
try:
batch = await _pick_pending(limit=CONCURRENCY * 2)
if not batch:
await asyncio.sleep(3)
continue
tasks = [asyncio.create_task(_run_one(ec, staging)) for (ec, staging) in batch]
await asyncio.gather(*tasks)
except Exception as e:
make_log('convert_v3', f"loop error: {e}", level='error')
await asyncio.sleep(2)
async def main_fn(memory):
make_log('convert_v3', f"Service started with concurrency={CONCURRENCY}", level='info')
await worker_loop()
async def stage_plain_from_ipfs(ec: EncryptedContent, dek_wrapped: str) -> Optional[PlainStaging]:
"""Download encrypted ENCF stream from IPFS and decrypt on the fly into shared staging."""
os.makedirs(UPLOADS_PATH / STAGING_SUBDIR, exist_ok=True)
try:
dek = unwrap_dek(dek_wrapped)
except KeyWrapError as exc:
make_log('convert_v3', f"unwrap failed for {ec.encrypted_cid}: {exc}", level='error')
return None
tmp = tempfile.NamedTemporaryFile(
prefix=f"dec_{ec.encrypted_cid[:8]}_",
dir=UPLOADS_PATH / STAGING_SUBDIR,
delete=False,
)
tmp_path = tmp.name
tmp.close()
try:
async def _aiter():
async for ch in cat_stream(ec.encrypted_cid):
yield ch
await decrypt_encf_auto(_aiter(), dek, tmp_path)
host_path = _container_to_host(tmp_path)
return PlainStaging(container_path=tmp_path, host_path=host_path)
except Exception as e:
make_log('convert_v3', f"decrypt from ipfs failed: {e}", level='error')
try:
os.remove(tmp_path)
except Exception:
pass
return None
def _short_error(message: str, limit: int = ERROR_TRUNCATE_LIMIT) -> str:
if not message:
return message
message = str(message)
return message if len(message) <= limit else message[: limit - 3] + '...'

View File

@ -0,0 +1,97 @@
import asyncio
import os
from datetime import datetime, timedelta
from sqlalchemy import select
from app.core.logger import make_log
from app.core.storage import db_session
from app.core.models.content_v3 import ContentDerivative
from app.core.models._config import ServiceConfig
ENV_MAX_GB = float(os.getenv('DERIVATIVE_CACHE_MAX_GB', '50'))
ENV_TTL_DAYS = int(os.getenv('DERIVATIVE_CACHE_TTL_DAYS', '0'))
INTERVAL_SEC = int(os.getenv('DERIVATIVE_JANITOR_INTERVAL_SEC', '600'))
async def _current_total_size() -> int:
async with db_session() as session:
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.status == 'ready'))).scalars().all()
return sum(int(r.size_bytes or 0) for r in rows)
async def _evict_over_ttl(now: datetime) -> int:
removed = 0
# Pull TTL from ServiceConfig each time
async with db_session() as session:
ttl_days = int(await ServiceConfig(session).get('DERIVATIVE_CACHE_TTL_DAYS', ENV_TTL_DAYS))
if ttl_days <= 0:
return 0
async with db_session() as session:
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.status == 'ready'))).scalars().all()
for r in rows:
la = r.last_access_at or r.created_at
if la and (now - la) > timedelta(days=ttl_days):
try:
if r.local_path and os.path.exists(r.local_path):
os.remove(r.local_path)
except Exception:
pass
r.status = 'pending'
r.local_path = None
r.size_bytes = None
r.last_access_at = None
removed += 1
await session.commit()
return removed
async def _evict_to_fit():
async with db_session() as session:
max_gb = await ServiceConfig(session).get('DERIVATIVE_CACHE_MAX_GB', ENV_MAX_GB)
limit_bytes = int(float(max_gb) * (1024 ** 3))
total = await _current_total_size()
if total <= limit_bytes:
return 0
to_remove = total - limit_bytes
removed = 0
async with db_session() as session:
# Oldest first by last_access_at
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.status == 'ready'))).scalars().all()
rows.sort(key=lambda r: (r.last_access_at or r.created_at or datetime.utcfromtimestamp(0)))
for r in rows:
if to_remove <= 0:
break
size = int(r.size_bytes or 0)
try:
if r.local_path and os.path.exists(r.local_path):
os.remove(r.local_path)
except Exception:
pass
r.status = 'pending'
r.local_path = None
r.last_access_at = None
r.size_bytes = None
await session.commit()
to_remove -= size
removed += 1
return removed
async def main_fn(memory):
async with db_session() as session:
cfg = ServiceConfig(session)
runtime_max_gb = float(await cfg.get('DERIVATIVE_CACHE_MAX_GB', ENV_MAX_GB))
runtime_ttl_days = int(await cfg.get('DERIVATIVE_CACHE_TTL_DAYS', ENV_TTL_DAYS))
make_log('derivative_janitor', f"Started (MAX_GB={runtime_max_gb}, TTL_DAYS={runtime_ttl_days})", level='info')
while True:
try:
now = datetime.utcnow()
r1 = await _evict_over_ttl(now)
r2 = await _evict_to_fit()
if r1 or r2:
make_log('derivative_janitor', f"Evicted: ttl={r1}, fit={r2}")
except Exception as e:
make_log('derivative_janitor', f"Error: {e}", level='error')
await asyncio.sleep(INTERVAL_SEC)

View File

@ -0,0 +1,152 @@
import asyncio
from typing import Dict, List, Optional, Tuple
from urllib.parse import urlencode
import httpx
from sqlalchemy import select
from app.core.logger import make_log
from app.core.storage import db_session
from app.core.models import KnownNode, NodeEvent
from app.core.events.service import (
store_remote_events,
upsert_cursor,
LOCAL_PUBLIC_KEY,
)
from app.core.models.events import NodeEventCursor
from app.core._secrets import hot_pubkey, hot_seed
from app.core.network.nodesig import sign_headers
from base58 import b58encode
def _node_public_base(node: KnownNode) -> Optional[str]:
meta = node.meta or {}
public_host = (meta.get('public_host') or '').strip()
if public_host:
base = public_host.rstrip('/')
if base.startswith('http://') or base.startswith('https://'):
return base
scheme = 'https' if node.port == 443 else 'http'
return f"{scheme}://{base.lstrip('/')}"
scheme = 'https' if node.port == 443 else 'http'
host = (node.ip or '').strip()
if not host:
return None
default_port = 443 if scheme == 'https' else 80
if node.port and node.port != default_port:
return f"{scheme}://{host}:{node.port}"
return f"{scheme}://{host}"
async def _fetch_events_for_node(node: KnownNode, limit: int = 100) -> Tuple[List[Dict], int]:
base = _node_public_base(node)
if not base:
return [], 0
async with db_session() as session:
cursor = (await session.execute(
select(NodeEventCursor).where(NodeEventCursor.source_public_key == node.public_key)
)).scalar_one_or_none()
since = cursor.last_seq if cursor else 0
query = urlencode({"since": since, "limit": limit})
path = f"/api/v1/network.events?{query}"
url = f"{base}{path}"
pk_b58 = b58encode(hot_pubkey).decode()
headers = sign_headers("GET", path, b"", hot_seed, pk_b58)
async with httpx.AsyncClient(timeout=20.0) as client:
try:
resp = await client.get(url, headers=headers)
if resp.status_code == 403:
make_log("Events", f"Access denied by node {node.public_key}", level="warning")
return [], since
resp.raise_for_status()
data = resp.json()
except Exception as exc:
make_log("Events", f"Fetch events failed from {node.public_key}: {exc}", level="debug")
return [], since
events = data.get("events") or []
next_since = int(data.get("next_since") or since)
return events, next_since
async def _apply_event(session, event: NodeEvent):
if event.event_type == "stars_payment":
from app.core.models import StarsInvoice
payload = event.payload or {}
invoice_id = payload.get("invoice_id")
telegram_id = payload.get("telegram_id")
content_hash = payload.get("content_hash")
amount = payload.get("amount")
if not invoice_id or not telegram_id or not content_hash:
return
invoice = (await session.execute(select(StarsInvoice).where(StarsInvoice.external_id == invoice_id))).scalar_one_or_none()
if not invoice:
invoice = StarsInvoice(
external_id=invoice_id,
user_id=payload.get("user_id"),
type=payload.get('type') or 'access',
telegram_id=telegram_id,
amount=amount,
content_hash=content_hash,
paid=True,
paid_at=event.created_at,
payment_node_id=payload.get("payment_node", {}).get("public_key"),
payment_node_public_host=payload.get("payment_node", {}).get("public_host"),
bot_username=payload.get("bot_username"),
is_remote=True,
)
session.add(invoice)
else:
invoice.paid = True
invoice.paid_at = invoice.paid_at or event.created_at
invoice.payment_node_id = payload.get("payment_node", {}).get("public_key")
invoice.payment_node_public_host = payload.get("payment_node", {}).get("public_host")
invoice.bot_username = payload.get("bot_username") or invoice.bot_username
invoice.telegram_id = telegram_id or invoice.telegram_id
invoice.is_remote = invoice.is_remote or True
if payload.get('type'):
invoice.type = payload['type']
event.status = 'applied'
event.applied_at = event.applied_at or event.received_at
elif event.event_type == "content_indexed":
# The index scout will pick up via remote_content_index; we only mark event applied
event.status = 'recorded'
elif event.event_type == "node_registered":
event.status = 'recorded'
else:
event.status = 'recorded'
async def main_fn(memory):
make_log("Events", "Sync service started", level="info")
while True:
try:
async with db_session() as session:
nodes = (await session.execute(select(KnownNode))).scalars().all()
trusted_nodes = [
n for n in nodes
if isinstance(n.meta, dict) and n.meta.get("role") == "trusted" and n.public_key != LOCAL_PUBLIC_KEY
]
trusted_keys = {n.public_key for n in trusted_nodes}
for node in trusted_nodes:
events, next_since = await _fetch_events_for_node(node)
if not events:
if next_since:
async with db_session() as session:
await upsert_cursor(session, node.public_key, next_since, node.meta.get("public_host") if isinstance(node.meta, dict) else None)
await session.commit()
continue
async with db_session() as session:
stored = await store_remote_events(
session,
events,
allowed_public_keys=trusted_keys,
)
for ev in stored:
await _apply_event(session, ev)
if stored:
await session.commit()
await upsert_cursor(session, node.public_key, next_since, node.meta.get("public_host") if isinstance(node.meta, dict) else None)
await session.commit()
except Exception as exc:
make_log("Events", f"Sync loop error: {exc}", level="error")
await asyncio.sleep(10)

View File

@ -0,0 +1,376 @@
import asyncio
import os
from datetime import datetime
from typing import List, Optional
import httpx
from urllib.parse import urlparse
import random
import shutil
from sqlalchemy import select
from app.core.logger import make_log
from app.core.storage import db_session
from app.core.models.my_network import KnownNode, RemoteContentIndex
from app.core.models.events import NodeEvent
from app.core.models.content_v3 import EncryptedContent, ContentDerivative
from app.core.ipfs_client import pin_add, pin_ls, find_providers, swarm_connect, add_streamed_file
from app.core.events.service import LOCAL_PUBLIC_KEY
INTERVAL_SEC = 60
ENV_PIN_CONCURRENCY = int(os.getenv('SYNC_MAX_CONCURRENT_PINS', '4'))
ENV_DISK_WATERMARK_PCT = int(os.getenv('SYNC_DISK_LOW_WATERMARK_PCT', '90'))
async def fetch_index(base_url: str, etag: Optional[str], since: Optional[str]) -> tuple[List[dict], Optional[str]]:
try:
headers = {}
params = {}
if since:
params['since'] = since
url = f"{base_url.rstrip('/')}/api/v1/content.delta" if since else f"{base_url.rstrip('/')}/api/v1/content.index"
if etag:
headers['If-None-Match'] = etag
# follow_redirects handles peers that force HTTPS and issue 301s
async with httpx.AsyncClient(timeout=20, follow_redirects=True) as client:
r = await client.get(url, headers=headers, params=params)
if r.status_code != 200:
if r.status_code == 304:
return [], etag
return [], etag
j = r.json()
new_etag = r.headers.get('ETag') or etag
return j.get('items') or [], (j.get('next_since') or new_etag or etag)
except Exception:
return [], etag
async def upsert_content(item: dict):
cid = item.get('encrypted_cid')
if not cid:
return
async with db_session() as session:
row = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == cid))).scalars().first()
if not row:
row = EncryptedContent(
encrypted_cid=cid,
title=item.get('title') or cid,
description=item.get('description') or '',
content_type=item.get('content_type') or 'application/octet-stream',
enc_size_bytes=item.get('size_bytes'),
preview_enabled=bool(item.get('preview_enabled')),
preview_conf=item.get('preview_conf') or {},
salt_b64=item.get('salt_b64'),
)
session.add(row)
else:
row.title = item.get('title') or row.title
row.description = item.get('description') or row.description
row.content_type = item.get('content_type') or row.content_type
row.enc_size_bytes = item.get('size_bytes') or row.enc_size_bytes
row.preview_enabled = bool(item.get('preview_enabled')) if item.get('preview_enabled') is not None else row.preview_enabled
if item.get('preview_conf'):
row.preview_conf = item['preview_conf']
if item.get('salt_b64'):
row.salt_b64 = item['salt_b64']
await session.commit()
# Fetch thumbnail via HTTP if provided and not present locally
cover_url = item.get('cover_url')
if cover_url:
try:
async with db_session() as session:
ec = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == cid))).scalars().first()
have_thumb = (await session.execute(select(ContentDerivative).where(ContentDerivative.content_id == ec.id, ContentDerivative.kind == 'decrypted_thumbnail', ContentDerivative.status == 'ready'))).scalars().first()
if not have_thumb:
import httpx, tempfile, os
async with httpx.AsyncClient(timeout=30) as client:
r = await client.get(cover_url)
r.raise_for_status()
tmp = tempfile.NamedTemporaryFile(delete=False)
tmp.write(r.content)
tmp.close()
# Save into store
from app.core.background.convert_v3_service import _save_derivative
h, size = await _save_derivative(tmp.name, os.path.basename(cover_url) or 'thumb.jpg')
cd = ContentDerivative(
content_id=ec.id,
kind='decrypted_thumbnail',
local_path=os.path.join(os.getenv('UPLOADS_DIR', '/app/data'), h),
content_type=r.headers.get('Content-Type') or 'image/jpeg',
size_bytes=size,
status='ready',
)
session.add(cd)
await session.commit()
except Exception as e:
make_log('index_scout_v3', f"thumbnail fetch failed for {cid}: {e}", level='warning')
def _node_base_url(node: KnownNode) -> Optional[str]:
meta = node.meta or {}
public_host = (meta.get('public_host') or '').strip()
if public_host:
base = public_host.rstrip('/')
if base.startswith('http://') or base.startswith('https://'):
return base
scheme = 'https' if node.port == 443 else 'http'
return f"{scheme}://{base.lstrip('/')}"
scheme = 'https' if node.port == 443 else 'http'
host = (node.ip or '').strip()
if not host:
return None
default_port = 443 if scheme == 'https' else 80
if node.port and node.port != default_port:
return f"{scheme}://{host}:{node.port}"
return f"{scheme}://{host}"
async def _update_remote_index(node_id: int, items: List[dict], *, incremental: bool):
if not items:
return
async with db_session() as session:
existing_rows = (await session.execute(
select(RemoteContentIndex).where(RemoteContentIndex.remote_node_id == node_id)
)).scalars().all()
existing_map = {row.encrypted_hash: row for row in existing_rows if row.encrypted_hash}
seen = set()
now = datetime.utcnow()
for item in items:
cid = item.get('encrypted_cid')
if not cid:
continue
seen.add(cid)
payload_meta = {
'title': item.get('title'),
'description': item.get('description'),
'size_bytes': item.get('size_bytes'),
'preview_enabled': item.get('preview_enabled'),
'preview_conf': item.get('preview_conf'),
'issuer_node_id': item.get('issuer_node_id'),
'salt_b64': item.get('salt_b64'),
}
meta_clean = {k: v for k, v in payload_meta.items() if v is not None}
row = existing_map.get(cid)
if row:
row.content_type = item.get('content_type') or row.content_type
row.meta = {**(row.meta or {}), **meta_clean}
row.last_updated = now
else:
row = RemoteContentIndex(
remote_node_id=node_id,
content_type=item.get('content_type') or 'application/octet-stream',
encrypted_hash=cid,
meta=meta_clean,
last_updated=now,
)
session.add(row)
if not incremental and existing_map:
for hash_value, row in list(existing_map.items()):
if hash_value not in seen:
await session.delete(row)
await session.commit()
async def main_fn(memory):
make_log('index_scout_v3', 'Service started', level='info')
sem = None
while True:
try:
# Read runtime config from ServiceConfig (fallback to env)
from app.core.models._config import ServiceConfig
async with db_session() as session:
max_pins = int(await ServiceConfig(session).get('SYNC_MAX_CONCURRENT_PINS', ENV_PIN_CONCURRENCY))
disk_pct = int(await ServiceConfig(session).get('SYNC_DISK_LOW_WATERMARK_PCT', ENV_DISK_WATERMARK_PCT))
if sem is None or sem._value != max_pins:
sem = asyncio.Semaphore(max_pins)
async with db_session() as session:
nodes = (await session.execute(select(KnownNode))).scalars().all()
node_by_pk = {n.public_key: n for n in nodes if n.public_key}
async with db_session() as session:
pending_events = (await session.execute(
select(NodeEvent)
.where(NodeEvent.event_type == 'content_indexed', NodeEvent.status.in_(('recorded', 'local', 'processing')))
.order_by(NodeEvent.created_at.asc())
.limit(25)
)).scalars().all()
for ev in pending_events:
if ev.status != 'processing':
ev.status = 'processing'
await session.commit()
for ev in pending_events:
payload = ev.payload or {}
cid = payload.get('encrypted_cid') or payload.get('content_cid')
if ev.origin_public_key == LOCAL_PUBLIC_KEY:
async with db_session() as session:
ref = await session.get(NodeEvent, ev.id)
if ref:
ref.status = 'applied'
ref.applied_at = datetime.utcnow()
await session.commit()
continue
if not cid:
async with db_session() as session:
ref = await session.get(NodeEvent, ev.id)
if ref:
ref.status = 'applied'
ref.applied_at = datetime.utcnow()
await session.commit()
continue
node = node_by_pk.get(ev.origin_public_key)
if not node:
async with db_session() as session:
node = (await session.execute(select(KnownNode).where(KnownNode.public_key == ev.origin_public_key))).scalar_one_or_none()
if node:
node_by_pk[node.public_key] = node
if not node:
make_log('index_scout_v3', f"Event {ev.uid} refers to unknown node {ev.origin_public_key}", level='debug')
async with db_session() as session:
ref = await session.get(NodeEvent, ev.id)
if ref:
ref.status = 'recorded'
await session.commit()
continue
try:
await _pin_one(node, cid)
async with db_session() as session:
ref = await session.get(NodeEvent, ev.id)
if ref:
ref.status = 'applied'
ref.applied_at = datetime.utcnow()
await session.commit()
except Exception as exc:
make_log('index_scout_v3', f"Event pin failed for {cid}: {exc}", level='warning')
async with db_session() as session:
ref = await session.get(NodeEvent, ev.id)
if ref:
ref.status = 'recorded'
await session.commit()
for n in nodes:
base = _node_base_url(n)
if not base:
continue
# jitter 0..30s per node to reduce stampede
await asyncio.sleep(random.uniform(0, 30))
etag = (n.meta or {}).get('index_etag')
since = (n.meta or {}).get('index_since')
items, marker = await fetch_index(base, etag, since)
if not items and marker == etag:
continue
# update node markers
try:
async with db_session() as session:
row = (await session.execute(select(KnownNode).where(KnownNode.id == n.id))).scalars().first()
if row:
meta = row.meta or {}
meta['index_etag'] = marker
meta['index_since'] = marker if (marker and 'T' in str(marker)) else meta.get('index_since')
row.meta = meta
await session.commit()
except Exception:
pass
if not items:
continue
make_log('index_scout_v3', f"Fetched {len(items)} from {base}")
try:
await _update_remote_index(n.id, items, incremental=bool(since))
except Exception as exc:
make_log('index_scout_v3', f"remote index update failed for node {n.id}: {exc}", level='warning')
# Check disk watermark
try:
from app.core._config import UPLOADS_DIR
du = shutil.disk_usage(UPLOADS_DIR)
used_pct = int(100 * (1 - du.free / du.total))
if used_pct >= disk_pct:
make_log('index_scout_v3', f"Disk watermark reached ({used_pct}%), skipping pins")
continue
except Exception:
pass
async def _pin_one(node: KnownNode, cid: str):
async with sem:
try:
node_ipfs_meta = (node.meta or {}).get('ipfs') or {}
multiaddrs = node_ipfs_meta.get('multiaddrs') or []
for addr in multiaddrs:
try:
await swarm_connect(addr)
except Exception:
pass
try:
existing = await pin_ls(cid)
if existing and existing.get('Keys'):
make_log('index_scout_v3', f"pin {cid} already present", level='debug')
return
except Exception:
pass
# Try to pre-connect to discovered providers
try:
provs = await find_providers(cid, max_results=5)
for p in provs:
for addr in (p.get('addrs') or [])[:2]:
try:
await swarm_connect(addr)
except Exception:
pass
except Exception:
pass
try:
await asyncio.wait_for(pin_add(cid, recursive=True), timeout=60)
return
except httpx.HTTPStatusError as http_err:
body = (http_err.response.text or '').lower() if http_err.response else ''
if 'already pinned' in body or 'pin already set' in body:
make_log('index_scout_v3', f"pin {cid} already present", level='debug')
return
raise
except Exception as e:
# Attempt HTTP gateway fallback before logging failure
fallback_sources = []
node_host = node.meta.get('public_host') if isinstance(node.meta, dict) else None
try:
# Derive gateway host: prefer public_host domain if present
parsed = urlparse(node_host) if node_host else None
gateway_host = parsed.hostname if parsed and parsed.hostname else (node.ip or '').split(':')[0]
gateway_port = parsed.port if (parsed and parsed.port not in (None, 80, 443)) else 8080
if gateway_host:
gateway_url = f"http://{gateway_host}:{gateway_port}/ipfs/{cid}"
make_log('index_scout_v3', f"fallback download start {cid} via {gateway_url}", level='debug')
async with httpx.AsyncClient(timeout=None) as client:
resp = await client.get(gateway_url)
resp.raise_for_status()
data = resp.content
chunk_bytes = int(os.getenv('CRYPTO_CHUNK_BYTES', '1048576'))
add_params = {
'cid-version': 1,
'raw-leaves': 'true',
'chunker': f'size-{chunk_bytes}',
'hash': 'sha2-256',
'pin': 'true',
}
result = await add_streamed_file([data], filename=f'{cid}.bin', params=add_params)
if str(result.get('Hash')) != str(cid):
raise ValueError(f"gateway add returned mismatched CID {result.get('Hash')}")
make_log('index_scout_v3', f"pin {cid} fetched via gateway {gateway_host}:{gateway_port}", level='info')
return
else:
fallback_sources.append('gateway-host-missing')
except Exception as fallback_err:
fallback_sources.append(str(fallback_err))
make_log('index_scout_v3', f"pin {cid} failed: {e}; fallback={'; '.join(fallback_sources) if fallback_sources else 'none'}", level='warning')
tasks = []
for it in items:
await upsert_content(it)
cid = it.get('encrypted_cid')
if cid:
make_log('index_scout_v3', f"queue pin {cid}")
tasks.append(asyncio.create_task(_pin_one(n, cid)))
if tasks:
await asyncio.gather(*tasks)
except Exception as e:
make_log('index_scout_v3', f"loop error: {e}", level='error')
await asyncio.sleep(INTERVAL_SEC)

View File

@ -1,4 +1,5 @@
import asyncio import asyncio
import os
from base64 import b64decode from base64 import b64decode
from datetime import datetime from datetime import datetime
@ -6,17 +7,22 @@ from base58 import b58encode
from sqlalchemy import String, and_, desc, cast from sqlalchemy import String, and_, desc, cast
from tonsdk.boc import Cell from tonsdk.boc import Cell
from tonsdk.utils import Address from tonsdk.utils import Address
from app.core._config import CLIENT_TELEGRAM_BOT_USERNAME from app.core._config import CLIENT_TELEGRAM_BOT_USERNAME, PROJECT_HOST
from app.core.events.service import record_event
from app.core._blockchain.ton.platform import platform from app.core._blockchain.ton.platform import platform
from app.core._blockchain.ton.toncenter import toncenter from app.core._blockchain.ton.toncenter import toncenter
from app.core._utils.send_status import send_status from app.core._utils.send_status import send_status
from app.core.logger import make_log from app.core.logger import make_log
from app.core.models import UserContent, KnownTelegramMessage, ServiceConfig from app.core.models import UserContent, KnownTelegramMessage, ServiceConfig
from app.core.models.user import User
from app.core.models.node_storage import StoredContent from app.core.models.node_storage import StoredContent
from app.core._utils.resolve_content import resolve_content from app.core._utils.resolve_content import resolve_content
from app.core.models.wallet_connection import WalletConnection from app.core.models.wallet_connection import WalletConnection
from app.core._keyboards import get_inline_keyboard
from app.core.models._telegram import Wrapped_CBotChat from app.core.models._telegram import Wrapped_CBotChat
MIN_ONCHAIN_INDEX = int(os.getenv("MIN_ONCHAIN_INDEX", "8"))
from sqlalchemy import select, and_, desc
from app.core.storage import db_session from app.core.storage import db_session
import os import os
import traceback import traceback
@ -33,7 +39,7 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
platform_found = True platform_found = True
make_log("Indexer", "Service running", level="debug") make_log("Indexer", "Service running", level="debug")
with db_session() as session: async with db_session() as session:
try: try:
result = await toncenter.run_get_method('EQD8TJ8xEWB1SpnRE4d89YO3jl0W0EiBnNS4IBaHaUmdfizE', 'get_pool_data') result = await toncenter.run_get_method('EQD8TJ8xEWB1SpnRE4d89YO3jl0W0EiBnNS4IBaHaUmdfizE', 'get_pool_data')
assert result['exit_code'] == 0, f"Error in get-method: {result}" assert result['exit_code'] == 0, f"Error in get-method: {result}"
@ -41,76 +47,116 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
assert result['stack'][1][0] == 'num', f"get second element is not num" assert result['stack'][1][0] == 'num', f"get second element is not num"
usdt_per_ton = (int(result['stack'][0][1], 16) * 1e3) / int(result['stack'][1][1], 16) usdt_per_ton = (int(result['stack'][0][1], 16) * 1e3) / int(result['stack'][1][1], 16)
ton_per_star = 0.014 / usdt_per_ton ton_per_star = 0.014 / usdt_per_ton
ServiceConfig(session).set('live_tonPerStar', [ton_per_star, datetime.utcnow().timestamp()]) await ServiceConfig(session).set('live_tonPerStar', [ton_per_star, datetime.utcnow().timestamp()])
make_log("TON_Daemon", f"TON per STAR price: {ton_per_star}", level="DEBUG") make_log("TON_Daemon", f"TON per STAR price: {ton_per_star}", level="DEBUG")
except BaseException as e: except BaseException as e:
make_log("TON_Daemon", f"Error while saving TON per STAR price: {e}" + '\n' + traceback.format_exc(), level="ERROR") make_log("TON_Daemon", f"Error while saving TON per STAR price: {e}" + '\n' + traceback.format_exc(), level="ERROR")
new_licenses = session.query(UserContent).filter( from sqlalchemy import cast
from sqlalchemy.dialects.postgresql import JSONB
new_licenses = (await session.execute(select(UserContent).where(
and_( and_(
~UserContent.meta.contains({'notification_sent': True}), ~(cast(UserContent.meta, JSONB).contains({'notification_sent': True})),
UserContent.type == 'nft/listen' UserContent.type == 'nft/listen'
) )
).all() ))).scalars().all()
for new_license in new_licenses: for new_license in new_licenses:
licensed_content = session.query(StoredContent).filter( try:
licensed_content = (await session.execute(select(StoredContent).where(
StoredContent.id == new_license.content_id StoredContent.id == new_license.content_id
).first() ))).scalars().first()
if not licensed_content: if not licensed_content:
make_log("Indexer", f"Licensed content not found: {new_license.content_id}", level="error") make_log("Indexer", f"Licensed content not found: {new_license.content_id}", level="error")
new_license.meta = {**(new_license.meta or {}), 'notification_sent': True, 'notification_error': 'content_not_found'}
await session.commit()
continue
content_metadata = licensed_content.metadata_json(session) try:
assert content_metadata, "No content metadata found" content_metadata = await licensed_content.metadata_json_async(session)
except BaseException as e:
make_log("Indexer", f"Metadata fetch failed for content_id={licensed_content.id}: {e}", level="warning")
content_metadata = None
# Metadata is best-effort here: it should never block indexer loop progress.
if not content_metadata:
content_metadata = {
'name': licensed_content.meta.get('title') or licensed_content.filename or 'Unknown',
'artist': licensed_content.meta.get('artist'),
'title': licensed_content.meta.get('title'),
}
if not (licensed_content.owner_address == new_license.owner_address): if not (licensed_content.owner_address == new_license.owner_address):
try: try:
user = new_license.user user = await session.get(User, new_license.user_id)
if user.telegram_id and licensed_content: if user and user.telegram_id:
await (Wrapped_CBotChat(memory._client_telegram_bot, chat_id=user.telegram_id, user=user, db_session=session)).send_content( await (
session, licensed_content Wrapped_CBotChat(
memory._client_telegram_bot,
chat_id=user.telegram_id,
user=user,
db_session=session,
) )
).send_content(session, licensed_content)
wallet_owner_connection = session.query(WalletConnection).filter_by( wallet_owner_connection = (await session.execute(
wallet_address=licensed_content.owner_address, select(WalletConnection).where(
invalidated=False WalletConnection.wallet_address == licensed_content.owner_address,
).order_by(desc(WalletConnection.id)).first() WalletConnection.invalidated == False
wallet_owner_user = wallet_owner_connection.user ).order_by(desc(WalletConnection.id))
if wallet_owner_user.telegram_id: )).scalars().first()
wallet_owner_bot = Wrapped_CBotChat(memory._client_telegram_bot, chat_id=wallet_owner_user.telegram_id, user=wallet_owner_user, db_session=session) wallet_owner_user = await session.get(User, wallet_owner_connection.user_id) if wallet_owner_connection else None
if wallet_owner_user and wallet_owner_user.telegram_id:
wallet_owner_bot = Wrapped_CBotChat(
memory._telegram_bot,
chat_id=wallet_owner_user.telegram_id,
user=wallet_owner_user,
db_session=session,
)
meta_title = content_metadata.get('title') or content_metadata.get('name') or 'Unknown'
meta_artist = content_metadata.get('artist')
formatted_title = f"{meta_artist} {meta_title}" if meta_artist else meta_title
await wallet_owner_bot.send_message( await wallet_owner_bot.send_message(
user.translated('p_licenseWasBought').format( user.translated('p_licenseWasBought').format(
username=user.front_format(), username=user.front_format(),
nft_address=f'"https://tonviewer.com/{new_license.onchain_address}"', nft_address=f'"https://tonviewer.com/{new_license.onchain_address}"',
content_title=content_metadata.get('name', 'Unknown'), content_title=formatted_title,
), ),
message_type='notification', message_type='notification',
) )
except BaseException as e: except BaseException as e:
make_log("IndexerSendNewLicense", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("IndexerSendNewLicense", f"Error: {e}" + '\n' + traceback.format_exc(), level="error")
new_license.meta = {**new_license.meta, 'notification_sent': True} # Preserve current behavior: do not retry notifications indefinitely.
session.commit() new_license.meta = {**(new_license.meta or {}), 'notification_sent': True}
await session.commit()
except BaseException as e:
# Never allow a single broken license/metadata record to block the whole indexer loop.
make_log("Indexer", f"Error processing new license {getattr(new_license, 'id', None)}: {e}" + '\n' + traceback.format_exc(), level="error")
new_license.meta = {**(new_license.meta or {}), 'notification_sent': True, 'notification_error': str(e)[:256]}
await session.commit()
content_without_cid = session.query(StoredContent).filter( content_without_cid = (await session.execute(select(StoredContent).where(StoredContent.content_id == None))).scalars().all()
StoredContent.content_id == None
)
for target_content in content_without_cid: for target_content in content_without_cid:
target_cid = target_content.cid.serialize_v2() target_cid = target_content.cid.serialize_v2()
make_log("Indexer", f"Content without CID: {target_content.hash}, setting CID: {target_cid}", level="debug") make_log("Indexer", f"Content without CID: {target_content.hash}, setting CID: {target_cid}", level="debug")
target_content.content_id = target_cid target_content.content_id = target_cid
session.commit() await session.commit()
last_known_index_ = session.query(StoredContent).filter( last_known_index_ = (await session.execute(
StoredContent.onchain_index != None select(StoredContent).where(StoredContent.onchain_index != None).order_by(StoredContent.onchain_index.desc())
).order_by(StoredContent.onchain_index.desc()).first() )).scalars().first()
last_known_index = last_known_index_.onchain_index if last_known_index_ else 0 last_known_index = last_known_index_.onchain_index if last_known_index_ else 0
last_known_index = max(last_known_index, 0) last_known_index = max(last_known_index, 0)
if last_known_index < (MIN_ONCHAIN_INDEX - 1):
make_log(
"Indexer",
f"Adjusting last_known_index from {last_known_index} to {MIN_ONCHAIN_INDEX - 1} (MIN_ONCHAIN_INDEX)",
level="debug"
)
last_known_index = MIN_ONCHAIN_INDEX - 1
make_log("Indexer", f"Last known index: {last_known_index}", level="debug") make_log("Indexer", f"Last known index: {last_known_index}", level="debug")
if last_known_index_:
next_item_index = last_known_index + 1 next_item_index = last_known_index + 1
else:
next_item_index = 0
resolve_item_result = await toncenter.run_get_method(platform.address.to_string(1, 1, 1), 'get_nft_address_by_index', [['num', next_item_index]]) resolve_item_result = await toncenter.run_get_method(platform.address.to_string(1, 1, 1), 'get_nft_address_by_index', [['num', next_item_index]])
make_log("Indexer", f"Resolve item result: {resolve_item_result}", level="debug") make_log("Indexer", f"Resolve item result: {resolve_item_result}", level="debug")
@ -137,6 +183,13 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
assert item_get_data_result['stack'][2][0] == 'num', "Item index is not a number" assert item_get_data_result['stack'][2][0] == 'num', "Item index is not a number"
item_index = int(item_get_data_result['stack'][2][1], 16) item_index = int(item_get_data_result['stack'][2][1], 16)
if item_index < MIN_ONCHAIN_INDEX:
make_log(
"Indexer",
f"Skip on-chain item {item_index}: below MIN_ONCHAIN_INDEX={MIN_ONCHAIN_INDEX}",
level="info"
)
return platform_found, seqno
assert item_index == next_item_index, "Item index mismatch" assert item_index == next_item_index, "Item index mismatch"
item_platform_address = Cell.one_from_boc(b64decode(item_get_data_result['stack'][3][1]['bytes'])).begin_parse().read_msg_addr() item_platform_address = Cell.one_from_boc(b64decode(item_get_data_result['stack'][3][1]['bytes'])).begin_parse().read_msg_addr()
@ -196,14 +249,13 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
user_wallet_connection = None user_wallet_connection = None
if item_owner_address: if item_owner_address:
user_wallet_connection = session.query(WalletConnection).filter( user_wallet_connection = (await session.execute(select(WalletConnection).where(
WalletConnection.wallet_address == item_owner_address.to_string(1, 1, 1) WalletConnection.wallet_address == item_owner_address.to_string(1, 1, 1)
).first() ))).scalars().first()
encrypted_stored_content = session.query(StoredContent).filter( encrypted_stored_content = (await session.execute(select(StoredContent).where(
StoredContent.hash == item_content_hash_str, StoredContent.hash == item_content_hash_str
# StoredContent.type.like("local%") ))).scalars().first()
).first()
if encrypted_stored_content: if encrypted_stored_content:
is_duplicate = encrypted_stored_content.type.startswith("onchain") \ is_duplicate = encrypted_stored_content.type.startswith("onchain") \
and encrypted_stored_content.onchain_index != item_index and encrypted_stored_content.onchain_index != item_index
@ -215,33 +267,57 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
user = None user = None
if user_wallet_connection: if user_wallet_connection:
encrypted_stored_content.user_id = user_wallet_connection.user_id encrypted_stored_content.user_id = user_wallet_connection.user_id
user = user_wallet_connection.user user = await session.get(User, user_wallet_connection.user_id)
if user: if user:
user_uploader_wrapper = Wrapped_CBotChat(memory._telegram_bot, chat_id=user.telegram_id, user=user, db_session=session) # Notify user about indexed content via client bot (main UX bot),
await user_uploader_wrapper.send_message( # but keep ability to clean up uploader-bot hint messages.
user.translated('p_contentWasIndexed').format( user_client_wrapper = Wrapped_CBotChat(
memory._client_telegram_bot,
chat_id=user.telegram_id,
user=user,
db_session=session,
)
user_uploader_wrapper = Wrapped_CBotChat(
memory._telegram_bot,
chat_id=user.telegram_id,
user=user,
db_session=session,
)
ref_id = (user.meta or {}).get('ref_id')
if not ref_id:
ref_id = user.ensure_ref_id()
await session.commit()
message_text = user.translated('p_contentWasIndexed').format(
item_address=item_address.to_string(1, 1, 1), item_address=item_address.to_string(1, 1, 1),
item_index=item_index, item_index=item_index,
), )
message_type='notification',
reply_markup=get_inline_keyboard([ await user_client_wrapper.send_message(
[{ message_text,
'text': user.translated('viewTrackAsClient_button'), message_type='notification'
'url': f"https://t.me/{CLIENT_TELEGRAM_BOT_USERNAME}?start=C{encrypted_stored_content.cid.serialize_v2()}" )
}],
]) await user_client_wrapper.send_content(
session,
encrypted_stored_content
) )
try: try:
for hint_message in session.query(KnownTelegramMessage).filter( result = await session.execute(select(KnownTelegramMessage).where(
and_( and_(
KnownTelegramMessage.chat_id == user.telegram_id, KnownTelegramMessage.chat_id == user.telegram_id,
KnownTelegramMessage.type == 'hint', KnownTelegramMessage.type == 'hint',
cast(KnownTelegramMessage.meta['encrypted_content_hash'], String) == encrypted_stored_content.hash, cast(KnownTelegramMessage.meta['encrypted_content_hash'], String) == encrypted_stored_content.hash,
KnownTelegramMessage.deleted == False KnownTelegramMessage.deleted == False
) )
).all(): ))
for hint_message in result.scalars().all():
# Delete the hint with the bot that originally sent it.
if hint_message.bot_id == user_client_wrapper.bot_id:
await user_client_wrapper.delete_message(hint_message.message_id)
elif hint_message.bot_id == user_uploader_wrapper.bot_id:
await user_uploader_wrapper.delete_message(hint_message.message_id) await user_uploader_wrapper.delete_message(hint_message.message_id)
except BaseException as e: except BaseException as e:
make_log("Indexer", f"Error while deleting hint messages: {e}" + '\n' + traceback.format_exc(), level="error") make_log("Indexer", f"Error while deleting hint messages: {e}" + '\n' + traceback.format_exc(), level="error")
@ -259,8 +335,24 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
**encrypted_stored_content.meta, **encrypted_stored_content.meta,
**item_metadata_packed **item_metadata_packed
} }
encrypted_stored_content.content_id = item_content_cid_str
try:
await record_event(
session,
'content_indexed',
{
'onchain_index': item_index,
'content_hash': item_content_hash_str,
'encrypted_cid': item_content_cid_str,
'item_address': item_address.to_string(1, 1, 1),
'owner_address': item_owner_address.to_string(1, 1, 1) if item_owner_address else None,
},
origin_host=PROJECT_HOST,
)
except Exception as exc:
make_log("Events", f"Failed to record content_indexed event: {exc}", level="warning")
session.commit() await session.commit()
return platform_found, seqno return platform_found, seqno
else: else:
item_metadata_packed['copied_from'] = encrypted_stored_content.id item_metadata_packed['copied_from'] = encrypted_stored_content.id
@ -279,10 +371,26 @@ async def indexer_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
encrypted=True, encrypted=True,
decrypted_content_id=None, decrypted_content_id=None,
key_id=None, key_id=None,
content_id=item_content_cid_str,
updated=datetime.now() updated=datetime.now()
) )
session.add(onchain_stored_content) session.add(onchain_stored_content)
session.commit() try:
await record_event(
session,
'content_indexed',
{
'onchain_index': item_index,
'content_hash': item_content_hash_str,
'encrypted_cid': item_content_cid_str,
'item_address': item_address.to_string(1, 1, 1),
'owner_address': item_owner_address.to_string(1, 1, 1) if item_owner_address else None,
},
origin_host=PROJECT_HOST,
)
except Exception as exc:
make_log("Events", f"Failed to record content_indexed event: {exc}", level="warning")
await session.commit()
make_log("Indexer", f"Item indexed: {item_content_hash_str}", level="info") make_log("Indexer", f"Item indexed: {item_content_hash_str}", level="info")
last_known_index += 1 last_known_index += 1
@ -295,15 +403,27 @@ async def main_fn(memory, ):
seqno = 0 seqno = 0
while True: while True:
try: try:
rid = __import__('uuid').uuid4().hex[:8]
try:
from app.core.log_context import ctx_rid
ctx_rid.set(rid)
except BaseException:
pass
make_log("Indexer", f"Loop start", level="debug", rid=rid)
platform_found, seqno = await indexer_loop(memory, platform_found, seqno) platform_found, seqno = await indexer_loop(memory, platform_found, seqno)
except BaseException as e: except BaseException as e:
make_log("Indexer", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("Indexer", f"Error: {e}" + '\n' + traceback.format_exc(), level="error", rid=locals().get('rid'))
if platform_found: if platform_found:
await send_status("indexer", f"working (seqno={seqno})") await send_status("indexer", f"working (seqno={seqno})")
await asyncio.sleep(5) await asyncio.sleep(5)
seqno += 1 seqno += 1
try:
from app.core.log_context import ctx_rid
ctx_rid.set(None)
except BaseException:
pass

View File

@ -3,7 +3,7 @@ from base64 import b64decode
from datetime import datetime, timedelta from datetime import datetime, timedelta
from base58 import b58encode from base58 import b58encode
from sqlalchemy import and_ from sqlalchemy import and_, or_, select, desc
from tonsdk.boc import Cell from tonsdk.boc import Cell
from tonsdk.utils import Address from tonsdk.utils import Address
@ -18,16 +18,19 @@ from app.core.models.wallet_connection import WalletConnection
from app.core._keyboards import get_inline_keyboard from app.core._keyboards import get_inline_keyboard
from app.core.models._telegram import Wrapped_CBotChat from app.core.models._telegram import Wrapped_CBotChat
from app.core.storage import db_session from app.core.storage import db_session
from app.core._config import CLIENT_TELEGRAM_API_KEY from app.core._config import CLIENT_TELEGRAM_API_KEY, CLIENT_TELEGRAM_BOT_USERNAME, PROJECT_HOST
from app.core.models.user import User from app.core.models.user import User
from app.core.models import StarsInvoice from app.core.models import StarsInvoice
from app.core.events.service import record_event
from app.core._secrets import hot_pubkey
from base58 import b58encode
import os import os
import traceback import traceback
async def license_index_loop(memory, platform_found: bool, seqno: int) -> [bool, int]: async def license_index_loop(memory, platform_found: bool, seqno: int) -> [bool, int]:
make_log("LicenseIndex", "Service running", level="debug") make_log("LicenseIndex", "Service running", level="debug")
with db_session() as session: async with db_session() as session:
async def check_telegram_stars_transactions(): async def check_telegram_stars_transactions():
# Проверка звездных telegram транзакций, обновление paid # Проверка звездных telegram транзакций, обновление paid
offset = {'desc': 'Статичное число заранее известного количества транзакций, которое даже не знает наш бот', 'value': 1}['value'] + \ offset = {'desc': 'Статичное число заранее известного количества транзакций, которое даже не знает наш бот', 'value': 1}['value'] + \
@ -45,20 +48,47 @@ async def license_index_loop(memory, platform_found: bool, seqno: int) -> [bool,
continue continue
try: try:
existing_invoice = session.query(StarsInvoice).filter( existing_invoice = (await session.execute(select(StarsInvoice).where(
StarsInvoice.external_id == star_payment.source.invoice_payload StarsInvoice.external_id == star_payment.source.invoice_payload
).first() ))).scalars().first()
if not existing_invoice: if not existing_invoice:
continue continue
if star_payment.amount == existing_invoice.amount: if star_payment.amount == existing_invoice.amount:
if not existing_invoice.paid: if not existing_invoice.paid:
user = (await session.execute(select(User).where(User.id == existing_invoice.user_id))).scalars().first()
existing_invoice.paid = True existing_invoice.paid = True
session.commit() existing_invoice.paid_at = datetime.utcnow()
existing_invoice.telegram_id = getattr(user, 'telegram_id', None)
licensed_content = session.query(StoredContent).filter(StoredContent.hash == existing_invoice.content_hash).first() existing_invoice.payment_tx_id = getattr(star_payment, 'id', None)
user = session.query(User).filter(User.id == existing_invoice.user_id).first() existing_invoice.payment_node_id = b58encode(hot_pubkey).decode()
existing_invoice.payment_node_public_host = PROJECT_HOST
existing_invoice.bot_username = CLIENT_TELEGRAM_BOT_USERNAME
existing_invoice.is_remote = False
await record_event(
session,
'stars_payment',
{
'invoice_id': existing_invoice.external_id,
'content_hash': existing_invoice.content_hash,
'amount': existing_invoice.amount,
'user_id': existing_invoice.user_id,
'telegram_id': existing_invoice.telegram_id,
'bot_username': CLIENT_TELEGRAM_BOT_USERNAME,
'type': existing_invoice.type,
'payment_node': {
'public_key': b58encode(hot_pubkey).decode(),
'public_host': PROJECT_HOST,
},
'paid_at': existing_invoice.paid_at.isoformat() + 'Z' if existing_invoice.paid_at else None,
'payment_tx_id': existing_invoice.payment_tx_id,
},
origin_host=PROJECT_HOST,
)
await session.commit()
licensed_content = (await session.execute(select(StoredContent).where(StoredContent.hash == existing_invoice.content_hash))).scalars().first()
if user and user.telegram_id and licensed_content:
await (Wrapped_CBotChat(memory._client_telegram_bot, chat_id=user.telegram_id, user=user, db_session=session)).send_content( await (Wrapped_CBotChat(memory._client_telegram_bot, chat_id=user.telegram_id, user=user, db_session=session)).send_content(
session, licensed_content session, licensed_content
) )
@ -73,33 +103,37 @@ async def license_index_loop(memory, platform_found: bool, seqno: int) -> [bool,
make_log("StarsProcessing", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("StarsProcessing", f"Error: {e}" + '\n' + traceback.format_exc(), level="error")
# Проверка кошельков пользователей на появление новых NFT, добавление их в базу как неопознанные # Проверка кошельков пользователей на появление новых NFT, добавление их в базу как неопознанные
for user in session.query(User).filter( users = (await session.execute(select(User).where(
User.last_use > datetime.now() - timedelta(minutes=10) User.last_use > datetime.now() - timedelta(hours=4)
).all(): ).order_by(User.updated.asc()))).scalars().all()
if not user.wallet_address(session): for user in users:
make_log("LicenseIndex", f"User {user.id} has no wallet address", level="info") user_wallet_address = await user.wallet_address_async(session)
if not user_wallet_address:
make_log("LicenseIndex", f"User {user.id} has no wallet address", level="debug")
continue continue
make_log("LicenseIndex", f"User {user.id} has wallet address {user_wallet_address}", level="debug")
last_updated_licenses = user.meta.get('last_updated_licenses') last_updated_licenses = user.meta.get('last_updated_licenses')
must_skip = last_updated_licenses and (datetime.now() - datetime.fromisoformat(last_updated_licenses)) < timedelta(minutes=1) must_skip = last_updated_licenses and (datetime.now() - datetime.fromisoformat(last_updated_licenses)) < timedelta(minutes=1)
make_log("LicenseIndex", f"User: {user.id}, last_updated_licenses: {last_updated_licenses}, must_skip: {must_skip}", level="info") make_log("LicenseIndex", f"User: {user.id}, last_updated_licenses: {last_updated_licenses}, must_skip: {must_skip}", level="debug")
if must_skip: if must_skip:
continue continue
try: try:
await user.scan_owned_user_content(session) await user.scan_owned_user_content(session)
user.meta = {**user.meta, 'last_updated_licenses': datetime.now().isoformat()} user.meta = {**user.meta, 'last_updated_licenses': datetime.now().isoformat()}
session.commit() await session.commit()
except BaseException as e: except BaseException as e:
make_log("LicenseIndex", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("LicenseIndex", f"Error: {e}" + '\n' + traceback.format_exc(), level="error")
# Проверка NFT на актуальность данных, в том числе уже проверенные # Проверка NFT на актуальность данных, в том числе уже проверенные
process_content = session.query(UserContent).filter( process_content = (await session.execute(select(UserContent).where(
and_( and_(
UserContent.type.startswith('nft/'), UserContent.type.startswith('nft/'),
UserContent.type != 'nft/ignored',
UserContent.updated < (datetime.now() - timedelta(minutes=60)), UserContent.updated < (datetime.now() - timedelta(minutes=60)),
) )
).first() ).order_by(UserContent.updated.asc()))).scalars().first()
if process_content: if process_content:
make_log("LicenseIndex", f"Syncing content with blockchain: {process_content.id}", level="info") make_log("LicenseIndex", f"Syncing content with blockchain: {process_content.id}", level="info")
try: try:
@ -108,7 +142,7 @@ async def license_index_loop(memory, platform_found: bool, seqno: int) -> [bool,
make_log("LicenseIndex", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("LicenseIndex", f"Error: {e}" + '\n' + traceback.format_exc(), level="error")
finally: finally:
process_content.updated = datetime.now() process_content.updated = datetime.now()
session.commit() await session.commit()
return platform_found, seqno return platform_found, seqno
@ -119,14 +153,26 @@ async def main_fn(memory, ):
seqno = 0 seqno = 0
while True: while True:
try: try:
rid = __import__('uuid').uuid4().hex[:8]
try:
from app.core.log_context import ctx_rid
ctx_rid.set(rid)
except BaseException:
pass
make_log("LicenseIndex", f"Loop start", level="debug", rid=rid)
platform_found, seqno = await license_index_loop(memory, platform_found, seqno) platform_found, seqno = await license_index_loop(memory, platform_found, seqno)
if platform_found: if platform_found:
await send_status("licenses", f"working (seqno={seqno})") await send_status("licenses", f"working (seqno={seqno})")
except BaseException as e: except BaseException as e:
make_log("LicenseIndex", f"Error: {e}" + '\n' + traceback.format_exc(), level="error") make_log("LicenseIndex", f"Error: {e}" + '\n' + traceback.format_exc(), level="error", rid=locals().get('rid'))
await asyncio.sleep(1) await asyncio.sleep(1)
seqno += 1 seqno += 1
try:
from app.core.log_context import ctx_rid
ctx_rid.set(None)
except BaseException:
pass
# if __name__ == '__main__': # if __name__ == '__main__':
# loop = asyncio.get_event_loop() # loop = asyncio.get_event_loop()

View File

@ -97,7 +97,7 @@ async def main_fn(memory):
service_wallet.create_transfer_message( service_wallet.create_transfer_message(
[{ [{
'address': highload_wallet.address.to_string(1, 1, 0), 'address': highload_wallet.address.to_string(1, 1, 0),
'amount': int(0.08 * 10 ** 9), 'amount': int(0.02 * 10 ** 9),
'send_mode': 1, 'send_mode': 1,
'payload': begin_cell().store_uint(0, 32).end_cell() 'payload': begin_cell().store_uint(0, 32).end_cell()
}], sw_seqno_value }], sw_seqno_value
@ -122,11 +122,18 @@ async def main_fn(memory):
while True: while True:
try: try:
rid = __import__('uuid').uuid4().hex[:8]
try:
from app.core.log_context import ctx_rid
ctx_rid.set(rid)
except BaseException:
pass
sw_seqno_value = await get_sw_seqno() sw_seqno_value = await get_sw_seqno()
make_log("TON", f"Service running ({sw_seqno_value})", level="debug") make_log("TON", f"Service running ({sw_seqno_value})", level="debug", rid=rid)
with db_session() as session: async with db_session() as session:
# Проверка отправленных сообщений # Проверка отправленных сообщений
await send_status("ton_daemon", f"working: processing in-txs (seqno={sw_seqno_value})")
async def process_incoming_transaction(transaction: dict): async def process_incoming_transaction(transaction: dict):
transaction_hash = transaction['transaction_id']['hash'] transaction_hash = transaction['transaction_id']['hash']
transaction_lt = str(transaction['transaction_id']['lt']) transaction_lt = str(transaction['transaction_id']['lt'])
@ -141,14 +148,17 @@ async def main_fn(memory):
in_msg_created_at = in_msg_slice.read_uint(64) in_msg_created_at = in_msg_slice.read_uint(64)
in_msg_epoch = int(in_msg_created_at // (60 * 60)) in_msg_epoch = int(in_msg_created_at // (60 * 60))
in_msg_seqno = HighloadQueryId.from_query_id(in_msg_query_id).to_seqno() in_msg_seqno = HighloadQueryId.from_query_id(in_msg_query_id).to_seqno()
from sqlalchemy import select
in_msg_blockchain_task = ( in_msg_blockchain_task = (
session.query(BlockchainTask).filter( await session.execute(
select(BlockchainTask).where(
and_( and_(
BlockchainTask.seqno == in_msg_seqno, BlockchainTask.seqno == in_msg_seqno,
BlockchainTask.epoch == in_msg_epoch, BlockchainTask.epoch == in_msg_epoch,
) )
) )
).first() )
).scalars().first()
if not in_msg_blockchain_task: if not in_msg_blockchain_task:
return return
@ -162,7 +172,7 @@ async def main_fn(memory):
try: try:
await process_incoming_message(blockchain_message) await process_incoming_message(blockchain_message)
except BaseException as e: except BaseException as e:
pass # make_log("TON_Daemon", f"Error while processing incoming message: {e}" + '\n' + traceback.format_exc(), level='debug') pass # make_log("TON_Daemon", f"Error while processing incoming message: {e}" + '\n' + traceback.format_exc(), level='debug', rid=rid)
try: try:
sw_transactions = await toncenter.get_transactions(highload_wallet.address.to_string(1, 1, 1), limit=100) sw_transactions = await toncenter.get_transactions(highload_wallet.address.to_string(1, 1, 1), limit=100)
@ -170,17 +180,18 @@ async def main_fn(memory):
try: try:
await process_incoming_transaction(sw_transaction) await process_incoming_transaction(sw_transaction)
except BaseException as e: except BaseException as e:
make_log("TON_Daemon", f"Error while processing incoming transaction: {e}", level="debug") make_log("TON_Daemon", f"Error while processing incoming transaction: {e}", level="debug", rid=rid)
except BaseException as e: except BaseException as e:
make_log("TON_Daemon", f"Error while getting service wallet transactions: {e}", level="ERROR") make_log("TON_Daemon", f"Error while getting service wallet transactions: {e}", level="ERROR", rid=rid)
await send_status("ton_daemon", f"working: processing out-txs (seqno={sw_seqno_value})")
# Отправка подписанных сообщений # Отправка подписанных сообщений
for blockchain_task in ( from sqlalchemy import select
session.query(BlockchainTask).filter( _processing = (await session.execute(select(BlockchainTask).where(
BlockchainTask.status == 'processing', BlockchainTask.status == 'processing'
).order_by(BlockchainTask.updated.asc()).all() ).order_by(BlockchainTask.updated.asc()))).scalars().all()
): for blockchain_task in _processing:
make_log("TON_Daemon", f"Processing task (processing) {blockchain_task.id}") make_log("TON_Daemon", f"Processing task (processing) {blockchain_task.id}", rid=rid)
query_boc = bytes.fromhex(blockchain_task.meta['signed_message']) query_boc = bytes.fromhex(blockchain_task.meta['signed_message'])
errors_list = [] errors_list = []
@ -213,17 +224,17 @@ async def main_fn(memory):
await asyncio.sleep(0.5) await asyncio.sleep(0.5)
await send_status("ton_daemon", f"working: creating new messages (seqno={sw_seqno_value})")
# Создание новых подписей # Создание новых подписей
for blockchain_task in ( _waiting = (await session.execute(select(BlockchainTask).where(BlockchainTask.status == 'wait'))).scalars().all()
session.query(BlockchainTask).filter(BlockchainTask.status == 'wait').all() for blockchain_task in _waiting:
):
try: try:
# Check processing tasks in current epoch < 3_000_000 # Check processing tasks in current epoch < 3_000_000
if ( from sqlalchemy import func
session.query(BlockchainTask).filter( _cnt = (await session.execute(select(func.count()).select_from(BlockchainTask).where(
BlockchainTask.epoch == blockchain_task.epoch, BlockchainTask.epoch == blockchain_task.epoch
).count() > 3_000_000 ))).scalar() or 0
): if _cnt > 3_000_000:
make_log("TON", f"Too many processing tasks in epoch {blockchain_task.epoch}", level="error") make_log("TON", f"Too many processing tasks in epoch {blockchain_task.epoch}", level="error")
await send_status("ton_daemon", f"working: too many tasks in epoch {blockchain_task.epoch}") await send_status("ton_daemon", f"working: too many tasks in epoch {blockchain_task.epoch}")
await asyncio.sleep(5) await asyncio.sleep(5)
@ -232,10 +243,11 @@ async def main_fn(memory):
sign_created = int(datetime.utcnow().timestamp()) - 60 sign_created = int(datetime.utcnow().timestamp()) - 60
try: try:
current_epoch = int(datetime.utcnow().timestamp() // (60 * 60)) current_epoch = int(datetime.utcnow().timestamp() // (60 * 60))
from sqlalchemy import func
max_epoch_seqno = ( max_epoch_seqno = (
session.query(func.max(BlockchainTask.seqno)).filter( (await session.execute(select(func.max(BlockchainTask.seqno)).where(
BlockchainTask.epoch == current_epoch BlockchainTask.epoch == current_epoch
).scalar() or 0 ))).scalar() or 0
) )
current_epoch_shift = 3_000_000 if current_epoch % 2 == 0 else 0 current_epoch_shift = 3_000_000 if current_epoch % 2 == 0 else 0
current_seqno = max_epoch_seqno + 1 + (current_epoch_shift if max_epoch_seqno == 0 else 0) current_seqno = max_epoch_seqno + 1 + (current_epoch_shift if max_epoch_seqno == 0 else 0)
@ -255,18 +267,18 @@ async def main_fn(memory):
) )
query_boc = query['message'].to_boc(False) query_boc = query['message'].to_boc(False)
except BaseException as e: except BaseException as e:
make_log("TON", f"Error creating transfer message: {e}", level="error") make_log("TON", f"Error creating transfer message: {e}", level="error", rid=rid)
query_boc = begin_cell().end_cell().to_boc(False) query_boc = begin_cell().end_cell().to_boc(False)
blockchain_task.meta = { blockchain_task.meta = {
**blockchain_task.meta, **blockchain_task.meta,
'sign_created': sign_created, 'sign_created': sign_created,
'signed_message': query_boc, 'signed_message': query_boc.hex(),
} }
await session.commit() await session.commit()
make_log("TON", f"Created signed message for task {blockchain_task.id}" + '\n' + traceback.format_exc(), level="info") make_log("TON", f"Created signed message for task {blockchain_task.id}" + '\n' + traceback.format_exc(), level="info", rid=rid)
except BaseException as e: except BaseException as e:
make_log("TON", f"Error processing task {blockchain_task.id}: {e}" + '\n' + traceback.format_exc(), level="error") make_log("TON", f"Error processing task {blockchain_task.id}: {e}" + '\n' + traceback.format_exc(), level="error", rid=rid)
continue continue
await asyncio.sleep(1) await asyncio.sleep(1)
@ -274,14 +286,17 @@ async def main_fn(memory):
await asyncio.sleep(1) await asyncio.sleep(1)
await send_status("ton_daemon", f"working (seqno={sw_seqno_value})") await send_status("ton_daemon", f"working (seqno={sw_seqno_value})")
except BaseException as e: except BaseException as e:
make_log("TON", f"Error: {e}", level="error") make_log("TON", f"Error: {e}", level="error", rid=locals().get('rid'))
await asyncio.sleep(3) await asyncio.sleep(3)
finally:
try:
from app.core.log_context import ctx_rid
ctx_rid.set(None)
except BaseException:
pass
# if __name__ == '__main__': # if __name__ == '__main__':
# loop = asyncio.get_event_loop() # loop = asyncio.get_event_loop()
# loop.run_until_complete(main()) # loop.run_until_complete(main())
# loop.close() # loop.close()

View File

@ -13,14 +13,26 @@ async def main_fn(memory):
seqno = 0 seqno = 0
while True: while True:
try: try:
make_log("Uploader", "Service running", level="debug") rid = __import__('uuid').uuid4().hex[:8]
try:
from app.core.log_context import ctx_rid
ctx_rid.set(rid)
except BaseException:
pass
make_log("Uploader", f"Service running", level="debug", rid=rid)
await uploader_loop() await uploader_loop()
await asyncio.sleep(5) await asyncio.sleep(5)
await send_status("uploader_daemon", f"working (seqno={seqno})") await send_status("uploader_daemon", f"working (seqno={seqno})")
seqno += 1 seqno += 1
except BaseException as e: except BaseException as e:
make_log("Uploader", f"Error: {e}", level="error") make_log("Uploader", f"Error: {e}", level="error", rid=locals().get('rid'))
await asyncio.sleep(3) await asyncio.sleep(3)
finally:
try:
from app.core.log_context import ctx_rid
ctx_rid.set(None)
except BaseException:
pass
# if __name__ == '__main__': # if __name__ == '__main__':
# loop = asyncio.get_event_loop() # loop = asyncio.get_event_loop()
@ -28,5 +40,3 @@ async def main_fn(memory):
# loop.close() # loop.close()

View File

@ -1,3 +1,6 @@
from base64 import b32decode
from typing import Optional, Tuple
from base58 import b58encode, b58decode from base58 import b58encode, b58decode
from tonsdk.boc import begin_cell from tonsdk.boc import begin_cell
@ -12,25 +15,50 @@ from app.core._utils.string_binary import string_to_bytes_fixed_size, bytes_to_s
# cid_v2#_ cid_version:uint8 content_sha256:uint256 *[Param]s = CIDv2; # cid_v2#_ cid_version:uint8 content_sha256:uint256 *[Param]s = CIDv2;
class ContentId: class ContentId:
"""Unified abstraction for legacy ContentID and ENCF/IPFS CID strings."""
def __init__( def __init__(
self, self,
version: int = None, version: Optional[int] = None,
content_hash: bytes = None, # only SHA256 content_hash: Optional[bytes] = None, # only SHA256
onchain_index: int = None, onchain_index: Optional[int] = None,
accept_type: str = None, accept_type: Optional[str] = None,
encryption_key_sha256: bytes = None, encryption_key_sha256: Optional[bytes] = None,
*,
raw_value: Optional[str] = None,
cid_format: Optional[str] = None,
multibase_prefix: Optional[str] = None,
multicodec: Optional[int] = None,
multihash_code: Optional[int] = 0x12,
multihash_length: Optional[int] = 32,
): ):
self.version = version self.version = version
self.content_hash = content_hash self.content_hash = content_hash
self.onchain_index = onchain_index or -1 self.onchain_index = onchain_index if onchain_index is not None else -1
self.accept_type = accept_type self.accept_type = accept_type
self.encryption_key_sha256 = encryption_key_sha256 self.encryption_key_sha256 = encryption_key_sha256
if self.encryption_key_sha256: if self.encryption_key_sha256:
assert len(self.encryption_key_sha256) == 32, "Invalid encryption key length" assert len(self.encryption_key_sha256) == 32, "Invalid encryption key length"
self._raw_value = raw_value
if cid_format:
self.cid_format = cid_format
else:
if self.version == 1:
self.cid_format = 'content_id_v1'
elif self.version == 2:
self.cid_format = 'content_id_v2'
else:
self.cid_format = 'content_id_v2'
self.multibase_prefix = multibase_prefix
self.multicodec = multicodec
self.multihash_code = multihash_code
self.multihash_length = multihash_length
@property @property
def content_hash_b58(self) -> str: def content_hash_b58(self) -> str:
assert self.content_hash, "Content hash is not set"
return b58encode(self.content_hash).decode() return b58encode(self.content_hash).decode()
@property @property
@ -38,6 +66,11 @@ class ContentId:
return self.onchain_index if (not (self.onchain_index is None) and self.onchain_index >= 0) else None return self.onchain_index if (not (self.onchain_index is None) and self.onchain_index >= 0) else None
def serialize_v2(self, include_accept_type=False) -> str: def serialize_v2(self, include_accept_type=False) -> str:
if self.cid_format == 'ipfs':
if self._raw_value:
return self._raw_value
return self._serialize_ipfs()
cid_bin = ( cid_bin = (
(2).to_bytes(1, 'big') # cid version (2).to_bytes(1, 'big') # cid version
+ self.content_hash + self.content_hash
@ -60,6 +93,8 @@ class ContentId:
return b58encode(cid_bin).decode() return b58encode(cid_bin).decode()
def serialize_v1(self) -> str: def serialize_v1(self) -> str:
if self.cid_format == 'ipfs':
raise ValueError("Cannot serialize IPFS CID as ContentId v1")
at_bin = string_to_bytes_fixed_size(self.accept_type, 15) at_bin = string_to_bytes_fixed_size(self.accept_type, 15)
assert len(self.content_hash) == 32, "Invalid hash length" assert len(self.content_hash) == 32, "Invalid hash length"
if self.onchain_index < 0: if self.onchain_index < 0:
@ -133,13 +168,31 @@ class ContentId:
@classmethod @classmethod
def deserialize(cls, cid: str): def deserialize(cls, cid: str):
if not cid:
raise ValueError("Empty content id provided")
first_char = cid[0]
if first_char in ('b', 'B', 'z', 'Z'):
return cls.from_ipfs(cid)
try:
cid_version = int.from_bytes(b58decode(cid)[0:1], 'big') cid_version = int.from_bytes(b58decode(cid)[0:1], 'big')
except Exception:
cid_version = None
if cid_version == 1: if cid_version == 1:
return cls.from_v1(cid) obj = cls.from_v1(cid)
elif cid_version == 2: obj._raw_value = cid
return cls.from_v2(cid) return obj
else: if cid_version == 2:
raise ValueError("Invalid cid version") obj = cls.from_v2(cid)
obj._raw_value = cid
return obj
try:
return cls.from_ipfs(cid)
except Exception as exc:
raise ValueError(f"Invalid cid format: {exc}") from exc
def json_format(self): def json_format(self):
return { return {
@ -147,7 +200,130 @@ class ContentId:
"content_hash": self.content_hash_b58, "content_hash": self.content_hash_b58,
"onchain_index": self.safe_onchain_index, "onchain_index": self.safe_onchain_index,
"accept_type": self.accept_type, "accept_type": self.accept_type,
"encryption_key_sha256": b58encode(self.encryption_key_sha256).decode() if self.encryption_key_sha256 else None "encryption_key_sha256": b58encode(self.encryption_key_sha256).decode() if self.encryption_key_sha256 else None,
"format": self.cid_format,
"raw": self.serialize_v2() if self.cid_format == 'ipfs' else None,
} }
# --- helpers for IPFS/ENCF CID handling ---------------------------------
@staticmethod
def _decode_multibase(cid_str: str) -> Tuple[bytes, Optional[str]]:
prefix = cid_str[0]
if prefix in ('b', 'B'):
payload = cid_str[1:]
padding = (-len(payload)) % 8
decoded = b32decode(payload.upper() + ('=' * padding), casefold=True)
return decoded, prefix.lower()
if prefix in ('z', 'Z'):
return b58decode(cid_str[1:]), prefix.lower()
# CIDv0 without explicit prefix
return b58decode(cid_str), None
@staticmethod
def _read_varint(data: bytes, offset: int) -> Tuple[int, int]:
result = 0
shift = 0
while True:
if offset >= len(data):
raise ValueError("truncated varint")
byte = data[offset]
offset += 1
result |= (byte & 0x7F) << shift
if not (byte & 0x80):
break
shift += 7
if shift > 63:
raise ValueError("varint overflow")
return result, offset
@classmethod
def from_ipfs(cls, cid: str):
cid = cid.strip()
payload, multibase_prefix = cls._decode_multibase(cid)
idx = 0
version: Optional[int] = None
codec: Optional[int] = None
if multibase_prefix is not None:
version, idx = cls._read_varint(payload, idx)
if version not in (0, 1):
raise ValueError(f"unsupported CID version: {version}")
if version == 1:
codec, idx = cls._read_varint(payload, idx)
else:
codec = 0x70 # dag-pb default for CIDv0
else:
# CIDv0 without explicit version/codec
version = 0
codec = 0x70
multihash_code, idx = cls._read_varint(payload, idx)
multihash_length, idx = cls._read_varint(payload, idx)
digest = payload[idx:idx + multihash_length]
if len(digest) != multihash_length:
raise ValueError("truncated multihash digest")
if multihash_length != 32:
raise ValueError("unsupported multihash length (expected 32 bytes)")
if multihash_code != 0x12:
raise ValueError(f"unsupported multihash code: {hex(multihash_code)}")
return cls(
version=version,
content_hash=digest,
onchain_index=None,
accept_type=None,
encryption_key_sha256=None,
raw_value=cid,
cid_format='ipfs',
multibase_prefix=multibase_prefix,
multicodec=codec,
multihash_code=multihash_code,
multihash_length=multihash_length,
)
def _serialize_ipfs(self) -> str:
if not self.content_hash:
raise ValueError("Cannot serialize IPFS CID without content hash")
if self.multibase_prefix is None:
# default to CIDv0 (base58btc) dag-pb
multihash = self._encode_varint(self.multihash_code or 0x12) + self._encode_varint(self.multihash_length or len(self.content_hash)) + self.content_hash
return b58encode(multihash).decode()
version_bytes = self._encode_varint(self.version or 1)
codec_bytes = b''
if (self.version or 1) == 1:
codec_bytes = self._encode_varint(self.multicodec or 0x70)
multihash = (
version_bytes +
codec_bytes +
self._encode_varint(self.multihash_code or 0x12) +
self._encode_varint(self.multihash_length or len(self.content_hash)) +
self.content_hash
)
if self.multibase_prefix == 'z':
return 'z' + b58encode(multihash).decode()
if self.multibase_prefix == 'b':
from base64 import b32encode
encoded = b32encode(multihash).decode().rstrip('=').lower()
return 'b' + encoded
# Fallback to base58btc without prefix
return b58encode(multihash).decode()
@staticmethod
def _encode_varint(value: int) -> bytes:
if value < 0:
raise ValueError("varint cannot encode negative values")
out = bytearray()
while True:
to_write = value & 0x7F
value >>= 7
if value:
out.append(to_write | 0x80)
else:
out.append(to_write)
break
return bytes(out)

View File

@ -1,8 +1,11 @@
import json import json
import asyncio import asyncio
import os import os
import string
import aiofiles import aiofiles
from hashlib import sha256 from hashlib import sha256
import re # Added import
import unicodedata # Added import
from base58 import b58encode from base58 import b58encode
from datetime import datetime, timedelta from datetime import datetime, timedelta
@ -23,7 +26,9 @@ async def create_new_content(
content_hash_bin = sha256(content_bin).digest() content_hash_bin = sha256(content_bin).digest()
content_hash_b58 = b58encode(content_hash_bin).decode() content_hash_b58 = b58encode(content_hash_bin).decode()
new_content = db_session.query(StoredContent).filter(StoredContent.hash == content_hash_b58).first() from sqlalchemy import select
result = await db_session.execute(select(StoredContent).where(StoredContent.hash == content_hash_b58))
new_content = result.scalars().first()
if new_content: if new_content:
return new_content, False return new_content, False
@ -35,8 +40,9 @@ async def create_new_content(
) )
db_session.add(new_content) db_session.add(new_content)
db_session.commit() await db_session.commit()
new_content = db_session.query(StoredContent).filter(StoredContent.hash == content_hash_b58).first() result = await db_session.execute(select(StoredContent).where(StoredContent.hash == content_hash_b58))
new_content = result.scalars().first()
assert new_content, "Content not created (through utils)" assert new_content, "Content not created (through utils)"
content_filepath = os.path.join(UPLOADS_DIR, content_hash_b58) content_filepath = os.path.join(UPLOADS_DIR, content_hash_b58)
async with aiofiles.open(content_filepath, 'wb') as file: async with aiofiles.open(content_filepath, 'wb') as file:
@ -45,34 +51,139 @@ async def create_new_content(
return new_content, True return new_content, True
# New helper functions for string cleaning
def _remove_emojis(text: str) -> str:
"""Removes common emoji characters from a string."""
# This regex covers many common emojis but might not be exhaustive.
emoji_pattern = re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0" # Dingbats
"\U000024C2-\U0001F251" # Various symbols
"]+",
flags=re.UNICODE,
)
return emoji_pattern.sub(r'', text)
def _clean_text_content(text: str, is_hashtag: bool = False) -> str:
"""
Cleans a string by removing emojis and unusual characters.
Level 1: Emoji removal.
Level 2: Unusual character cleaning (specific logic for hashtags).
"""
if not isinstance(text, str):
return ""
# Level 1: Remove emojis
text_no_emojis = _remove_emojis(text)
# Level 2: Clean unusual characters
if is_hashtag:
# Convert to lowercase
processed_text = text_no_emojis.lower()
# Replace hyphens, dots, spaces (and sequences) with a single underscore
processed_text = re.sub(r'[\s.-]+', '_', processed_text)
# Keep only lowercase letters (a-z), digits (0-9), and underscores
cleaned_text = re.sub(r'[^a-z0-9_]', '', processed_text)
# Remove leading/trailing underscores
cleaned_text = cleaned_text.strip('_')
# Consolidate multiple underscores into one
cleaned_text = re.sub(r'_+', '_', cleaned_text)
return cleaned_text
else: # For title, authors, or general text
# Normalize Unicode characters (e.g., NFKD form)
nfkd_form = unicodedata.normalize('NFKD', text_no_emojis)
# Keep letters (Unicode), numbers (Unicode), spaces, and basic punctuation
# This allows for a wider range of characters suitable for titles/names.
cleaned_text_chars = []
for char_in_nfkd in nfkd_form:
if not unicodedata.combining(char_in_nfkd): # remove combining diacritics
# Keep letters, numbers, spaces, and specific punctuation
cat = unicodedata.category(char_in_nfkd)
if cat.startswith('L') or cat.startswith('N') or cat.startswith('Z') or char_in_nfkd in '.,!?-':
cleaned_text_chars.append(char_in_nfkd)
cleaned_text = "".join(cleaned_text_chars)
# Normalize multiple spaces to a single space and strip leading/trailing spaces
cleaned_text = re.sub(r'\s+', ' ', cleaned_text).strip()
return cleaned_text
async def create_metadata_for_item( async def create_metadata_for_item(
db_session, db_session,
title: str = None, title: str = None,
artist: str = None,
cover_url: str = None, cover_url: str = None,
authors: list = None, authors: list = None,
hashtags: list = [], hashtags: list = [],
downloadable: bool = False,
) -> StoredContent: ) -> StoredContent:
assert title, "No title provided" assert title, "No title provided"
# assert cover_url, "No cover_url provided" # assert cover_url, "No cover_url provided" # Original comment, kept as is
assert len(title) > 3, "Title too short"
title = title[:100].strip() # Clean title using the new helper function
cleaned_title = _clean_text_content(title, is_hashtag=False)
cleaned_title = cleaned_title[:100].strip() # Truncate and strip after cleaning
assert len(cleaned_title) > 3, f"Cleaned title '{cleaned_title}' (from original '{title}') is too short or became empty after cleaning."
cleaned_artist = None
if artist:
cleaned_artist = _clean_text_content(artist, is_hashtag=False)
cleaned_artist = cleaned_artist[:100].strip()
if not cleaned_artist:
cleaned_artist = None
display_name = f"{cleaned_artist} {cleaned_title}" if cleaned_artist else cleaned_title
# Process and clean hashtags
processed_hashtags = []
if hashtags and isinstance(hashtags, list):
for _h_tag_text in hashtags:
if isinstance(_h_tag_text, str):
cleaned_h = _clean_text_content(_h_tag_text, is_hashtag=True)
if cleaned_h: # Add only if not empty after cleaning
processed_hashtags.append(cleaned_h)
# Ensure uniqueness of hashtags and limit their count (e.g., to first 10 unique)
# Using dict.fromkeys to preserve order while ensuring uniqueness
processed_hashtags = list(dict.fromkeys(processed_hashtags))[:10]
item_metadata = { item_metadata = {
'name': title, 'name': display_name,
'description': ' '.join([f"#{_h.replace(' ', '_')}" for _h in hashtags]), 'title': cleaned_title,
'attributes': [ 'display_name': display_name,
# { 'downloadable': downloadable,
# 'trait_type': 'Artist', 'tags': processed_hashtags, # New field for storing the list of cleaned hashtags
# 'value': 'Unknown' 'attributes': [],
# },
],
} }
if cleaned_artist:
item_metadata['artist'] = cleaned_artist
item_metadata['attributes'].append({
'trait_type': 'Artist',
'value': cleaned_artist,
})
# Generate description from the processed hashtags
item_metadata['description'] = ' '.join([f"#{h}" for h in processed_hashtags if h])
if cover_url: if cover_url:
item_metadata['image'] = cover_url item_metadata['image'] = cover_url
item_metadata['authors'] = [ # Clean authors
''.join([_a_ch for _a_ch in _a if len(_a_ch.encode()) == 1]) for _a in (authors or [])[:500] cleaned_authors = []
] if authors and isinstance(authors, list):
for author_name in (authors or [])[:500]: # Limit number of authors
if isinstance(author_name, str):
# Apply general cleaning to author names
# This replaces the old logic: ''.join([_a_ch for _a_ch in _a if len(_a_ch.encode()) == 1])
cleaned_author = _clean_text_content(author_name, is_hashtag=False)
if cleaned_author.strip(): # Ensure not empty
cleaned_authors.append(cleaned_author.strip()[:100]) # Limit length of each author name
item_metadata['authors'] = cleaned_authors
# Upload file # Upload file
metadata_bin = json.dumps(item_metadata).encode() metadata_bin = json.dumps(item_metadata).encode()

View File

@ -0,0 +1,112 @@
from __future__ import annotations
import hmac
import hashlib
import struct
from typing import BinaryIO, Iterator, AsyncIterator
from cryptography.hazmat.primitives.ciphers.aead import AESGCMSIV
MAGIC = b"ENCF"
VERSION = 1
SCHEME_AES_GCM_SIV = 0x01
def _derive_nonce(salt: bytes, idx: int) -> bytes:
b = idx.to_bytes(8, 'big')
return hmac.new(salt, b, hashlib.sha256).digest()[:12]
def build_header(chunk_bytes: int, salt: bytes) -> bytes:
assert 0 < chunk_bytes <= (1 << 31)
assert 1 <= len(salt) <= 255
# MAGIC(4) | ver(1) | scheme(1) | chunk_bytes(4,BE) | salt_len(1) | salt | reserved(5)
hdr = bytearray()
hdr += MAGIC
hdr += bytes([VERSION])
hdr += bytes([SCHEME_AES_GCM_SIV])
hdr += struct.pack(">I", int(chunk_bytes))
hdr += bytes([len(salt)])
hdr += salt
hdr += b"\x00" * 5
return bytes(hdr)
def encrypt_file_to_encf(src: BinaryIO, key: bytes, chunk_bytes: int, salt: bytes) -> Iterator[bytes]:
"""
Yield ENCF v1 stream using AES-GCM-SIV per chunk with deterministic nonces.
Frame: [p_len:4][cipher][tag(16)].
"""
yield build_header(chunk_bytes, salt)
idx = 0
cipher = AESGCMSIV(key)
while True:
block = src.read(chunk_bytes)
if not block:
break
nonce = _derive_nonce(salt, idx)
ct_and_tag = cipher.encrypt(nonce, block, associated_data=None)
# Split tag
tag = ct_and_tag[-16:]
ct = ct_and_tag[:-16]
yield struct.pack(">I", len(block))
yield ct
yield tag
idx += 1
async def decrypt_encf_to_file(byte_iter: AsyncIterator[bytes], key: bytes, out_path: str) -> None:
"""Parse ENCF v1 (AES-GCM-SIV) and write plaintext to out_path."""
import aiofiles
buf = bytearray()
async def _fill(n: int):
nonlocal buf
while len(buf) < n:
try:
chunk = await byte_iter.__anext__()
except StopAsyncIteration:
break
if chunk:
buf.extend(chunk)
# header minimal
await _fill(11)
if buf[:4] != MAGIC:
raise ValueError("bad magic")
version = buf[4]
scheme = buf[5]
if version != 1 or scheme != SCHEME_AES_GCM_SIV:
raise ValueError("unsupported encf header")
chunk_bytes = struct.unpack(">I", bytes(buf[6:10]))[0]
salt_len = buf[10]
hdr_len = 4 + 1 + 1 + 4 + 1 + salt_len + 5
await _fill(hdr_len)
salt = bytes(buf[11:11 + salt_len])
del buf[:hdr_len]
cipher = AESGCMSIV(key)
async with aiofiles.open(out_path, 'wb') as out:
idx = 0
TAG_LEN = 16
while True:
await _fill(4)
if len(buf) == 0:
break
if len(buf) < 4:
raise ValueError("truncated frame length")
p_len = struct.unpack(">I", bytes(buf[:4]))[0]
del buf[:4]
await _fill(p_len + TAG_LEN)
if len(buf) < p_len + TAG_LEN:
raise ValueError("truncated cipher/tag")
ct = bytes(buf[:p_len])
tag = bytes(buf[p_len:p_len+TAG_LEN])
del buf[:p_len+TAG_LEN]
nonce = _derive_nonce(salt, idx)
pt = cipher.decrypt(nonce, ct + tag, associated_data=None)
await out.write(pt)
idx += 1

View File

@ -0,0 +1,118 @@
from __future__ import annotations
import hmac
import hashlib
import os
import struct
from typing import BinaryIO, Iterator, AsyncIterator
import aiofiles
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
MAGIC = b"ENCF"
VERSION = 1
SCHEME_AES_GCM = 0x03
CHUNK_BYTES = int(os.getenv("CRYPTO_CHUNK_BYTES", "1048576"))
def _derive_nonce(salt: bytes, idx: int) -> bytes:
"""Derive a deterministic 12-byte nonce from salt and chunk index."""
if len(salt) < 12:
raise ValueError("salt must be at least 12 bytes")
idx_bytes = idx.to_bytes(8, "big")
return hmac.new(salt, idx_bytes, hashlib.sha256).digest()[:12]
def build_header(chunk_bytes: int, salt: bytes) -> bytes:
if not (0 < chunk_bytes <= (1 << 31)):
raise ValueError("chunk_bytes must be between 1 and 2^31")
if not (1 <= len(salt) <= 255):
raise ValueError("salt length must be 1..255 bytes")
# MAGIC(4) | ver(1) | scheme(1) | chunk_bytes(4,BE) | salt_len(1) | salt | reserved(5 zeros)
hdr = bytearray()
hdr += MAGIC
hdr.append(VERSION)
hdr.append(SCHEME_AES_GCM)
hdr += struct.pack(">I", int(chunk_bytes))
hdr.append(len(salt))
hdr += salt
hdr += b"\x00" * 5
return bytes(hdr)
def encrypt_file_to_encf(src: BinaryIO, key: bytes, chunk_bytes: int, salt: bytes) -> Iterator[bytes]:
"""Yield ENCF v1 frames encrypted with AES-GCM."""
if len(key) not in (16, 24, 32):
raise ValueError("AES-GCM key must be 128, 192 or 256 bits long")
cipher = AESGCM(key)
yield build_header(chunk_bytes, salt)
idx = 0
while True:
block = src.read(chunk_bytes)
if not block:
break
nonce = _derive_nonce(salt, idx)
ct = cipher.encrypt(nonce, block, associated_data=None)
tag = ct[-16:]
data = ct[:-16]
yield struct.pack(">I", len(block))
yield data
yield tag
idx += 1
async def decrypt_encf_to_file(byte_iter: AsyncIterator[bytes], key: bytes, out_path: str) -> None:
"""Parse ENCF v1 (AES-GCM) stream and write plaintext to `out_path`."""
if len(key) not in (16, 24, 32):
raise ValueError("AES-GCM key must be 128, 192 or 256 bits long")
cipher = AESGCM(key)
buf = bytearray()
async def _fill(n: int) -> None:
nonlocal buf
while len(buf) < n:
try:
chunk = await byte_iter.__anext__()
except StopAsyncIteration:
break
if chunk:
buf.extend(chunk)
# Parse header
await _fill(11)
if buf[:4] != MAGIC:
raise ValueError("bad magic")
version = buf[4]
scheme = buf[5]
if version != VERSION or scheme != SCHEME_AES_GCM:
raise ValueError("unsupported ENCF header")
chunk_bytes = struct.unpack(">I", bytes(buf[6:10]))[0]
salt_len = buf[10]
hdr_len = 4 + 1 + 1 + 4 + 1 + salt_len + 5
await _fill(hdr_len)
salt = bytes(buf[11:11 + salt_len])
del buf[:hdr_len]
async with aiofiles.open(out_path, "wb") as out:
idx = 0
TAG_LEN = 16
while True:
await _fill(4)
if len(buf) == 0:
break
if len(buf) < 4:
raise ValueError("truncated frame length")
p_len = struct.unpack(">I", bytes(buf[:4]))[0]
del buf[:4]
await _fill(p_len + TAG_LEN)
if len(buf) < p_len + TAG_LEN:
raise ValueError("truncated cipher/tag")
ct = bytes(buf[:p_len])
tag = bytes(buf[p_len:p_len + TAG_LEN])
del buf[:p_len + TAG_LEN]
nonce = _derive_nonce(salt, idx)
pt = cipher.decrypt(nonce, ct + tag, associated_data=None)
await out.write(pt)
idx += 1

View File

@ -0,0 +1,135 @@
from __future__ import annotations
import os
import struct
from typing import BinaryIO, Iterator, AsyncIterator
from Crypto.Cipher import AES
MAGIC = b"ENCF"
VERSION = 1
# Scheme codes
SCHEME_AES_SIV = 0x02 # RFC5297 AES-SIV (CMAC-based)
def build_header(chunk_bytes: int, salt: bytes, scheme: int = SCHEME_AES_SIV) -> bytes:
assert 0 < chunk_bytes <= (1 << 31)
assert 1 <= len(salt) <= 255
# Layout: MAGIC(4) | version(1) | scheme(1) | chunk_bytes(4,BE) | salt_len(1) | salt(N) | reserved(5 zeros)
hdr = bytearray()
hdr += MAGIC
hdr += bytes([VERSION])
hdr += bytes([scheme])
hdr += struct.pack(">I", int(chunk_bytes))
hdr += bytes([len(salt)])
hdr += salt
hdr += b"\x00" * 5
return bytes(hdr)
def parse_header(buf: bytes) -> tuple[int, int, int, bytes, int]:
if len(buf) < 4 + 1 + 1 + 4 + 1:
raise ValueError("header too short")
if buf[:4] != MAGIC:
raise ValueError("bad magic")
version = buf[4]
scheme = buf[5]
chunk_bytes = struct.unpack(">I", buf[6:10])[0]
salt_len = buf[10]
needed = 4 + 1 + 1 + 4 + 1 + salt_len + 5
if len(buf) < needed:
raise ValueError("incomplete header")
salt = buf[11:11 + salt_len]
# reserved 5 bytes at the end ignored
return version, scheme, chunk_bytes, salt, needed
def _ad(salt: bytes, idx: int) -> bytes:
return salt + struct.pack(">Q", idx)
def encrypt_file_to_encf(src: BinaryIO, key: bytes, chunk_bytes: int, salt: bytes) -> Iterator[bytes]:
"""
Yield ENCF v1 stream bytes: [header] then for each chunk: [p_len:4][cipher][tag(16)].
Uses AES-SIV (RFC5297) with per-chunk associated data salt||index.
"""
yield build_header(chunk_bytes, salt, SCHEME_AES_SIV)
idx = 0
while True:
block = src.read(chunk_bytes)
if not block:
break
siv = AES.new(key, AES.MODE_SIV) # new object per message
siv.update(_ad(salt, idx))
ciph, tag = siv.encrypt_and_digest(block)
yield struct.pack(">I", len(block))
yield ciph
yield tag
idx += 1
async def decrypt_encf_to_file(byte_iter: AsyncIterator[bytes], key: bytes, out_path: str) -> None:
"""
Parse ENCF v1 stream from async byte iterator and write plaintext to out_path.
"""
import aiofiles
from Crypto.Cipher import AES as _AES
buf = bytearray()
async def _fill(n: int):
"""Ensure at least n bytes in buffer (or EOF)."""
nonlocal buf
while len(buf) < n:
try:
chunk = await byte_iter.__anext__()
except StopAsyncIteration:
break
if chunk:
buf.extend(chunk)
# Read and parse header
await _fill(4 + 1 + 1 + 4 + 1) # minimal header
# Might still be incomplete if salt_len > 0; keep filling progressively
# First, get preliminary to know salt_len
if len(buf) < 11:
await _fill(11)
if buf[:4] != MAGIC:
raise ValueError("bad magic")
salt_len = buf[10]
hdr_len = 4 + 1 + 1 + 4 + 1 + salt_len + 5
await _fill(hdr_len)
version, scheme, chunk_bytes, salt, consumed = parse_header(bytes(buf))
del buf[:consumed]
if version != 1:
raise ValueError("unsupported ENCF version")
if scheme != SCHEME_AES_SIV:
raise ValueError("unsupported scheme")
async with aiofiles.open(out_path, 'wb') as out:
idx = 0
TAG_LEN = 16
while True:
# Need at least 4 bytes for p_len
await _fill(4)
if len(buf) == 0:
break # EOF exactly on boundary
if len(buf) < 4:
raise ValueError("truncated frame length")
p_len = struct.unpack(">I", bytes(buf[:4]))[0]
del buf[:4]
# Now need p_len + 16 bytes
await _fill(p_len + TAG_LEN)
if len(buf) < p_len + TAG_LEN:
raise ValueError("truncated cipher/tag")
c = bytes(buf[:p_len])
t = bytes(buf[p_len:p_len+TAG_LEN])
del buf[:p_len+TAG_LEN]
siv = _AES.new(key, _AES.MODE_SIV)
siv.update(_ad(salt, idx))
p = siv.decrypt_and_verify(c, t)
await out.write(p)
idx += 1

Some files were not shown because too many files have changed in this diff Show More