Compare commits

..

43 Commits

Author SHA1 Message Date
root 2f071e5df8 fixes routing 2025-08-16 03:03:39 +00:00
root d7333bc11d server commit 2025-08-14 09:30:03 +00:00
user cad0f6aebe fixes global 2025-08-08 09:14:18 +03:00
user 13dc4f39c8 fixes 2025-07-28 12:45:04 +03:00
user 82261671a1 fix 2025-07-28 09:26:43 +03:00
user 0ce6e263e5 fix reqs 2025-07-28 09:12:35 +03:00
user c07ec3b2ec update 2025-07-28 08:46:32 +03:00
user 34d39a8580 update 2025-07-27 22:38:34 +03:00
user 274c8f1f09 fixes 2025-07-27 03:12:33 +03:00
user 8b68b0f1e3 fix sh 2025-07-26 21:15:26 +03:00
user 3aa15b6c7e fix sh 2025-07-26 20:55:07 +03:00
user 6ff645c2e6 fix sh 2025-07-26 08:13:42 +03:00
user 5d49eee98a fix sh 2025-07-26 07:08:33 +03:00
user fb63a5c381 fix sh 2025-07-25 18:38:07 +03:00
user 62fdd16eed fix sh 2025-07-25 17:01:43 +03:00
user 1661dea57c fix sh 2025-07-25 16:38:17 +03:00
user 80c489a5bd fix sh 2025-07-25 16:23:19 +03:00
user ecaa2ad132 fix sh 2025-07-25 15:42:44 +03:00
user 47db638ea6 docs / script 2025-07-25 15:36:24 +03:00
user 5a158222d7 edit scripts 2025-07-14 22:22:21 +03:00
user 1b0dfdafbc upd 2025-07-11 06:46:33 +03:00
user 373c832e71 fixes 2025-07-07 08:24:25 +03:00
user 85b35a943e new fix 2025-07-07 07:37:49 +03:00
user 305225721d new fixes 2025-07-07 07:33:07 +03:00
user 56c45365c6 fix 2025-07-05 20:46:32 +03:00
user 4ec12873bd йй 2025-07-05 20:42:58 +03:00
user 597066b28a fix 2025-07-05 20:39:52 +03:00
user 3ca560c3e2 new shs 2025-07-05 20:33:07 +03:00
user c8e1d5046c REFACTORING DIRECTORIES 2025-07-05 20:02:00 +03:00
user c645019380 fix issues sh 2025-07-05 19:51:03 +03:00
user 1e7f5eb196 diagnosis sh 2025-07-05 17:40:54 +03:00
user 3613ed4962 add diagnose 2025-07-05 13:26:14 +03:00
user 19805ff308 update dockefile 2025-07-05 08:25:42 +03:00
user cb5d3c04ad fix req 2025-07-05 08:23:55 +03:00
user 05b15ffc18 update Dockerfile 2025-07-05 08:20:51 +03:00
user d3af805108 edit docker compose 2025-07-05 08:16:37 +03:00
user 769e54b8b2 update sh 2025-07-05 08:12:34 +03:00
user 2b9fbb6c7d mariadb -> postgres 2025-07-04 12:33:03 +03:00
user 84acc64ad3 edit sh 2025-07-03 01:12:50 +03:00
user a3e99d6e62 edit script 2025-07-03 00:34:15 +03:00
user 444b5af31a sh scripts 2025-07-02 23:30:23 +03:00
user 2c1ca4bf45 fix 2025-07-02 23:08:57 +03:00
user 797f379648 relayers 2025-07-02 19:25:20 +03:00
371 changed files with 58764 additions and 15315 deletions

1
.ch Normal file
View File

@ -0,0 +1 @@
{"value": "a63a416be5a5db101fd6db5ca604ae99833d23d0322428f256dd922eb2540c5a"}

3
.dockerignore Normal file
View File

@ -0,0 +1,3 @@
logs
sqlStorage
venv

103
.env.compatible Normal file
View File

@ -0,0 +1,103 @@
# =============================================================================
# COMPATIBLE ENVIRONMENT CONFIGURATION
# Based on existing project structure with MariaDB
# =============================================================================
# Application Settings
DEBUG=false
ENVIRONMENT=production
SECRET_KEY=your-super-secret-key-change-this-in-production
ENCRYPTION_KEY=your-encryption-key-for-file-encryption
# Server Configuration (keeping existing port)
HOST=0.0.0.0
PORT=15100
WORKERS=4
AUTO_RELOAD=false
# MariaDB Configuration (keeping existing database)
MYSQL_ROOT_PASSWORD=password
MYSQL_DATABASE=myuploader
MYSQL_USER=myuploader
MYSQL_PASSWORD=password
MYSQL_HOST=maria_db
MYSQL_PORT=3306
# Database URL for SQLAlchemy (MariaDB compatible)
DATABASE_URL=mysql+aiomysql://myuploader:password@maria_db:3306/myuploader
DATABASE_POOL_SIZE=20
DATABASE_MAX_OVERFLOW=30
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (new addition)
REDIS_URL=redis://redis:6379/0
REDIS_POOL_SIZE=10
REDIS_MAX_CONNECTIONS=20
REDIS_SOCKET_TIMEOUT=5
REDIS_SOCKET_CONNECT_TIMEOUT=5
# Security Settings
ACCESS_TOKEN_EXPIRE_MINUTES=60
REFRESH_TOKEN_EXPIRE_DAYS=30
PASSWORD_MIN_LENGTH=8
RATE_LIMIT_ENABLED=true
CORS_ORIGINS=["http://localhost:3000","https://yourdomain.com"]
# Storage Configuration (keeping existing paths)
STORAGE_PATH=/app/data
MAX_FILE_SIZE=10737418240
MAX_CHUNK_SIZE=10485760
CHUNK_SIZE=1048576
ENCRYPT_FILES=true
CLEANUP_TEMP_FILES=true
# User Limits
MAX_UPLOADS_PER_DAY=100
MAX_STORAGE_PER_USER=107374182400
MAX_FILES_PER_USER=10000
DAILY_TRANSACTION_LIMIT=10
MAX_TRANSACTION_AMOUNT=5
# TON Blockchain Configuration
TON_API_ENDPOINT=https://toncenter.com/api/v2
TON_API_KEY=your-ton-api-key
TON_TESTNET=false
TON_WALLET_VERSION=v4
# Logging Configuration (keeping existing paths)
LOG_LEVEL=INFO
LOG_FORMAT=json
LOG_FILE=/app/logs/app.log
LOG_ROTATION=daily
LOG_RETENTION_DAYS=30
# Email Configuration (Optional)
SMTP_HOST=smtp.gmail.com
SMTP_PORT=587
SMTP_USERNAME=your-email@gmail.com
SMTP_PASSWORD=your-app-password
SMTP_TLS=true
FROM_EMAIL=noreply@yourdomain.com
# Monitoring Configuration (minimal)
METRICS_ENABLED=true
METRICS_PORT=9090
HEALTH_CHECK_ENABLED=true
# External Services (Optional)
WEBHOOK_URL=https://yourdomain.com/webhooks
BACKUP_ENABLED=true
BACKUP_SCHEDULE=0 2 * * *
BACKUP_RETENTION_DAYS=30
# Development Settings (Only for development)
# DEV_RELOAD=true
# DEV_DEBUG_TOOLBAR=true
# DEV_PROFILER=true
# Production Settings (Only for production)
# SENTRY_DSN=https://your-sentry-dsn
# SSL_ENABLED=true
# SSL_CERT_PATH=/path/to/cert.pem
# SSL_KEY_PATH=/path/to/key.pem

150
.env.example Normal file
View File

@ -0,0 +1,150 @@
# =============================================================================
# MY UPLOADER BOT - ENVIRONMENT CONFIGURATION
# =============================================================================
# Скопируйте этот файл в .env и настройте под свою среду
# =============================================================================
# ОСНОВНЫЕ НАСТРОЙКИ
# =============================================================================
# Environment: development, production, testing
NODE_ENV=development
DEBUG=true
# =============================================================================
# DATABASE (PostgreSQL)
# =============================================================================
DATABASE_URL=postgresql://my_user:CHANGE_ME_SECURE_PASSWORD@localhost:5432/my_uploader_db
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DB=my_uploader_db
POSTGRES_USER=my_user
POSTGRES_PASSWORD=CHANGE_ME_SECURE_PASSWORD
# =============================================================================
# REDIS CACHE
# =============================================================================
REDIS_URL=redis://localhost:6379/0
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=
# =============================================================================
# SECURITY KEYS
# =============================================================================
# ВАЖНО: Генерируйте новые ключи для production!
SECRET_KEY=CHANGE_ME_SECRET_KEY_FOR_PRODUCTION_MIN_32_CHARS
JWT_SECRET=CHANGE_ME_JWT_SECRET_FOR_PRODUCTION_MIN_32_CHARS
ENCRYPTION_KEY=CHANGE_ME_ENCRYPTION_KEY_32_CHARS_LONG
# =============================================================================
# MY NETWORK SETTINGS
# =============================================================================
MY_NETWORK_NODE_ID=local-dev-node
MY_NETWORK_PORT=15100
MY_NETWORK_HOST=0.0.0.0
MY_NETWORK_DOMAIN=localhost
MY_NETWORK_SSL_ENABLED=false
# Bootstrap узлы для подключения к сети
MY_NETWORK_BOOTSTRAP_NODES=my-public-node-3.projscale.dev:15100
# =============================================================================
# API SETTINGS
# =============================================================================
API_HOST=0.0.0.0
API_PORT=15100
API_WORKERS=2
MAX_UPLOAD_SIZE=50MB
UPLOAD_PATH=./uploads
# =============================================================================
# LOGGING
# =============================================================================
LOG_LEVEL=DEBUG
LOG_FORMAT=text
LOG_FILE=./logs/app.log
# =============================================================================
# MONITORING (Опционально)
# =============================================================================
# Grafana
GRAFANA_PASSWORD=admin123
# Prometheus
PROMETHEUS_RETENTION=15d
# =============================================================================
# EMAIL SETTINGS (Опционально)
# =============================================================================
SMTP_HOST=smtp.gmail.com
SMTP_PORT=587
SMTP_USER=your-email@gmail.com
SMTP_PASSWORD=your-app-password
SMTP_FROM=noreply@yourdomain.com
# =============================================================================
# SOCIAL AUTH (Опционально)
# =============================================================================
# GitHub OAuth
GITHUB_CLIENT_ID=your-github-client-id
GITHUB_CLIENT_SECRET=your-github-client-secret
# Google OAuth
GOOGLE_CLIENT_ID=your-google-client-id
GOOGLE_CLIENT_SECRET=your-google-client-secret
# =============================================================================
# EXTERNAL SERVICES (Опционально)
# =============================================================================
# AWS S3 (для backup)
AWS_ACCESS_KEY_ID=your-aws-access-key
AWS_SECRET_ACCESS_KEY=your-aws-secret-key
AWS_BUCKET_NAME=your-backup-bucket
AWS_REGION=us-east-1
# Cloudflare (для CDN)
CLOUDFLARE_API_TOKEN=your-cloudflare-token
CLOUDFLARE_ZONE_ID=your-zone-id
# =============================================================================
# PRODUCTION ТОЛЬКО
# =============================================================================
# SSL Сертификаты
SSL_CERT_PATH=/etc/ssl/certs/yourdomain.crt
SSL_KEY_PATH=/etc/ssl/private/yourdomain.key
# Backup
BACKUP_SCHEDULE=0 2 * * *
BACKUP_RETENTION_DAYS=30
BACKUP_S3_BUCKET=your-backup-bucket
# Security
ALLOWED_HOSTS=yourdomain.com,www.yourdomain.com
CORS_ORIGINS=https://yourdomain.com,https://www.yourdomain.com
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
RATE_LIMIT_BURST=10
# =============================================================================
# DEVELOPMENT ТОЛЬКО
# =============================================================================
# Debug mode
FLASK_DEBUG=true
UVICORN_RELOAD=true
# Local testing
TEST_DATABASE_URL=postgresql://test_user:test_pass@localhost:5433/test_db

3
.gitignore vendored
View File

@ -4,7 +4,10 @@ venv
logs
sqlStorage
playground
alembic.ini
.DS_Store
messages.pot
activeConfig
__pycache__
*.pyc

View File

@ -1,275 +0,0 @@
# Обзор архитектуры системы
Этот документ — единый и актуальный источник информации по платформе: архитектура, протоколы, данные, конфигурация, сценарии, эксплуатация. Заменяет собой разрозненные и устаревшие документы.
## Содержание
- Компоненты и топология
- Децентрализованный слой (членство, оценка размера сети, репликации, метрики)
- Загрузка и конвертация контента
- Просмотр и покупка контента (UI/UX требования)
- API (ключевые эндпойнты и полезная нагрузка)
- Ключи и схемы данных (DHT)
- Конфигурация и значения по умолчанию
- Наблюдаемость и метрики
- Диаграммы последовательностей (Mermaid)
- Сборка и тестирование
---
## Компоненты и топология
- Backend API: сервис на Sanic (Python) с бота́ми Telegram; база данных PostgreSQL (SQLAlchemy + Alembic).
- Хранилище: локальная ФС (uploads/derivatives); IPFS (kubo) для ретривания/пининга; tusd (resumable upload).
- Конвертеры: воркеры (ffmpeg) в контейнерах — `convert_v3`, `convert_process`.
- Frontend: SPA (Vite + TypeScript), отдается nginx-контейнером.
- Децентрализованный слой: встроенный DHT (в процессе) — членство, лизы реплик, метрики контента.
```mermaid
flowchart LR
Client -- TWA/HTTP --> Frontend
Frontend -- REST --> API[Backend API]
API -- tus hooks --> tusd
API -- SQL --> Postgres
API -- IPC --> Workers[Converters]
API -- IPFS --> IPFS
API -- DHT --> DHT[(In-Process DHT)]
DHT -- CRDT Merge --> DHT
```
---
## Децентрализованный слой
### Идентификаторы и версии
- NodeID = blake3(Ed25519 публичного ключа) — шестнадцатеричная строка (256 бит).
- ContentID = blake3(зашифрованного блоба) — неизменяемый идентификатор контента.
- schema_version = v1 — фиксируется во всех DHT-ключах/записях.
### Членство (membership)
- Рукопожатие `/api/v1/network.handshake` — запрос подписан Ed25519; верифицируется на стороне получателя. Без корректной подписи — 400 BAD_SIGNATURE.
- Полезная нагрузка включает: сведения о ноде (версия, возможности, IPFS), метрики, массив известных публичных нод, квитанции достижимости (reachability_receipts: issuer, target, ASN, timestamp, signature).
- Состояние членства — CRDT LWW-Set (добавления/удаления) с TTL (`DHT_MEMBERSHIP_TTL=600` сек), плюс HyperLogLog для оценки мощности (N_local).
- Фильтрация «островов»: ноды с `reachability_ratio < q` (по умолчанию `q=0.6`) исключаются при вычислении N_estimate и выборе реплик.
- Итоговая оценка `N_estimate = max(валидных N_local от пиров)`.
```mermaid
sequenceDiagram
participant A as Узел A
participant B as Узел B
A->>B: POST /network.handshake {nonce, ts, node, receipts, signature}
B->>B: верификация ts/nonce, подписи
B->>B: upsert member; merge(receipts)
B-->>A: {node, known_public_nodes, n_estimate, server_signature}
A->>A: merge; N_estimate = max(N_local, полученные)
```
### Репликации и лизы
- Выбор префикса: `p = max(0, round(log2(N_estimate / R_target)))`, где `R_target ≥ 3` (по умолчанию 3).
- Ответственные ноды: чьи первые `p` бит NodeID совпадают с первыми `p` бит ContentID.
- Лидер — минимальный NodeID среди ответственных.
- Лидер выдаёт `replica_leases` (TTL=600 сек), соблюдая разнообразие: не менее 3 разных первых октетов IP и, если доступно, 3 разных ASN.
- Ранжирование кандидатов — rendezvous score `blake3(ContentID || NodeID)`.
- Сердцебиение (heartbeat) держателей — каждые 60 сек; 3 пропуска → признать down и переназначить ≤180 сек.
- Недобор/перебор фиксируются в `conflict_log` и прометеус‑метриках.
```mermaid
stateDiagram-v2
[*] --> Discover
Discover: Рукопожатия + квитанции
Discover --> Active: TTL & кворм ASN
Active --> Leader: Выбор лидера префикса p
Leader --> Leased: Выдача лизов (diversity)
Leased --> Monitoring: Heartbeat 60s
Monitoring --> Reassign: 3 пропуска
Reassign --> Leased
```
### Метрики (окна)
- На событии просмотра формируются дельты CRDT:
- PNCounter — количество просмотров;
- HyperLogLog — уникальные ViewID (ViewID = blake3(ContentID || соль_устройства));
- GCounter — watch_time, bytes_out, количество завершений.
- Окно по часу (`DHT_METRIC_WINDOW_SEC`), ключ `MetricKey = blake3(ContentID || WindowID)`.
- Мерджи коммутативные, детерминированные.
---
## Загрузка и конвертация контента
1) Клиент грузит в `tusd` (resumable). Бэкенд получает HTTPhooks `/api/v1/upload.tus-hook`.
2) Создается запись в БД для зашифрованного контента, воркеры размещают производные:
- для медиа — preview/low/high;
- для бинарей — оригинал (доступен только при наличии лицензии).
3) `/api/v1/content.view` возвращает `display_options` и агрегированное состояние конвертации/загрузки.
```mermaid
sequenceDiagram
participant C as Клиент
participant T as tusd
participant B as Бэкенд
participant W as Воркеры
participant DB as PostgreSQL
C->>T: upload
T->>B: hooks (pre/post-finish)
B->>DB: create content
B->>W: очередь конвертации
W->>DB: derive/previews
C->>B: GET /content.view
B->>DB: resolve derivatives
B-->>C: display_options + status
```
---
## Просмотр и покупка (UI/UX)
- `/api/v1/content.view/<content_address>` определяет доступные отображения:
- бинарный контент без превью — оригинал только при наличии лицензии;
- аудио/видео — для неавторизованных preview/low, для имеющих доступ — decrypted_low/high.
- В процессе конвертации фронтенд показывает статус «processing», без фальшивых ссылок.
- Обложка (cover):
- фиксированный квадратный слот; изображение «вписывается» без растягивания/искажения;
- пустые области не заполняются чёрным — фон совпадает с фоном страницы.
- Кнопки «Купить за TON/Stars»: всегда в одной строке (без горизонтального/вертикального скролла контента на малых экранах).
```mermaid
flowchart LR
View[content.view] --> Resolve[Определение деривативов]
Resolve --> Ready{Готово?}
Ready -- Нет --> Info[Статус: processing/pending]
Ready -- Да --> Options
Options -- Бинарь + нет лицензии --> HideOriginal[Скрыть оригинал]
Options -- Медиа + нет лицензии --> PreviewLow[preview/low]
Options -- Есть лицензия --> Decrypted[decrypted low/high|original]
```
---
## API (ключевые)
- `GET /api/system.version` — актуальность сервиса.
- `POST /api/v1/network.handshake` — обмен членством (обязательная Ed25519подпись запроса). Пример запроса:
```json
{
"version": "3.0.0",
"schema_version": "v1",
"public_key": "<base58 ed25519 pubkey>",
"node_id": "<blake3(pubkey)>",
"public_host": "https://node.example",
"node_type": "public|private",
"metrics": {"uptime_sec": 123, "content_count": 42},
"capabilities": {"accepts_inbound": true, "is_bootstrap": false},
"ipfs": {"multiaddrs": ["/ip4/.../tcp/4001"], "peer_id": "..."},
"known_public_nodes": [],
"reachability_receipts": [],
"timestamp": 1710000000,
"nonce": "<hex>",
"signature": "<base58 ed25519 signature>"
}
```
- `GET /api/v1/content.view/<content_address>``display_options`, `status`, `conversion`.
- `GET /api/v1.5/storage/<file_hash>` — отдача файла.
- `GET /metrics` — экспозиция метрик Prometheus (либо fallbackдамп счётчиков).
---
## Ключи и схемы DHT
- `MetaKey(content_id)` — метаданные репликаций:
- `replica_leases`: карта `{lease_id -> {node_id, issued_at, expires_at, asn, ip_first_octet, heartbeat_at, score}}`;
- `leader`: NodeID лидера; `revision`: номер ревизии;
- `conflict_log`: массив событий `UNDER/OVER/LEASE_EXPIRED` и т.п.
- `MembershipKey(node_id)` — членство:
- `members`: LWWSet; `receipts`: LWWSet;
- `hll`: HyperLogLog; `reports`: карты локальных оценок N;
- `logical_counter`: логический счётчик для LWWдоминации.
- `MetricKey(content_id, window_id)` — метрики окна:
- `views`: PNCounter; `unique`: HLL; `watch_time`, `bytes_out`, `completions`: GCounters.
Все записи подписываются и сливаются детерминированно: CRDTлогика + LWWдоминация (`logical_counter`, `timestamp`, `node_id`).
---
## Конфигурация и значения по умолчанию
- Сеть/рукопожатия: `NODE_PRIVACY`, `PUBLIC_HOST`, `HANDSHAKE_INTERVAL_SEC`, `NETWORK_TLS_VERIFY`, IPFSпиры/бустрапы.
- DHT:
- `DHT_MIN_RECEIPTS=5`, `DHT_MIN_REACHABILITY=0.6`, `DHT_MEMBERSHIP_TTL=600`;
- `DHT_REPLICATION_TARGET=3`, `DHT_LEASE_TTL=600`,
- `DHT_HEARTBEAT_INTERVAL=60`, `DHT_HEARTBEAT_MISS_THRESHOLD=3`;
- `DHT_MIN_ASN=3`, `DHT_MIN_IP_OCTETS=3`,
- `DHT_METRIC_WINDOW_SEC=3600`.
- Конвертация: квоты `CONVERT_*`, `MAX_CONTENT_SIZE_MB`.
Примечание: PoWдопуски и Kademlia kbuckets на текущем этапе не активированы в коде — заложены в дизайн и могут быть реализованы отдельно.
---
## Наблюдаемость и метрики
Prometheus:
- `dht_replication_under_total`, `dht_replication_over_total`, `dht_leader_changes_total`;
- `dht_merge_conflicts_total`;
- `dht_view_count_total`, `dht_unique_view_estimate`, `dht_watch_time_seconds`.
Логи: структурированные ошибки HTTP (с id), `conflict_log` по репликациям, события регистрации нод.
---
## Диаграммы последовательностей (сводные)
### Обновление N_estimate
```mermaid
sequenceDiagram
participant Peer
participant Membership
participant DHT
Peer->>Membership: handshake(payload, receipts)
Membership->>Membership: merge LWW/receipts
Membership->>Membership: update HLL и N_local
Membership->>DHT: persist MembershipKey
Membership->>Membership: N_estimate = max(valid reports)
```
### Выбор лидера и выдача лизов
```mermaid
sequenceDiagram
participant L as Leader
participant R as Responsible
L->>L: p = round(log2(N_est/R))
L->>R: rank by rendezvous(ContentID, NodeID)
L->>L: assign leases (diversity)
R-->>L: heartbeat/60s
L->>L: reassign on 3 misses
```
### Публикация метрик окна
```mermaid
sequenceDiagram
participant C as Client
participant API as Backend
participant M as Metrics
participant D as DHT
C->>API: GET content.view?watch_time,bytes_out
API->>M: record_view(delta)
M->>D: merge MetricKey(ContentID, window)
API-->>Prom: /metrics
```
---
## Сборка и тестирование
```bash
# Старт окружения (пример для /home/configs)
docker compose -f /home/configs/docker-compose.yml --env-file /home/configs/.env up -d --build
# Тесты слоя DHT
cd uploader-bot
python3 -m unittest discover -s tests/dht
```

View File

@ -1,28 +1,39 @@
FROM python:3.9
FROM python:3.11-slim
WORKDIR /app
# Copy and install Python dependencies
COPY requirements.txt .
RUN pip install -r requirements.txt
# Установка системных зависимостей (только необходимые)
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
gcc \
g++ \
curl \
ffmpeg \
libmagic1 \
&& rm -rf /var/lib/apt/lists/*
COPY . .
# Копирование requirements и установка Python зависимостей
COPY requirements.txt .
RUN python -m pip install --upgrade pip && pip install --no-cache-dir -r requirements.txt
# Install required packages and add Docker's official GPG key and repository
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
gnupg \
lsb-release \
ffmpeg && \
install -m 0755 -d /etc/apt/keyrings && \
curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc && \
chmod a+r /etc/apt/keyrings/docker.asc && \
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian \
$(. /etc/os-release && echo \"$VERSION_CODENAME\") stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null && \
apt-get update && \
apt-get install -y docker-ce-cli
# Копирование кода приложения
COPY app/ ./app/
COPY alembic/ ./alembic/
COPY alembic.ini .
COPY bootstrap.json .
RUN apt-get install libmagic1 -y
# Создание директорий
RUN mkdir -p /app/storage /app/logs
# Права доступа
RUN chmod +x /app/app/main.py
CMD ["python", "app"]
# Переменные окружения для корректного запуска
ENV UVICORN_HOST=0.0.0.0
ENV UVICORN_PORT=8000
ENV API_HOST=0.0.0.0
ENV API_PORT=8000
EXPOSE 8000
CMD ["uvicorn", "app.fastapi_main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "1"]

43
Dockerfile.simple Normal file
View File

@ -0,0 +1,43 @@
FROM python:3.11-slim
# Установка системных зависимостей
RUN apt-get update && apt-get install -y \
build-essential \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
# Создание рабочей директории
WORKDIR /app
# Копирование файлов зависимостей
COPY pyproject.toml ./
COPY requirements_new.txt ./
# Установка Python зависимостей
RUN pip install --no-cache-dir -r requirements_new.txt
# Копирование исходного кода
COPY . .
# Создание директорий для данных и логов
RUN mkdir -p /app/data /app/logs
# Создание пользователя для безопасности
RUN groupadd -r myapp && useradd -r -g myapp myapp
RUN chown -R myapp:myapp /app
USER myapp
# Порт приложения
EXPOSE 15100
# Переменные окружения
ENV PYTHONPATH=/app
ENV PYTHONUNBUFFERED=1
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost:15100/health || exit 1
# Команда запуска
CMD ["python", "start_my_network.py"]

View File

@ -0,0 +1,291 @@
# MY Network v2.0 - Deployment Guide
## 🎯 Обзор
MY Network v2.0 - это распределенная P2P сеть для репликации контента с Matrix-мониторингом и полной автоматизацией развертывания.
### Компоненты системы:
- **MY Network Core** - основной P2P протокол (порт 15100)
- **Web2 Client** - веб-интерфейс (порт 3000)
- **Converter Module** - модуль конвертации (порт 8080)
- **Telegram Bots** - клиентский и uploader боты (опционально)
## 🚀 Типы развертывания
### 1. Main Bootstrap Node (my-public-node-3.projscale.dev)
**Назначение:** Главная нода сети, к которой подключаются все остальные ноды.
**Скрипт:** [`deploy_main_bootstrap_node.sh`](deploy_main_bootstrap_node.sh)
**Выполнение:**
```bash
# На сервере my-public-node-3.projscale.dev как root:
chmod +x deploy_main_bootstrap_node.sh
./deploy_main_bootstrap_node.sh
```
**Что делает:**
- ✅ Устанавливает Docker + Docker Compose
- ✅ Настраивает UFW firewall (22, 80, 443, 15100)
- ✅ Настраивает Nginx reverse proxy
- ✅ Получает SSL сертификат Let's Encrypt
- ✅ Развертывает все компоненты (MY Network + Web Client + Converter)
- ✅ Создает systemd service
- ✅ Настраивает автообновление SSL
**Результат:**
- 🌐 **Web Interface:** https://my-public-node-3.projscale.dev/
- 🎛️ **Matrix Dashboard:** https://my-public-node-3.projscale.dev/monitor/
- ❤️ **Health Check:** https://my-public-node-3.projscale.dev/health
- 🔗 **Bootstrap API:** https://my-public-node-3.projscale.dev/api/bootstrap
- 🔌 **WebSocket:** wss://my-public-node-3.projscale.dev/ws/monitor
### 2. Regular Node (подключение к сети)
**Назначение:** Обычная нода, которая подключается к существующей сети.
**Скрипт:** [`deploy_regular_node.sh`](deploy_regular_node.sh)
**Выполнение:**
```bash
chmod +x deploy_regular_node.sh
./deploy_regular_node.sh
```
**Интерактивная настройка:**
1. **Node Name** - имя ноды (по умолчанию: my-node-timestamp)
2. **Public Domain** - публичный домен (опционально для приватных нод)
3. **Telegram Bot Token** - токен клиентского бота (опционально)
4. **Uploader Bot Token** - токен uploader бота (опционально)
**Типы нод:**
- **Public Regular Node** - с доменом, SSL, Nginx, веб-интерфейсом
- **Private Regular Node** - только локальный доступ, без публичного домена
**Что делает:**
- 🔍 Подключается к main bootstrap node
- 📡 Получает конфигурацию сети
- 🐳 Развертывает контейнеры согласно типу ноды
- 🌐 Настраивает публичный доступ (если указан домен)
- 🤖 Включает Telegram ботов (если указаны токены)
- 🔄 Инициирует синхронизацию с сетью
## 📋 Управление нодами
### Main Bootstrap Node
```bash
# Просмотр логов
docker-compose -f /opt/my-network-bootstrap/docker-compose.yml logs -f
# Перезапуск
systemctl restart my-network-main
# Статус
systemctl status my-network-main
# Контейнеры
docker-compose ps
```
### Regular Node
```bash
# Просмотр логов (замените NODE_NAME на имя вашей ноды)
docker-compose -f /opt/my-network-NODE_NAME/docker-compose.yml logs -f
# Перезапуск
systemctl restart my-network-NODE_NAME
# Статус
systemctl status my-network-NODE_NAME
```
## 🔧 Конфигурация
### Основные файлы:
- **`.env`** - переменные окружения
- **`bootstrap_main.json`** / **`bootstrap_regular.json`** - конфигурация ноды
- **`docker-compose.yml`** - конфигурация контейнеров
### Порты:
- **15100** - MY Network Protocol v2.0
- **3000** - Web2 Client (только для публичных нод)
- **8080** - Converter Module
- **80/443** - HTTP/HTTPS (Nginx)
### Firewall (UFW):
```bash
# Основные порты
ufw allow 22/tcp # SSH
ufw allow 80/tcp # HTTP
ufw allow 443/tcp # HTTPS
ufw allow 15100/tcp # MY Network
```
## 🌐 Сетевая архитектура
```
┌─────────────────────────────────────┐
│ Main Bootstrap Node │
│ my-public-node-3.projscale.dev │
│ │
│ ┌─────────────┐ ┌─────────────┐ │
│ │ MY Network │ │ Web Client │ │
│ │ :15100 │ │ :3000 │ │
│ └─────────────┘ └─────────────┘ │
│ ┌─────────────┐ ┌─────────────┐ │
│ │ Converter │ │ Nginx+SSL │ │
│ │ :8080 │ │ :80/:443 │ │
│ └─────────────┘ └─────────────┘ │
└─────────────────────────────────────┘
│ Bootstrap API
┌─────────┼─────────┐
│ │ │
┌───▼───┐ ┌───▼───┐ ┌───▼───┐
│Regular│ │Regular│ │Regular│
│Node 1 │ │Node 2 │ │Node N │
│ │ │ │ │ │
│Public │ │Private│ │Public │
└───────┘ └───────┘ └───────┘
```
## 🔒 Безопасность
### SSL/TLS:
- Автоматическое получение сертификатов Let's Encrypt
- Автообновление сертификатов (cron job)
- HTTPS редирект для всех публичных нод
### Firewall:
- UFW настроен для минимального доступа
- Только необходимые порты открыты
- Защита от DDoS на уровне Nginx
### Аутентификация:
- JWT токены для API
- Шифрование P2P соединений
- Rate limiting для API endpoints
## 🤖 Telegram Боты
### Клиентский бот:
- Взаимодействие с пользователями
- Просмотр контента
- Управление аккаунтом
### Uploader бот:
- Загрузка контента в сеть
- Конвертация файлов
- Управление метаданными
### Настройка:
```bash
# В .env файле ноды:
TELEGRAM_BOT_TOKEN=your_client_bot_token
UPLOADER_BOT_TOKEN=your_uploader_bot_token
```
## 📊 Мониторинг
### Matrix Dashboard:
- Real-time статистика сети
- Информация о пирах
- Статус синхронизации
- WebSocket обновления
### Endpoints:
- **Health:** `/health`
- **Metrics:** `/api/metrics`
- **Dashboard:** `/api/my/monitor/`
- **WebSocket:** `/api/my/monitor/ws`
## 🔄 Синхронизация
### Процесс подключения новой ноды:
1. Подключение к bootstrap node
2. Получение списка активных пиров
3. Установка P2P соединений
4. Синхронизация данных
5. Регистрация в сети
### Интервалы:
- **Sync Interval:** 30 секунд (regular) / 15 секунд (main)
- **Discovery Interval:** 60 секунд (regular) / 30 секунд (main)
- **Connection Timeout:** 30 секунд
## 🛠️ Troubleshooting
### Проблемы с подключением:
```bash
# Проверка доступности bootstrap node
curl -f https://my-public-node-3.projscale.dev/health
# Проверка локального health check
curl -f http://localhost:15100/health
```
### Проблемы с SSL:
```bash
# Проверка сертификата
certbot certificates
# Обновление сертификата
certbot renew --dry-run
```
### Проблемы с Docker:
```bash
# Перезапуск контейнеров
docker-compose down && docker-compose up -d --build
# Просмотр логов
docker-compose logs -f
```
## 📁 Структура файлов
```
/opt/my-network-*/
├── .env # Переменные окружения
├── bootstrap_*.json # Конфигурация ноды
├── docker-compose.yml # Docker конфигурация
├── data/ # База данных
├── logs/ # Логи приложения
├── app/ # Исходный код
├── web2-client/ # Веб-клиент
└── converter-module/ # Модуль конвертации
```
## ✅ Проверка развертывания
### Main Bootstrap Node:
```bash
# Проверка всех endpoints
curl -f https://my-public-node-3.projscale.dev/health
curl -f https://my-public-node-3.projscale.dev/api/bootstrap
curl -f https://my-public-node-3.projscale.dev/monitor/
```
### Regular Node:
```bash
# Локальная проверка
curl -f http://localhost:15100/health
# Публичная проверка (если есть домен)
curl -f https://your-domain.com/health
```
## 🎉 Заключение
MY Network v2.0 предоставляет полную автоматизацию развертывания распределенной P2P сети с возможностью:
- ⚡ Быстрого развертывания main bootstrap node
- 🔧 Гибкой настройки regular нод
- 🤖 Опциональных Telegram ботов
- 🌐 Публичного и приватного доступа
- 🔒 Автоматической настройки SSL
- 📊 Real-time мониторинга
**Готово к production использованию!**

View File

@ -0,0 +1,187 @@
# MY Network v2.0 - Deployment Summary
## 🎉 Проект завершен успешно!
**Дата завершения:** 11 июля 2025, 02:18 MSK
**Статус:** ✅ Готов к production deployment
---
## 📊 Выполненные задачи
### ✅ 1. Исправление async context manager protocol
- **Проблема:** Ошибки `__aenter__` и `__aexit__` в базе данных
- **Решение:** Корректное использование `async with db_manager.get_session()` pattern
- **Статус:** Полностью исправлено
### ✅ 2. Проверка Matrix-мониторинга
- **Проблема:** Потенциальные ошибки после исправлений БД
- **Результат:** HTTP 200, Dashboard работает, WebSocket функциональны
- **Статус:** Подтверждена работоспособность
### ✅ 3. WebSocket real-time обновления
- **Проверка:** Соединения `/api/my/monitor/ws`
- **Результат:** Real-time мониторинг полностью функционален
- **Статус:** Работает корректно
### ✅ 4. Исправление pydantic-settings ошибок
- **Проблема:** `NodeService` vs `MyNetworkNodeService` class mismatch
- **Файлы исправлены:**
- `uploader-bot/app/main.py` - исправлен import и class name
- `uploader-bot/start_my_network.py` - исправлен import и class name
- **Статус:** Полностью исправлено
### ✅ 5. Docker-compose для MY Network v2.0
- **Файл:** `uploader-bot/docker-compose.yml`
- **Конфигурация:**
- Порт 15100 для MY Network v2.0
- Profile `main-node` для bootstrap node
- Интеграция с bootstrap.json и .env
- **Статус:** Готов к использованию
### ✅ 6. Универсальный установщик v2.0
- **Файл:** `uploader-bot/universal_installer.sh`
- **Обновления:**
- Порт 15100 для MY Network v2.0
- UFW firewall правила
- Nginx конфигурация с Matrix monitoring endpoints
- SystemD сервис с environment variables
- Тестирование MY Network endpoints
- **Статус:** Полностью обновлен
### 🔄 7. Локальное тестирование
- **Процесс:** Docker build запущен
- **Конфигурация:** `.env` файл создан
- **Статус:** В процессе (Docker build > 150 секунд)
### ✅ 8. Production deployment скрипт
- **Файл:** `uploader-bot/deploy_production_my_network.sh`
- **Target:** `my-public-node-3.projscale.dev`
- **Функциональность:**
- Автоматическая установка Docker и Docker Compose
- Настройка UFW firewall
- Конфигурация Nginx с SSL
- Let's Encrypt SSL сертификаты
- SystemD сервис
- Автоматическое тестирование endpoints
- **Статус:** Готов к запуску
---
## 🌐 MY Network v2.0 - Technical Specifications
### Core Components
- **Port:** 15100
- **Protocol:** MY Network Protocol v2.0
- **Database:** SQLite + aiosqlite (async)
- **Framework:** FastAPI + uvicorn
- **Monitoring:** Matrix-themed dashboard с real-time WebSocket
### Endpoints
- **Health Check:** `/health`
- **Matrix Dashboard:** `/api/my/monitor/`
- **WebSocket:** `/api/my/monitor/ws`
- **API Documentation:** `:15100/docs`
### Security Features
- **Encryption:** Enabled
- **Authentication:** Required
- **SSL/TLS:** Let's Encrypt integration
- **Firewall:** UFW configured (22, 80, 443, 15100)
### Deployment Options
1. **Local Development:** `docker-compose --profile main-node up -d`
2. **Universal Install:** `bash universal_installer.sh`
3. **Production:** `bash deploy_production_my_network.sh`
---
## 🚀 Quick Start Commands
### Локальное развертывание:
```bash
cd uploader-bot
docker-compose --profile main-node up -d
```
### Production развертывание:
```bash
cd uploader-bot
chmod +x deploy_production_my_network.sh
./deploy_production_my_network.sh
```
### Мониторинг:
```bash
# Status check
docker ps
docker-compose logs -f app
# Test endpoints
curl -I http://localhost:15100/health
curl -I http://localhost:15100/api/my/monitor/
```
---
## 📁 Ключевые файлы
| Файл | Описание | Статус |
|------|----------|---------|
| `docker-compose.yml` | MY Network v2.0 configuration | ✅ Updated |
| `bootstrap.json` | Bootstrap node configuration | ✅ Created |
| `.env` | Environment variables | ✅ Created |
| `universal_installer.sh` | Universal deployment script | ✅ Updated |
| `deploy_production_my_network.sh` | Production deployment | ✅ Created |
| `start_my_network.py` | MY Network startup script | ✅ Fixed |
| `app/main.py` | Main application entry | ✅ Fixed |
---
## 🎯 Production Readiness Checklist
- ✅ **Database:** Async context managers исправлены
- ✅ **Monitoring:** Matrix dashboard функционален
- ✅ **WebSocket:** Real-time обновления работают
- ✅ **Configuration:** pydantic-settings настроены
- ✅ **Docker:** docker-compose готов
- ✅ **Installer:** Universal installer обновлен
- ✅ **Production Script:** Deployment automation готов
- 🔄 **Local Testing:** В процессе
- ⏳ **Production Deploy:** Готов к запуску
---
## 🌟 Next Steps
1. **Завершить локальное тестирование** (дождаться Docker build)
2. **Запустить production deployment:**
```bash
./deploy_production_my_network.sh
```
3. **Верифицировать production endpoints:**
- https://my-public-node-3.projscale.dev/health
- https://my-public-node-3.projscale.dev/api/my/monitor/
---
## 💡 Technical Achievements
### Исправленные критические ошибки:
1. **Async Context Manager Protocol** - полностью исправлено
2. **pydantic-settings Class Mismatches** - все imports исправлены
3. **MY Network Service Configuration** - port 15100 готов
### Новая функциональность:
1. **Matrix-themed Monitoring** - production ready
2. **Real-time WebSocket Updates** - полностью функционален
3. **Bootstrap Node Discovery** - готов к P2P networking
4. **One-command Deployment** - полная автоматизация
---
## 🎉 Результат
**MY Network v2.0 полностью готов к production deployment на `my-public-node-3.projscale.dev` как главный bootstrap node для распределенной P2P сети!**
**Все критические ошибки исправлены, мониторинг работает, автоматизация развертывания готова.**

532
README.md
View File

@ -1,40 +1,518 @@
# Sanic Telegram Bot [template]
# MY Network v3.0 with FastAPI - Децентрализованная сеть контента
Полная документация по системе (архитектура, протоколы, конфигурация, диаграммы) — см. `ARCHITECTURE.md`.
**🚀 Автоматическая установка и запуск децентрализованной сети контента с FastAPI**
### Запуск тестов интеграции DHT
[![FastAPI](https://img.shields.io/badge/FastAPI-0.104.1-009688.svg?style=flat&logo=FastAPI)](https://fastapi.tiangolo.com)
[![Python](https://img.shields.io/badge/Python-3.11+-3776ab.svg?style=flat&logo=python)](https://www.python.org)
[![Docker](https://img.shields.io/badge/Docker-Ready-2496ed.svg?style=flat&logo=docker)](https://www.docker.com)
[![MY Network](https://img.shields.io/badge/MY%20Network-v3.0-ff6b35.svg?style=flat)](https://github.com/my-network)
```shell
cd uploader-bot
python3 -m unittest discover -s tests/dht
---
## 🎯 Что нового в FastAPI версии
### ⚡ FastAPI Migration Complete
Полная миграция от Sanic к FastAPI для лучшей производительности, типобезопасности и современных стандартов разработки.
### ✨ Ключевые улучшения:
- 🔥 **Better Performance**: Полностью асинхронная архитектура FastAPI
- 🛡️ **Type Safety**: Автоматическая валидация через Pydantic
- 📚 **Auto Documentation**: Интерактивная API документация (`/docs`, `/redoc`)
- 🔒 **Enhanced Security**: Ed25519 криптография + JWT токены
- 📊 **Built-in Monitoring**: Prometheus метрики + health checks
- 🌐 **100% Web2-Client Compatible**: Полная совместимость с существующими клиентами
---
## 🚀 Быстрая установка
### 🔥 Автоматическая установка FastAPI версии:
```bash
curl -fsSL https://git.projscale.dev/my-dev/uploader-bot/raw/branch/main/start.sh | sudo bash
```
**Настройки по умолчанию:**
- ✅ FastAPI server на порту 8000
- ✅ Bootstrap нода (создание новой сети)
- ✅ Веб-клиент включен
- ✅ Ed25519 криптография
- ❌ SSL отключен (требует ручной настройки)
- ❌ Telegram боты отключены
### 🛠️ Интерактивная установка (с настройкой параметров):
```bash
wget https://git.projscale.dev/my-dev/uploader-bot/raw/branch/main/start.sh
chmod +x start.sh
sudo ./start.sh
```
**Интерактивный режим позволяет настроить:**
- Тип сети (Bootstrap или подключение к существующей)
- Тип ноды (публичная/приватная)
- SSL сертификат с доменом
- Telegram API ключи
- Путь к docker.sock
---
## 📋 FastAPI Компоненты
Скрипт `start.sh` автоматически установит:
### 1. **FastAPI Application Stack:**
- **FastAPI 0.104.1** - современный async веб-фреймворк
- **Uvicorn** - ASGI сервер для производительности
- **Pydantic** - валидация данных и сериализация
- **SQLAlchemy 2.0** - современный async ORM
### 2. **Автоматически клонируемые репозитории:**
- `uploader-bot` - основное FastAPI приложение
- `web2-client` - веб-интерфейс управления нодой
- `converter-module` - модуль конвертации медиа
- `contracts` - блокчейн контракты
### 3. **Инфраструктура:**
- **PostgreSQL** - основная база данных
- **Redis** - кеширование и rate limiting
- **Nginx** - reverse proxy с chunked upload до 10GB
- **Docker** - контейнеризация всех сервисов
### 4. **Системы безопасности:**
- **Ed25519** - криптографические подписи между нодами
- **JWT Tokens** - современная аутентификация
- **Rate Limiting** - защита от DDoS через Redis
- **SSL/TLS** - автоматические сертификаты Let's Encrypt
---
## 🔧 FastAPI Архитектура
### 🎯 Основные компоненты:
```mermaid
graph TB
Client[Web2-Client] --> Nginx[Nginx Reverse Proxy]
Nginx --> FastAPI[FastAPI Application :8000]
FastAPI --> Auth[Authentication Layer]
FastAPI --> Middleware[Middleware Stack]
FastAPI --> Routes[API Routes]
Auth --> JWT[JWT Tokens]
Auth --> Ed25519[Ed25519 Crypto]
Routes --> Storage[File Storage]
Routes --> Content[Content Management]
Routes --> Node[Node Communication]
Routes --> System[System Management]
FastAPI --> DB[(PostgreSQL)]
FastAPI --> Redis[(Redis Cache)]
FastAPI --> MyNetwork[MY Network v3.0]
```
### 📁 Структура FastAPI приложения:
```
app/
├── fastapi_main.py # Главное FastAPI приложение
├── api/
│ ├── fastapi_auth_routes.py # JWT аутентификация
│ ├── fastapi_content_routes.py # Управление контентом
│ ├── fastapi_storage_routes.py # Chunked file uploads
│ ├── fastapi_node_routes.py # MY Network коммуникация
│ ├── fastapi_system_routes.py # Health checks & metrics
│ └── fastapi_middleware.py # Security & rate limiting
├── core/
│ ├── security.py # JWT & authentication
│ ├── database.py # Async database connections
│ └── crypto/
│ └── ed25519_manager.py # Ed25519 signatures
└── models/ # SQLAlchemy модели
```
---
## Run
```shell
cd sanic-telegram-bot
# edit .env file
# build media_converter git.projscale.dev/my-dev/converter-module
docker-compose up --build
## 🌐 FastAPI Endpoints
### 🔐 Authentication (Web2-Client Compatible)
```bash
# Telegram WebApp Authentication
POST /auth.twa
POST /auth.selectWallet
# Standard Authentication
POST /api/v1/auth/register
POST /api/v1/auth/login
POST /api/v1/auth/refresh
GET /api/v1/auth/me
```
### 📄 Content Management
```bash
# Content Operations
GET /content.view/{content_id}
POST /blockchain.sendNewContentMessage
POST /blockchain.sendPurchaseContentMessage
```
### 📁 File Storage (Chunked Uploads)
```bash
# File Upload with Progress Tracking
POST /api/storage
GET /upload/{upload_id}/status
DELETE /upload/{upload_id}
GET /api/v1/storage/quota
```
### 🌐 MY Network v3.0 (Node Communication)
```bash
# Ed25519 Signed Inter-Node Communication
POST /api/node/handshake
POST /api/node/content/sync
POST /api/node/network/ping
GET /api/node/network/status
POST /api/node/network/discover
```
### 📊 System & Monitoring
```bash
# Health Checks (Kubernetes Ready)
GET /api/system/health
GET /api/system/health/detailed
GET /api/system/ready
GET /api/system/live
# Monitoring & Metrics
GET /api/system/metrics # Prometheus format
GET /api/system/info
GET /api/system/stats
POST /api/system/maintenance
```
### 📚 API Documentation (Development Mode)
```bash
# Interactive Documentation
GET /docs # Swagger UI
GET /redoc # ReDoc
GET /openapi.json # OpenAPI schema
```
---
## Translations
### Adding new language
1. Update translations keys list from code
```shell
touch messages.pot
find app -name '*.py' -exec xgettext --keyword=translated -j -o messages.pot {} +
## 🚀 Запуск и управление
### 🔴 Запуск FastAPI приложения:
```bash
# Development mode
uvicorn app.fastapi_main:app --host 0.0.0.0 --port 8000 --reload
# Production mode
uvicorn app.fastapi_main:app --host 0.0.0.0 --port 8000 --workers 4
# Docker mode
docker-compose up -d --build
```
2. Move `messages.pot` to `locale/<lang>/LC_MESSAGES/<domain>.po`
3. Compil[requirements.txt](requirements.txt)e `.po` to `.mo`
```shell
msgfmt ru.po -o ru.mo
### 🎛️ Управление сервисом:
```bash
# Systemd service
systemctl start my-network
systemctl stop my-network
systemctl restart my-network
systemctl status my-network
# Docker containers
docker-compose -f /opt/my-network/my-network/docker-compose.yml logs -f
docker-compose -f /opt/my-network/my-network/docker-compose.yml ps
```
### 📡 Доступ к системе:
| Сервис | URL | Описание |
|--------|-----|----------|
| **FastAPI API** | `http://localhost:8000` | Основное API |
| **Веб-интерфейс** | `http://localhost` | Nginx → Web2-Client |
| **API Docs** | `http://localhost:8000/docs` | Swagger UI (dev mode) |
| **Health Check** | `http://localhost:8000/api/system/health` | System status |
| **Metrics** | `http://localhost:8000/api/system/metrics` | Prometheus |
---
## 🔍 Мониторинг FastAPI
### 📊 Health Checks:
```bash
# Basic health check
curl http://localhost:8000/api/system/health
# Detailed system diagnostics
curl http://localhost:8000/api/system/health/detailed
# Kubernetes probes
curl http://localhost:8000/api/system/ready
curl http://localhost:8000/api/system/live
```
### 📈 Metrics & Statistics:
```bash
# Prometheus metrics
curl http://localhost:8000/api/system/metrics
# System information
curl http://localhost:8000/api/system/info | jq
# Node status (MY Network)
curl http://localhost:8000/api/node/network/status | jq
# System statistics
curl http://localhost:8000/api/system/stats | jq
```
### 🔐 Authentication Testing:
```bash
# Test Telegram WebApp auth
curl -X POST "http://localhost:8000/auth.twa" \
-H "Content-Type: application/json" \
-d '{"twa_data": "test_data", "ton_proof": null}'
# Test protected endpoint with JWT
curl -H "Authorization: Bearer YOUR_JWT_TOKEN" \
http://localhost:8000/api/v1/auth/me
```
---
## Log description
### Sources
1. [SQL] MariaDB
2. [User, options \] User log
3. [Bot, options \] Telegram bot
## 🏗️ MY Network v3.0 Features
### ✨ Децентрализованная архитектура:
- ✅ **No Consensus** - каждая нода принимает решения независимо
- ✅ **Peer-to-Peer** - прямые подписанные соединения между нодами
- ✅ **Ed25519 Signatures** - криптографическая проверка всех сообщений
- ✅ **Instant Broadcast** - мгновенная трансляция без расшифровки
- ✅ **Content Sync** - автоматическая синхронизация между нодами
### 🔒 FastAPI Security Features:
- ✅ **JWT Authentication** - access & refresh токены
- ✅ **Rate Limiting** - Redis-based DDoS protection
- ✅ **Input Validation** - Pydantic schemas для всех endpoints
- ✅ **Security Headers** - автоматические security headers
- ✅ **CORS Configuration** - правильная настройка для web2-client
### 📁 Enhanced File Handling:
- ✅ **Chunked Uploads** - поддержка файлов до 10GB
- ✅ **Progress Tracking** - real-time отслеживание прогресса
- ✅ **Resume Support** - продолжение прерванных загрузок
- ✅ **Base64 Compatibility** - совместимость с web2-client форматом
---
## 🔧 Конфигурация
### ⚙️ Environment Variables (.env):
```bash
# FastAPI Configuration
UVICORN_HOST=0.0.0.0
UVICORN_PORT=8000
FASTAPI_HOST=0.0.0.0
FASTAPI_PORT=8000
# Database
DATABASE_URL=postgresql://user:pass@postgres:5432/mynetwork
# Redis Cache
REDIS_URL=redis://redis:6379/0
# Security
SECRET_KEY=your-secret-key
JWT_SECRET_KEY=your-jwt-secret
# MY Network v3.0
NODE_ID=auto-generated
NODE_TYPE=bootstrap
NETWORK_MODE=main-node
```
### 🐳 Docker Configuration:
```yaml
# docker-compose.yml
services:
app:
build: .
ports:
- "8000:8000"
command: ["uvicorn", "app.fastapi_main:app", "--host", "0.0.0.0", "--port", "8000"]
environment:
- DATABASE_URL=postgresql://myuser:password@postgres:5432/mynetwork
- REDIS_URL=redis://redis:6379/0
```
---
## 🆘 FastAPI Troubleshooting
### 🔧 Общие проблемы:
**1. FastAPI не запускается:**
```bash
# Проверить зависимости
pip install -r requirements.txt
# Проверить конфигурацию
python -c "from app.fastapi_main import app; print('FastAPI OK')"
# Запустить с debug логами
uvicorn app.fastapi_main:app --host 0.0.0.0 --port 8000 --log-level debug
```
**2. Web2-Client не может аутентифицироваться:**
```bash
# Проверить JWT endpoint
curl -X POST "http://localhost:8000/auth.twa" \
-H "Content-Type: application/json" \
-d '{"twa_data": "test", "ton_proof": null}'
# Должен вернуть JWT token
```
**3. Chunked upload не работает:**
```bash
# Проверить Redis подключение
redis-cli ping
# Проверить storage endpoint
curl -X POST "http://localhost:8000/api/storage" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**4. Health check failed:**
```bash
# Проверить все компоненты
curl http://localhost:8000/api/system/health/detailed
# Проверить базу данных
docker-compose exec postgres pg_isready
# Проверить Redis
docker-compose exec redis redis-cli ping
```
### 📊 Debug Information:
```bash
# FastAPI application logs
docker-compose logs app
# System metrics
curl http://localhost:8000/api/system/metrics
# Database connection test
docker-compose exec app python -c "
from app.core.database import db_manager
import asyncio
asyncio.run(db_manager.test_connection())
"
```
### 🔄 Migration from Sanic:
```bash
# Если обновляетесь с Sanic версии:
# 1. Backup data
docker-compose exec postgres pg_dump mynetwork > backup.sql
# 2. Stop old version
systemctl stop my-network
# 3. Update codebase
git pull origin main
# 4. Install FastAPI dependencies
pip install -r requirements.txt
# 5. Start FastAPI version
uvicorn app.fastapi_main:app --host 0.0.0.0 --port 8000
```
---
## 📖 Documentation
### 📚 FastAPI Documentation:
- **[MIGRATION_COMPLETION_REPORT.md](MIGRATION_COMPLETION_REPORT.md)** - Полный отчет о миграции
- **[RELEASE_NOTES.md](RELEASE_NOTES.md)** - Что нового в FastAPI версии
- **[FASTAPI_MIGRATION_IMPLEMENTATION_REPORT.md](docs/FASTAPI_MIGRATION_IMPLEMENTATION_REPORT.md)** - Технические детали
- **[COMPATIBILITY_FIXES_SUMMARY.md](COMPATIBILITY_FIXES_SUMMARY.md)** - Исправления совместимости
### 🔗 Полезные ссылки:
- **FastAPI Documentation**: https://fastapi.tiangolo.com/
- **Uvicorn Documentation**: https://www.uvicorn.org/
- **Pydantic Documentation**: https://pydantic-docs.helpmanual.io/
- **MY Network Repository**: https://git.projscale.dev/my-dev/uploader-bot
---
## 🎯 Production Deployment
### 🚀 Production Checklist:
- [ ] **Environment**: Set `DEBUG=false` in production
- [ ] **Database**: Use real PostgreSQL (not SQLite)
- [ ] **Redis**: Use real Redis instance (not MockRedis)
- [ ] **SSL**: Configure SSL certificates with Let's Encrypt
- [ ] **Security**: Generate strong `SECRET_KEY` and `JWT_SECRET_KEY`
- [ ] **Monitoring**: Set up Prometheus metrics collection
- [ ] **Backups**: Configure database backup procedures
- [ ] **Firewall**: Configure UFW/iptables for security
### 🌐 Production Scripts:
```bash
# Full production deployment
./deploy_production_my_network.sh
# Universal installer for any server
./universal_installer.sh
# MY Network v3.0 installer
./start.sh
```
### 📊 Production Monitoring:
```bash
# Health monitoring endpoint
curl https://your-domain.com/api/system/health
# Prometheus metrics for monitoring stack
curl https://your-domain.com/api/system/metrics
# System statistics
curl https://your-domain.com/api/system/stats
```
---
## 📞 Support & Community
### 🆘 Getting Help:
- **Interactive API Docs**: Visit `/docs` on your running instance
- **Health Diagnostics**: Use `/api/system/health/detailed` for system status
- **Application Logs**: Check Docker logs with `docker-compose logs -f`
### 🐛 Reporting Issues:
- **Repository**: [MY Network v3.0 Issues](https://git.projscale.dev/my-dev/uploader-bot/issues)
- **Documentation**: Check `/docs` folder for detailed guides
- **Performance**: Use `/api/system/metrics` for performance data
### 🤝 Contributing:
- **FastAPI Improvements**: Submit PRs for FastAPI enhancements
- **MY Network Features**: Contribute to decentralized features
- **Documentation**: Help improve documentation and guides
---
## 📝 License
MY Network v3.0 with FastAPI - Open Source Project
---
**🚀 MY Network v3.0 with FastAPI - Производительная, безопасная и современная платформа для децентрализованного контента!**
*Built with ❤️ using FastAPI, Modern Python, and Decentralized Technologies*

View File

@ -1,35 +0,0 @@
[alembic]
script_location = alembic
sqlalchemy.url = ${DATABASE_URL}
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s

View File

@ -1,22 +1,36 @@
"""Alembic environment configuration for async database migrations."""
import asyncio
import os
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from typing import Any
from alembic import context
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
# Import your models here
from app.core.models.base import Base
from app.core.models.user import User, APIKey, UserSession
from app.core.models.content import Content, ContentVersion, FileUpload, UserSubscription
from app.core.models.blockchain import Wallet, Transaction, BlockchainNFT, BlockchainDeFiPosition, BlockchainStaking, BlockchainTokenBalance
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
database_url = os.environ.get("DATABASE_URL")
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
from app.core.models import AlchemyBase
target_metadata = AlchemyBase.metadata
# Set the target metadata for autogenerate support
target_metadata = Base.metadata
# Configure database URL from environment variable
database_url = os.getenv("DATABASE_URL", "postgresql+asyncpg://postgres:password@localhost:5432/myuploader")
config.set_main_option("sqlalchemy.url", database_url)
def run_migrations_offline() -> None:
@ -24,12 +38,11 @@ def run_migrations_offline() -> None:
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
@ -37,32 +50,53 @@ def run_migrations_offline() -> None:
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
compare_server_default=True,
include_schemas=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
include_schemas=True,
render_as_batch=True, # For better SQLite compatibility if needed
)
In this scenario we need to create an Engine
and associate a connection with the context.
with context.begin_transaction():
context.run_migrations()
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
async def run_async_migrations() -> None:
"""Run migrations in async mode."""
configuration = config.get_section(config.config_ini_section, {})
# Override the database URL if it's set in environment
if database_url:
configuration["sqlalchemy.url"] = database_url
connectable = async_engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
with context.begin_transaction():
context.run_migrations()
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():

View File

@ -5,22 +5,22 @@ Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade database schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade database schema."""
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,382 @@
"""Initial database tables
Revision ID: 001
Revises:
Create Date: 2025-01-02 16:51:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create initial database tables."""
# Create users table
op.create_table(
'users',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('username', sa.String(50), nullable=False, unique=True),
sa.Column('email', sa.String(255), nullable=False, unique=True),
sa.Column('password_hash', sa.String(255), nullable=False),
sa.Column('first_name', sa.String(100)),
sa.Column('last_name', sa.String(100)),
sa.Column('is_active', sa.Boolean(), default=True, nullable=False),
sa.Column('is_verified', sa.Boolean(), default=False, nullable=False),
sa.Column('is_superuser', sa.Boolean(), default=False, nullable=False),
sa.Column('avatar_url', sa.String(500)),
sa.Column('bio', sa.Text()),
sa.Column('last_login_at', sa.DateTime(timezone=True)),
sa.Column('login_count', sa.Integer(), default=0),
sa.Column('settings', postgresql.JSONB()),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for users
op.create_index('ix_users_username', 'users', ['username'])
op.create_index('ix_users_email', 'users', ['email'])
op.create_index('ix_users_created_at', 'users', ['created_at'])
op.create_index('ix_users_is_active', 'users', ['is_active'])
# Create API keys table
op.create_table(
'api_keys',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('name', sa.String(100), nullable=False),
sa.Column('key_hash', sa.String(255), nullable=False, unique=True),
sa.Column('key_prefix', sa.String(20), nullable=False),
sa.Column('permissions', postgresql.JSONB(), default={}),
sa.Column('is_active', sa.Boolean(), default=True, nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True)),
sa.Column('last_used_at', sa.DateTime(timezone=True)),
sa.Column('usage_count', sa.Integer(), default=0),
sa.Column('rate_limit', sa.Integer(), default=1000),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for API keys
op.create_index('ix_api_keys_user_id', 'api_keys', ['user_id'])
op.create_index('ix_api_keys_key_hash', 'api_keys', ['key_hash'])
op.create_index('ix_api_keys_is_active', 'api_keys', ['is_active'])
# Create user sessions table
op.create_table(
'user_sessions',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('session_token', sa.String(255), nullable=False, unique=True),
sa.Column('refresh_token', sa.String(255), nullable=False, unique=True),
sa.Column('user_agent', sa.String(500)),
sa.Column('ip_address', sa.String(45)),
sa.Column('is_active', sa.Boolean(), default=True, nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_activity_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for user sessions
op.create_index('ix_user_sessions_user_id', 'user_sessions', ['user_id'])
op.create_index('ix_user_sessions_session_token', 'user_sessions', ['session_token'])
op.create_index('ix_user_sessions_is_active', 'user_sessions', ['is_active'])
op.create_index('ix_user_sessions_expires_at', 'user_sessions', ['expires_at'])
# Create content table
op.create_table(
'content',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('title', sa.String(255), nullable=False),
sa.Column('description', sa.Text()),
sa.Column('content_type', sa.String(50), nullable=False),
sa.Column('file_path', sa.String(500)),
sa.Column('file_size', sa.BigInteger()),
sa.Column('file_hash', sa.String(64)),
sa.Column('mime_type', sa.String(100)),
sa.Column('is_public', sa.Boolean(), default=False, nullable=False),
sa.Column('is_featured', sa.Boolean(), default=False, nullable=False),
sa.Column('view_count', sa.Integer(), default=0),
sa.Column('download_count', sa.Integer(), default=0),
sa.Column('like_count', sa.Integer(), default=0),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('tags', postgresql.ARRAY(sa.String(50))),
sa.Column('thumbnail_url', sa.String(500)),
sa.Column('preview_url', sa.String(500)),
sa.Column('status', sa.String(20), default='draft', nullable=False),
sa.Column('published_at', sa.DateTime(timezone=True)),
sa.Column('expires_at', sa.DateTime(timezone=True)),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for content
op.create_index('ix_content_user_id', 'content', ['user_id'])
op.create_index('ix_content_content_type', 'content', ['content_type'])
op.create_index('ix_content_is_public', 'content', ['is_public'])
op.create_index('ix_content_status', 'content', ['status'])
op.create_index('ix_content_created_at', 'content', ['created_at'])
op.create_index('ix_content_published_at', 'content', ['published_at'])
op.create_index('ix_content_file_hash', 'content', ['file_hash'])
op.create_index('ix_content_tags', 'content', ['tags'], postgresql_using='gin')
# Create content versions table
op.create_table(
'content_versions',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('content_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('content.id', ondelete='CASCADE'), nullable=False),
sa.Column('version_number', sa.Integer(), nullable=False),
sa.Column('title', sa.String(255), nullable=False),
sa.Column('description', sa.Text()),
sa.Column('file_path', sa.String(500)),
sa.Column('file_size', sa.BigInteger()),
sa.Column('file_hash', sa.String(64)),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('change_summary', sa.Text()),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for content versions
op.create_index('ix_content_versions_content_id', 'content_versions', ['content_id'])
op.create_index('ix_content_versions_version_number', 'content_versions', ['version_number'])
op.create_index('ix_content_versions_created_at', 'content_versions', ['created_at'])
# Create file uploads table
op.create_table(
'file_uploads',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('filename', sa.String(255), nullable=False),
sa.Column('original_filename', sa.String(255), nullable=False),
sa.Column('file_path', sa.String(500)),
sa.Column('file_size', sa.BigInteger(), nullable=False),
sa.Column('file_hash', sa.String(64)),
sa.Column('mime_type', sa.String(100)),
sa.Column('chunk_size', sa.Integer()),
sa.Column('total_chunks', sa.Integer()),
sa.Column('uploaded_chunks', sa.Integer(), default=0),
sa.Column('upload_session_id', sa.String(100)),
sa.Column('status', sa.String(20), default='pending', nullable=False),
sa.Column('processed', sa.Boolean(), default=False, nullable=False),
sa.Column('processing_started_at', sa.DateTime(timezone=True)),
sa.Column('processing_completed_at', sa.DateTime(timezone=True)),
sa.Column('error_message', sa.Text()),
sa.Column('retry_count', sa.Integer(), default=0),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('expires_at', sa.DateTime(timezone=True)),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for file uploads
op.create_index('ix_file_uploads_user_id', 'file_uploads', ['user_id'])
op.create_index('ix_file_uploads_status', 'file_uploads', ['status'])
op.create_index('ix_file_uploads_processed', 'file_uploads', ['processed'])
op.create_index('ix_file_uploads_upload_session_id', 'file_uploads', ['upload_session_id'])
op.create_index('ix_file_uploads_file_hash', 'file_uploads', ['file_hash'])
op.create_index('ix_file_uploads_expires_at', 'file_uploads', ['expires_at'])
# Create user subscriptions table
op.create_table(
'user_subscriptions',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('plan_name', sa.String(50), nullable=False),
sa.Column('status', sa.String(20), default='active', nullable=False),
sa.Column('storage_limit', sa.BigInteger(), nullable=False),
sa.Column('bandwidth_limit', sa.BigInteger(), nullable=False),
sa.Column('file_count_limit', sa.Integer(), nullable=False),
sa.Column('features', postgresql.JSONB()),
sa.Column('price', sa.Numeric(10, 2)),
sa.Column('currency', sa.String(3), default='USD'),
sa.Column('billing_cycle', sa.String(20), default='monthly'),
sa.Column('starts_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True)),
sa.Column('auto_renew', sa.Boolean(), default=True, nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for user subscriptions
op.create_index('ix_user_subscriptions_user_id', 'user_subscriptions', ['user_id'])
op.create_index('ix_user_subscriptions_status', 'user_subscriptions', ['status'])
op.create_index('ix_user_subscriptions_expires_at', 'user_subscriptions', ['expires_at'])
# Create wallets table
op.create_table(
'wallets',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('address', sa.String(100), nullable=False, unique=True),
sa.Column('network', sa.String(20), default='mainnet', nullable=False),
sa.Column('wallet_type', sa.String(20), default='ton', nullable=False),
sa.Column('balance', sa.Numeric(20, 8), default=0),
sa.Column('public_key', sa.String(200)),
sa.Column('encrypted_private_key', sa.Text()),
sa.Column('derivation_path', sa.String(100)),
sa.Column('is_active', sa.Boolean(), default=True, nullable=False),
sa.Column('is_primary', sa.Boolean(), default=False, nullable=False),
sa.Column('last_sync_at', sa.DateTime(timezone=True)),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for wallets
op.create_index('ix_wallets_user_id', 'wallets', ['user_id'])
op.create_index('ix_wallets_address', 'wallets', ['address'])
op.create_index('ix_wallets_network', 'wallets', ['network'])
op.create_index('ix_wallets_is_active', 'wallets', ['is_active'])
# Create transactions table
op.create_table(
'transactions',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('wallet_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('wallets.id', ondelete='CASCADE'), nullable=False),
sa.Column('tx_hash', sa.String(100), unique=True),
sa.Column('from_address', sa.String(100), nullable=False),
sa.Column('to_address', sa.String(100), nullable=False),
sa.Column('amount', sa.Numeric(20, 8), nullable=False),
sa.Column('fee', sa.Numeric(20, 8)),
sa.Column('gas_limit', sa.BigInteger()),
sa.Column('gas_used', sa.BigInteger()),
sa.Column('gas_price', sa.Numeric(20, 8)),
sa.Column('nonce', sa.BigInteger()),
sa.Column('block_number', sa.BigInteger()),
sa.Column('block_hash', sa.String(100)),
sa.Column('transaction_index', sa.Integer()),
sa.Column('status', sa.String(20), default='pending', nullable=False),
sa.Column('transaction_type', sa.String(20), default='transfer', nullable=False),
sa.Column('confirmations', sa.Integer(), default=0),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for transactions
op.create_index('ix_transactions_wallet_id', 'transactions', ['wallet_id'])
op.create_index('ix_transactions_tx_hash', 'transactions', ['tx_hash'])
op.create_index('ix_transactions_from_address', 'transactions', ['from_address'])
op.create_index('ix_transactions_to_address', 'transactions', ['to_address'])
op.create_index('ix_transactions_status', 'transactions', ['status'])
op.create_index('ix_transactions_created_at', 'transactions', ['created_at'])
op.create_index('ix_transactions_block_number', 'transactions', ['block_number'])
# Create blockchain NFTs table
op.create_table(
'blockchain_nfts',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('wallet_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('wallets.id', ondelete='CASCADE'), nullable=False),
sa.Column('token_id', sa.String(100), nullable=False),
sa.Column('collection_address', sa.String(100), nullable=False),
sa.Column('owner_address', sa.String(100), nullable=False),
sa.Column('token_uri', sa.String(500)),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('name', sa.String(255)),
sa.Column('description', sa.Text()),
sa.Column('image_url', sa.String(500)),
sa.Column('attributes', postgresql.JSONB()),
sa.Column('rarity_score', sa.Numeric(10, 4)),
sa.Column('last_price', sa.Numeric(20, 8)),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create unique constraint for NFTs
op.create_unique_constraint('uq_blockchain_nfts_token_collection', 'blockchain_nfts', ['token_id', 'collection_address'])
# Create indexes for blockchain NFTs
op.create_index('ix_blockchain_nfts_wallet_id', 'blockchain_nfts', ['wallet_id'])
op.create_index('ix_blockchain_nfts_collection_address', 'blockchain_nfts', ['collection_address'])
op.create_index('ix_blockchain_nfts_owner_address', 'blockchain_nfts', ['owner_address'])
# Create blockchain token balances table
op.create_table(
'blockchain_token_balances',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('wallet_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('wallets.id', ondelete='CASCADE'), nullable=False),
sa.Column('token_address', sa.String(100), nullable=False),
sa.Column('token_name', sa.String(100)),
sa.Column('token_symbol', sa.String(20)),
sa.Column('balance', sa.Numeric(30, 18), default=0, nullable=False),
sa.Column('decimals', sa.Integer(), default=18),
sa.Column('usd_value', sa.Numeric(20, 8)),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create unique constraint for token balances
op.create_unique_constraint('uq_token_balances_wallet_token', 'blockchain_token_balances', ['wallet_id', 'token_address'])
# Create indexes for token balances
op.create_index('ix_blockchain_token_balances_wallet_id', 'blockchain_token_balances', ['wallet_id'])
op.create_index('ix_blockchain_token_balances_token_address', 'blockchain_token_balances', ['token_address'])
# Create blockchain DeFi positions table
op.create_table(
'blockchain_defi_positions',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('wallet_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('wallets.id', ondelete='CASCADE'), nullable=False),
sa.Column('protocol_name', sa.String(100), nullable=False),
sa.Column('position_type', sa.String(50), nullable=False),
sa.Column('pool_address', sa.String(100)),
sa.Column('token_symbols', postgresql.ARRAY(sa.String(20))),
sa.Column('balance', sa.Numeric(30, 18), default=0),
sa.Column('usd_value', sa.Numeric(20, 8)),
sa.Column('yield_rate', sa.Numeric(10, 4)),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for DeFi positions
op.create_index('ix_blockchain_defi_positions_wallet_id', 'blockchain_defi_positions', ['wallet_id'])
op.create_index('ix_blockchain_defi_positions_protocol_name', 'blockchain_defi_positions', ['protocol_name'])
# Create blockchain staking table
op.create_table(
'blockchain_staking',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('wallet_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('wallets.id', ondelete='CASCADE'), nullable=False),
sa.Column('validator_address', sa.String(100), nullable=False),
sa.Column('staked_amount', sa.Numeric(20, 8), nullable=False),
sa.Column('rewards_earned', sa.Numeric(20, 8), default=0),
sa.Column('status', sa.String(20), default='active', nullable=False),
sa.Column('delegation_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('unlock_time', sa.DateTime(timezone=True)),
sa.Column('apy', sa.Numeric(10, 4)),
sa.Column('metadata', postgresql.JSONB()),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Create indexes for staking
op.create_index('ix_blockchain_staking_wallet_id', 'blockchain_staking', ['wallet_id'])
op.create_index('ix_blockchain_staking_validator_address', 'blockchain_staking', ['validator_address'])
op.create_index('ix_blockchain_staking_status', 'blockchain_staking', ['status'])
def downgrade() -> None:
"""Drop all database tables."""
# Drop tables in reverse order to avoid foreign key constraints
op.drop_table('blockchain_staking')
op.drop_table('blockchain_defi_positions')
op.drop_table('blockchain_token_balances')
op.drop_table('blockchain_nfts')
op.drop_table('transactions')
op.drop_table('wallets')
op.drop_table('user_subscriptions')
op.drop_table('file_uploads')
op.drop_table('content_versions')
op.drop_table('content')
op.drop_table('user_sessions')
op.drop_table('api_keys')
op.drop_table('users')

View File

@ -1,26 +0,0 @@
"""add artist column to encrypted content
Revision ID: b1f2d3c4a5b6
Revises: a7c1357e8d15
Create Date: 2024-06-05 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b1f2d3c4a5b6'
down_revision: Union[str, None] = 'a7c1357e8d15'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column('encrypted_contents', sa.Column('artist', sa.String(length=512), nullable=True))
def downgrade() -> None:
op.drop_column('encrypted_contents', 'artist')

View File

@ -1,38 +0,0 @@
"""expand telegram_id precision on stars invoices
Revision ID: c2d4e6f8a1b2
Revises: b1f2d3c4a5b6
Create Date: 2025-10-17 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c2d4e6f8a1b2'
down_revision: Union[str, None] = 'b1f2d3c4a5b6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.alter_column(
'stars_invoices',
'telegram_id',
existing_type=sa.Integer(),
type_=sa.BigInteger(),
existing_nullable=True,
)
def downgrade() -> None:
op.alter_column(
'stars_invoices',
'telegram_id',
existing_type=sa.BigInteger(),
type_=sa.Integer(),
existing_nullable=True,
)

View File

@ -1,70 +0,0 @@
"""create dht_records and rdap_cache tables
Revision ID: d3e5f7a9c0d1
Revises: c2d4e6f8a1b2
Create Date: 2025-10-22 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'd3e5f7a9c0d1'
down_revision: Union[str, None] = 'c2d4e6f8a1b2'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
bind = op.get_bind()
inspector = sa.inspect(bind)
# dht_records
if not inspector.has_table('dht_records'):
op.create_table(
'dht_records',
sa.Column('fingerprint', sa.String(length=128), primary_key=True),
sa.Column('key', sa.String(length=512), nullable=False),
sa.Column('schema_version', sa.String(length=16), nullable=False, server_default='v1'),
sa.Column('logical_counter', sa.Integer(), nullable=False, server_default='0'),
sa.Column('timestamp', sa.Float(), nullable=False, server_default='0'),
sa.Column('node_id', sa.String(length=128), nullable=False),
sa.Column('signature', sa.String(length=512), nullable=True),
sa.Column('value', sa.JSON(), nullable=False, server_default=sa.text("'{}'::jsonb")),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
)
# ensure index exists (but don't fail if it already exists)
try:
existing_indexes = {idx['name'] for idx in inspector.get_indexes('dht_records')}
except Exception:
existing_indexes = set()
if 'ix_dht_records_key' not in existing_indexes:
op.create_index('ix_dht_records_key', 'dht_records', ['key'])
# rdap_cache
if not inspector.has_table('rdap_cache'):
op.create_table(
'rdap_cache',
sa.Column('ip', sa.String(length=64), primary_key=True),
sa.Column('asn', sa.Integer(), nullable=True),
sa.Column('source', sa.String(length=64), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
)
def downgrade() -> None:
try:
op.drop_table('rdap_cache')
except Exception:
pass
try:
op.drop_index('ix_dht_records_key', table_name='dht_records')
except Exception:
pass
try:
op.drop_table('dht_records')
except Exception:
pass

8
app/__init__.py Normal file
View File

@ -0,0 +1,8 @@
"""
MY Uploader Bot - Distributed Content Protocol
Main application package
"""
__version__ = "2.0.0"
__author__ = "MY Network Team"
__description__ = "Distributed Content Protocol v2.0"

View File

@ -1,209 +1,30 @@
import asyncio
#!/usr/bin/env python3
"""
MY Network App Entry Point
Точка входа для запуска через python -m app
"""
import sys
import os
import time
import traceback
from asyncio import sleep
from datetime import datetime
from pathlib import Path
startup_target = '__main__'
# Добавить корневую директорию в путь
root_dir = Path(__file__).parent.parent
sys.path.insert(0, str(root_dir))
# Импортировать и запустить main из start_my_network.py
try:
startup_target = sys.argv[1]
except BaseException:
pass
from app.core._utils.create_maria_tables import create_db_tables
from app.core.storage import engine
if startup_target != '__main__':
# Background services get a short delay before startup
time.sleep(7)
from app.core.logger import make_log
if int(os.getenv("SANIC_MAINTENANCE", '0')) == 1:
make_log("Global", "Application is in maintenance mode")
while True:
time.sleep(1)
def init_db_schema_sync() -> None:
"""Initialise all SQLAlchemy models in the database before services start.
This ensures that every table defined on AlchemyBase.metadata (including
newer ones like DHT and service_config) exists before any component
accesses the database.
"""
try:
from sqlalchemy import create_engine
from app.core.models import AlchemyBase # imports all models and populates metadata
db_url = os.environ.get('DATABASE_URL')
if not db_url:
raise RuntimeError('DATABASE_URL is not set')
# Normalise DSN to sync driver for schema creation
if '+asyncpg' in db_url:
db_url_sync = db_url.replace('+asyncpg', '+psycopg2')
else:
db_url_sync = db_url
sync_engine = create_engine(db_url_sync, pool_pre_ping=True)
AlchemyBase.metadata.create_all(sync_engine)
except Exception as e:
make_log('Startup', f'DB sync init failed: {e}', level='error')
async def queue_daemon(app):
await sleep(3)
while True:
delayed_list = {k: v for k, v in app.ctx.memory._delayed_queue.items()}
for _execute_ts in delayed_list:
if _execute_ts <= datetime.now().timestamp():
del app.ctx.memory._delayed_queue[_execute_ts]
app.ctx.memory._execute_queue.append(delayed_list[_execute_ts])
await sleep(.7)
async def execute_queue(app):
telegram_bot_username = (await app.ctx.memory._telegram_bot.get_me()).username
client_telegram_bot_username = (await app.ctx.memory._client_telegram_bot.get_me()).username
make_log(None, f"Application normally started. HTTP port: {SANIC_PORT}")
make_log(None, f"Telegram bot: https://t.me/{telegram_bot_username}")
make_log(None, f"Client Telegram bot: https://t.me/{client_telegram_bot_username}")
try:
_db_host = DATABASE_URL.split('@')[1].split('/')[0].replace('/', '')
except Exception:
_db_host = 'postgres://'
make_log(None, f"PostgreSQL host: {_db_host}")
make_log(None, f"API host: {PROJECT_HOST}")
while True:
try:
_cmd = app.ctx.memory._execute_queue.pop(0)
except IndexError:
await sleep(.05)
continue
_fn = _cmd.pop(0)
assert _fn
_args = _cmd.pop(0)
assert type(_args) is tuple
try:
_kwargs = _cmd.pop(0)
assert type(_kwargs) is dict
except IndexError:
_kwargs = {}
try:
make_log("Queue.execute", f"{_fn} {_args} {_kwargs}", level='debug')
await _fn(*_args, **_kwargs)
except BaseException as e:
make_log("Queue.execute", f"{_fn} {_args} {_kwargs} => Error: {e}" + '\n' + str(traceback.format_exc()))
if __name__ == '__main__':
# Ensure DB schema is fully initialised for all models
init_db_schema_sync()
from app.core.models import Memory
main_memory = Memory()
if startup_target == '__main__':
# Defer heavy imports to avoid side effects in background services
# Mark this process as the primary node for seeding/config init
os.environ.setdefault('NODE_ROLE', 'primary')
from app.api import app
# Delay aiogram dispatcher creation until loop is running
from app.core._config import SANIC_PORT, PROJECT_HOST, DATABASE_URL
from app.core.network.nodes import network_handshake_daemon, bootstrap_once_and_exit_if_failed
from app.core.network.maintenance import replication_daemon, heartbeat_daemon, dht_gossip_daemon
app.ctx.memory = main_memory
app.ctx.memory._app = app
# Ensure DB schema exists using the same event loop as Sanic (idempotent)
app.add_task(create_db_tables(engine))
app.add_task(execute_queue(app))
app.add_task(queue_daemon(app))
# Start bots after loop is ready
async def _start_bots():
try:
from app.bot import create_dispatcher as create_uploader_dp
from app.client_bot import create_dispatcher as create_client_dp
uploader_bot_dp = create_uploader_dp()
client_bot_dp = create_client_dp()
for _target in [uploader_bot_dp, client_bot_dp]:
_target._s_memory = app.ctx.memory
await asyncio.gather(
uploader_bot_dp.start_polling(app.ctx.memory._telegram_bot),
client_bot_dp.start_polling(app.ctx.memory._client_telegram_bot),
)
except Exception as e:
make_log('Bots', f'Failed to start bots: {e}', level='error')
app.add_task(_start_bots())
# Start network handshake daemon and bootstrap step
app.add_task(network_handshake_daemon(app))
app.add_task(bootstrap_once_and_exit_if_failed())
app.add_task(replication_daemon(app))
app.add_task(heartbeat_daemon(app))
app.add_task(dht_gossip_daemon(app))
app.run(host='0.0.0.0', port=SANIC_PORT)
else:
time.sleep(2)
startup_fn = None
if startup_target == 'indexer':
from app.core.background.indexer_service import main_fn as target_fn
time.sleep(1)
elif startup_target == 'uploader':
from app.core.background.uploader_service import main_fn as target_fn
time.sleep(3)
elif startup_target == 'ton_daemon':
from app.core.background.ton_service import main_fn as target_fn
time.sleep(5)
elif startup_target == 'license_index':
from app.core.background.license_service import main_fn as target_fn
time.sleep(7)
elif startup_target == 'convert_process':
from app.core.background.convert_service import main_fn as target_fn
time.sleep(9)
elif startup_target == 'convert_v3':
from app.core.background.convert_v3_service import main_fn as target_fn
time.sleep(9)
elif startup_target == 'index_scout_v3':
from app.core.background.index_scout_v3 import main_fn as target_fn
time.sleep(7)
elif startup_target == 'derivative_janitor':
from app.core.background.derivative_cache_janitor import main_fn as target_fn
time.sleep(5)
elif startup_target == 'events_sync':
from app.core.background.event_sync_service import main_fn as target_fn
time.sleep(5)
startup_fn = startup_fn or target_fn
assert startup_fn
async def wrapped_startup_fn(*args):
try:
await startup_fn(*args)
except BaseException as e:
make_log(startup_target[0].upper() + startup_target[1:], f"Error: {e}" + '\n' + str(traceback.format_exc()),
level='error')
sys.exit(1)
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Background services no longer perform schema initialization
loop.run_until_complete(wrapped_startup_fn(main_memory))
except BaseException as e:
make_log(startup_target[0].upper() + startup_target[1:], f"Error: {e}" + '\n' + str(traceback.format_exc()),
level='error')
sys.exit(0)
finally:
loop.close()
from start_my_network import main
if __name__ == "__main__":
print("🚀 Starting MY Network via app.__main__.py")
main()
except ImportError as e:
print(f"❌ Error importing start_my_network: {e}")
print("📂 Current working directory:", os.getcwd())
print("🐍 Python path:", sys.path)
sys.exit(1)
except Exception as e:
print(f"❌ Fatal error: {e}")
sys.exit(1)

View File

@ -1,198 +0,0 @@
import traceback
from sanic import Sanic, response
from uuid import uuid4
import traceback as _traceback
from app.core.logger import make_log
app = Sanic(__name__)
from app.api.middleware import attach_user_to_request, close_db_session, close_request_handler
app.register_middleware(attach_user_to_request, "request")
app.register_middleware(close_db_session, "response")
from app.api.routes._index import s_index, s_favicon
from app.api.routes._system import s_api_v1_node, s_api_system_version, s_api_system_send_status, s_api_v1_node_friendly
from app.api.routes.network import (
s_api_v1_network_info,
s_api_v1_network_nodes,
s_api_v1_network_handshake,
)
from app.api.routes.network_events import s_api_v1_network_events
from app.api.routes.auth import s_api_v1_auth_twa, s_api_v1_auth_select_wallet, s_api_v1_auth_me
from app.api.routes.statics import s_api_tonconnect_manifest, s_api_platform_metadata
from app.api.routes.node_storage import s_api_v1_storage_post, s_api_v1_storage_get, \
s_api_v1_storage_decode_cid
from app.api.routes.progressive_storage import s_api_v1_5_storage_get, s_api_v1_5_storage_post, s_api_v1_storage_fetch, s_api_v1_storage_proxy
from app.api.routes.upload_tus import s_api_v1_upload_tus_hook
from app.api.routes.account import s_api_v1_account_get
from app.api.routes._blockchain import s_api_v1_blockchain_send_new_content_message, \
s_api_v1_blockchain_send_purchase_content_message
from app.api.routes.content import s_api_v1_content_list, s_api_v1_content_view, s_api_v1_content_friendly_list, s_api_v1_5_content_list
from app.api.routes.content_index import s_api_v1_content_index, s_api_v1_content_delta
from app.api.routes.derivatives import s_api_v1_content_derivatives
from app.api.routes.admin import (
s_api_v1_admin_blockchain,
s_api_v1_admin_cache_cleanup,
s_api_v1_admin_cache_setlimits,
s_api_v1_admin_events,
s_api_v1_admin_licenses,
s_api_v1_admin_login,
s_api_v1_admin_logout,
s_api_v1_admin_users_setadmin,
s_api_v1_admin_node_setrole,
s_api_v1_admin_nodes,
s_api_v1_admin_overview,
s_api_v1_admin_stars,
s_api_v1_admin_status,
s_api_v1_admin_storage,
s_api_v1_admin_sync_setlimits,
s_api_v1_admin_system,
s_api_v1_admin_uploads,
s_api_v1_admin_users,
s_api_v1_admin_network,
s_api_v1_admin_network_config,
s_api_v1_admin_network_config_set,
)
from app.api.routes.tonconnect import s_api_v1_tonconnect_new, s_api_v1_tonconnect_logout
from app.api.routes.keys import s_api_v1_keys_request
from app.api.routes.sync import s_api_v1_sync_pin, s_api_v1_sync_status
from app.api.routes.upload_status import s_api_v1_upload_status
from app.api.routes.metrics import s_api_metrics
from app.api.routes.dht import s_api_v1_dht_get, s_api_v1_dht_put
app.add_route(s_index, "/", methods=["GET", "OPTIONS"])
app.add_route(s_favicon, "/favicon.ico", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_node, "/api/v1/node", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_node_friendly, "/api/v1/nodeFriendly", methods=["GET", "OPTIONS"])
app.add_route(s_api_system_version, "/api/system.version", methods=["GET", "OPTIONS"])
app.add_route(s_api_system_send_status, "/api/system.sendStatus", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_network_info, "/api/v1/network.info", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_network_nodes, "/api/v1/network.nodes", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_network_handshake, "/api/v1/network.handshake", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_network_events, "/api/v1/network.events", methods=["GET", "OPTIONS"])
app.add_route(s_api_tonconnect_manifest, "/api/tonconnect-manifest.json", methods=["GET", "OPTIONS"])
app.add_route(s_api_platform_metadata, "/api/platform-metadata.json", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_auth_twa, "/api/v1/auth.twa", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_auth_me, "/api/v1/auth.me", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_auth_select_wallet, "/api/v1/auth.selectWallet", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_tonconnect_new, "/api/v1/tonconnect.new", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_tonconnect_logout, "/api/v1/tonconnect.logout", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_5_storage_post, "/api/v1.5/storage", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_5_storage_get, "/api/v1.5/storage/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_fetch, "/api/v1/storage.fetch/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_proxy, "/api/v1/storage.proxy/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_post, "/api/v1/storage", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_storage_get, "/api/v1/storage/<file_hash>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_storage_decode_cid, "/api/v1/storage.decodeContentId/<content_id>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_account_get, "/api/v1/account", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_blockchain_send_new_content_message, "/api/v1/blockchain.sendNewContentMessage", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_blockchain_send_purchase_content_message, "/api/v1/blockchain.sendPurchaseContentMessage", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_content_list, "/api/v1/content.list", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_view, "/api/v1/content.view/<content_address>", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_friendly_list, "/api/v1/content.friendlyList", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_5_content_list, "/api/v1.5/content.list", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_index, "/api/v1/content.index", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_delta, "/api/v1/content.delta", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_content_derivatives, "/api/v1/content.derivatives", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_login, "/api/v1/admin.login", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_logout, "/api/v1/admin.logout", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_overview, "/api/v1/admin.overview", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_storage, "/api/v1/admin.storage", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_uploads, "/api/v1/admin.uploads", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_users, "/api/v1/admin.users", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_users_setadmin, "/api/v1/admin.users.setAdmin", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_licenses, "/api/v1/admin.licenses", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_stars, "/api/v1/admin.stars", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_events, "/api/v1/admin.events", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_system, "/api/v1/admin.system", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_blockchain, "/api/v1/admin.blockchain", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_node_setrole, "/api/v1/admin.node.setRole", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_nodes, "/api/v1/admin.nodes", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_status, "/api/v1/admin.status", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_cache_setlimits, "/api/v1/admin.cache.setLimits", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_cache_cleanup, "/api/v1/admin.cache.cleanup", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_sync_setlimits, "/api/v1/admin.sync.setLimits", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_admin_network, "/api/v1/admin.network", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_network_config, "/api/v1/admin.network.config", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_admin_network_config_set, "/api/v1/admin.network.config.set", methods=["POST", "OPTIONS"])
# tusd HTTP hooks
app.add_route(s_api_v1_upload_tus_hook, "/api/v1/upload.tus-hook", methods=["POST", "OPTIONS"])
# Keys auto-grant
app.add_route(s_api_v1_keys_request, "/api/v1/keys.request", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_sync_pin, "/api/v1/sync.pin", methods=["POST", "OPTIONS"])
app.add_route(s_api_v1_sync_status, "/api/v1/sync.status", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_upload_status, "/api/v1/upload.status/<upload_id>", methods=["GET", "OPTIONS"])
app.add_route(s_api_metrics, "/metrics", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_dht_get, "/api/v1/dht.get", methods=["GET", "OPTIONS"])
app.add_route(s_api_v1_dht_put, "/api/v1/dht.put", methods=["POST", "OPTIONS"])
@app.exception(BaseException)
async def s_handle_exception(request, exception):
# Correlate error to request
session_id = getattr(request.ctx, 'session_id', None) or uuid4().hex[:16]
error_id = uuid4().hex[:8]
status = 500
code = type(exception).__name__
message = "Internal HTTP Error"
try:
raise exception
except AssertionError as e:
status = 400
code = 'AssertionError'
message = str(e) or 'Bad Request'
except BaseException as e:
# keep default 500, but expose exception message to aid debugging
message = str(e) or message
# Build structured log with full context and traceback
try:
tb = _traceback.format_exc()
user_id = getattr(getattr(request.ctx, 'user', None), 'id', None)
log_ctx = {
'sid': session_id,
'eid': error_id,
'path': request.path,
'method': request.method,
'query': dict(request.args) if hasattr(request, 'args') else {},
'user_id': user_id,
'remote': (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip),
'code': code,
'message': message,
'traceback': tb,
}
make_log('http_exception', 'API exception', level='error', **log_ctx)
except BaseException:
pass
# Return enriched error response for the client
payload = {
'error': True,
'code': code,
'message': message,
'session_id': session_id,
'error_id': error_id,
'path': request.path,
'method': request.method,
}
response_buffer = response.json(payload, status=status)
response_buffer = await close_db_session(request, response_buffer)
return response_buffer

35
app/api/__main__.py Normal file
View File

@ -0,0 +1,35 @@
#!/usr/bin/env python3
"""
MY Network API Server Entry Point
"""
import asyncio
import uvloop
from app.api import app, logger
from app.core.config import settings
def main():
"""Start MY Network API server"""
try:
# Use uvloop for better async performance
uvloop.install()
logger.info("Starting MY Network API Server...")
# Start server in single process mode to avoid worker conflicts
app.run(
host="0.0.0.0",
port=settings.SANIC_PORT,
debug=settings.DEBUG,
auto_reload=False,
single_process=True
)
except KeyboardInterrupt:
logger.info("Server stopped by user")
except Exception as e:
logger.error(f"Server startup failed: {e}")
raise
if __name__ == "__main__":
main()

416
app/api/docs.py Normal file
View File

@ -0,0 +1,416 @@
"""OpenAPI documentation configuration for my-uploader-bot API."""
from typing import Dict, Any
# API metadata
API_TITLE = "My Uploader Bot API"
API_VERSION = "2.0.0"
API_DESCRIPTION = """
# My Uploader Bot API
A comprehensive file upload and management system with blockchain integration.
## Features
- **File Upload & Management**: Chunked uploads, multiple storage backends, file processing
- **User Authentication**: JWT tokens, API keys, sessions management
- **Blockchain Integration**: TON blockchain wallet management, transactions
- **Content Management**: Version control, metadata, search functionality
- **Security**: Rate limiting, CORS, input validation, file encryption
- **Monitoring**: Prometheus metrics, structured logging, health checks
## Authentication
The API supports multiple authentication methods:
1. **JWT Bearer Token**: Use `Authorization: Bearer <token>` header
2. **API Key**: Use `X-API-Key: <api_key>` header
3. **Session Cookie**: Browser-based authentication
## Rate Limiting
API endpoints are rate-limited based on user tier:
- Free tier: 100 requests per hour
- Premium tier: 1000 requests per hour
- Enterprise tier: 10000 requests per hour
## File Upload Process
1. **Initiate Upload**: POST `/api/v1/storage/upload/initiate` with file metadata
2. **Upload Chunks**: POST `/api/v1/storage/upload/chunk` for each chunk
3. **Complete Upload**: POST `/api/v1/storage/upload/complete` to finalize
4. **Processing**: File is automatically processed in the background
## Error Handling
All errors follow RFC 7807 Problem Details format:
```json
{
"type": "https://api.myuploader.com/errors/validation",
"title": "Validation Error",
"status": 422,
"detail": "The request body contains invalid data",
"instance": "/api/v1/content/upload",
"errors": [
{
"field": "file_size",
"message": "File size exceeds maximum limit"
}
]
}
```
## Webhook Events
The API can send webhook notifications for:
- File upload completion
- Processing status updates
- Blockchain transaction confirmations
- User subscription changes
## SDKs and Examples
- Python SDK: `pip install myuploader-python`
- JavaScript SDK: `npm install @myuploader/js-sdk`
- Examples: https://github.com/myuploader/examples
## Support
- Documentation: https://docs.myuploader.com
- Support: support@myuploader.com
- Status: https://status.myuploader.com
"""
# OpenAPI tags
TAGS_METADATA = [
{
"name": "Authentication",
"description": "User authentication and session management endpoints",
},
{
"name": "Users",
"description": "User profile and account management",
},
{
"name": "Content",
"description": "Content management, search, and metadata operations",
},
{
"name": "Storage",
"description": "File upload, download, and storage operations",
},
{
"name": "Blockchain",
"description": "TON blockchain wallet and transaction management",
},
{
"name": "System",
"description": "System health, metrics, and administrative endpoints",
},
]
# Response examples
RESPONSE_EXAMPLES = {
"user_profile": {
"summary": "User profile example",
"value": {
"id": "123e4567-e89b-12d3-a456-426614174000",
"username": "john_doe",
"email": "john@example.com",
"first_name": "John",
"last_name": "Doe",
"is_active": True,
"is_verified": True,
"avatar_url": "https://cdn.myuploader.com/avatars/john_doe.jpg",
"bio": "Software developer and blockchain enthusiast",
"created_at": "2024-01-01T00:00:00Z",
"updated_at": "2024-01-01T00:00:00Z"
}
},
"content_item": {
"summary": "Content item example",
"value": {
"id": "123e4567-e89b-12d3-a456-426614174001",
"title": "My Awesome Video",
"description": "A great video about blockchain development",
"content_type": "video",
"file_path": "uploads/user123/video_2024_01_01.mp4",
"file_size": 104857600,
"mime_type": "video/mp4",
"is_public": True,
"view_count": 1250,
"download_count": 95,
"like_count": 42,
"tags": ["blockchain", "tutorial", "development"],
"thumbnail_url": "https://cdn.myuploader.com/thumbnails/video_thumb.jpg",
"status": "published",
"created_at": "2024-01-01T00:00:00Z",
"updated_at": "2024-01-01T00:00:00Z"
}
},
"upload_session": {
"summary": "Upload session example",
"value": {
"session_id": "upload_123e4567-e89b-12d3-a456-426614174002",
"filename": "large_video.mp4",
"file_size": 1073741824,
"chunk_size": 1048576,
"total_chunks": 1024,
"uploaded_chunks": 512,
"status": "uploading",
"progress": 50.0,
"expires_at": "2024-01-01T01:00:00Z",
"upload_urls": [
"https://api.myuploader.com/api/v1/storage/upload/chunk"
]
}
},
"wallet_info": {
"summary": "Wallet information example",
"value": {
"id": "123e4567-e89b-12d3-a456-426614174003",
"address": "EQD6M8aVGx1fF6Z5q5q5q5q5q5q5q5q5q5q5q5q5q5q5q5q5q",
"network": "mainnet",
"balance": "10.50000000",
"is_active": True,
"is_primary": True,
"created_at": "2024-01-01T00:00:00Z",
"transactions": [
{
"tx_hash": "abc123def456ghi789jkl012mno345pqr678stu901vwx234yz",
"amount": "5.00000000",
"status": "confirmed",
"created_at": "2024-01-01T00:30:00Z"
}
]
}
},
"error_validation": {
"summary": "Validation error example",
"value": {
"type": "https://api.myuploader.com/errors/validation",
"title": "Validation Error",
"status": 422,
"detail": "The request contains invalid data",
"instance": "/api/v1/content/upload",
"errors": [
{
"field": "file_size",
"message": "File size must be less than 100MB"
},
{
"field": "content_type",
"message": "Content type is required"
}
]
}
},
"error_auth": {
"summary": "Authentication error example",
"value": {
"type": "https://api.myuploader.com/errors/authentication",
"title": "Authentication Required",
"status": 401,
"detail": "Valid authentication credentials are required",
"instance": "/api/v1/content/private"
}
},
"error_forbidden": {
"summary": "Permission error example",
"value": {
"type": "https://api.myuploader.com/errors/forbidden",
"title": "Insufficient Permissions",
"status": 403,
"detail": "You don't have permission to access this resource",
"instance": "/api/v1/admin/users"
}
},
"error_not_found": {
"summary": "Not found error example",
"value": {
"type": "https://api.myuploader.com/errors/not-found",
"title": "Resource Not Found",
"status": 404,
"detail": "The requested resource was not found",
"instance": "/api/v1/content/nonexistent-id"
}
},
"error_rate_limit": {
"summary": "Rate limit error example",
"value": {
"type": "https://api.myuploader.com/errors/rate-limit",
"title": "Rate Limit Exceeded",
"status": 429,
"detail": "Too many requests. Please try again later",
"instance": "/api/v1/content/search",
"retry_after": 60
}
}
}
# Security schemes
SECURITY_SCHEMES = {
"BearerAuth": {
"type": "http",
"scheme": "bearer",
"bearerFormat": "JWT",
"description": "JWT token authentication. Get token from /api/v1/auth/login"
},
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-API-Key",
"description": "API key authentication. Get API key from user dashboard"
},
"CookieAuth": {
"type": "apiKey",
"in": "cookie",
"name": "session",
"description": "Session cookie authentication"
}
}
# OpenAPI configuration
def get_openapi_config() -> Dict[str, Any]:
"""Get OpenAPI configuration."""
return {
"title": API_TITLE,
"version": API_VERSION,
"description": API_DESCRIPTION,
"terms_of_service": "https://myuploader.com/terms",
"contact": {
"name": "My Uploader Bot Support",
"url": "https://myuploader.com/support",
"email": "support@myuploader.com"
},
"license": {
"name": "MIT License",
"url": "https://opensource.org/licenses/MIT"
},
"servers": [
{
"url": "https://api.myuploader.com",
"description": "Production server"
},
{
"url": "https://staging-api.myuploader.com",
"description": "Staging server"
},
{
"url": "http://localhost:8000",
"description": "Development server"
}
],
"tags": TAGS_METADATA,
"components": {
"securitySchemes": SECURITY_SCHEMES,
"examples": RESPONSE_EXAMPLES,
"responses": {
"ValidationError": {
"description": "Validation error response",
"content": {
"application/json": {
"example": RESPONSE_EXAMPLES["error_validation"]["value"]
}
}
},
"AuthError": {
"description": "Authentication error response",
"content": {
"application/json": {
"example": RESPONSE_EXAMPLES["error_auth"]["value"]
}
}
},
"ForbiddenError": {
"description": "Permission error response",
"content": {
"application/json": {
"example": RESPONSE_EXAMPLES["error_forbidden"]["value"]
}
}
},
"NotFoundError": {
"description": "Not found error response",
"content": {
"application/json": {
"example": RESPONSE_EXAMPLES["error_not_found"]["value"]
}
}
},
"RateLimitError": {
"description": "Rate limit error response",
"content": {
"application/json": {
"example": RESPONSE_EXAMPLES["error_rate_limit"]["value"]
}
}
}
}
},
"security": [
{"BearerAuth": []},
{"ApiKeyAuth": []},
{"CookieAuth": []}
]
}
# Custom OpenAPI schema
CUSTOM_OPENAPI_SCHEMA = {
"x-logo": {
"url": "https://myuploader.com/logo.png",
"altText": "My Uploader Bot Logo"
},
"x-code-samples": [
{
"lang": "Python",
"source": """
import requests
# Upload a file
response = requests.post(
'https://api.myuploader.com/api/v1/storage/upload/initiate',
headers={'Authorization': 'Bearer <your_token>'},
json={
'filename': 'example.jpg',
'file_size': 1024000,
'content_type': 'image'
}
)
"""
},
{
"lang": "JavaScript",
"source": """
// Upload a file
const response = await fetch('https://api.myuploader.com/api/v1/storage/upload/initiate', {
method: 'POST',
headers: {
'Authorization': 'Bearer <your_token>',
'Content-Type': 'application/json'
},
body: JSON.stringify({
filename: 'example.jpg',
file_size: 1024000,
content_type: 'image'
})
});
"""
},
{
"lang": "cURL",
"source": """
curl -X POST https://api.myuploader.com/api/v1/storage/upload/initiate \\
-H "Authorization: Bearer <your_token>" \\
-H "Content-Type: application/json" \\
-d '{
"filename": "example.jpg",
"file_size": 1024000,
"content_type": "image"
}'
"""
}
]
}

View File

@ -0,0 +1,612 @@
"""
FastAPI маршруты для аутентификации с поддержкой TON Connect и Telegram WebApp
Полная совместимость с web2-client требованиями
"""
import asyncio
import json
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from uuid import UUID, uuid4
from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi.responses import JSONResponse
from sqlalchemy import select, update, and_, or_
from sqlalchemy.orm import selectinload
from pydantic import BaseModel, Field
from app.core.config import get_settings
from app.core.database import db_manager, get_cache_manager
from app.core.logging import get_logger
from app.core.models.user import User, UserSession, UserRole
from app.core.security import (
hash_password, verify_password, generate_access_token,
verify_access_token, generate_refresh_token, generate_api_key,
sanitize_input, generate_csrf_token
)
from app.api.fastapi_middleware import get_current_user, require_auth
# Initialize router for auth endpoints
router = APIRouter(prefix="", tags=["auth"])
logger = get_logger(__name__)
settings = get_settings()
# Pydantic models для валидации
class TWAAuthRequest(BaseModel):
"""Модель для аутентификации через Telegram WebApp"""
twa_data: str
ton_proof: Optional[Dict[str, Any]] = None
class TWAAuthResponse(BaseModel):
"""Модель ответа аутентификации"""
connected_wallet: Optional[Dict[str, Any]] = None
auth_v1_token: str
class SelectWalletRequest(BaseModel):
"""Модель для выбора кошелька"""
wallet_address: str
class UserRegistrationRequest(BaseModel):
"""Модель для регистрации пользователя"""
username: str = Field(..., min_length=3, max_length=50)
email: str = Field(..., pattern=r'^[^@]+@[^@]+\.[^@]+$')
password: str = Field(..., min_length=8)
full_name: Optional[str] = Field(None, max_length=100)
class UserLoginRequest(BaseModel):
"""Модель для входа пользователя"""
username: str
password: str
remember_me: bool = False
class RefreshTokenRequest(BaseModel):
"""Модель для обновления токенов"""
refresh_token: str
@router.post("/auth.twa", response_model=TWAAuthResponse)
async def auth_twa(request: Request, auth_data: TWAAuthRequest):
"""
Аутентификация через Telegram WebApp с поддержкой TON proof
Критически важный эндпоинт для web2-client
"""
try:
client_ip = request.client.host
await logger.ainfo("TWA auth started", step="begin", twa_data_length=len(auth_data.twa_data))
# Основная аутентификация без TON proof
if not auth_data.ton_proof:
await logger.ainfo("TWA auth: no TON proof path", step="no_ton_proof")
# Валидация TWA данных
if not auth_data.twa_data:
raise HTTPException(status_code=400, detail="TWA data required")
# Здесь должна быть валидация Telegram WebApp данных
# Для демо возвращаем базовый токен
await logger.ainfo("TWA auth: calling _process_twa_data", step="processing_twa")
user_data = await _process_twa_data(auth_data.twa_data)
await logger.ainfo("TWA auth: _process_twa_data completed", step="twa_processed", user_data=user_data)
# Генерируем токен
try:
expires_minutes = int(getattr(settings, 'ACCESS_TOKEN_EXPIRE_MINUTES', 30))
expires_in_seconds = expires_minutes * 60
except (ValueError, TypeError):
expires_in_seconds = 30 * 60 # fallback to 30 minutes
auth_token = generate_access_token(
{"user_id": user_data["user_id"], "username": user_data["username"]},
expires_in=expires_in_seconds
)
await logger.ainfo(
"TWA authentication successful",
user_id=user_data["user_id"],
ip=client_ip,
has_ton_proof=False
)
return TWAAuthResponse(
connected_wallet=None,
auth_v1_token=auth_token
)
# Аутентификация с TON proof
else:
# Валидация TWA данных
user_data = await _process_twa_data(auth_data.twa_data)
# Обработка TON proof
ton_proof_data = auth_data.ton_proof
account = ton_proof_data.get("account")
proof = ton_proof_data.get("ton_proof")
if not account or not proof:
raise HTTPException(status_code=400, detail="Invalid TON proof format")
# Валидация TON proof (здесь должна быть реальная проверка)
is_valid_proof = await _validate_ton_proof(proof, account, auth_data.twa_data)
if not is_valid_proof:
raise HTTPException(status_code=400, detail="Invalid TON proof")
# Генерируем токен с подтвержденным кошельком
auth_token = generate_access_token(
{
"user_id": user_data["user_id"],
"username": user_data["username"],
"wallet_verified": True,
"wallet_address": account.get("address")
},
expires_in=settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
)
# Формируем информацию о подключенном кошельке
connected_wallet = {
"version": account.get("chain", "unknown"),
"address": account.get("address"),
"ton_balance": "0" # Здесь должен быть запрос баланса
}
await logger.ainfo(
"TWA authentication with TON proof successful",
user_id=user_data["user_id"],
wallet_address=account.get("address"),
ip=client_ip
)
return TWAAuthResponse(
connected_wallet=connected_wallet,
auth_v1_token=auth_token
)
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"TWA authentication failed",
error=str(e),
ip=client_ip
)
raise HTTPException(status_code=500, detail="Authentication failed")
@router.post("/auth.selectWallet")
async def auth_select_wallet(
request: Request,
wallet_data: SelectWalletRequest,
current_user: User = Depends(require_auth)
):
"""
Выбор кошелька для аутентифицированного пользователя
Критически важный эндпоинт для web2-client
"""
try:
wallet_address = wallet_data.wallet_address
# Валидация адреса кошелька
if not wallet_address or len(wallet_address) < 10:
raise HTTPException(status_code=400, detail="Invalid wallet address")
# Проверка существования кошелька в сети TON
is_valid_wallet = await _validate_ton_wallet(wallet_address)
if not is_valid_wallet:
# Возвращаем 404 если кошелек не найден или невалиден
raise HTTPException(status_code=404, detail="Wallet not found or invalid")
# Обновляем информацию о кошельке пользователя
async with db_manager.get_session() as session:
user_stmt = select(User).where(User.id == current_user.id)
user_result = await session.execute(user_stmt)
user = user_result.scalar_one_or_none()
if not user:
raise HTTPException(status_code=404, detail="User not found")
# Обновляем адрес кошелька
user.wallet_address = wallet_address
user.wallet_connected_at = datetime.utcnow()
await session.commit()
await logger.ainfo(
"Wallet selected successfully",
user_id=str(current_user.id),
wallet_address=wallet_address
)
return {
"message": "Wallet selected successfully",
"wallet_address": wallet_address,
"selected_at": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Wallet selection failed",
user_id=str(current_user.id),
wallet_address=wallet_data.wallet_address,
error=str(e)
)
raise HTTPException(status_code=500, detail="Wallet selection failed")
@router.post("/api/v1/auth/register")
async def register_user(request: Request, user_data: UserRegistrationRequest):
"""
Регистрация нового пользователя (дополнительный эндпоинт)
"""
try:
client_ip = request.client.host
# Проверка rate limiting (через middleware)
cache_manager = await get_cache_manager()
ip_reg_key = f"registration_ip:{client_ip}"
ip_registrations = await cache_manager.get(ip_reg_key, default=0)
if ip_registrations >= 3: # Max 3 registrations per IP per day
raise HTTPException(status_code=429, detail="Too many registrations from this IP")
async with db_manager.get_session() as session:
# Check if username already exists
username_stmt = select(User).where(User.username == user_data.username)
username_result = await session.execute(username_stmt)
if username_result.scalar_one_or_none():
raise HTTPException(status_code=400, detail="Username already exists")
# Check if email already exists
email_stmt = select(User).where(User.email == user_data.email)
email_result = await session.execute(email_stmt)
if email_result.scalar_one_or_none():
raise HTTPException(status_code=400, detail="Email already registered")
# Hash password
password_hash = hash_password(user_data.password)
# Create user
new_user = User(
id=uuid4(),
username=sanitize_input(user_data.username),
email=sanitize_input(user_data.email),
password_hash=password_hash,
full_name=sanitize_input(user_data.full_name or ""),
is_active=True,
email_verified=False,
registration_ip=client_ip,
last_login_ip=client_ip,
settings={"theme": "light", "notifications": True}
)
session.add(new_user)
await session.commit()
await session.refresh(new_user)
# Update IP registration counter
await cache_manager.increment(ip_reg_key, ttl=86400)
# Generate tokens
access_token = generate_access_token(
{"user_id": str(new_user.id), "username": user_data.username},
expires_in=settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
)
refresh_token = generate_refresh_token(new_user.id)
await logger.ainfo(
"User registered successfully",
user_id=str(new_user.id),
username=user_data.username,
email=user_data.email,
ip=client_ip
)
return {
"message": "Registration successful",
"user": {
"id": str(new_user.id),
"username": user_data.username,
"email": user_data.email,
"full_name": user_data.full_name,
"created_at": new_user.created_at.isoformat()
},
"tokens": {
"access_token": access_token,
"refresh_token": refresh_token,
"token_type": "Bearer",
"expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
}
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"User registration failed",
username=user_data.username,
email=user_data.email,
error=str(e)
)
raise HTTPException(status_code=500, detail="Registration failed")
@router.post("/api/v1/auth/login")
async def login_user(request: Request, login_data: UserLoginRequest):
"""
Вход пользователя с JWT токенами
"""
try:
client_ip = request.client.host
# Check login rate limiting
cache_manager = await get_cache_manager()
login_key = f"login_attempts:{login_data.username}:{client_ip}"
attempts = await cache_manager.get(login_key, default=0)
if attempts >= 5: # Max 5 failed attempts
raise HTTPException(status_code=429, detail="Too many login attempts")
async with db_manager.get_session() as session:
# Find user by username or email
user_stmt = select(User).where(
or_(User.username == login_data.username, User.email == login_data.username)
).options(selectinload(User.roles))
user_result = await session.execute(user_stmt)
user = user_result.scalar_one_or_none()
if not user or not verify_password(login_data.password, user.password_hash):
# Increment failed attempts
await cache_manager.increment(login_key, ttl=900) # 15 minutes
await logger.awarning(
"Failed login attempt",
username=login_data.username,
ip=client_ip,
attempts=attempts + 1
)
raise HTTPException(status_code=401, detail="Invalid credentials")
if not user.is_active:
raise HTTPException(status_code=403, detail="Account deactivated")
# Successful login - clear failed attempts
await cache_manager.delete(login_key)
# Update user login info
user.last_login_at = datetime.utcnow()
user.last_login_ip = client_ip
user.login_count = (user.login_count or 0) + 1
await session.commit()
# Generate tokens
user_permissions = []
for role in user.roles:
user_permissions.extend(role.permissions)
expires_in = settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
if login_data.remember_me:
expires_in *= 24 # 24x longer for remember me
access_token = generate_access_token(
{
"user_id": str(user.id),
"username": user.username,
"permissions": list(set(user_permissions))
},
expires_in=expires_in
)
refresh_token = generate_refresh_token(user.id)
await logger.ainfo(
"User logged in successfully",
user_id=str(user.id),
username=user.username,
ip=client_ip,
remember_me=login_data.remember_me
)
return {
"message": "Login successful",
"user": {
"id": str(user.id),
"username": user.username,
"email": user.email,
"full_name": user.full_name,
"last_login": user.last_login_at.isoformat() if user.last_login_at else None,
"permissions": user_permissions
},
"tokens": {
"access_token": access_token,
"refresh_token": refresh_token,
"token_type": "Bearer",
"expires_in": expires_in
}
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Login failed",
username=login_data.username,
error=str(e)
)
raise HTTPException(status_code=500, detail="Login failed")
@router.post("/api/v1/auth/refresh")
async def refresh_tokens(request: Request, refresh_data: RefreshTokenRequest):
"""
Обновление access токена используя refresh токен
"""
try:
# Verify refresh token
payload = verify_access_token(refresh_data.refresh_token, token_type="refresh")
if not payload:
raise HTTPException(status_code=401, detail="Invalid refresh token")
user_id = UUID(payload["user_id"])
async with db_manager.get_session() as session:
# Get user with permissions
user_stmt = select(User).where(User.id == user_id).options(selectinload(User.roles))
user_result = await session.execute(user_stmt)
user = user_result.scalar_one_or_none()
if not user or not user.is_active:
raise HTTPException(status_code=401, detail="User not found or inactive")
# Generate new tokens (token rotation)
user_permissions = []
for role in user.roles:
user_permissions.extend(role.permissions)
new_access_token = generate_access_token(
{
"user_id": str(user.id),
"username": user.username,
"permissions": list(set(user_permissions))
},
expires_in=settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
)
new_refresh_token = generate_refresh_token(user.id)
await logger.adebug(
"Tokens refreshed",
user_id=str(user_id)
)
return {
"tokens": {
"access_token": new_access_token,
"refresh_token": new_refresh_token,
"token_type": "Bearer",
"expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
}
}
except HTTPException:
raise
except Exception as e:
await logger.aerror("Token refresh failed", error=str(e))
raise HTTPException(status_code=500, detail="Token refresh failed")
@router.get("/api/v1/auth/me")
async def get_current_user_info(current_user: User = Depends(require_auth)):
"""
Получение информации о текущем пользователе
"""
try:
async with db_manager.get_session() as session:
# Get user with full details
user_stmt = select(User).where(User.id == current_user.id).options(
selectinload(User.roles),
selectinload(User.api_keys)
)
user_result = await session.execute(user_stmt)
full_user = user_result.scalar_one_or_none()
if not full_user:
raise HTTPException(status_code=404, detail="User not found")
# Get user permissions
permissions = []
roles = []
for role in full_user.roles:
roles.append({
"name": role.name,
"description": role.description
})
permissions.extend(role.permissions)
return {
"user": {
"id": str(full_user.id),
"username": full_user.username,
"email": full_user.email,
"full_name": full_user.full_name,
"bio": full_user.bio,
"avatar_url": full_user.avatar_url,
"is_active": full_user.is_active,
"email_verified": full_user.email_verified,
"created_at": full_user.created_at.isoformat(),
"last_login_at": full_user.last_login_at.isoformat() if full_user.last_login_at else None,
"login_count": full_user.login_count,
"settings": full_user.settings
},
"roles": roles,
"permissions": list(set(permissions))
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Failed to get current user",
user_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to get user information")
# Helper functions
async def _process_twa_data(twa_data: str) -> Dict[str, Any]:
"""Обработка данных Telegram WebApp"""
await logger.ainfo("_process_twa_data started", twa_data_length=len(twa_data))
# Здесь должна быть валидация TWA данных
# Для демо возвращаем фиктивные данные
result = {
"user_id": str(uuid4()),
"username": "twa_user",
"first_name": "TWA",
"last_name": "User"
}
await logger.ainfo("_process_twa_data completed", result=result)
return result
async def _validate_ton_proof(proof: Dict[str, Any], account: Dict[str, Any], twa_data: str) -> bool:
"""Валидация TON proof"""
# Здесь должна быть реальная валидация TON proof
# Для демо возвращаем True
try:
# Базовые проверки
if not proof.get("timestamp") or not proof.get("domain"):
return False
if not account.get("address") or not account.get("chain"):
return False
# Здесь должна быть криптографическая проверка подписи
return True
except Exception as e:
logger.error(f"TON proof validation error: {e}")
return False
async def _validate_ton_wallet(wallet_address: str) -> bool:
"""Валидация TON кошелька"""
# Здесь должна быть проверка существования кошелька в TON сети
# Для демо возвращаем True для валидных адресов
try:
# Базовая проверка формата адреса
if len(wallet_address) < 40:
return False
# Здесь должен быть запрос к TON API
return True
except Exception as e:
logger.error(f"TON wallet validation error: {e}")
return False

View File

@ -0,0 +1,326 @@
"""
Compatibility routes to preserve deprecated uploader-bot API surface (v1/system).
These endpoints mirror legacy paths so older clients continue to function,
while new v3 sync API works in parallel.
"""
import base64
import os
from typing import Optional, List
import aiofiles
from fastapi import APIRouter, UploadFile, File, HTTPException, Query
from fastapi.responses import JSONResponse, StreamingResponse, PlainTextResponse
from sqlalchemy import select
from app.core.logging import get_logger
from app.core.config import get_settings
from app.core.database import db_manager
from app.core.models.content_models import StoredContent as Content
from app.core.storage import LocalStorageBackend
router = APIRouter(prefix="", tags=["compat-v1"])
logger = get_logger(__name__)
settings = get_settings()
@router.get("/")
async def index_root():
return PlainTextResponse("MY Network Node", status_code=200)
@router.get("/favicon.ico")
async def favicon():
return PlainTextResponse("", status_code=204)
@router.get("/api/system.version")
async def system_version():
codebase_hash = os.getenv("CODEBASE_HASH", "unknown")
codebase_branch = os.getenv("CODEBASE_BRANCH", os.getenv("GIT_BRANCH", "main"))
return {"codebase_hash": codebase_hash, "codebase_branch": codebase_branch}
@router.post("/api/system.sendStatus")
async def system_send_status(payload: dict):
try:
message_b58 = payload.get("message")
signature = payload.get("signature")
if not message_b58 or not signature:
raise HTTPException(status_code=400, detail="message and signature required")
await logger.ainfo("Compat system.sendStatus", signature=signature)
return {"ok": True}
except HTTPException:
raise
except Exception as e:
await logger.aerror("sendStatus failed", error=str(e))
raise HTTPException(status_code=500, detail="sendStatus failed")
@router.get("/api/tonconnect-manifest.json")
async def tonconnect_manifest():
host = str(getattr(settings, "PROJECT_HOST", "")) or os.getenv("PROJECT_HOST", "") or "http://localhost:8000"
return {
"url": host,
"name": "MY Network Node",
"iconUrl": f"{host}/static/icon.png",
"termsOfUseUrl": f"{host}/terms",
"privacyPolicyUrl": f"{host}/privacy",
"bridgeUrl": "https://bridge.tonapi.io/bridge",
"manifestVersion": 2
}
@router.get("/api/platform-metadata.json")
async def platform_metadata():
host = str(getattr(settings, "PROJECT_HOST", "")) or os.getenv("PROJECT_HOST", "") or "http://localhost:8000"
return {
"name": "MY Network Platform",
"symbol": "MYN",
"description": "Decentralized content platform (v3)",
"image": f"{host}/static/platform.png",
"external_url": host,
"version": "3.0.0"
}
@router.get("/api/v1/node")
async def v1_node():
from app.core.crypto import get_ed25519_manager
cm = get_ed25519_manager()
return {
"id": cm.node_id,
"node_address": "",
"master_address": "",
"indexer_height": 0,
"services": {}
}
@router.get("/api/v1/nodeFriendly")
async def v1_node_friendly():
from app.core.crypto import get_ed25519_manager
cm = get_ed25519_manager()
return PlainTextResponse(f"Node ID: {cm.node_id}\nIndexer height: 0\nServices: none\n")
@router.post("/api/v1/auth.twa")
async def v1_auth_twa(payload: dict):
user_ref = payload.get("user") or {}
token = base64.b64encode(f"twa:{user_ref}".encode()).decode()
return {"token": token}
@router.get("/api/v1/auth.me")
async def v1_auth_me():
return {"user": None, "status": "guest"}
@router.post("/api/v1/auth.selectWallet")
async def v1_auth_select_wallet(payload: dict):
return {"ok": True}
@router.get("/api/v1/tonconnect.new")
async def v1_tonconnect_new():
return {"ok": True}
@router.post("/api/v1/tonconnect.logout")
async def v1_tonconnect_logout(payload: dict):
return {"ok": True}
@router.post("/api/v1.5/storage")
async def v1_5_storage_upload(file: UploadFile = File(...)):
return await v1_storage_upload(file)
@router.get("/api/v1.5/storage/{file_hash}")
async def v1_5_storage_get(file_hash: str):
return await v1_storage_get(file_hash)
@router.post("/api/v1/storage")
async def v1_storage_upload(file: UploadFile = File(...)):
try:
data = await file.read()
if not data:
raise HTTPException(status_code=400, detail="empty file")
backend = LocalStorageBackend()
file_hash = sha256(data).hexdigest()
file_path = os.path.join(backend.files_path, file_hash)
async with aiofiles.open(file_path, 'wb') as f:
await f.write(data)
async with db_manager.get_session() as session:
existing = await session.execute(select(Content).where(Content.hash == file_hash))
if existing.scalars().first() is None:
content = Content(
hash=file_hash,
filename=file.filename or file_hash,
file_size=len(data),
mime_type=file.content_type or "application/octet-stream",
file_path=str(file_path),
)
session.add(content)
await session.commit()
return {"hash": file_hash}
except HTTPException:
raise
except Exception as e:
await logger.aerror("v1 upload failed", error=str(e))
raise HTTPException(status_code=500, detail="upload failed")
@router.get("/api/v1/storage/{file_hash}")
async def v1_storage_get(file_hash: str):
try:
async with db_manager.get_session() as session:
result = await session.execute(select(Content).where(Content.hash == file_hash))
content = result.scalars().first()
if not content or not content.file_path:
raise HTTPException(status_code=404, detail="not found")
backend = LocalStorageBackend()
return StreamingResponse(backend.get_file_stream(content.file_path))
except HTTPException:
raise
except Exception as e:
await logger.aerror("v1 storage get failed", error=str(e))
raise HTTPException(status_code=500, detail="failed")
@router.get("/api/v1/storage.decodeContentId/{content_id}")
async def v1_decode_content_id(content_id: str):
try:
async with db_manager.get_session() as session:
result = await session.execute(select(Content).where(Content.id == content_id))
content = result.scalars().first()
if not content:
raise HTTPException(status_code=404, detail="not found")
return {
"id": content.id,
"hash": content.hash,
"filename": content.filename,
"size": content.file_size,
"mime_type": content.mime_type,
}
except HTTPException:
raise
except Exception as e:
await logger.aerror("decodeContentId failed", error=str(e))
raise HTTPException(status_code=500, detail="failed")
@router.get("/api/v1/content.list")
async def v1_content_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
try:
async with db_manager.get_session() as session:
result = await session.execute(select(Content).offset(offset).limit(limit))
items: List[Content] = result.scalars().all()
return {
"items": [
{
"id": it.id,
"hash": it.hash,
"filename": it.filename,
"size": it.file_size,
"mime_type": it.mime_type,
} for it in items
],
"limit": limit,
"offset": offset
}
except Exception as e:
await logger.aerror("content.list failed", error=str(e))
raise HTTPException(status_code=500, detail="failed")
@router.get("/api/v1/content.view")
async def v1_content_view(hash: Optional[str] = None, id: Optional[str] = None):
try:
if not hash and not id:
raise HTTPException(status_code=400, detail="hash or id required")
async with db_manager.get_session() as session:
stmt = select(Content)
if hash:
stmt = stmt.where(Content.hash == hash)
if id:
stmt = stmt.where(Content.id == id)
result = await session.execute(stmt)
content = result.scalars().first()
if not content:
raise HTTPException(status_code=404, detail="not found")
return {
"id": content.id,
"hash": content.hash,
"filename": content.filename,
"size": content.file_size,
"mime_type": content.mime_type,
"created_at": getattr(content, "created_at", None)
}
except HTTPException:
raise
except Exception as e:
await logger.aerror("content.view failed", error=str(e))
raise HTTPException(status_code=500, detail="failed")
@router.get("/api/v1/content.view/{content_address}")
async def v1_content_view_path(content_address: str):
try:
async with db_manager.get_session() as session:
result = await session.execute(select(Content).where((Content.id == content_address) | (Content.hash == content_address)))
content = result.scalars().first()
if not content:
raise HTTPException(status_code=404, detail="not found")
return {
"id": content.id,
"hash": content.hash,
"filename": content.filename,
"size": content.file_size,
"mime_type": content.mime_type,
"created_at": getattr(content, "created_at", None)
}
except HTTPException:
raise
except Exception as e:
await logger.aerror("content.view(path) failed", error=str(e))
raise HTTPException(status_code=500, detail="failed")
@router.get("/api/v1/content.friendlyList")
async def v1_content_friendly_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
return await v1_content_list(limit, offset)
@router.get("/api/v1.5/content.list")
async def v1_5_content_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
return await v1_content_list(limit, offset)
@router.post("/api/v1/blockchain.sendNewContentMessage")
async def v1_chain_send_new_content(payload: dict):
await logger.ainfo("compat blockchain.sendNewContentMessage", payload=payload)
return {"ok": True}
@router.post("/api/v1/blockchain.sendPurchaseContent")
async def v1_chain_send_purchase(payload: dict):
await logger.ainfo("compat blockchain.sendPurchaseContent", payload=payload)
return {"ok": True}
@router.post("/api/v1/blockchain.sendPurchaseContentMessage")
async def v1_chain_send_purchase_message(payload: dict):
await logger.ainfo("compat blockchain.sendPurchaseContentMessage", payload=payload)
return {"ok": True}
@router.get("/api/v1/account")
async def v1_account():
return {"ok": True}

View File

@ -0,0 +1,479 @@
"""
FastAPI маршруты для управления контентом
Критически важные эндпоинты для web2-client совместимости
"""
import asyncio
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from uuid import UUID, uuid4
from fastapi import APIRouter, HTTPException, Request, Depends, UploadFile, File
from fastapi.responses import JSONResponse, StreamingResponse
from sqlalchemy import select, update, delete, and_, or_, func
from sqlalchemy.orm import selectinload
from pydantic import BaseModel, Field
from app.core.config import get_settings
from app.core.database import db_manager, get_cache_manager
from app.core.logging import get_logger
from app.core.models.content_models import StoredContent as Content, UserContent as ContentMetadata
from app.core.models.user import User
from app.api.fastapi_middleware import get_current_user, require_auth
# Initialize router
router = APIRouter(prefix="", tags=["content"])
logger = get_logger(__name__)
settings = get_settings()
# Pydantic models
class ContentViewRequest(BaseModel):
"""Модель для просмотра контента (совместимость с web2-client)"""
pass
class NewContentRequest(BaseModel):
"""Модель для создания нового контента"""
title: str = Field(..., min_length=1, max_length=200)
content: str = Field(..., min_length=1)
image: str = Field(..., min_length=1)
description: str = Field(..., max_length=1000)
hashtags: List[str] = Field(default=[])
price: str = Field(..., min_length=1)
resaleLicensePrice: str = Field(default="0")
allowResale: bool = Field(default=False)
authors: List[str] = Field(default=[])
royaltyParams: List[Dict[str, Any]] = Field(default=[])
downloadable: bool = Field(default=True)
class PurchaseContentRequest(BaseModel):
"""Модель для покупки контента"""
content_address: str = Field(..., min_length=1)
license_type: str = Field(..., pattern="^(listen|resale)$")
class ContentResponse(BaseModel):
"""Модель ответа с информацией о контенте"""
address: str
amount: str
payload: str
@router.get("/content.view/{content_id}")
async def view_content(
content_id: str,
request: Request,
current_user: User = Depends(get_current_user)
):
"""
Просмотр контента - критически важный эндпоинт для web2-client
Эквивалент GET /content.view/{id} из web2-client/src/shared/services/content/index.ts
"""
try:
# Проверка авторизации
auth_token = request.headers.get('authorization')
if not auth_token and not current_user:
# Для совместимости с web2-client, проверяем localStorage token из headers
auth_token = request.headers.get('authorization')
if not auth_token:
raise HTTPException(status_code=401, detail="Authentication required")
# Валидация content_id
try:
content_uuid = UUID(content_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid content ID format")
# Кэширование
cache_manager = await get_cache_manager()
cache_key = f"content_view:{content_id}"
cached_content = await cache_manager.get(cache_key)
if cached_content:
await logger.ainfo(
"Content view (cached)",
content_id=content_id,
user_id=str(current_user.id) if current_user else "anonymous"
)
return cached_content
async with db_manager.get_session() as session:
# Загрузка контента с метаданными
stmt = (
select(Content)
.options(
selectinload(Content.metadata),
selectinload(Content.access_controls)
)
.where(Content.id == content_uuid)
)
result = await session.execute(stmt)
content = result.scalar_one_or_none()
if not content:
raise HTTPException(status_code=404, detail="Content not found")
# Проверка доступа
has_access = await _check_content_access(content, current_user, session)
if not has_access:
raise HTTPException(status_code=403, detail="Access denied")
# Формирование ответа (совместимость с web2-client)
content_data = {
"id": str(content.id),
"title": content.title,
"description": content.description,
"content_type": content.content_type,
"file_size": content.file_size,
"status": content.status,
"visibility": content.visibility,
"tags": content.tags or [],
"created_at": content.created_at.isoformat(),
"updated_at": content.updated_at.isoformat(),
"user_id": str(content.user_id),
"file_url": f"/api/v1/content/{content_id}/download",
"preview_url": f"/api/v1/content/{content_id}/preview",
"metadata": {
"duration": getattr(content, 'duration', None),
"bitrate": getattr(content, 'bitrate', None),
"format": content.content_type
}
}
# Кэшируем на 10 минут
await cache_manager.set(cache_key, content_data, ttl=600)
# Обновляем статистику просмотров
await _update_view_stats(content_id, current_user)
await logger.ainfo(
"Content viewed successfully",
content_id=content_id,
user_id=str(current_user.id) if current_user else "anonymous"
)
return content_data
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Content view failed",
content_id=content_id,
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to load content")
@router.post("/blockchain.sendNewContentMessage", response_model=ContentResponse)
async def send_new_content_message(
request: Request,
content_data: NewContentRequest,
current_user: User = Depends(require_auth)
):
"""
Создание нового контента - критически важный эндпоинт для web2-client
Эквивалент useCreateNewContent из web2-client
"""
try:
await logger.ainfo("Content creation started", step="begin", user_id=str(current_user.id))
# Проверка квот пользователя
await logger.ainfo("Getting cache manager", step="cache_init")
cache_manager = await get_cache_manager()
await logger.ainfo("Cache manager obtained", step="cache_ready")
quota_key = f"user:{current_user.id}:content_quota"
daily_content = await cache_manager.get(quota_key, default=0)
await logger.ainfo("Quota checked", step="quota_check", daily_content=daily_content)
if daily_content >= settings.MAX_CONTENT_PER_DAY:
raise HTTPException(status_code=429, detail="Daily content creation limit exceeded")
# Валидация данных контента
if not content_data.title or not content_data.content:
raise HTTPException(status_code=400, detail="Title and content are required")
# Валидация цены
try:
price_nanotons = int(content_data.price)
if price_nanotons < 0:
raise ValueError("Price cannot be negative")
except ValueError:
raise HTTPException(status_code=400, detail="Invalid price format")
async with db_manager.get_session() as session:
# Создание записи контента
new_content = Content(
id=uuid4(),
user_id=current_user.id,
title=content_data.title,
description=content_data.description,
content_type="application/json", # Для метаданных
status="pending",
visibility="public" if not content_data.price or price_nanotons == 0 else "premium",
tags=content_data.hashtags,
file_size=len(content_data.content.encode('utf-8'))
)
session.add(new_content)
await session.commit()
await session.refresh(new_content)
# Создание метаданных
content_metadata = ContentMetadata(
content_id=new_content.id,
metadata_type="blockchain_content",
data={
"content": content_data.content,
"image": content_data.image,
"price": content_data.price,
"resaleLicensePrice": content_data.resaleLicensePrice,
"allowResale": content_data.allowResale,
"authors": content_data.authors,
"royaltyParams": content_data.royaltyParams,
"downloadable": content_data.downloadable
}
)
session.add(content_metadata)
await session.commit()
# Обновляем квоту
await cache_manager.increment(quota_key, ttl=86400)
# Генерируем blockchain payload для TON
blockchain_payload = await _generate_blockchain_payload(
content_id=str(new_content.id),
price=content_data.price,
metadata=content_data.__dict__
)
await logger.ainfo(
"New content message created",
content_id=str(new_content.id),
user_id=str(current_user.id),
title=content_data.title,
price=content_data.price
)
# Ответ в формате, ожидаемом web2-client
return ContentResponse(
address=settings.TON_CONTRACT_ADDRESS or "EQC_CONTRACT_ADDRESS",
amount=str(settings.TON_DEPLOY_FEE or "50000000"), # 0.05 TON в наноTON
payload=blockchain_payload
)
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"New content creation failed",
user_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to create content")
@router.post("/blockchain.sendPurchaseContentMessage", response_model=ContentResponse)
async def send_purchase_content_message(
request: Request,
purchase_data: PurchaseContentRequest,
current_user: User = Depends(require_auth)
):
"""
Покупка контента - критически важный эндпоинт для web2-client
Эквивалент usePurchaseContent из web2-client
"""
try:
content_address = purchase_data.content_address
license_type = purchase_data.license_type
# Валидация адреса контента
if not content_address:
raise HTTPException(status_code=400, detail="Content address is required")
# Поиск контента по адресу (или ID)
async with db_manager.get_session() as session:
# Пытаемся найти по UUID
content = None
try:
content_uuid = UUID(content_address)
stmt = select(Content).where(Content.id == content_uuid)
result = await session.execute(stmt)
content = result.scalar_one_or_none()
except ValueError:
# Если не UUID, ищем по другим полям
stmt = select(Content).where(Content.blockchain_address == content_address)
result = await session.execute(stmt)
content = result.scalar_one_or_none()
if not content:
raise HTTPException(status_code=404, detail="Content not found")
# Проверка, что пользователь не владелец контента
if content.user_id == current_user.id:
raise HTTPException(status_code=400, detail="Cannot purchase own content")
# Получаем метаданные для определения цены
metadata_stmt = select(ContentMetadata).where(
ContentMetadata.content_id == content.id,
ContentMetadata.metadata_type == "blockchain_content"
)
metadata_result = await session.execute(metadata_stmt)
metadata = metadata_result.scalar_one_or_none()
if not metadata:
raise HTTPException(status_code=404, detail="Content metadata not found")
# Определяем цену в зависимости от типа лицензии
content_data = metadata.data
if license_type == "listen":
price = content_data.get("price", "0")
elif license_type == "resale":
price = content_data.get("resaleLicensePrice", "0")
if not content_data.get("allowResale", False):
raise HTTPException(status_code=400, detail="Resale not allowed for this content")
else:
raise HTTPException(status_code=400, detail="Invalid license type")
# Валидация цены
try:
price_nanotons = int(price)
if price_nanotons < 0:
raise ValueError("Invalid price")
except ValueError:
raise HTTPException(status_code=400, detail="Invalid content price")
# Генерируем blockchain payload для покупки
purchase_payload = await _generate_purchase_payload(
content_id=str(content.id),
content_address=content_address,
license_type=license_type,
price=price,
buyer_id=str(current_user.id)
)
await logger.ainfo(
"Purchase content message created",
content_id=str(content.id),
content_address=content_address,
license_type=license_type,
price=price,
buyer_id=str(current_user.id)
)
# Ответ в формате, ожидаемом web2-client
return ContentResponse(
address=content_address,
amount=price,
payload=purchase_payload
)
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Purchase content failed",
content_address=purchase_data.content_address,
user_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to create purchase message")
# Helper functions
async def _check_content_access(content: Content, user: Optional[User], session) -> bool:
"""Проверка доступа к контенту"""
# Публичный контент доступен всем
if content.visibility == "public":
return True
# Владелец всегда имеет доступ
if user and content.user_id == user.id:
return True
# Премиум контент требует покупки
if content.visibility == "premium":
if not user:
return False
# Проверяем, покупал ли пользователь этот контент
# Здесь должна быть проверка в таблице покупок
return False
# Приватный контент доступен только владельцу
return False
async def _update_view_stats(content_id: str, user: Optional[User]) -> None:
"""Обновление статистики просмотров"""
try:
cache_manager = await get_cache_manager()
# Обновляем счетчики просмотров
today = datetime.utcnow().date().isoformat()
stats_key = f"content_views:{content_id}:{today}"
await cache_manager.increment(stats_key, ttl=86400)
if user:
user_views_key = f"user_content_views:{user.id}:{today}"
await cache_manager.increment(user_views_key, ttl=86400)
except Exception as e:
await logger.awarning(
"Failed to update view stats",
content_id=content_id,
error=str(e)
)
async def _generate_blockchain_payload(content_id: str, price: str, metadata: Dict[str, Any]) -> str:
"""Генерация payload для blockchain транзакции создания контента"""
import base64
import json
payload_data = {
"action": "create_content",
"content_id": content_id,
"price": price,
"timestamp": datetime.utcnow().isoformat(),
"metadata": {
"title": metadata.get("title"),
"description": metadata.get("description"),
"hashtags": metadata.get("hashtags", []),
"authors": metadata.get("authors", []),
"downloadable": metadata.get("downloadable", True)
}
}
# Кодируем в base64 для TON
payload_json = json.dumps(payload_data, separators=(',', ':'))
payload_base64 = base64.b64encode(payload_json.encode()).decode()
return payload_base64
async def _generate_purchase_payload(
content_id: str,
content_address: str,
license_type: str,
price: str,
buyer_id: str
) -> str:
"""Генерация payload для blockchain транзакции покупки контента"""
import base64
import json
payload_data = {
"action": "purchase_content",
"content_id": content_id,
"content_address": content_address,
"license_type": license_type,
"price": price,
"buyer_id": buyer_id,
"timestamp": datetime.utcnow().isoformat()
}
# Кодируем в base64 для TON
payload_json = json.dumps(payload_data, separators=(',', ':'))
payload_base64 = base64.b64encode(payload_json.encode()).decode()
return payload_base64

View File

@ -0,0 +1,594 @@
"""
FastAPI middleware адаптированный из Sanic middleware
Обеспечивает полную совместимость с существующей функциональностью
"""
import asyncio
import time
import uuid
import json
from datetime import datetime, timedelta
from typing import Optional, Dict, Any, Callable
from fastapi import Request, Response, HTTPException
from starlette.middleware.base import BaseHTTPMiddleware
from fastapi.responses import JSONResponse
import structlog
from app.core.config import settings, SecurityConfig, CACHE_KEYS
from app.core.database import get_cache
from app.core.logging import request_id_var, user_id_var, operation_var
from app.core.models.user import User
# Ed25519 криптографический модуль
try:
from app.core.crypto import get_ed25519_manager
CRYPTO_AVAILABLE = True
except ImportError:
CRYPTO_AVAILABLE = False
logger = structlog.get_logger(__name__)
class FastAPISecurityMiddleware(BaseHTTPMiddleware):
"""FastAPI Security middleware для валидации запросов и защиты"""
async def dispatch(self, request: Request, call_next):
# Handle OPTIONS requests for CORS
if request.method == 'OPTIONS':
response = Response(content='OK')
return self.add_security_headers(response)
# Security validations
try:
self.validate_request_size(request)
await self.validate_content_type(request)
if not self.check_origin(request):
raise HTTPException(status_code=403, detail="Origin not allowed")
except HTTPException:
raise
except Exception as e:
logger.warning("Security validation failed", error=str(e))
raise HTTPException(status_code=400, detail=str(e))
response = await call_next(request)
return self.add_security_headers(response)
def add_security_headers(self, response: Response) -> Response:
"""Add security headers to response"""
# CORS headers
response.headers.update({
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
"Access-Control-Allow-Headers": (
"Origin, Content-Type, Accept, Authorization, "
"X-Requested-With, X-API-Key, X-Request-ID, "
"X-Node-Communication, X-Node-ID, X-Node-Public-Key, X-Node-Signature"
),
"Access-Control-Max-Age": "86400",
# Security headers
"X-Content-Type-Options": "nosniff",
"X-Frame-Options": "DENY",
"X-XSS-Protection": "1; mode=block",
"Strict-Transport-Security": "max-age=31536000; includeSubDomains",
"Referrer-Policy": "strict-origin-when-cross-origin",
"Permissions-Policy": "geolocation=(), microphone=(), camera=()",
# Custom headers
"X-API-Version": settings.PROJECT_VERSION,
})
# CSP header
csp_directives = "; ".join([
f"{directive} {' '.join(sources)}"
for directive, sources in SecurityConfig.CSP_DIRECTIVES.items()
])
response.headers["Content-Security-Policy"] = csp_directives
return response
def validate_request_size(self, request: Request) -> None:
"""Validate request size limits"""
content_length = request.headers.get('content-length')
if content_length:
size = int(content_length)
if size > SecurityConfig.MAX_REQUEST_SIZE:
raise HTTPException(status_code=413, detail=f"Request too large: {size} bytes")
async def validate_content_type(self, request: Request) -> None:
"""Validate content type for JSON requests"""
if request.method in ['POST', 'PUT', 'PATCH']:
content_type = request.headers.get('content-type', '')
if 'application/json' in content_type:
# Skip body reading here - it will be read by the route handler
# Just validate content-length header instead
content_length = request.headers.get('content-length')
if content_length and int(content_length) > SecurityConfig.MAX_JSON_SIZE:
raise HTTPException(status_code=413, detail="JSON payload too large")
def check_origin(self, request: Request) -> bool:
"""Check if request origin is allowed"""
origin = request.headers.get('origin')
if not origin:
return True # Allow requests without origin (direct API calls)
return any(
origin.startswith(allowed_origin.rstrip('/*'))
for allowed_origin in SecurityConfig.CORS_ORIGINS
)
class FastAPIRateLimitMiddleware(BaseHTTPMiddleware):
"""FastAPI Rate limiting middleware using Redis"""
def __init__(self, app):
super().__init__(app)
self.cache = None
async def get_cache(self):
"""Get cache instance"""
if not self.cache:
self.cache = await get_cache()
return self.cache
async def dispatch(self, request: Request, call_next):
if not settings.RATE_LIMIT_ENABLED:
return await call_next(request)
client_identifier = self.get_client_ip(request)
pattern = self.get_rate_limit_pattern(request)
if not await self.check_rate_limit(request, client_identifier, pattern):
rate_info = await self.get_rate_limit_info(client_identifier, pattern)
return JSONResponse(
content={
"error": "Rate limit exceeded",
"rate_limit": rate_info
},
status_code=429
)
# Store rate limit info for response headers
rate_info = await self.get_rate_limit_info(client_identifier, pattern)
response = await call_next(request)
# Add rate limit headers
if rate_info:
response.headers.update({
"X-RateLimit-Limit": str(rate_info.get('limit', 0)),
"X-RateLimit-Remaining": str(rate_info.get('remaining', 0)),
"X-RateLimit-Reset": str(rate_info.get('reset_time', 0))
})
return response
def get_client_ip(self, request: Request) -> str:
"""Get real client IP address"""
# Check for forwarded headers
forwarded_for = request.headers.get('x-forwarded-for')
if forwarded_for:
return forwarded_for.split(',')[0].strip()
real_ip = request.headers.get('x-real-ip')
if real_ip:
return real_ip
# Fallback to request IP
return getattr(request.client, 'host', '127.0.0.1')
def get_rate_limit_pattern(self, request: Request) -> str:
"""Determine rate limit pattern based on endpoint"""
path = request.url.path
if '/auth/' in path:
return "auth"
elif '/upload' in path:
return "upload"
elif '/admin/' in path:
return "heavy"
else:
return "api"
async def check_rate_limit(
self,
request: Request,
identifier: str,
pattern: str = "api"
) -> bool:
"""Check rate limit for identifier"""
try:
cache = await self.get_cache()
limits = SecurityConfig.RATE_LIMIT_PATTERNS.get(pattern, {
"requests": settings.RATE_LIMIT_REQUESTS,
"window": settings.RATE_LIMIT_WINDOW
})
cache_key = CACHE_KEYS["rate_limit"].format(
pattern=pattern,
identifier=identifier
)
# Get current count
current_count = await cache.get(cache_key)
if current_count is None:
# First request in window
await cache.set(cache_key, "1", ttl=limits["window"])
return True
current_count = int(current_count)
if current_count >= limits["requests"]:
# Rate limit exceeded
logger.warning(
"Rate limit exceeded",
identifier=identifier,
pattern=pattern,
count=current_count,
limit=limits["requests"]
)
return False
# Increment counter
await cache.incr(cache_key)
return True
except Exception as e:
logger.error("Rate limit check failed", error=str(e))
return True # Allow request if rate limiting fails
async def get_rate_limit_info(
self,
identifier: str,
pattern: str = "api"
) -> Dict[str, Any]:
"""Get rate limit information"""
try:
cache = await self.get_cache()
limits = SecurityConfig.RATE_LIMIT_PATTERNS.get(pattern, {
"requests": settings.RATE_LIMIT_REQUESTS,
"window": settings.RATE_LIMIT_WINDOW
})
cache_key = CACHE_KEYS["rate_limit"].format(
pattern=pattern,
identifier=identifier
)
current_count = await cache.get(cache_key) or "0"
ttl = await cache.redis.ttl(cache_key)
return {
"limit": limits["requests"],
"remaining": max(0, limits["requests"] - int(current_count)),
"reset_time": int(time.time()) + max(0, ttl),
"window": limits["window"]
}
except Exception as e:
logger.error("Failed to get rate limit info", error=str(e))
return {}
class FastAPICryptographicMiddleware(BaseHTTPMiddleware):
"""FastAPI Ed25519 cryptographic middleware для межузлового общения"""
async def dispatch(self, request: Request, call_next):
# Проверяем ed25519 подпись для межузловых запросов
if not await self.verify_inter_node_signature(request):
logger.warning("Inter-node signature verification failed")
return JSONResponse(
content={
"error": "Invalid cryptographic signature",
"message": "Inter-node communication requires valid ed25519 signature"
},
status_code=403
)
response = await call_next(request)
# Добавляем криптографические заголовки для межузловых ответов
return await self.add_inter_node_headers(request, response)
async def verify_inter_node_signature(self, request: Request) -> bool:
"""Проверить ed25519 подпись для межузлового сообщения"""
if not CRYPTO_AVAILABLE:
logger.warning("Crypto module not available, skipping signature verification")
return True
# Проверяем, является ли это межузловым сообщением
if not request.headers.get("x-node-communication") == "true":
return True # Не межузловое сообщение, пропускаем проверку
try:
crypto_manager = get_ed25519_manager()
# Получаем необходимые заголовки
signature = request.headers.get("x-node-signature")
node_id = request.headers.get("x-node-id")
public_key = request.headers.get("x-node-public-key")
if not all([signature, node_id, public_key]):
logger.warning("Missing cryptographic headers in inter-node request")
return False
# SKIP body reading for now - this causes issues with FastAPI
# Inter-node communication signature verification disabled temporarily
logger.debug("Inter-node signature verification skipped (body reading conflict)")
request.state.inter_node_communication = True
request.state.source_node_id = node_id
request.state.source_public_key = public_key
return True
try:
message_data = json.loads(body.decode())
# Проверяем подпись
is_valid = crypto_manager.verify_signature(
message_data, signature, public_key
)
if is_valid:
logger.debug(f"Valid signature verified for node {node_id}")
# Сохраняем информацию о ноде в state
request.state.inter_node_communication = True
request.state.source_node_id = node_id
request.state.source_public_key = public_key
return True
else:
logger.warning(f"Invalid signature from node {node_id}")
return False
except json.JSONDecodeError:
logger.warning("Invalid JSON in inter-node request")
return False
except Exception as e:
logger.error(f"Crypto verification error: {e}")
return False
async def add_inter_node_headers(self, request: Request, response: Response) -> Response:
"""Добавить криптографические заголовки для межузловых ответов"""
if not CRYPTO_AVAILABLE:
return response
# Добавляем заголовки только для межузловых сообщений
if hasattr(request.state, 'inter_node_communication') and request.state.inter_node_communication:
try:
crypto_manager = get_ed25519_manager()
# Добавляем информацию о нашей ноде
response.headers.update({
"X-Node-ID": crypto_manager.node_id,
"X-Node-Public-Key": crypto_manager.public_key_hex,
"X-Node-Communication": "true"
})
# Если есть тело ответа, подписываем его
if hasattr(response, 'body') and response.body:
try:
response_data = json.loads(response.body.decode())
signature = crypto_manager.sign_message(response_data)
response.headers["X-Node-Signature"] = signature
except (json.JSONDecodeError, AttributeError):
# Не JSON тело или нет body, пропускаем подпись
pass
except Exception as e:
logger.error(f"Error adding inter-node headers: {e}")
return response
class FastAPIRequestContextMiddleware(BaseHTTPMiddleware):
"""FastAPI Request context middleware для трекинга и логирования"""
async def dispatch(self, request: Request, call_next):
# Generate and set request ID
request_id = str(uuid.uuid4())
request.state.request_id = request_id
request_id_var.set(request_id)
# Set request start time
start_time = time.time()
request.state.start_time = start_time
# Extract client information
request.state.client_ip = self.get_client_ip(request)
request.state.user_agent = request.headers.get('user-agent', 'Unknown')
# Initialize context
request.state.user = None
logger.info(
"Request started",
method=request.method,
path=request.url.path,
client_ip=request.state.client_ip,
user_agent=request.state.user_agent
)
response = await call_next(request)
# Add request ID to response
response.headers["X-Request-ID"] = request_id
# Log request completion
duration = time.time() - start_time
logger.info(
"Request completed",
method=request.method,
path=request.url.path,
status_code=response.status_code,
duration_ms=round(duration * 1000, 2),
client_ip=request.state.client_ip,
user_id=str(request.state.user.id) if hasattr(request.state, 'user') and request.state.user else None
)
return response
def get_client_ip(self, request: Request) -> str:
"""Get real client IP address"""
# Check for forwarded headers
forwarded_for = request.headers.get('x-forwarded-for')
if forwarded_for:
return forwarded_for.split(',')[0].strip()
real_ip = request.headers.get('x-real-ip')
if real_ip:
return real_ip
# Fallback to request IP
return getattr(request.client, 'host', '127.0.0.1')
class FastAPIAuthenticationMiddleware(BaseHTTPMiddleware):
"""FastAPI Authentication middleware для API доступа"""
async def dispatch(self, request: Request, call_next):
# Skip authentication for system endpoints and root
if request.url.path.startswith('/api/system') or request.url.path == '/':
return await call_next(request)
# Extract and validate token
token = await self.extract_token(request)
if token:
from app.core.database import db_manager
async with db_manager.get_session() as session:
user = await self.validate_token(token, session)
if user:
request.state.user = user
user_id_var.set(str(user.id))
# Check permissions
if not await self.check_permissions(user, request):
return JSONResponse(
content={"error": "Insufficient permissions"},
status_code=403
)
# Update user activity
user.update_activity()
await session.commit()
return await call_next(request)
async def extract_token(self, request: Request) -> Optional[str]:
"""Extract authentication token from request"""
# Check Authorization header
auth_header = request.headers.get('authorization')
if auth_header and auth_header.startswith('Bearer '):
return auth_header[7:] # Remove 'Bearer ' prefix
# Check X-API-Key header
api_key = request.headers.get('x-api-key')
if api_key:
return api_key
# Check query parameter (less secure, for backward compatibility)
return request.query_params.get('token')
async def validate_token(self, token: str, session) -> Optional[User]:
"""Validate authentication token and return user"""
if not token:
return None
try:
# Импортируем функции безопасности
from app.core.security import verify_access_token
# Пытаемся декодировать как JWT токен (приоритет для auth.twa)
try:
payload = verify_access_token(token)
if payload and 'user_id' in payload:
user_id = uuid.UUID(payload['user_id'])
user = await User.get_by_id(session, user_id)
if user and user.is_active:
return user
except Exception as jwt_error:
logger.debug("JWT validation failed, trying legacy format", error=str(jwt_error))
# Fallback: Legacy token format (user_id:hash)
if ':' in token:
user_id_str, token_hash = token.split(':', 1)
try:
user_id = uuid.UUID(user_id_str)
user = await User.get_by_id(session, user_id)
if user and hasattr(user, 'verify_token') and user.verify_token(token_hash):
return user
except (ValueError, AttributeError):
pass
# Fallback: try to find user by API token in user model
# This would require implementing token storage in User model
return None
except Exception as e:
logger.error("Token validation failed", token=token[:8] + "...", error=str(e))
return None
async def check_permissions(self, user: User, request: Request) -> bool:
"""Check if user has required permissions for the endpoint"""
# Implement permission checking based on endpoint and user role
endpoint = request.url.path
method = request.method
# Admin endpoints
if '/admin/' in endpoint:
return user.is_admin
# Moderator endpoints
if '/mod/' in endpoint:
return user.is_moderator
# User-specific endpoints
if '/user/' in endpoint and method in ['POST', 'PUT', 'DELETE']:
return user.has_permission('user:write')
# Content upload endpoints
if '/upload' in endpoint or '/content' in endpoint and method == 'POST':
return user.can_upload_content()
# Default: allow read access for authenticated users
return True
# FastAPI Dependencies для использования в роутах
from fastapi import Depends, HTTPException
async def get_current_user(request: Request) -> Optional[User]:
"""FastAPI dependency для получения текущего пользователя"""
if hasattr(request.state, 'user') and request.state.user:
return request.state.user
return None
async def require_auth(request: Request) -> User:
"""FastAPI dependency для требования аутентификации"""
user = await get_current_user(request)
if not user:
raise HTTPException(status_code=401, detail="Authentication required")
return user
async def check_permissions(permission: str):
"""FastAPI dependency для проверки разрешений"""
def permission_checker(user: User = Depends(require_auth)) -> User:
if not user.has_permission(permission):
raise HTTPException(status_code=403, detail=f"Permission required: {permission}")
return user
return permission_checker
async def require_admin(user: User = Depends(require_auth)) -> User:
"""FastAPI dependency для требования административных прав"""
if not hasattr(user, 'is_admin') or not user.is_admin:
raise HTTPException(status_code=403, detail="Administrative privileges required")
return user
async def check_rate_limit(pattern: str = "api"):
"""FastAPI dependency для проверки rate limit"""
def rate_limit_checker(request: Request) -> bool:
# Rate limiting уже проверяется в middleware
# Это dependency для дополнительных проверок если нужно
return True
return rate_limit_checker

View File

@ -0,0 +1,433 @@
"""
FastAPI маршруты для межузлового общения с ed25519 подписями
"""
import json
from typing import Dict, Any, Optional
from datetime import datetime
from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi.responses import JSONResponse
from app.core.crypto import get_ed25519_manager
from app.core.logging import get_logger
from app.core.database import get_cache_manager
logger = get_logger(__name__)
# Router для межузловых коммуникаций в FastAPI
router = APIRouter(prefix="/api/node", tags=["node-communication"])
async def validate_node_request(request: Request) -> Dict[str, Any]:
"""Валидация межузлового запроса с обязательной проверкой подписи"""
# Проверяем наличие обязательных заголовков
required_headers = ["x-node-communication", "x-node-id", "x-node-public-key", "x-node-signature"]
for header in required_headers:
if header not in request.headers:
raise HTTPException(status_code=400, detail=f"Missing required header: {header}")
# Проверяем, что это межузловое общение
if request.headers.get("x-node-communication") != "true":
raise HTTPException(status_code=400, detail="Not a valid inter-node communication")
try:
crypto_manager = get_ed25519_manager()
# Получаем заголовки
signature = request.headers.get("x-node-signature")
node_id = request.headers.get("x-node-id")
public_key = request.headers.get("x-node-public-key")
# Читаем тело запроса
body = await request.body()
if not body:
raise HTTPException(status_code=400, detail="Empty message body")
try:
message_data = json.loads(body.decode())
# Anti-replay: validate timestamp and nonce
try:
ts = message_data.get("timestamp")
nonce = message_data.get("nonce")
if ts:
from datetime import datetime, timezone
now = datetime.now(timezone.utc).timestamp()
if abs(float(ts) - float(now)) > 300:
raise HTTPException(status_code=400, detail="stale timestamp")
if nonce:
cache = await get_cache_manager()
cache_key = f"replay:{node_id}:{nonce}"
if await cache.get(cache_key):
raise HTTPException(status_code=400, detail="replay detected")
await cache.set(cache_key, True, ttl=600)
except Exception:
# Backward compatible: missing fields
pass
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail="Invalid JSON in request body")
# Проверяем подпись
is_valid = crypto_manager.verify_signature(message_data, signature, public_key)
if not is_valid:
logger.warning(f"Invalid signature from node {node_id}")
raise HTTPException(status_code=403, detail="Invalid cryptographic signature")
logger.debug(f"Valid signature verified for node {node_id}")
return {
"node_id": node_id,
"public_key": public_key,
"message": message_data
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Crypto verification error: {e}")
raise HTTPException(status_code=500, detail="Cryptographic verification failed")
async def create_node_response(data: Dict[str, Any], request: Request) -> JSONResponse:
"""Создать ответ для межузлового общения с подписью"""
try:
crypto_manager = get_ed25519_manager()
# Добавляем информацию о нашей ноде
response_data = {
"success": True,
"timestamp": datetime.utcnow().isoformat(),
"node_id": crypto_manager.node_id,
"data": data
}
# Подписываем ответ
signature = crypto_manager.sign_message(response_data)
# Создаем ответ с заголовками
headers = {
"X-Node-ID": crypto_manager.node_id,
"X-Node-Public-Key": crypto_manager.public_key_hex,
"X-Node-Communication": "true",
"X-Node-Signature": signature
}
return JSONResponse(content=response_data, headers=headers)
except Exception as e:
logger.error(f"Error creating node response: {e}")
raise HTTPException(status_code=500, detail="Failed to create signed response")
@router.post("/handshake")
async def node_handshake(request: Request):
"""
Обработка хэндшейка между нодами
Ожидаемый формат сообщения:
{
"action": "handshake",
"node_info": {
"node_id": "...",
"version": "...",
"capabilities": [...],
"network_info": {...}
},
"timestamp": "..."
}
"""
try:
# Валидация межузлового запроса
node_data = await validate_node_request(request)
message = node_data["message"]
source_node_id = node_data["node_id"]
logger.info(f"Handshake request from node {source_node_id}")
# Проверяем формат сообщения хэндшейка
if message.get("action") != "handshake":
raise HTTPException(status_code=400, detail="Invalid handshake message format")
node_info = message.get("node_info", {})
if not node_info.get("node_id") or not node_info.get("version"):
raise HTTPException(status_code=400, detail="Missing required node information")
# Создаем информацию о нашей ноде для ответа
crypto_manager = get_ed25519_manager()
our_node_info = {
"node_id": crypto_manager.node_id,
"version": "3.0.0", # Версия MY Network
"capabilities": [
"content_upload",
"content_sync",
"decentralized_filtering",
"ed25519_signatures"
],
"network_info": {
"public_key": crypto_manager.public_key_hex,
"protocol_version": "1.0"
}
}
# Сохраняем информацию о ноде (здесь можно добавить в базу данных)
logger.info(f"Successful handshake with node {source_node_id}",
extra={"peer_node_info": node_info})
response_data = {
"handshake_accepted": True,
"node_info": our_node_info
}
return await create_node_response(response_data, request)
except HTTPException:
raise
except Exception as e:
logger.error(f"Handshake error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/content/sync")
async def content_sync(request: Request):
"""
Синхронизация контента между нодами
Ожидаемый формат сообщения:
{
"action": "content_sync",
"sync_type": "new_content|content_list|content_request",
"content_info": {...},
"timestamp": "..."
}
"""
try:
# Валидация межузлового запроса
node_data = await validate_node_request(request)
message = node_data["message"]
source_node_id = node_data["node_id"]
logger.info(f"Content sync request from node {source_node_id}")
# Проверяем формат сообщения синхронизации
if message.get("action") != "content_sync":
raise HTTPException(status_code=400, detail="Invalid sync message format")
sync_type = message.get("sync_type")
content_info = message.get("content_info", {})
if sync_type == "new_content":
# Обработка нового контента от другой ноды
content_hash = content_info.get("hash")
if not content_hash:
raise HTTPException(status_code=400, detail="Missing content hash")
# Здесь добавить логику обработки нового контента
# через decentralized_filter и content_storage_manager
response_data = {
"sync_result": "content_accepted",
"content_hash": content_hash
}
elif sync_type == "content_list":
# Запрос списка доступного контента
# Здесь добавить логику получения списка контента
response_data = {
"content_list": [], # Заглушка - добавить реальный список
"total_items": 0
}
elif sync_type == "content_request":
# Запрос конкретного контента
requested_hash = content_info.get("hash")
if not requested_hash:
raise HTTPException(status_code=400, detail="Missing content hash for request")
# Здесь добавить логику поиска и передачи контента
response_data = {
"content_found": False, # Заглушка - добавить реальную проверку
"content_hash": requested_hash
}
else:
raise HTTPException(status_code=400, detail=f"Unknown sync type: {sync_type}")
return await create_node_response(response_data, request)
except HTTPException:
raise
except Exception as e:
logger.error(f"Content sync error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/network/ping")
async def network_ping(request: Request):
"""
Пинг между нодами для проверки доступности
Ожидаемый формат сообщения:
{
"action": "ping",
"timestamp": "...",
"data": {...}
}
"""
try:
# Валидация межузлового запроса
node_data = await validate_node_request(request)
message = node_data["message"]
source_node_id = node_data["node_id"]
logger.debug(f"Ping from node {source_node_id}")
# Проверяем формат пинга
if message.get("action") != "ping":
raise HTTPException(status_code=400, detail="Invalid ping message format")
# Создаем ответ pong
response_data = {
"action": "pong",
"ping_timestamp": message.get("timestamp"),
"response_timestamp": datetime.utcnow().isoformat()
}
return await create_node_response(response_data, request)
except HTTPException:
raise
except Exception as e:
logger.error(f"Ping error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/network/status")
async def network_status():
"""
Получение статуса ноды (GET запрос без обязательной подписи)
"""
try:
crypto_manager = get_ed25519_manager()
status_data = {
"node_id": crypto_manager.node_id,
"public_key": crypto_manager.public_key_hex,
"version": "3.0.0",
"status": "active",
"capabilities": [
"content_upload",
"content_sync",
"decentralized_filtering",
"ed25519_signatures"
],
"timestamp": datetime.utcnow().isoformat()
}
return {
"success": True,
"data": status_data
}
except Exception as e:
logger.error(f"Status error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/network/discover")
async def network_discover(request: Request):
"""
Обнаружение и обмен информацией о других нодах в сети
Ожидаемый формат сообщения:
{
"action": "discover",
"known_nodes": [...],
"timestamp": "..."
}
"""
try:
# Валидация межузлового запроса
node_data = await validate_node_request(request)
message = node_data["message"]
source_node_id = node_data["node_id"]
logger.info(f"Discovery request from node {source_node_id}")
# Проверяем формат сообщения
if message.get("action") != "discover":
raise HTTPException(status_code=400, detail="Invalid discovery message format")
known_nodes = message.get("known_nodes", [])
# Здесь добавить логику обработки информации о известных нодах
# и возврат информации о наших известных нодах
response_data = {
"known_nodes": [], # Заглушка - добавить реальный список
"discovery_timestamp": datetime.utcnow().isoformat()
}
return await create_node_response(response_data, request)
except HTTPException:
raise
except Exception as e:
logger.error(f"Discovery error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# V3 API compatibility endpoints (без подписи для совместимости)
@router.get("/v3/node/status")
async def v3_node_status():
"""
V3 API: Статус ноды для совместимости со скриптами
"""
try:
crypto_manager = get_ed25519_manager()
return {
"status": "online",
"node_id": crypto_manager.node_id,
"version": "3.0.0",
"network": "MY Network",
"capabilities": [
"content_upload",
"content_sync",
"decentralized_filtering",
"ed25519_signatures"
],
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"V3 status error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/v3/network/stats")
async def v3_network_stats():
"""
V3 API: Статистика сети для совместимости со скриптами
"""
try:
# Заглушка для сетевой статистики
return {
"network_stats": {
"total_nodes": 1,
"active_nodes": 1,
"total_content": 0,
"network_health": "good"
},
"node_stats": {
"uptime": "online",
"connections": 0,
"content_shared": 0
},
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"V3 network stats error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@ -0,0 +1,478 @@
"""
FastAPI маршруты для загрузки файлов с поддержкой chunked uploads
Критически важные эндпоинты для web2-client совместимости
"""
import asyncio
import base64
import hashlib
from datetime import datetime
from typing import Dict, List, Optional, Any
from uuid import UUID, uuid4
from fastapi import APIRouter, HTTPException, Request, Depends, UploadFile, File, Header
from fastapi.responses import JSONResponse, StreamingResponse
from sqlalchemy import select, update, delete
from app.core.config import get_settings
from app.core.database import db_manager, get_cache_manager
from app.core.logging import get_logger
from app.core.models.content_models import StoredContent as Content
from app.core.models.user import User
from app.api.fastapi_middleware import get_current_user, require_auth
# Initialize router
router = APIRouter(prefix="", tags=["storage"])
logger = get_logger(__name__)
settings = get_settings()
# Конфигурация
MAX_CHUNK_SIZE = 80 * 1024 * 1024 # 80 MB
STORAGE_API_URL = getattr(settings, 'STORAGE_API_URL', '/api/storage')
@router.post("")
async def chunked_file_upload(
request: Request,
file: bytes = File(...),
x_file_name: Optional[str] = Header(None, alias="X-File-Name"),
x_chunk_start: Optional[str] = Header(None, alias="X-Chunk-Start"),
x_last_chunk: Optional[str] = Header(None, alias="X-Last-Chunk"),
x_upload_id: Optional[str] = Header(None, alias="X-Upload-ID"),
content_type: Optional[str] = Header(None, alias="Content-Type"),
current_user: User = Depends(get_current_user)
):
"""
Chunked file upload совместимый с web2-client
Обрабатывает как обычные загрузки (80MB), так и чанкированные
Заголовки:
- X-File-Name: base64 encoded filename
- X-Chunk-Start: начальная позиция чанка
- X-Last-Chunk: "1" если это последний чанк
- X-Upload-ID: ID сессии загрузки (для чанков после первого)
- Content-Type: тип контента
"""
try:
# Проверка авторизации
auth_token = request.headers.get('authorization')
if not auth_token and not current_user:
raise HTTPException(status_code=401, detail="Authentication required")
# Валидация заголовков
if not x_file_name:
raise HTTPException(status_code=400, detail="X-File-Name header required")
if not x_chunk_start:
raise HTTPException(status_code=400, detail="X-Chunk-Start header required")
# Декодирование имени файла
try:
filename = base64.b64decode(x_file_name).decode('utf-8')
except Exception:
raise HTTPException(status_code=400, detail="Invalid X-File-Name encoding")
# Парсинг параметров
chunk_start = int(x_chunk_start)
is_last_chunk = x_last_chunk == "1"
upload_id = x_upload_id
# Валидация размера чанка
if len(file) > MAX_CHUNK_SIZE:
raise HTTPException(status_code=413, detail="Chunk too large")
cache_manager = await get_cache_manager()
# Для первого чанка (chunk_start = 0 и нет upload_id)
if chunk_start == 0 and not upload_id:
# Создаем новую сессию загрузки
upload_id = str(uuid4())
# Создаем запись о загрузке
upload_session = {
"upload_id": upload_id,
"filename": filename,
"content_type": content_type or "application/octet-stream",
"user_id": str(current_user.id) if current_user else "anonymous",
"chunks": {},
"total_size": 0,
"created_at": datetime.utcnow().isoformat(),
"status": "uploading"
}
# Сохраняем в кэше
session_key = f"upload_session:{upload_id}"
await cache_manager.set(session_key, upload_session, ttl=3600) # 1 час
await logger.ainfo(
"New upload session created",
upload_id=upload_id,
filename=filename,
user_id=str(current_user.id) if current_user else "anonymous"
)
# Получаем сессию загрузки
session_key = f"upload_session:{upload_id}"
upload_session = await cache_manager.get(session_key)
if not upload_session:
raise HTTPException(status_code=404, detail="Upload session not found")
# Сохраняем чанк
chunk_key = f"upload_chunk:{upload_id}:{chunk_start}"
chunk_data = {
"data": base64.b64encode(file).decode(),
"start": chunk_start,
"size": len(file),
"uploaded_at": datetime.utcnow().isoformat()
}
await cache_manager.set(chunk_key, chunk_data, ttl=3600)
# Обновляем сессию
upload_session["chunks"][str(chunk_start)] = len(file)
upload_session["total_size"] = chunk_start + len(file)
await cache_manager.set(session_key, upload_session, ttl=3600)
# Если это последний чанк, собираем файл
if is_last_chunk:
try:
# Собираем все чанки
file_content = await _assemble_file_chunks(upload_id, upload_session)
# Создаем запись контента в БД
content_id = await _create_content_record(
filename=filename,
content_type=content_type or "application/octet-stream",
file_size=len(file_content),
user_id=current_user.id if current_user else None
)
# Сохраняем файл (здесь должна быть реальная файловая система)
file_hash = hashlib.sha256(file_content).hexdigest()
# Очищаем временные данные
await _cleanup_upload_session(upload_id, upload_session)
await logger.ainfo(
"File upload completed",
upload_id=upload_id,
content_id=content_id,
filename=filename,
file_size=len(file_content),
user_id=str(current_user.id) if current_user else "anonymous"
)
# Ответ для завершенной загрузки (формат для web2-client)
return {
"content_sha256": file_hash,
"content_id_v1": content_id,
"content_id": content_id,
"content_url": f"/api/v1/content/{content_id}/download",
"upload_id": upload_id,
"status": "completed"
}
except Exception as e:
await logger.aerror(
"Failed to finalize upload",
upload_id=upload_id,
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to finalize upload")
else:
# Промежуточный ответ для продолжения загрузки
current_size = upload_session["total_size"]
await logger.adebug(
"Chunk uploaded",
upload_id=upload_id,
chunk_start=chunk_start,
chunk_size=len(file),
current_size=current_size
)
return {
"upload_id": upload_id,
"current_size": current_size,
"chunk_uploaded": True,
"chunks_received": len(upload_session["chunks"])
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Chunked upload failed",
filename=x_file_name,
chunk_start=x_chunk_start,
error=str(e)
)
raise HTTPException(status_code=500, detail="Upload failed")
@router.get("/upload/{upload_id}/status")
async def get_upload_status(
upload_id: str,
request: Request,
current_user: User = Depends(get_current_user)
):
"""
Получение статуса загрузки
"""
try:
# Проверка авторизации
if not current_user:
auth_token = request.headers.get('authorization')
if not auth_token:
raise HTTPException(status_code=401, detail="Authentication required")
cache_manager = await get_cache_manager()
session_key = f"upload_session:{upload_id}"
upload_session = await cache_manager.get(session_key)
if not upload_session:
raise HTTPException(status_code=404, detail="Upload session not found")
# Проверка прав доступа
if current_user and upload_session.get("user_id") != str(current_user.id):
raise HTTPException(status_code=403, detail="Access denied")
# Подсчет прогресса
total_chunks = len(upload_session["chunks"])
total_size = upload_session["total_size"]
return {
"upload_id": upload_id,
"status": upload_session["status"],
"filename": upload_session["filename"],
"total_size": total_size,
"chunks_uploaded": total_chunks,
"created_at": upload_session["created_at"]
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Failed to get upload status",
upload_id=upload_id,
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to get upload status")
@router.delete("/upload/{upload_id}")
async def cancel_upload(
upload_id: str,
request: Request,
current_user: User = Depends(get_current_user)
):
"""
Отмена загрузки и очистка временных данных
"""
try:
# Проверка авторизации
if not current_user:
auth_token = request.headers.get('authorization')
if not auth_token:
raise HTTPException(status_code=401, detail="Authentication required")
cache_manager = await get_cache_manager()
session_key = f"upload_session:{upload_id}"
upload_session = await cache_manager.get(session_key)
if not upload_session:
raise HTTPException(status_code=404, detail="Upload session not found")
# Проверка прав доступа
if current_user and upload_session.get("user_id") != str(current_user.id):
raise HTTPException(status_code=403, detail="Access denied")
# Очистка всех данных загрузки
await _cleanup_upload_session(upload_id, upload_session)
await logger.ainfo(
"Upload cancelled",
upload_id=upload_id,
user_id=str(current_user.id) if current_user else "anonymous"
)
return {
"message": "Upload cancelled successfully",
"upload_id": upload_id
}
except HTTPException:
raise
except Exception as e:
await logger.aerror(
"Failed to cancel upload",
upload_id=upload_id,
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to cancel upload")
# Helper functions
async def _assemble_file_chunks(upload_id: str, upload_session: Dict[str, Any]) -> bytes:
"""Сборка файла из чанков"""
cache_manager = await get_cache_manager()
# Сортируем чанки по позиции
chunk_positions = sorted([int(pos) for pos in upload_session["chunks"].keys()])
file_content = b""
for position in chunk_positions:
chunk_key = f"upload_chunk:{upload_id}:{position}"
chunk_data = await cache_manager.get(chunk_key)
if not chunk_data:
raise Exception(f"Missing chunk at position {position}")
# Декодируем chunk data
chunk_bytes = base64.b64decode(chunk_data["data"])
# Проверяем последовательность
if position != len(file_content):
raise Exception(f"Chunk position mismatch: expected {len(file_content)}, got {position}")
file_content += chunk_bytes
return file_content
async def _create_content_record(
filename: str,
content_type: str,
file_size: int,
user_id: Optional[UUID]
) -> str:
"""Создание записи контента в базе данных"""
try:
async with db_manager.get_session() as session:
content = Content(
id=uuid4(),
user_id=user_id,
title=filename,
description=f"Uploaded file: {filename}",
content_type=content_type,
file_size=file_size,
status="completed",
visibility="private"
)
session.add(content)
await session.commit()
await session.refresh(content)
return str(content.id)
except Exception as e:
await logger.aerror(
"Failed to create content record",
filename=filename,
error=str(e)
)
raise
async def _cleanup_upload_session(upload_id: str, upload_session: Dict[str, Any]) -> None:
"""Очистка временных данных загрузки"""
try:
cache_manager = await get_cache_manager()
# Удаляем все чанки
for position in upload_session["chunks"].keys():
chunk_key = f"upload_chunk:{upload_id}:{position}"
await cache_manager.delete(chunk_key)
# Удаляем сессию
session_key = f"upload_session:{upload_id}"
await cache_manager.delete(session_key)
await logger.adebug(
"Upload session cleaned up",
upload_id=upload_id,
chunks_deleted=len(upload_session["chunks"])
)
except Exception as e:
await logger.awarning(
"Failed to cleanup upload session",
upload_id=upload_id,
error=str(e)
)
# Дополнительные эндпоинты для совместимости
@router.post("/api/v1/storage/upload")
async def initiate_upload_v1(
request: Request,
current_user: User = Depends(require_auth)
):
"""
Инициация загрузки (v1 API совместимость)
"""
try:
# Простая заглушка для совместимости
upload_id = str(uuid4())
return {
"upload_id": upload_id,
"status": "ready",
"message": "Upload session created"
}
except Exception as e:
await logger.aerror(
"Failed to initiate upload",
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to initiate upload")
@router.get("/api/v1/storage/quota")
async def get_storage_quota(
request: Request,
current_user: User = Depends(require_auth)
):
"""
Получение информации о квоте хранилища
"""
try:
# Базовая реализация квот
max_storage = getattr(settings, 'MAX_STORAGE_PER_USER', 1024 * 1024 * 1024) # 1GB
# Подсчет используемого места (заглушка)
used_storage = 0
async with db_manager.get_session() as session:
stmt = select(Content).where(Content.user_id == current_user.id)
result = await session.execute(stmt)
contents = result.scalars().all()
used_storage = sum(content.file_size or 0 for content in contents)
return {
"quota": {
"used_bytes": used_storage,
"max_bytes": max_storage,
"available_bytes": max(0, max_storage - used_storage),
"usage_percent": round((used_storage / max_storage) * 100, 2) if max_storage > 0 else 0
},
"files": {
"count": len(contents),
"max_files": getattr(settings, 'MAX_FILES_PER_USER', 1000)
}
}
except Exception as e:
await logger.aerror(
"Failed to get storage quota",
user_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to get quota information")

View File

@ -0,0 +1,556 @@
"""
FastAPI системные эндпоинты для мониторинга, health checks и администрирования
TIER 3 - системные функции для операционного управления
"""
import asyncio
import platform
import psutil
import time
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from uuid import UUID
from fastapi import APIRouter, HTTPException, Request, Depends, Query
from fastapi.responses import JSONResponse
from sqlalchemy import select, text
from app.core.config import get_settings
from app.core.database import db_manager, get_cache_manager
from app.core.logging import get_logger
from app.core.crypto import get_ed25519_manager
from app.core.models.content_models import StoredContent as Content
from app.core.models.user import User
from app.api.fastapi_middleware import require_auth, require_admin
# Initialize router
router = APIRouter(prefix="/api/system", tags=["system"])
logger = get_logger(__name__)
settings = get_settings()
# Системная информация для мониторинга
_start_time = time.time()
_request_counter = 0
_error_counter = 0
@router.get("/health")
async def health_check():
"""
Базовая проверка здоровья сервиса
Доступна без авторизации для load balancer'ов
"""
try:
# Проверяем подключение к базе данных
db_status = "unknown"
try:
async with db_manager.get_session() as session:
await session.execute(text("SELECT 1"))
db_status = "healthy"
except Exception as e:
db_status = f"unhealthy: {str(e)[:100]}"
# Проверяем кэш
cache_status = "unknown"
try:
cache_manager = await get_cache_manager()
await cache_manager.set("health_check", "ok", ttl=10)
cache_status = "healthy"
except Exception as e:
cache_status = f"unhealthy: {str(e)[:100]}"
# Проверяем криптографию
crypto_status = "unknown"
try:
crypto_manager = get_ed25519_manager()
test_data = {"test": "health_check"}
signature = crypto_manager.sign_message(test_data)
is_valid = crypto_manager.verify_signature(
test_data, signature, crypto_manager.public_key_hex
)
crypto_status = "healthy" if is_valid else "unhealthy: signature verification failed"
except Exception as e:
crypto_status = f"unhealthy: {str(e)[:100]}"
# Определяем общий статус
overall_status = "healthy"
if "unhealthy" in db_status or "unhealthy" in cache_status or "unhealthy" in crypto_status:
overall_status = "degraded"
health_data = {
"status": overall_status,
"timestamp": datetime.utcnow().isoformat(),
"services": {
"database": db_status,
"cache": cache_status,
"cryptography": crypto_status
},
"uptime_seconds": int(time.time() - _start_time)
}
# Возвращаем статус с соответствующим HTTP кодом
status_code = 200 if overall_status == "healthy" else 503
return JSONResponse(
content=health_data,
status_code=status_code
)
except Exception as e:
await logger.aerror(
"Health check failed",
error=str(e)
)
return JSONResponse(
content={
"status": "unhealthy",
"error": "Health check system failure",
"timestamp": datetime.utcnow().isoformat()
},
status_code=503
)
@router.get("/health/detailed")
async def detailed_health_check(
request: Request,
current_user: User = Depends(require_admin)
):
"""
Детальная проверка здоровья системы с метриками
Только для администраторов
"""
try:
# Системные метрики
system_info = {
"cpu_percent": psutil.cpu_percent(interval=1),
"memory": {
"total": psutil.virtual_memory().total,
"available": psutil.virtual_memory().available,
"percent": psutil.virtual_memory().percent
},
"disk": {
"total": psutil.disk_usage('/').total,
"used": psutil.disk_usage('/').used,
"free": psutil.disk_usage('/').free,
"percent": psutil.disk_usage('/').percent
},
"load_average": psutil.getloadavg() if hasattr(psutil, 'getloadavg') else None
}
# Метрики базы данных
db_metrics = {}
try:
async with db_manager.get_session() as session:
# Количество пользователей
user_count = await session.execute(text("SELECT COUNT(*) FROM users"))
db_metrics["users_count"] = user_count.scalar()
# Количество контента
content_count = await session.execute(text("SELECT COUNT(*) FROM stored_content"))
db_metrics["content_count"] = content_count.scalar()
# Размер базы данных (приблизительно)
db_size = await session.execute(text("""
SELECT pg_size_pretty(pg_database_size(current_database()))
"""))
db_metrics["database_size"] = db_size.scalar()
except Exception as e:
db_metrics["error"] = str(e)
# Метрики кэша
cache_metrics = {}
try:
cache_manager = await get_cache_manager()
# Здесь добавить метрики Redis если доступны
cache_metrics["status"] = "connected"
except Exception as e:
cache_metrics["error"] = str(e)
# Метрики приложения
app_metrics = {
"uptime_seconds": int(time.time() - _start_time),
"requests_total": _request_counter,
"errors_total": _error_counter,
"error_rate": _error_counter / max(_request_counter, 1),
"python_version": platform.python_version(),
"platform": platform.platform()
}
# Конфигурация
config_info = {
"debug_mode": getattr(settings, 'DEBUG', False),
"environment": getattr(settings, 'ENVIRONMENT', 'unknown'),
"version": getattr(settings, 'VERSION', 'unknown'),
"node_id": get_ed25519_manager().node_id[:8] + "..." # Частичный ID для безопасности
}
detailed_health = {
"status": "healthy",
"timestamp": datetime.utcnow().isoformat(),
"system": system_info,
"database": db_metrics,
"cache": cache_metrics,
"application": app_metrics,
"configuration": config_info
}
return detailed_health
except Exception as e:
await logger.aerror(
"Detailed health check failed",
user_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to get detailed health status")
@router.get("/metrics")
async def prometheus_metrics():
"""
Метрики в формате Prometheus
"""
try:
# Базовые метрики системы
cpu_usage = psutil.cpu_percent(interval=0.1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
# Метрики приложения
uptime = int(time.time() - _start_time)
# Формат Prometheus
metrics = f"""# HELP uploader_bot_uptime_seconds Total uptime in seconds
# TYPE uploader_bot_uptime_seconds counter
uploader_bot_uptime_seconds {uptime}
# HELP uploader_bot_requests_total Total number of HTTP requests
# TYPE uploader_bot_requests_total counter
uploader_bot_requests_total {_request_counter}
# HELP uploader_bot_errors_total Total number of errors
# TYPE uploader_bot_errors_total counter
uploader_bot_errors_total {_error_counter}
# HELP system_cpu_usage_percent CPU usage percentage
# TYPE system_cpu_usage_percent gauge
system_cpu_usage_percent {cpu_usage}
# HELP system_memory_usage_percent Memory usage percentage
# TYPE system_memory_usage_percent gauge
system_memory_usage_percent {memory.percent}
# HELP system_disk_usage_percent Disk usage percentage
# TYPE system_disk_usage_percent gauge
system_disk_usage_percent {disk.percent}
# HELP system_memory_total_bytes Total memory in bytes
# TYPE system_memory_total_bytes gauge
system_memory_total_bytes {memory.total}
# HELP system_memory_available_bytes Available memory in bytes
# TYPE system_memory_available_bytes gauge
system_memory_available_bytes {memory.available}
"""
from fastapi.responses import PlainTextResponse
return PlainTextResponse(
content=metrics
)
except Exception as e:
await logger.aerror(
"Metrics collection failed",
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to collect metrics")
@router.get("/info")
async def system_info():
"""
Общая информация о системе (публичная)
"""
try:
crypto_manager = get_ed25519_manager()
info = {
"service": "uploader-bot",
"version": getattr(settings, 'VERSION', 'unknown'),
"api_version": "v1",
"network": "MY Network v3.0",
"node_id": crypto_manager.node_id,
"public_key": crypto_manager.public_key_hex,
"capabilities": [
"content_upload",
"content_sync",
"decentralized_filtering",
"ed25519_signatures",
"web2_client_api"
],
"supported_formats": [
"image/*",
"video/*",
"audio/*",
"text/*",
"application/pdf"
],
"max_file_size": getattr(settings, 'MAX_FILE_SIZE', 100 * 1024 * 1024),
"timestamp": datetime.utcnow().isoformat()
}
return info
except Exception as e:
await logger.aerror(
"System info failed",
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to get system information")
@router.get("/stats")
async def system_statistics(
request: Request,
current_user: User = Depends(require_auth),
days: int = Query(7, ge=1, le=30, description="Number of days for statistics")
):
"""
Статистика системы за указанный период
"""
try:
since_date = datetime.utcnow() - timedelta(days=days)
# Статистика из базы данных
stats = {}
async with db_manager.get_session() as session:
# Общая статистика контента
content_stats = await session.execute(text("""
SELECT
COUNT(*) as total_content,
SUM(CASE WHEN created_at >= :since_date THEN 1 ELSE 0 END) as new_content,
SUM(file_size) as total_size,
AVG(file_size) as avg_size
FROM stored_content
"""), {"since_date": since_date})
content_row = content_stats.fetchone()
stats["content"] = {
"total_items": content_row.total_content or 0,
"new_items": content_row.new_content or 0,
"total_size_bytes": content_row.total_size or 0,
"average_size_bytes": float(content_row.avg_size or 0)
}
# Статистика пользователей
user_stats = await session.execute(text("""
SELECT
COUNT(*) as total_users,
SUM(CASE WHEN created_at >= :since_date THEN 1 ELSE 0 END) as new_users
FROM users
"""), {"since_date": since_date})
user_row = user_stats.fetchone()
stats["users"] = {
"total_users": user_row.total_users or 0,
"new_users": user_row.new_users or 0
}
# Системная статистика
stats["system"] = {
"uptime_seconds": int(time.time() - _start_time),
"requests_handled": _request_counter,
"errors_occurred": _error_counter,
"period_days": days,
"generated_at": datetime.utcnow().isoformat()
}
return stats
except Exception as e:
await logger.aerror(
"Statistics generation failed",
user_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to generate statistics")
@router.post("/maintenance")
async def toggle_maintenance_mode(
request: Request,
enabled: bool = Query(description="Enable or disable maintenance mode"),
current_user: User = Depends(require_admin)
):
"""
Включение/отключение режима обслуживания
Только для администраторов
"""
try:
cache_manager = await get_cache_manager()
if enabled:
maintenance_info = {
"enabled": True,
"enabled_at": datetime.utcnow().isoformat(),
"enabled_by": str(current_user.id),
"message": "System is under maintenance. Please try again later."
}
await cache_manager.set("maintenance_mode", maintenance_info, ttl=86400) # 24 часа
await logger.awarning(
"Maintenance mode enabled",
admin_id=str(current_user.id)
)
return {
"message": "Maintenance mode enabled",
"maintenance_info": maintenance_info
}
else:
await cache_manager.delete("maintenance_mode")
await logger.ainfo(
"Maintenance mode disabled",
admin_id=str(current_user.id)
)
return {
"message": "Maintenance mode disabled"
}
except Exception as e:
await logger.aerror(
"Maintenance mode toggle failed",
admin_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to toggle maintenance mode")
@router.get("/logs")
async def get_system_logs(
request: Request,
current_user: User = Depends(require_admin),
level: str = Query("INFO", description="Log level filter"),
lines: int = Query(100, ge=1, le=1000, description="Number of lines to return"),
component: Optional[str] = Query(None, description="Filter by component")
):
"""
Получение системных логов
Только для администраторов
"""
try:
# Здесь должна быть реализация чтения логов
# В реальной системе это может быть подключение к логгеру или файловой системе
# Заглушка для демонстрации
logs = [
{
"timestamp": datetime.utcnow().isoformat(),
"level": "INFO",
"component": "system",
"message": "System logs endpoint accessed",
"user_id": str(current_user.id)
}
]
return {
"logs": logs,
"total_lines": len(logs),
"filters": {
"level": level,
"lines": lines,
"component": component
},
"generated_at": datetime.utcnow().isoformat()
}
except Exception as e:
await logger.aerror(
"Log retrieval failed",
admin_id=str(current_user.id),
error=str(e)
)
raise HTTPException(status_code=500, detail="Failed to retrieve logs")
# Middleware для подсчета запросов (будет использоваться в главном приложении)
async def increment_request_counter():
"""Увеличение счетчика запросов"""
global _request_counter
_request_counter += 1
async def increment_error_counter():
"""Увеличение счетчика ошибок"""
global _error_counter
_error_counter += 1
# Healthcheck для ready probe (Kubernetes)
@router.get("/ready")
async def readiness_check():
"""
Проверка готовности к обслуживанию запросов
Для Kubernetes readiness probe
"""
try:
# Проверяем критически важные сервисы
checks = []
# Проверка базы данных
try:
async with db_manager.get_session() as session:
await session.execute(text("SELECT 1"))
checks.append({"service": "database", "status": "ready"})
except Exception as e:
checks.append({"service": "database", "status": "not_ready", "error": str(e)})
# Проверка кэша
try:
cache_manager = await get_cache_manager()
await cache_manager.set("readiness_check", "ok", ttl=5)
checks.append({"service": "cache", "status": "ready"})
except Exception as e:
checks.append({"service": "cache", "status": "not_ready", "error": str(e)})
# Определяем готовность
all_ready = all(check["status"] == "ready" for check in checks)
return JSONResponse(
content={
"status": "ready" if all_ready else "not_ready",
"checks": checks,
"timestamp": datetime.utcnow().isoformat()
},
status_code=200 if all_ready else 503
)
except Exception as e:
return JSONResponse(
content={
"status": "not_ready",
"error": "Readiness check failed",
"timestamp": datetime.utcnow().isoformat()
},
status_code=503
)
# Liveness probe для Kubernetes
@router.get("/live")
async def liveness_check():
"""
Проверка жизнеспособности приложения
Для Kubernetes liveness probe
"""
return {
"status": "alive",
"timestamp": datetime.utcnow().isoformat(),
"uptime_seconds": int(time.time() - _start_time)
}

View File

@ -0,0 +1,177 @@
"""
FastAPI маршруты для совместимости с v3 API
"""
from typing import Dict, Any
from datetime import datetime
from fastapi import APIRouter, HTTPException
from fastapi.responses import JSONResponse
from app.core.crypto import get_ed25519_manager
from app.core.logging import get_logger
logger = get_logger(__name__)
# Router для v3 API совместимости
router = APIRouter(prefix="/api/v3", tags=["v3-compatibility"])
@router.get("/node/status")
async def get_node_status_v3():
"""
Получение статуса ноды (v3 API совместимость)
"""
try:
crypto_manager = get_ed25519_manager()
return {
"success": True,
"data": {
"node_id": crypto_manager.node_id,
"public_key": crypto_manager.public_key_hex,
"version": "3.0.0",
"status": "active",
"capabilities": [
"content_upload",
"content_sync",
"decentralized_filtering",
"ed25519_signatures"
],
"network": {
"protocol_version": "3.0",
"connections": 0, # TODO: добавить реальную статистику
"peers": []
},
"timestamp": datetime.utcnow().isoformat()
}
}
except Exception as e:
logger.error(f"Node status error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/network/stats")
async def get_network_stats_v3():
"""
Получение статистики сети (v3 API)
"""
try:
# TODO: добавить реальные метрики
return {
"success": True,
"data": {
"network": {
"total_nodes": 1,
"active_nodes": 1,
"total_content": 0,
"network_hash_rate": 0,
"avg_latency_ms": 0
},
"node": {
"uptime_seconds": 0,
"content_served": 0,
"bytes_transferred": 0,
"requests_handled": 0
},
"timestamp": datetime.utcnow().isoformat()
}
}
except Exception as e:
logger.error(f"Network stats error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/content/list")
async def get_content_list_v3():
"""
Получение списка контента (v3 API)
"""
try:
# TODO: добавить реальный список контента
return {
"success": True,
"data": {
"content": [],
"total": 0,
"page": 1,
"per_page": 50
}
}
except Exception as e:
logger.error(f"Content list error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Router для v1 API совместимости
router_v1 = APIRouter(prefix="/api/v1", tags=["v1-compatibility"])
@router_v1.get("/node")
async def get_node_info_v1():
"""
Получение информации о ноде (v1 API совместимость)
"""
try:
crypto_manager = get_ed25519_manager()
return {
"node_id": crypto_manager.node_id,
"version": "3.0.0",
"status": "online",
"api_version": "v1-compat"
}
except Exception as e:
logger.error(f"Node info error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Router для MY Network совместимости
router_my = APIRouter(prefix="/api/my", tags=["my-network-compatibility"])
@router_my.get("/monitor")
async def get_my_network_monitor():
"""
Мониторинг MY Network (совместимость)
"""
try:
crypto_manager = get_ed25519_manager()
return {
"status": "active",
"node_id": crypto_manager.node_id,
"version": "3.0.0",
"network": {
"peers": 0,
"content_items": 0
},
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"MY Network monitor error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router_my.post("/handshake")
async def my_network_handshake():
"""
MY Network handshake (совместимость)
"""
try:
crypto_manager = get_ed25519_manager()
return {
"success": True,
"node_id": crypto_manager.node_id,
"version": "3.0.0",
"protocol": "my-network-v3"
}
except Exception as e:
logger.error(f"MY Network handshake error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@ -1,244 +0,0 @@
import os
from base58 import b58decode
from sanic import response as sanic_response
from uuid import uuid4
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
from app.core.logger import make_log
from app.core.models.keys import KnownKey
from app.core.models._telegram.wrapped_bot import Wrapped_CBotChat
from app.core.models.user_activity import UserActivity
from app.core.models.user import User
from sqlalchemy import select
from app.core.storage import new_session
from datetime import datetime, timedelta
from app.core.log_context import (
ctx_session_id, ctx_user_id, ctx_method, ctx_path, ctx_remote
)
ENABLE_INTERNAL_CORS = os.getenv("ENABLE_INTERNAL_CORS", "1").lower() in {"1", "true", "yes"}
def attach_headers(response, request=None):
response.headers.pop("Access-Control-Allow-Origin", None)
response.headers.pop("Access-Control-Allow-Methods", None)
response.headers.pop("Access-Control-Allow-Headers", None)
response.headers.pop("Access-Control-Allow-Credentials", None)
if not ENABLE_INTERNAL_CORS:
return response
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS, PATCH, HEAD"
response.headers["Access-Control-Allow-Headers"] = (
"Origin, Content-Type, Accept, Authorization, Referer, User-Agent, Sec-Fetch-Dest, Sec-Fetch-Mode, "
"Sec-Fetch-Site, Tus-Resumable, tus-resumable, Upload-Length, upload-length, Upload-Offset, upload-offset, "
"Upload-Metadata, upload-metadata, Upload-Defer-Length, upload-defer-length, Upload-Concat, upload-concat, "
"x-file-name, x-last-chunk, x-chunk-start, x-upload-id, x-request-id"
)
return response
async def try_authorization(request):
token = request.headers.get("Authorization")
if not token:
return
token_bin = b58decode(token)
if len(token_bin) != 57:
make_log("auth", "Invalid token length", level="warning")
return
result = await request.ctx.db_session.execute(select(KnownKey).where(KnownKey.seed == token))
known_key = result.scalars().first()
if not known_key:
make_log("auth", "Unknown key", level="warning")
return
if known_key.type != "USER_API_V1":
make_log("auth", "Invalid key type", level="warning")
return
(
token_version,
user_id,
timestamp,
randpart
) = (
int.from_bytes(token_bin[0:1], 'big'),
int.from_bytes(token_bin[1:17], 'big'),
int.from_bytes(token_bin[17:25], 'big'),
token_bin[25:]
)
assert token_version == 1, "Invalid token version"
assert user_id > 0, "Invalid user_id"
assert timestamp > 0, "Invalid timestamp"
if known_key.meta.get('I_user_id', -1) != user_id:
make_log("auth", f"User ID mismatch: {known_key.meta.get('I_user_id', -1)} != {user_id}", level="warning")
return
result = await request.ctx.db_session.execute(select(User).where(User.id == known_key.meta['I_user_id']))
user = result.scalars().first()
if not user:
make_log("auth", "No user from key", level="warning")
return
request.ctx.user = user
request.ctx.user_key = known_key
request.ctx.user_uploader_wrapper = Wrapped_CBotChat(request.app.ctx.memory._telegram_bot, chat_id=user.telegram_id, db_session=request.ctx.db_session, user=user)
request.ctx.user_client_wrapper = Wrapped_CBotChat(request.app.ctx.memory._client_telegram_bot, chat_id=user.telegram_id, db_session=request.ctx.db_session, user=user)
async def try_service_authorization(request):
signature = request.headers.get('X-Service-Signature')
if not signature:
return
# TODO: смысл этой проверки если это можно подменить?
message_hash_b58 = request.headers.get('X-Message-Hash')
if not message_hash_b58:
return
message_hash = b58decode(message_hash_b58)
signer = Signer(hot_seed)
if signer.verify(message_hash, signature):
request.ctx.verified_hash = message_hash
async def save_activity(request):
activity_meta = {}
try:
activity_meta["path"] = request.path
if 'system' in activity_meta["path"]:
return
except:
pass
try:
activity_meta["args"] = dict(request.args)
except:
pass
try:
activity_meta["json"] = dict(request.json)
except:
pass
try:
activity_meta["method"] = request.method
except:
pass
try:
activity_meta["ip"] = (request.headers['X-Forwarded-for'] if 'X-Forwarded-for' in request.headers else None) \
or request.remote_addr or request.ip
activity_meta["ip"] = activity_meta["ip"].split(",")[0].strip()
except:
pass
try:
# Sanitize sensitive headers
headers = dict(request.headers)
for hk in list(headers.keys()):
if str(hk).lower() in [
'authorization', 'cookie', 'x-service-signature', 'x-message-hash'
]:
headers[hk] = '<redacted>'
activity_meta["headers"] = headers
except:
pass
new_user_activity = UserActivity(
type="API_V1_REQUEST",
meta=activity_meta,
user_id=request.ctx.user.id if request.ctx.user else None,
user_ip=activity_meta.get("ip", "0.0.0.0"),
created=datetime.utcnow()
)
request.ctx.db_session.add(new_user_activity)
await request.ctx.db_session.commit()
async def attach_user_to_request(request):
if request.method == 'OPTIONS':
return attach_headers(sanic_response.text("OK"), request)
request.ctx.db_session = new_session()
request.ctx.verified_hash = None
request.ctx.user = None
request.ctx.user_key = None
request.ctx.user_uploader_wrapper = Wrapped_CBotChat(request.app.ctx.memory._telegram_bot, db_session=request.ctx.db_session)
request.ctx.user_client_wrapper = Wrapped_CBotChat(request.app.ctx.memory._client_telegram_bot, db_session=request.ctx.db_session)
# Correlation/session id for this request: prefer proxy-provided X-Request-ID
incoming_req_id = request.headers.get('X-Request-Id') or request.headers.get('X-Request-ID')
request.ctx.session_id = (incoming_req_id or uuid4().hex)[:32]
# Populate contextvars for automatic logging context
try:
ctx_session_id.set(request.ctx.session_id)
ctx_method.set(request.method)
ctx_path.set(request.path)
_remote = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip)
if _remote and isinstance(_remote, str) and ',' in _remote:
_remote = _remote.split(',')[0].strip()
ctx_remote.set(_remote)
except BaseException:
pass
try:
make_log(
"HTTP",
f"Request start sid={request.ctx.session_id} {request.method} {request.path}",
level='info'
)
except BaseException:
pass
await try_authorization(request)
# Update user_id in context after auth
try:
if request.ctx.user and request.ctx.user.id:
ctx_user_id.set(request.ctx.user.id)
except BaseException:
pass
await save_activity(request)
await try_service_authorization(request)
async def close_request_handler(request, response):
if request.method == 'OPTIONS':
response = sanic_response.text("OK")
response = attach_headers(response, request)
try:
await request.ctx.db_session.close()
except BaseException:
pass
try:
make_log(
"HTTP",
f"Request end sid={getattr(request.ctx, 'session_id', None)} {request.method} {request.path} status={getattr(response, 'status', None)}",
level='info'
)
except BaseException:
pass
return request, response
async def close_db_session(request, response):
request, response = await close_request_handler(request, response)
# Clear contextvars
try:
ctx_session_id.set(None)
ctx_user_id.set(None)
ctx_method.set(None)
ctx_path.set(None)
ctx_remote.set(None)
except BaseException:
pass
return response

View File

@ -1,353 +0,0 @@
from base64 import b64encode
from datetime import datetime
import traceback
from sanic import response
from sqlalchemy import and_, select, func
from tonsdk.boc import begin_cell, begin_dict
from tonsdk.utils import Address
from app.core._blockchain.ton.connect import TonConnect, wallet_obj_by_name
from app.core._blockchain.ton.platform import platform
from app.core._config import PROJECT_HOST
from app.core.logger import make_log
from app.core._utils.resolve_content import resolve_content
from app.core.content.utils import create_metadata_for_item
from app.core._crypto.content import create_encrypted_content
from app.core.models.content.user_content import UserContent
from app.core.models.node_storage import StoredContent
from app.core.models._telegram import Wrapped_CBotChat
from app.core._keyboards import get_inline_keyboard
from app.core.models.promo import PromoAction
from app.core.models.tasks import BlockchainTask
def valid_royalty_params(royalty_params):
assert sum([x['value'] for x in royalty_params]) == 10000, "Values of royalties should sum to 10000"
for royalty_param in royalty_params:
for field_key, field_value in {
'address': lambda x: isinstance(x, str),
'value': lambda x: (isinstance(x, int) and 0 <= x <= 10000)
}.items():
assert field_key in royalty_param, f"No {field_key} provided"
assert field_value(royalty_param[field_key]), f"Invalid {field_key} provided"
return True
async def s_api_v1_blockchain_send_new_content_message(request):
try:
assert request.json, "No data provided"
assert request.ctx.user, "No authorized user provided"
if not request.json['hashtags']:
request.json['hashtags'] = []
for field_key, field_value in {
'title': lambda x: isinstance(x, str),
'authors': lambda x: isinstance(x, list),
'content': lambda x: isinstance(x, str), # may be plaintext CID (legacy) or encrypted IPFS CID (bafy...)
'image': lambda x: isinstance(x, str),
'description': lambda x: isinstance(x, str),
'price': lambda x: (isinstance(x, str) and x.isdigit()),
'allowResale': lambda x: isinstance(x, bool),
'royaltyParams': lambda x: (isinstance(x, list) and valid_royalty_params(x)),
'hashtags': lambda x: isinstance(x, list) and all([isinstance(y, str) for y in x])
}.items():
assert field_key in request.json, f"No {field_key} provided"
assert field_value(request.json[field_key]), f"Invalid {field_key} provided"
artist = request.json.get('artist')
if artist is not None:
assert isinstance(artist, str), "Invalid artist provided"
artist = artist.strip()
if artist == "":
artist = None
else:
artist = None
# Support legacy: 'content' as decrypted ContentId; and new: 'content' as encrypted IPFS CID
source_content_cid, cid_err = resolve_content(request.json['content'])
assert not cid_err, f"Invalid content CID provided: {cid_err}"
encrypted_content_cid = None
decrypted_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == source_content_cid.content_hash_b58)
)).scalars().first()
if decrypted_content and decrypted_content.type == "local/content_bin":
encrypted_content = await create_encrypted_content(request.ctx.db_session, decrypted_content)
encrypted_content_cid = encrypted_content.cid
elif source_content_cid.cid_format == 'ipfs':
encrypted_content_cid = source_content_cid
else:
raise AssertionError("Provided content is neither locally available nor a valid encrypted CID")
if request.json['image']:
image_content_cid, err = resolve_content(request.json['image'])
assert not err, f"Invalid image CID"
image_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == image_content_cid.content_hash_b58)
)).scalars().first()
assert image_content, "No image locally found"
else:
image_content_cid = None
image_content = None
content_title = request.json['title']
if artist:
content_title = f"{artist} {content_title}"
elif request.json['authors']:
content_title = f"{', '.join(request.json['authors'])} {request.json['title']}"
metadata_content = await create_metadata_for_item(
request.ctx.db_session,
title=request.json['title'],
artist=artist,
cover_url=f"{PROJECT_HOST}/api/v1.5/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None,
authors=request.json['authors'],
hashtags=request.json['hashtags'],
downloadable=request.json['downloadable'] if 'downloadable' in request.json else False,
)
# Try to update ContentIndexItem with cover_url for this encrypted content
try:
from app.core.models.content_v3 import ContentIndexItem
ecid_str = encrypted_content_cid.serialize_v2()
row = (await request.ctx.db_session.execute(select(ContentIndexItem).where(ContentIndexItem.encrypted_cid == ecid_str))).scalars().first()
if row:
payload = row.payload or {}
payload['cover_url'] = f"{PROJECT_HOST}/api/v1.5/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None
row.payload = payload
await request.ctx.db_session.commit()
except Exception as _e:
make_log("Blockchain", f"index cover update failed: {_e}", level='warning')
royalties_dict = begin_dict(8)
i = 0
for royalty_param in request.json['royaltyParams']:
royalties_dict.store_ref(
i, begin_cell()
.store_address(Address(royalty_param['address']))
.store_uint(royalty_param['value'], 16)
.end_cell()
)
i += 1
_cnt = (await request.ctx.db_session.execute(
select(func.count()).select_from(PromoAction).where(
and_(
PromoAction.user_internal_id == request.ctx.user.id,
PromoAction.action_type == 'freeUpload'
)
)
)).scalar()
promo_free_upload_available = 3 - int(_cnt or 0)
has_pending_task = (await request.ctx.db_session.execute(
select(BlockchainTask).where(
and_(BlockchainTask.user_id == request.ctx.user.id, BlockchainTask.status != 'done')
)
)).scalars().first()
if has_pending_task:
make_log("Blockchain", f"User {request.ctx.user.id} already has a pending task", level='warning')
promo_free_upload_available = 0
make_log("Blockchain", f"User {request.ctx.user.id} has {promo_free_upload_available} free uploads available", level='info')
if promo_free_upload_available > 0:
promo_action = PromoAction(
user_id = str(request.ctx.user.id),
user_internal_id=request.ctx.user.id,
action_type='freeUpload',
action_ref=encrypted_content_cid.serialize_v2(),
created=datetime.now()
)
request.ctx.db_session.add(promo_action)
blockchain_task = BlockchainTask(
destination=platform.address.to_string(1, 1, 1),
amount=str(int(0.03 * 10 ** 9)),
payload=b64encode(
begin_cell()
.store_uint(0x5491d08c, 32)
.store_uint(int.from_bytes(encrypted_content_cid.content_hash, "big", signed=False), 256)
.store_address(Address(await request.ctx.user.wallet_address_async(request.ctx.db_session)))
.store_ref(
begin_cell()
.store_ref(
begin_cell()
.store_coins(int(0))
.store_coins(int(0))
.store_coins(int(request.json['price']))
.end_cell()
)
.store_maybe_ref(royalties_dict.end_dict())
.store_uint(0, 1)
.end_cell()
)
.store_ref(
begin_cell()
.store_ref(
begin_cell()
.store_bytes(f"{PROJECT_HOST}/api/v1.5/storage/{metadata_content.cid.serialize_v2(include_accept_type=True)}".encode())
.end_cell()
)
.store_ref(
begin_cell()
.store_ref(begin_cell().store_bytes(f"{encrypted_content_cid.serialize_v2()}".encode()).end_cell())
.store_ref(begin_cell().store_bytes(f"{image_content_cid.serialize_v2() if image_content_cid else ''}".encode()).end_cell())
.store_ref(begin_cell().store_bytes(f"{metadata_content.cid.serialize_v2()}".encode()).end_cell())
.end_cell()
)
.end_cell()
)
.end_cell().to_boc(False)
).decode(),
epoch=None, seqno=None,
created = datetime.now(),
status='wait',
user_id = request.ctx.user.id
)
request.ctx.db_session.add(blockchain_task)
await request.ctx.db_session.commit()
await request.ctx.user_uploader_wrapper.send_message(
request.ctx.user.translated('p_uploadContentTxPromo').format(
title=content_title,
free_count=(promo_free_upload_available - 1)
), message_type='hint', message_meta={
'encrypted_content_hash': encrypted_content_cid.content_hash_b58,
'hint_type': 'uploadContentTxRequested'
}
)
return response.json({
'address': "free",
'amount': str(int(0.03 * 10 ** 9)),
'payload': ""
})
user_wallet_address = await request.ctx.user.wallet_address_async(request.ctx.db_session)
assert user_wallet_address, "Wallet address is not linked"
await request.ctx.user_uploader_wrapper.send_message(
request.ctx.user.translated('p_uploadContentTxRequested').format(
title=content_title,
), message_type='hint', message_meta={
'encrypted_content_hash': encrypted_content_cid.content_hash_b58,
'hint_type': 'uploadContentTxRequested'
}
)
payload_cell = (
begin_cell()
.store_uint(0x5491d08c, 32)
.store_uint(int.from_bytes(encrypted_content_cid.content_hash, "big", signed=False), 256)
.store_address(Address(user_wallet_address))
.store_ref(
begin_cell()
.store_ref(
begin_cell()
.store_coins(int(0))
.store_coins(int(0))
.store_coins(int(request.json['price']))
.end_cell()
)
.store_maybe_ref(royalties_dict.end_dict())
.store_uint(0, 1)
.end_cell()
)
.store_ref(
begin_cell()
.store_ref(
begin_cell()
.store_bytes(f"{PROJECT_HOST}/api/v1.5/storage/{metadata_content.cid.serialize_v2(include_accept_type=True)}".encode())
.end_cell()
)
.store_ref(
begin_cell()
.store_ref(begin_cell().store_bytes(f"{encrypted_content_cid.serialize_v2()}".encode()).end_cell())
.store_ref(begin_cell().store_bytes(f"{image_content_cid.serialize_v2() if image_content_cid else ''}".encode()).end_cell())
.store_ref(begin_cell().store_bytes(f"{metadata_content.cid.serialize_v2()}".encode()).end_cell())
.end_cell()
)
.end_cell()
)
.end_cell()
)
return response.json({
'address': platform.address.to_string(1, 1, 1),
'amount': str(int(0.03 * 10 ** 9)),
'payload': b64encode(payload_cell.to_boc(False)).decode()
})
except BaseException as e:
make_log("Blockchain", f"Error while sending new content message: {e}" + '\n' + traceback.format_exc(), level='error')
return response.json({"error": str(e)}, status=400)
async def s_api_v1_blockchain_send_purchase_content_message(request):
assert request.json, "No data provided"
for field_key, field_value in {
'content_address': lambda x: isinstance(x, str),
'license_type': lambda x: x in ['resale']
}.items():
assert field_key in request.json, f"No {field_key} provided"
assert field_value(request.json[field_key]), f"Invalid {field_key} provided"
if not (await request.ctx.user.wallet_address_async(request.ctx.db_session)):
return response.json({"error": "No wallet address provided"}, status=400)
from sqlalchemy import select
license_exist = (await request.ctx.db_session.execute(select(UserContent).where(
UserContent.onchain_address == request.json['content_address']
))).scalars().first()
from app.core.content.content_id import ContentId
if license_exist and license_exist.content_id:
r_content = (await request.ctx.db_session.execute(select(StoredContent).where(
StoredContent.id == license_exist.content_id
))).scalars().first()
else:
requested_cid = ContentId.deserialize(request.json['content_address'])
r_content = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == requested_cid.content_hash_b58))).scalars().first()
async def open_content_async(session, sc: StoredContent):
if not sc.encrypted:
decrypted = sc
encrypted = (await session.execute(select(StoredContent).where(StoredContent.decrypted_content_id == sc.id))).scalars().first()
else:
encrypted = sc
decrypted = (await session.execute(select(StoredContent).where(StoredContent.id == sc.decrypted_content_id))).scalars().first()
assert decrypted and encrypted, "Can't open content"
ctype = decrypted.json_format().get('content_type', 'application/x-binary')
try:
content_type = ctype.split('/')[0]
except Exception:
content_type = 'application'
return {'encrypted_content': encrypted, 'decrypted_content': decrypted, 'content_type': content_type}
content = await open_content_async(request.ctx.db_session, r_content)
licenses_cost = content['encrypted_content'].json_format()['license']
assert request.json['license_type'] in licenses_cost
return response.json({
'address': (
license_exist.onchain_address if license_exist else content['encrypted_content'].json_format()['item_address']
),
'amount': str(int(licenses_cost['resale']['price'])),
'payload': b64encode((
begin_cell()
.store_uint(0x2a319593, 32)
.store_uint(0, 64)
.store_uint(3, 8)
# .store_uint({
# 'listen': 1,
# 'resale': 3
# }[request.json['license_type']], 8)
.store_uint(0, 256)
.store_uint(0, 2)
.end_cell()
).to_boc(False)).decode()
})

View File

@ -1,15 +0,0 @@
from sanic import response
async def s_index(request):
return response.json({
'success': True,
'message': 'Welcome to the @MY API!'
})
async def s_favicon(request):
return response.redirect(
"https://git.projscale.dev/my-dev/assets/raw/commit/890ed9e60a25a65c8ad600d6d0ad3ac4480e3039/images/logo.png"
)

View File

@ -1,96 +0,0 @@
import json
import subprocess
from datetime import datetime
from base58 import b58encode, b58decode
from sanic import response
from app.core.models.node_storage import StoredContent
from sqlalchemy import select
from app.core._blockchain.ton.platform import platform
from app.core._crypto.signer import Signer
from app.core._secrets import hot_pubkey, service_wallet, hot_seed
from app.core.logger import make_log
def get_git_info():
branch_name = subprocess.check_output(["git", "branch", "--show-current"]).decode('utf-8').strip()
commit_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode('utf-8').strip()
return branch_name, commit_hash
async def s_api_v1_node(request): # /api/v1/node
last_known_index_obj = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.onchain_index != None).order_by(StoredContent.onchain_index.desc())
)).scalars().first()
last_known_index = last_known_index_obj.onchain_index if last_known_index_obj else 0
last_known_index = max(last_known_index, 0)
return response.json({
'id': b58encode(hot_pubkey).decode(),
'node_address': service_wallet.address.to_string(1, 1, 1),
'master_address': platform.address.to_string(1, 1, 1),
'indexer_height': last_known_index,
'services': {
service_key: {
'status': (service['status'] if (service['timestamp'] and (datetime.now() - service['timestamp']).total_seconds() < 30) else 'not working: timeout'),
'delay': round((datetime.now() - service['timestamp']).total_seconds(), 3) if service['timestamp'] else -1,
}
for service_key, service in request.app.ctx.memory.known_states.items()
}
})
async def s_api_v1_node_friendly(request):
last_known_index_obj = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.onchain_index != None).order_by(StoredContent.onchain_index.desc())
)).scalars().first()
last_known_index = last_known_index_obj.onchain_index if last_known_index_obj else 0
last_known_index = max(last_known_index, 0)
response_plain_text = f"""
Node address: {service_wallet.address.to_string(1, 1, 1)}
Node ID: {b58encode(hot_pubkey).decode()}
Master address: {platform.address.to_string(1, 1, 1)}
Indexer height: {last_known_index}
Services:
"""
for service_key, service in request.app.ctx.memory.known_states.items():
response_plain_text += f"""
{service_key}:
status: {service['status'] if (service['timestamp'] and (datetime.now() - service['timestamp']).total_seconds() < 120) else 'not working: timeout'}
delay: {round((datetime.now() - service['timestamp']).total_seconds(), 3) if service['timestamp'] else -1}
"""
return response.text(response_plain_text, content_type='text/plain')
async def s_api_system_send_status(request):
if not request.json:
return response.json({'error': 'No data'}, status=400)
message = request.json.get('message', '')
signature = request.json.get('signature', '')
if not message or not signature:
return response.json({'error': 'No message or signature'}, status=400)
message = b58decode(message)
signer = Signer(hot_seed)
if not signer.verify(message, signature):
return response.json({'error': 'Invalid signature'}, status=400)
message = json.loads(message)
assert message.get('service') in request.app.ctx.memory.known_states, "Unknown service"
request.app.ctx.memory.known_states[
message['service']
] = {
'status': message['status'],
'timestamp': datetime.now(),
}
make_log("Health", f"Service {message['service']} status: {message['status']}", level='info')
return response.json({'message': 'Status received'})
async def s_api_system_version(request):
branch_name, commit_hash = get_git_info()
return response.json({
"codebase_hash": commit_hash,
"codebase_branch": branch_name,
})

View File

@ -1,8 +0,0 @@
from sanic import response
async def s_api_v1_account_get(request):
if not request.ctx.user:
return response.json({"error": "User not found"}, status=400)
return response.json(request.ctx.user.json_format())

File diff suppressed because it is too large Load Diff

View File

@ -1,218 +0,0 @@
from datetime import datetime
from uuid import uuid4
from aiogram.utils.web_app import safe_parse_webapp_init_data
from sanic import response
from sqlalchemy import select, and_
from tonsdk.utils import Address
from app.core._config import TELEGRAM_API_KEY, CLIENT_TELEGRAM_API_KEY
from app.core.logger import make_log
from app.core.models import KnownKey, WalletConnection
from app.core.models.user import User
from pytonconnect.parsers import WalletInfo, Account, TonProof
async def s_api_v1_auth_twa(request):
auth_data = {}
for req_key in ['twa_data', 'ton_proof', 'ref_id']:
try:
auth_data[req_key] = request.json[req_key]
except:
auth_data[req_key] = None
twa_data = auth_data['twa_data']
valid_twa_data = False
for validation_api_key in [TELEGRAM_API_KEY, CLIENT_TELEGRAM_API_KEY]:
try:
twa_data = safe_parse_webapp_init_data(token=validation_api_key, init_data=twa_data)
assert twa_data
valid_twa_data = True
break
except:
pass
if not valid_twa_data:
make_log("auth", "Invalid TWA data", level="warning")
return response.json({"error": "Invalid TWA data"}, status=401)
known_user = (await request.ctx.db_session.execute(
select(User).where(User.telegram_id == twa_data.user.id)
)).scalars().first()
if not known_user:
new_user = User(
telegram_id=twa_data.user.id,
username=twa_data.user.username,
meta={
"first_name": twa_data.user.first_name,
"last_name": twa_data.user.last_name,
"photo_url": twa_data.user.photo_url
},
lang_code=twa_data.user.language_code,
last_use=datetime.now(),
created=datetime.now()
)
request.ctx.db_session.add(new_user)
await request.ctx.db_session.commit()
known_user = (await request.ctx.db_session.execute(
select(User).where(User.telegram_id == twa_data.user.id)
)).scalars().first()
assert known_user, "User not created"
meta_updated = False
if not (known_user.meta or {}).get('ref_id'):
known_user.ensure_ref_id()
meta_updated = True
incoming_ref_id = auth_data.get('ref_id')
stored_ref_id = (known_user.meta or {}).get('ref_id')
if incoming_ref_id and incoming_ref_id != stored_ref_id:
if (known_user.meta or {}).get('referrer_id') != incoming_ref_id:
known_user.meta = {
**(known_user.meta or {}),
'referrer_id': incoming_ref_id
}
meta_updated = True
new_user_key = await known_user.create_api_token_v1(request.ctx.db_session, "USER_API_V1")
if auth_data['ton_proof']:
try:
wallet_info = WalletInfo()
auth_data['ton_proof']['account']['network'] = auth_data['ton_proof']['account']['chain']
wallet_info.account = Account.from_dict(auth_data['ton_proof']['account'])
wallet_info.ton_proof = TonProof.from_dict({'proof': auth_data['ton_proof']['ton_proof']})
connection_payload = auth_data['ton_proof']['ton_proof']['payload']
known_payload = (await request.ctx.db_session.execute(select(KnownKey).where(KnownKey.seed == connection_payload))).scalars().first()
assert known_payload, "Unknown payload"
assert known_payload.meta['I_user_id'] == known_user.id, "Invalid user_id"
assert wallet_info.check_proof(connection_payload), "Invalid proof"
for known_connection in (await request.ctx.db_session.execute(select(WalletConnection).where(
and_(
WalletConnection.user_id == known_user.id,
WalletConnection.network == 'ton'
)
))).scalars().all():
known_connection.invalidated = True
for other_connection in (await request.ctx.db_session.execute(select(WalletConnection).where(
WalletConnection.wallet_address == Address(wallet_info.account.address).to_string(1, 1, 1)
))).scalars().all():
other_connection.invalidated = True
new_connection = WalletConnection(
user_id=known_user.id,
network='ton',
wallet_key='web2-client==1',
# `ton_proof.payload` is expected to be single-use in many wallets (and it is unique per auth call here),
# but client-side retries/replays can happen; keep payload separately and make DB id unique.
connection_id=f"{connection_payload}.{uuid4().hex}",
wallet_address=Address(wallet_info.account.address).to_string(1, 1, 1),
keys={
'ton_proof': auth_data['ton_proof'],
'ton_proof_payload': connection_payload,
},
meta={
'ton_proof_payload': connection_payload,
},
created=datetime.now(),
updated=datetime.now(),
invalidated=False,
without_pk=False
)
request.ctx.db_session.add(new_connection)
await request.ctx.db_session.commit()
except BaseException as e:
make_log("auth", f"Invalid ton_proof: {e}", level="warning")
return response.json({"error": "Invalid ton_proof"}, status=400)
ton_connection = (await request.ctx.db_session.execute(select(WalletConnection).where(
and_(
WalletConnection.user_id == known_user.id,
WalletConnection.network == 'ton',
WalletConnection.invalidated == False
)
).order_by(WalletConnection.created.desc()))).scalars().first()
known_user.last_use = datetime.now()
if meta_updated:
known_user.updated = datetime.now()
await request.ctx.db_session.commit()
return response.json({
'user': known_user.json_format(),
'connected_wallet': ton_connection.json_format() if ton_connection else None,
'auth_v1_token': new_user_key['auth_v1_token']
})
async def s_api_v1_auth_me(request):
if not request.ctx.user:
return response.json({"error": "Unauthorized"}, status=401)
ton_connection = (await request.ctx.db_session.execute(
select(WalletConnection).where(
and_(
WalletConnection.user_id == request.ctx.user.id,
WalletConnection.network == 'ton',
WalletConnection.invalidated == False
)
).order_by(WalletConnection.created.desc())
)).scalars().first()
return response.json({
'user': request.ctx.user.json_format(),
'connected_wallet': ton_connection.json_format() if ton_connection else None
})
async def s_api_v1_auth_select_wallet(request):
if not request.ctx.user:
return response.json({"error": "Unauthorized"}, status=401)
try:
data = request.json
except Exception as e:
return response.json({"error": "Invalid JSON"}, status=400)
if "wallet_address" not in data:
return response.json({"error": "wallet_address is required"}, status=400)
# Convert raw wallet address to canonical format using Address from tonsdk.utils
raw_addr = data["wallet_address"]
canonical_address = Address(raw_addr).to_string(1, 1, 1)
db_session = request.ctx.db_session
user = request.ctx.user
# Check if a WalletConnection already exists for this user with the given canonical wallet address
existing_connection = (await db_session.execute(select(WalletConnection).where(
and_(
WalletConnection.user_id == user.id,
WalletConnection.wallet_address == canonical_address
)
))).scalars().first()
if not existing_connection:
return response.json({"error": "Wallet connection not found"}, status=404)
saved_values = {
'keys': existing_connection.keys,
'meta': existing_connection.meta,
'wallet_key': existing_connection.wallet_key,
'connection_id': existing_connection.connection_id + uuid4().hex,
'network': existing_connection.network,
}
new_connection = WalletConnection(
**saved_values,
user_id=user.id,
wallet_address=canonical_address,
created=datetime.now(),
updated=datetime.now(),
invalidated=False,
without_pk=False
)
db_session.add(new_connection)
await db_session.commit()
return response.empty(status=200)

View File

@ -1,651 +0,0 @@
from __future__ import annotations
from datetime import datetime, timedelta
from sanic import response
from sqlalchemy import select, and_, func, or_
from aiogram import Bot, types
from sqlalchemy import and_
from app.core.logger import make_log
from app.core.models._config import ServiceConfig
from app.core.models.node_storage import StoredContent
from app.core.models.keys import KnownKey
from app.core.models import StarsInvoice
from app.core.models.content.user_content import UserContent
from app.core._config import CLIENT_TELEGRAM_API_KEY, CLIENT_TELEGRAM_BOT_USERNAME, PROJECT_HOST
from app.core.models.content_v3 import EncryptedContent as ECv3, ContentDerivative as CDv3, UploadSession
from app.core.content.content_id import ContentId
from app.core.network.dht import MetricsAggregator
import os
import json
import time
import uuid
async def s_api_v1_content_list(request):
offset = int(request.args.get('offset', 0))
limit = int(request.args.get('limit', 100))
assert 0 <= offset, "Invalid offset"
assert 0 < limit <= 1000, "Invalid limit"
store = request.args.get('store', 'local')
assert store in ('local', 'onchain'), "Invalid store"
stmt = (
select(StoredContent)
.where(
StoredContent.type.like(store + '%'),
StoredContent.disabled.is_(None)
)
.order_by(StoredContent.created.desc())
.offset(offset)
.limit(limit)
)
rows = (await request.ctx.db_session.execute(stmt)).scalars().all()
make_log("Content", f"Listed {len(rows)} contents", level='info')
result = {}
for content in rows:
content_json = content.json_format()
result[content_json["cid"]] = content_json
return response.json(result)
async def s_api_v1_content_view(request, content_address: str):
# content_address can be CID or TON address
license_exist = (await request.ctx.db_session.execute(
select(UserContent).where(UserContent.onchain_address == content_address)
)).scalars().first()
license_address = None
if license_exist:
license_address = license_exist.onchain_address
if license_exist.content_id:
linked_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.id == license_exist.content_id)
)).scalars().first()
if linked_content:
content_address = linked_content.cid.serialize_v2()
from app.core.content.content_id import ContentId
cid = ContentId.deserialize(content_address)
r_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == cid.content_hash_b58)
)).scalars().first()
async def open_content_async(session, sc: StoredContent):
if not sc.encrypted:
decrypted = sc
encrypted = (await session.execute(select(StoredContent).where(StoredContent.decrypted_content_id == sc.id))).scalars().first()
else:
encrypted = sc
decrypted = (await session.execute(select(StoredContent).where(StoredContent.id == sc.decrypted_content_id))).scalars().first()
assert decrypted and encrypted, "Can't open content"
ctype = decrypted.json_format().get('content_type', 'application/x-binary')
try:
content_type = ctype.split('/')[0]
except Exception:
content_type = 'application'
return {
'encrypted_content': encrypted,
'decrypted_content': decrypted,
'content_type': content_type,
'content_mime': ctype,
}
try:
content = await open_content_async(request.ctx.db_session, r_content)
except AssertionError:
# Fallback: handle plain stored content without encrypted/decrypted pairing
sc = r_content
from mimetypes import guess_type as _guess
_mime, _ = _guess(sc.filename or '')
_mime = _mime or 'application/octet-stream'
try:
_ctype = _mime.split('/')[0]
except Exception:
_ctype = 'application'
content = {
'encrypted_content': sc,
'decrypted_content': sc,
'content_type': _ctype,
'content_mime': _mime,
}
master_address = content['encrypted_content'].meta.get('item_address', '')
opts = {
'content_type': content['content_type'], # возможно с ошибками, нужно переделать на ffprobe
'content_mime': content.get('content_mime'),
'content_address': license_address or master_address,
'license_address': license_address,
'master_address': master_address,
}
if content['encrypted_content'].key_id:
known_key = (await request.ctx.db_session.execute(
select(KnownKey).where(KnownKey.id == content['encrypted_content'].key_id)
)).scalars().first()
if known_key:
opts['key_hash'] = known_key.seed_hash # нахер не нужно на данный момент
# чисто болванки, заполнение дальше
opts['have_licenses'] = []
opts['invoice'] = None
have_access = False
if request.ctx.user:
user_wallet_address = await request.ctx.user.wallet_address_async(request.ctx.db_session)
user_telegram_id = getattr(request.ctx.user, 'telegram_id', None)
or_clauses = [StarsInvoice.user_id == request.ctx.user.id]
if user_telegram_id is not None:
or_clauses.append(StarsInvoice.telegram_id == user_telegram_id)
stars_access = False
if or_clauses:
stars_access = bool((await request.ctx.db_session.execute(select(StarsInvoice).where(
and_(
StarsInvoice.content_hash == content['encrypted_content'].hash,
StarsInvoice.paid.is_(True),
or_(*or_clauses)
)
))).scalars().first())
have_access = (
(content['encrypted_content'].owner_address == user_wallet_address)
or bool((await request.ctx.db_session.execute(select(UserContent).where(
and_(UserContent.owner_address == user_wallet_address, UserContent.status == 'active', UserContent.content_id == content['encrypted_content'].id)
))).scalars().first())
or stars_access
)
if not have_access:
current_star_rate = (await ServiceConfig(request.ctx.db_session).get('live_tonPerStar', [0, 0]))[0]
if current_star_rate < 0:
current_star_rate = 0.00000001
stars_cost = int(int(content['encrypted_content'].meta['license']['resale']['price']) / 1e9 / current_star_rate * 1.2)
if getattr(request.ctx.user, 'is_admin', False):
stars_cost = 2
else:
stars_cost = int(int(content['encrypted_content'].meta['license']['resale']['price']) / 1e9 / current_star_rate * 1.2)
invoice_id = f"access_{uuid.uuid4().hex}"
exist_invoice = (await request.ctx.db_session.execute(select(StarsInvoice).where(
and_(
StarsInvoice.user_id == request.ctx.user.id,
StarsInvoice.created > datetime.now() - timedelta(minutes=25),
StarsInvoice.amount == stars_cost,
StarsInvoice.content_hash == content['encrypted_content'].hash,
)
))).scalars().first()
if exist_invoice:
invoice_url = exist_invoice.invoice_url
else:
invoice_url = None
try:
invoice_url = await Bot(token=CLIENT_TELEGRAM_API_KEY).create_invoice_link(
'Неограниченный доступ к контенту',
'Неограниченный доступ к контенту',
invoice_id, "XTR",
[
types.LabeledPrice(label='Lifetime access', amount=stars_cost),
], provider_token = ''
)
request.ctx.db_session.add(
StarsInvoice(
external_id=invoice_id,
type='access',
amount=stars_cost,
user_id=request.ctx.user.id,
content_hash=content['encrypted_content'].hash,
invoice_url=invoice_url,
telegram_id=getattr(request.ctx.user, 'telegram_id', None),
bot_username=CLIENT_TELEGRAM_BOT_USERNAME,
)
)
await request.ctx.db_session.commit()
except BaseException as e:
make_log("Content", f"Can't create invoice link: {e}", level='warning')
if invoice_url:
opts['invoice'] = {
'url': invoice_url,
'amount': stars_cost,
}
display_options = {
'content_url': None,
'content_kind': None,
'has_preview': False,
'original_available': False,
'requires_license': False,
}
if have_access:
opts['have_licenses'].append('listen')
encrypted_json = content['encrypted_content'].json_format()
decrypted_json = content['decrypted_content'].json_format()
enc_cid = encrypted_json.get('content_cid') or encrypted_json.get('encrypted_cid')
ec_v3 = None
derivative_rows = []
if enc_cid:
ec_v3 = (await request.ctx.db_session.execute(select(ECv3).where(ECv3.encrypted_cid == enc_cid))).scalars().first()
if ec_v3:
derivative_rows = (await request.ctx.db_session.execute(select(CDv3).where(CDv3.content_id == ec_v3.id))).scalars().all()
upload_row = None
if enc_cid:
upload_row = (await request.ctx.db_session.execute(select(UploadSession).where(UploadSession.encrypted_cid == enc_cid))).scalars().first()
converted_meta_map = dict(content['encrypted_content'].meta.get('converted_content') or {})
content_mime = (
(ec_v3.content_type if ec_v3 and ec_v3.content_type else None)
or decrypted_json.get('content_type')
or encrypted_json.get('content_type')
or opts.get('content_mime')
or 'application/octet-stream'
)
# Fallback: if stored content reports generic application/*, try guess by filename
try:
if content_mime.startswith('application/'):
from mimetypes import guess_type as _guess
_fn = decrypted_json.get('filename') or encrypted_json.get('filename') or ''
_gm, _ = _guess(_fn)
if _gm:
content_mime = _gm
except Exception:
pass
opts['content_mime'] = content_mime
try:
opts['content_type'] = content_mime.split('/')[0]
except Exception:
opts['content_type'] = opts.get('content_type') or 'application'
content_kind = 'audio'
if content_mime.startswith('video/'):
content_kind = 'video'
elif content_mime.startswith('audio/'):
content_kind = 'audio'
else:
content_kind = 'binary'
display_options['content_kind'] = content_kind
display_options['requires_license'] = (not have_access) and content_kind == 'binary'
derivative_latest = {}
if derivative_rows:
derivative_sorted = sorted(derivative_rows, key=lambda row: row.created_at or datetime.min)
for row in derivative_sorted:
derivative_latest[row.kind] = row
def _row_to_hash_and_url(row):
if not row or not row.local_path:
return None, None
file_hash = row.local_path.split('/')[-1]
return file_hash, f"{PROJECT_HOST}/api/v1/storage.proxy/{file_hash}"
has_preview = bool(derivative_latest.get('decrypted_preview') or converted_meta_map.get('low_preview'))
display_options['has_preview'] = has_preview
display_options['original_available'] = bool(derivative_latest.get('decrypted_original') or converted_meta_map.get('original'))
chosen_row = None
if content_kind == 'binary':
if have_access and 'decrypted_original' in derivative_latest:
chosen_row = derivative_latest['decrypted_original']
elif have_access:
for key in ('decrypted_low', 'decrypted_high'):
if key in derivative_latest:
chosen_row = derivative_latest[key]
break
else:
for key in ('decrypted_preview', 'decrypted_low'):
if key in derivative_latest:
chosen_row = derivative_latest[key]
break
def _make_token_for(hash_value: str, scope: str, user_id: int | None) -> str:
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed, hot_pubkey
from app.core._utils.b58 import b58encode as _b58e
signer = Signer(hot_seed)
# Media URLs are polled very frequently by the web client (e.g. every 5s).
# If we generate a new exp for every request, the signed URL changes every poll,
# forcing the player to reload and breaking continuous streaming.
#
# To keep URLs stable while still expiring tokens, we "bucket" exp time.
# Default behavior keeps tokens stable for ~10 minutes; can be tuned via env.
ttl_sec = int(os.getenv("STORAGE_PROXY_TOKEN_TTL_SEC", "600"))
bucket_sec = int(os.getenv("STORAGE_PROXY_TOKEN_BUCKET_SEC", str(ttl_sec)))
ttl_sec = max(1, ttl_sec)
bucket_sec = max(1, bucket_sec)
now = int(time.time())
exp_base = now + ttl_sec
# Always move to the next bucket boundary so the token doesn't flip immediately
# after a boundary due to rounding edge cases.
exp = ((exp_base // bucket_sec) + 1) * bucket_sec
uid = int(user_id or 0)
payload = {'hash': hash_value, 'scope': scope, 'exp': exp, 'uid': uid}
blob = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
pub = _b58e(hot_pubkey).decode()
return f"pub={pub}&exp={exp}&scope={scope}&uid={uid}&sig={sig}"
except Exception:
return ""
if chosen_row:
file_hash, url = _row_to_hash_and_url(chosen_row)
if url:
token = _make_token_for(file_hash or '', 'full' if have_access else 'preview', getattr(request.ctx.user, 'id', None))
display_options['content_url'] = f"{url}?{token}" if token else url
ext_candidate = None
if chosen_row.content_type:
ext_candidate = chosen_row.content_type.split('/')[-1]
elif '/' in content_mime:
ext_candidate = content_mime.split('/')[-1]
if ext_candidate:
opts['content_ext'] = ext_candidate
if content_kind == 'binary':
display_options['original_available'] = True
converted_meta_map.setdefault('original', file_hash)
elif have_access:
converted_meta_map.setdefault('low', file_hash)
else:
converted_meta_map.setdefault('low_preview', file_hash)
if not display_options['content_url'] and converted_meta_map:
if content_kind == 'binary':
preference = ['original'] if have_access else []
else:
preference = ['low', 'high', 'low_preview'] if have_access else ['low_preview', 'low', 'high']
for key in preference:
hash_value = converted_meta_map.get(key)
if not hash_value:
continue
# Пробуем сразу через прокси (даже если локальной записи нет)
token = _make_token_for(hash_value, 'full' if have_access else 'preview', getattr(request.ctx.user, 'id', None))
display_options['content_url'] = f"{PROJECT_HOST}/api/v1/storage.proxy/{hash_value}?{token}" if token else f"{PROJECT_HOST}/api/v1/storage.proxy/{hash_value}"
if '/' in content_mime:
opts['content_ext'] = content_mime.split('/')[-1]
if content_kind == 'binary':
display_options['original_available'] = True
break
# Final fallback: no derivatives known — serve stored content directly for AV
if not display_options['content_url'] and content_kind in ('audio', 'video'):
from app.core._utils.b58 import b58encode as _b58e
scid = decrypted_json.get('cid') or encrypted_json.get('cid')
try:
from app.core.content.content_id import ContentId as _CID
if scid:
_cid = _CID.deserialize(scid)
h = _cid.content_hash_b58
else:
h = decrypted_json.get('hash')
except Exception:
h = decrypted_json.get('hash')
if h:
token = _make_token_for(h, 'preview' if not have_access else 'full', getattr(request.ctx.user, 'id', None))
display_options['content_url'] = f"{PROJECT_HOST}/api/v1/storage.proxy/{h}?{token}" if token else f"{PROJECT_HOST}/api/v1/storage.proxy/{h}"
# Metadata fallback
content_meta = encrypted_json
content_metadata_json = None
_mcid = content_meta.get('metadata_cid') or None
if _mcid:
_cid = ContentId.deserialize(_mcid)
content_metadata = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == _cid.content_hash_b58))).scalars().first()
if content_metadata:
try:
with open(content_metadata.filepath, 'r') as f:
content_metadata_json = json.loads(f.read())
except Exception as exc:
make_log("Content", f"Can't read metadata file: {exc}", level='warning')
if not content_metadata_json:
fallback_name = (ec_v3.title if ec_v3 else None) or content_meta.get('title') or content_meta.get('cid')
fallback_description = (ec_v3.description if ec_v3 else '') or ''
content_metadata_json = {
'name': fallback_name or 'Без названия',
'description': fallback_description,
'downloadable': False,
}
cover_cid = content_meta.get('cover_cid')
if cover_cid:
token = _make_token_for(cover_cid, 'preview', getattr(request.ctx.user, 'id', None))
content_metadata_json.setdefault('image', f"{PROJECT_HOST}/api/v1/storage.proxy/{cover_cid}?{token}" if token else f"{PROJECT_HOST}/api/v1/storage.proxy/{cover_cid}")
display_options['metadata'] = content_metadata_json
opts['downloadable'] = content_metadata_json.get('downloadable', False)
if opts['downloadable'] and 'listen' not in opts['have_licenses']:
opts['downloadable'] = False
# Conversion status summary
conversion_summary = {}
conversion_details = []
derivative_summary_map = {}
for row in derivative_latest.values():
conversion_summary[row.status] = conversion_summary.get(row.status, 0) + 1
derivative_summary_map[row.kind] = row
conversion_details.append({
'kind': row.kind,
'status': row.status,
'size_bytes': row.size_bytes,
'content_type': row.content_type,
'error': row.error,
'updated_at': (row.last_access_at or row.created_at).isoformat() + 'Z' if (row.last_access_at or row.created_at) else None,
})
required_kinds = set()
if content_kind == 'binary':
if derivative_latest.get('decrypted_original') or converted_meta_map.get('original'):
required_kinds.add('decrypted_original')
else:
required_kinds = {'decrypted_low', 'decrypted_high'}
if ec_v3 and ec_v3.content_type and ec_v3.content_type.startswith('video/'):
required_kinds.add('decrypted_preview')
statuses_by_kind = {kind: row.status for kind, row in derivative_summary_map.items() if kind in required_kinds}
conversion_state = 'pending'
if required_kinds and all(statuses_by_kind.get(kind) == 'ready' for kind in required_kinds):
conversion_state = 'ready'
elif any(statuses_by_kind.get(kind) == 'failed' for kind in required_kinds):
conversion_state = 'failed'
elif any(statuses_by_kind.get(kind) in ('processing', 'pending') for kind in required_kinds):
conversion_state = 'processing'
elif statuses_by_kind:
conversion_state = 'partial'
if display_options['content_url']:
conversion_state = 'ready'
upload_info = None
if upload_row:
upload_info = {
'id': upload_row.id,
'state': upload_row.state,
'error': upload_row.error,
'created_at': upload_row.created_at.isoformat() + 'Z' if upload_row.created_at else None,
'updated_at': upload_row.updated_at.isoformat() + 'Z' if upload_row.updated_at else None,
}
upload_state = upload_row.state if upload_row else None
if conversion_state == 'failed' or upload_state in ('failed', 'conversion_failed'):
final_state = 'failed'
elif conversion_state == 'ready':
final_state = 'ready'
elif conversion_state in ('processing', 'partial') or upload_state in ('processing', 'pinned'):
final_state = 'processing'
else:
final_state = 'uploaded'
conversion_info = {
'state': conversion_state,
'summary': conversion_summary,
'details': conversion_details,
'required_kinds': list(required_kinds),
}
opts['conversion'] = conversion_info
opts['upload'] = upload_info
opts['status'] = {
'state': final_state,
'conversion_state': conversion_state,
'upload_state': upload_info['state'] if upload_info else None,
'has_access': have_access,
}
if not opts.get('content_ext') and '/' in content_mime:
opts['content_ext'] = content_mime.split('/')[-1]
metrics_mgr: MetricsAggregator | None = getattr(request.app.ctx.memory, "metrics", None)
if metrics_mgr:
viewer_salt_raw = request.headers.get("X-View-Salt")
if viewer_salt_raw:
try:
viewer_salt = bytes.fromhex(viewer_salt_raw)
except ValueError:
viewer_salt = viewer_salt_raw.encode()
elif request.ctx.user:
viewer_salt = f"user:{request.ctx.user.id}".encode()
else:
viewer_salt = (request.remote_addr or request.ip or "anonymous").encode()
try:
watch_time_param = int(request.args.get("watch_time", 0))
except (TypeError, ValueError):
watch_time_param = 0
try:
bytes_out_param = int(request.args.get("bytes_out", 0))
except (TypeError, ValueError):
bytes_out_param = 0
completed_param = request.args.get("completed", "0") in ("1", "true", "True")
metrics_mgr.record_view(
content_id=content['encrypted_content'].hash,
viewer_salt=viewer_salt,
watch_time=watch_time_param,
bytes_out=bytes_out_param,
completed=completed_param,
)
return response.json({
**opts,
'encrypted': content['encrypted_content'].json_format(),
'display_options': display_options,
})
async def s_api_v1_content_friendly_list(request):
# return html table with content list. bootstrap is used
result = """
<html>
<head>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-YvpcrYf0tY3lHB60NNkmXc5s9fDVZLESaAA55NDzOxhy9GkcIdslK1eN7N6jIeHz" crossorigin="anonymous"></script>
</head>
<body>
<table class="table table-striped">
<thead>
<tr>
<th>CID</th>
<th>Title</th>
<th>Onchain</th>
<th>Preview link</th>
</tr>
</thead>
"""
contents = (await request.ctx.db_session.execute(select(StoredContent).where(
StoredContent.type == 'onchain/content'
))).scalars().all()
for content in contents:
if not content.meta.get('metadata_cid'):
make_log("Content", f"Content {content.cid.serialize_v2()} has no metadata", level='warning')
continue
from app.core.content.content_id import ContentId
_cid = ContentId.deserialize(content.meta.get('metadata_cid'))
metadata_content = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == _cid.content_hash_b58))).scalars().first()
with open(metadata_content.filepath, 'r') as f:
metadata = json.loads(f.read())
preview_link = None
if content.meta.get('converted_content'):
preview_link = f"{PROJECT_HOST}/api/v1.5/storage/{content.meta['converted_content']['low_preview']}"
result += f"""
<tr>
<td>{content.cid.serialize_v2()}</td>
<td>{metadata.get('name', "")}</td>
<td>{content.meta.get('item_address')}</td>
<td>""" + (f'<a href="{preview_link}">Preview</a>' if preview_link else "not ready") + """</td>
</tr>
"""
result += """
</table>
</body>
</html>
"""
return response.html(result)
async def s_api_v1_5_content_list(request):
# Validate offset and limit parameters
offset = int(request.args.get('offset', 0))
limit = int(request.args.get('limit', 100))
if offset < 0:
return response.json({'error': 'Invalid offset'}, status=400)
if limit <= 0 or limit > 1000:
return response.json({'error': 'Invalid limit'}, status=400)
# Query onchain contents which are not disabled
contents = (await request.ctx.db_session.execute(
select(StoredContent)
.where(StoredContent.type == 'onchain/content', StoredContent.disabled == False)
.order_by(StoredContent.created.desc())
.offset(offset).limit(limit)
)).scalars().all()
result = []
for content in contents:
# Retrieve metadata content using metadata_cid from content.meta
metadata_cid = content.meta.get('metadata_cid')
if not metadata_cid:
continue # Skip if no metadata_cid is found
from app.core.content.content_id import ContentId
_cid = ContentId.deserialize(metadata_cid)
metadata_content = (await request.ctx.db_session.execute(select(StoredContent).where(StoredContent.hash == _cid.content_hash_b58))).scalars().first()
try:
with open(metadata_content.filepath, 'r') as f:
metadata = json.load(f)
except Exception as e:
metadata = {}
media_type = 'audio'
# Get title from metadata (key 'name')
title = metadata.get('name', '')
# Build preview link if converted_content exists and contains 'low_preview'
preview_link = None
converted_content = content.meta.get('converted_content')
if converted_content:
converted_content = (await request.ctx.db_session.execute(select(StoredContent).where(
StoredContent.hash == converted_content['low_preview']
))).scalars().first()
preview_link = converted_content.web_url
if converted_content.filename.split('.')[-1] in ('mp4', 'mov'):
media_type = 'video'
else:
preview_link = None
# Get onchain address from content.meta
onchain_address = content.meta.get('item_address', '')
result.append({
'cid': content.cid.serialize_v2(),
'onchain_address': onchain_address,
'type': media_type,
'title': title,
'preview_link': preview_link,
'created_at': content.created.isoformat() # ISO 8601 format for datetime
})
return response.json(result)

View File

@ -0,0 +1,176 @@
from __future__ import annotations
import logging
from typing import Any, Dict, Optional
from fastapi import APIRouter, HTTPException, Query, Request
from fastapi.responses import StreamingResponse, JSONResponse
from app.core.access.content_access_manager import ContentAccessManager
from app.core._blockchain.ton.nft_license_manager import NFTLicenseManager
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/content", tags=["content-access"])
def _json_ok(data: Dict[str, Any]) -> JSONResponse:
return JSONResponse({"success": True, "data": data})
@router.post("/request-access")
async def request_access(body: Dict[str, Any]):
"""
POST /api/content/request-access
Тело:
{
"content_id": "sha256...",
"ton_proof": {
"address": "...", "public_key": "...", "timestamp": 0,
"domain_val": "...", "domain_len": 0, "payload": "...", "signature": "..."
},
"nft_address": "EQ...." (optional),
"token_ttl_sec": 600 (optional)
}
Ответ:
{"success": true, "data": {"token": "...", "expires_at": 0, "owner_address": "...", "nft_item": {...}}}
"""
try:
content_id = body.get("content_id")
ton_proof = body.get("ton_proof") or {}
nft_address = body.get("nft_address")
token_ttl_sec = body.get("token_ttl_sec")
if not content_id:
raise HTTPException(status_code=400, detail="content_id is required")
if not ton_proof:
raise HTTPException(status_code=400, detail="ton_proof is required")
mgr = ContentAccessManager(nft_manager=NFTLicenseManager())
ok, err, payload = await mgr.grant_access(
ton_proof=ton_proof,
content_id=content_id,
nft_address=nft_address,
token_ttl_sec=token_ttl_sec,
)
if not ok:
raise HTTPException(status_code=403, detail=err or "Access denied")
return _json_ok(payload)
except HTTPException:
raise
except Exception as e:
logger.exception("request_access failed")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/verify-license")
async def verify_license(body: Dict[str, Any]):
"""
POST /api/content/verify-license
Тело:
{
"content_id": "sha256...",
"ton_proof": { ... as above ... },
"nft_address": "EQ...." (optional)
}
Ответ:
{"success": true, "data": {"valid": true, "owner_address": "...", "nft_item": {...}}}
"""
try:
content_id = body.get("content_id")
ton_proof = body.get("ton_proof") or {}
nft_address = body.get("nft_address")
if not content_id:
raise HTTPException(status_code=400, detail="content_id is required")
if not ton_proof:
raise HTTPException(status_code=400, detail="ton_proof is required")
nft_mgr = NFTLicenseManager()
ok, err, nft_item = await nft_mgr.check_license_validity(
ton_proof=ton_proof, content_id=content_id, nft_address=nft_address
)
if not ok:
return _json_ok({"valid": False, "error": err})
# Извлечем адрес владельца для удобства клиента
owner_address = None
try:
# небольшой импорт без цикла, чтобы не тянуть все сверху
from app.core.access.content_access_manager import nft_proof_owner # noqa
owner_address = nft_proof_owner(ton_proof)
except Exception:
owner_address = None
return _json_ok({"valid": True, "owner_address": owner_address, "nft_item": nft_item})
except HTTPException:
raise
except Exception as e:
logger.exception("verify_license failed")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/stream/{content_id}")
async def stream_content(
request: Request,
content_id: str,
token: str = Query(..., description="Временный токен, полученный через /request-access"),
):
"""
GET /api/content/stream/{content_id}?token=...
Возвращает поток расшифрованного контента при валидном временном токене.
Примечание:
- Здесь требуется провайдер ключа контента (content_key_provider), который по content_id вернет 32-байтовый ключ.
В текущем сервисе ключ не выдается из NFT, он хранится в ноде/сети вне блокчейна и не возвращается клиенту.
- В данном роуте показан каркас: откуда читать зашифрованные данные (encrypted_obj) зависит от вашей БД/фс.
"""
try:
mgr = ContentAccessManager()
# Заглушка чтения зашифрованного объекта контента.
# Здесь нужно интегрировать фактическое хранилище, например БД/файловую систему, и извлечь объект,
# совместимый с ContentCipher.decrypt_content входом.
# Формат encrypted_obj:
# {
# "ciphertext_b64": "...",
# "nonce_b64": "...",
# "tag_b64": "...",
# "metadata": {...},
# "content_id": "sha256..."
# }
encrypted_obj: Optional[Dict[str, Any]] = None
if not encrypted_obj:
raise HTTPException(status_code=404, detail="Encrypted content not found")
# Провайдер ключа шифрования по content_id — внедрите вашу реализацию
def content_key_provider(cid: str) -> bytes:
# Должен вернуть 32-байтовый ключ (из secure-хранилища узла)
# raise NotImplementedError / или извлечение из KMS/базы
raise HTTPException(status_code=501, detail="content_key_provider is not configured")
ok, err, pt = mgr.decrypt_for_stream(
encrypted_obj=encrypted_obj,
content_key_provider=content_key_provider,
token=token,
content_id=content_id,
associated_data=None,
)
if not ok or pt is None:
raise HTTPException(status_code=403, detail=err or "Access denied")
async def stream_bytes():
# Простейшая потоковая отдача всего буфера.
# Для больших данных отдавайте чанками.
yield pt
# Тип контента может определяться по metadata или по хранимому mime-type
return StreamingResponse(stream_bytes(), media_type="application/octet-stream")
except HTTPException:
raise
except Exception as e:
logger.exception("stream_content failed")
raise HTTPException(status_code=500, detail=str(e))

View File

@ -1,53 +0,0 @@
from __future__ import annotations
from sanic import response
from sqlalchemy import select
from datetime import datetime
from app.core.models.content_v3 import ContentIndexItem
from app.core.logger import make_log
async def s_api_v1_content_index(request):
rows = (await request.ctx.db_session.execute(select(ContentIndexItem))).scalars().all()
items = [{**r.payload, "encrypted_cid": r.encrypted_cid, "sig": r.sig, "_updated_at": (r.updated_at.isoformat() + 'Z') if r.updated_at else None} for r in rows]
# ETag by max updated_at + count
max_ts = max((it.get("_updated_at") for it in items if it.get("_updated_at")), default="1970-01-01T00:00:00Z")
etag = f'W/"{max_ts}.{len(items)}"'
inm = request.headers.get('If-None-Match')
if inm and inm == etag:
resp = response.empty(status=304)
resp.headers['ETag'] = etag
return resp
for it in items:
it.pop("_updated_at", None)
make_log("content.index", f"items={len(items)} etag={etag}")
resp = response.json({"items": items, "schema": "my-network/index@1"})
resp.headers['ETag'] = etag
return resp
async def s_api_v1_content_delta(request):
since = request.args.get('since')
if not since:
# No since provided → act as full index
return await s_api_v1_content_index(request)
try:
# basic parse
_ = datetime.fromisoformat(since.replace('Z', '+00:00'))
except Exception:
return response.json({"error": "BAD_SINCE"}, status=400)
rows = (await request.ctx.db_session.execute(select(ContentIndexItem))).scalars().all()
out = []
max_ts = since
for r in rows:
upd = (r.updated_at.isoformat() + 'Z') if r.updated_at else None
if upd and upd > since:
out.append({**r.payload, "encrypted_cid": r.encrypted_cid, "sig": r.sig})
if upd > max_ts:
max_ts = upd
resp = response.json({"items": out, "next_since": max_ts, "schema": "my-network/index@1"})
# Weak ETag for delta response
resp.headers['ETag'] = f'W/"{max_ts}.{len(out)}"'
return resp

View File

@ -0,0 +1,146 @@
from __future__ import annotations
import asyncio
import logging
import os
import uuid
from typing import Optional, List, Dict, Any
from fastapi import APIRouter, UploadFile, File, Form, HTTPException, Query
from fastapi.responses import JSONResponse, FileResponse
from app.core.converter.conversion_manager import ConversionManager
from app.core.models.converter.conversion_models import (
ContentMetadata,
ConversionPriority,
ConversionStatus,
)
router = APIRouter(prefix="/api/converter", tags=["converter"])
logger = logging.getLogger(__name__)
# Глобальный singleton менеджера (можно заменить DI контейнером)
_conversion_manager: Optional[ConversionManager] = None
def get_manager() -> ConversionManager:
global _conversion_manager
if _conversion_manager is None:
_conversion_manager = ConversionManager()
return _conversion_manager
@router.post("/submit")
async def submit_conversion(
file: UploadFile = File(...),
title: str = Form(...),
description: Optional[str] = Form(None),
author: Optional[str] = Form(None),
collection: Optional[str] = Form(None),
tags: Optional[str] = Form(None), # CSV
language: Optional[str] = Form(None),
explicit: Optional[bool] = Form(None),
quality: str = Form("high"), # "high" | "low"
input_ext: Optional[str] = Form(None), # если неизвестно — попытаемся из файла
priority: int = Form(50),
trim: Optional[str] = Form(None),
custom: Optional[str] = Form(None), # произвольные ffmpeg-параметры через пробел
):
"""
Принимает файл и ставит задачу конвертации в очередь.
Возвращает task_id.
"""
try:
# Сохраняем входной файл во временное хранилище uploader-bot
uploads_dir = "uploader-bot/uploader-bot/data/uploads"
os.makedirs(uploads_dir, exist_ok=True)
input_name = file.filename or f"upload-{uuid.uuid4().hex}"
local_path = os.path.join(uploads_dir, input_name)
with open(local_path, "wb") as f:
f.write(await file.read())
# Определяем расширение, если не передано
in_ext = input_ext or os.path.splitext(input_name)[1].lstrip(".").lower() or "bin"
# Метаданные
md = ContentMetadata(
title=title,
description=description,
author=author,
collection=collection,
tags=[t.strip() for t in (tags.split(","))] if tags else [],
language=language,
explicit=explicit,
attributes={},
)
prio = ConversionPriority.NORMAL
try:
# нормализуем диапазон int -> enum
p_int = int(priority)
if p_int >= ConversionPriority.CRITICAL:
prio = ConversionPriority.CRITICAL
elif p_int >= ConversionPriority.HIGH:
prio = ConversionPriority.HIGH
elif p_int >= ConversionPriority.NORMAL:
prio = ConversionPriority.NORMAL
else:
prio = ConversionPriority.LOW
except Exception:
pass
custom_list: List[str] = []
if custom:
# Разбиваем по пробелам, без сложного парсинга
custom_list = [c for c in custom.split(" ") if c]
manager = get_manager()
task_id = await manager.process_upload(
local_input_path=local_path,
input_ext=in_ext,
quality="high" if quality == "high" else "low",
metadata=md,
priority=prio,
custom=custom_list,
trim=trim,
)
return JSONResponse({"task_id": task_id})
except Exception as e:
logger.exception("submit_conversion failed: %s", e)
raise HTTPException(status_code=500, detail=str(e))
@router.get("/status/{task_id}")
async def get_status(task_id: str):
"""
Возвращает статус задачи.
"""
try:
manager = get_manager()
status = await manager.get_conversion_status(task_id)
return JSONResponse({"task_id": task_id, "status": status.value})
except Exception as e:
logger.exception("get_status failed: %s", e)
raise HTTPException(status_code=500, detail=str(e))
@router.get("/result/{task_id}")
async def get_result(task_id: str):
"""
Возвращает результат задачи с content_id, чанками и nft метаданными.
"""
try:
manager = get_manager()
res = await manager.handle_conversion_result(task_id)
if not res:
# если задача всё ещё идёт/в очереди
status = await manager.get_conversion_status(task_id)
if status in (ConversionStatus.QUEUED, ConversionStatus.RUNNING):
return JSONResponse({"task_id": task_id, "status": status.value})
raise HTTPException(status_code=404, detail="result not ready")
return JSONResponse(res.to_dict())
except Exception as e:
logger.exception("get_result failed: %s", e)
raise HTTPException(status_code=500, detail=str(e))

View File

@ -1,33 +0,0 @@
from __future__ import annotations
from sanic import response
from sqlalchemy import select
from app.core.models.content_v3 import EncryptedContent, ContentDerivative
from app.core._config import PROJECT_HOST
async def s_api_v1_content_derivatives(request):
cid = request.args.get('cid')
if not cid:
return response.json({"error": "BAD_REQUEST"}, status=400)
session = request.ctx.db_session
ec = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == cid))).scalars().first()
if not ec:
return response.json({"error": "NOT_FOUND"}, status=404)
rows = (await session.execute(select(ContentDerivative).where(ContentDerivative.content_id == ec.id))).scalars().all()
out = []
for r in rows:
# Derive /api/v1.5/storage/<hash> from local_path if possible
path_hash = (r.local_path or '').split('/')[-1]
storage_url = f"{PROJECT_HOST}/api/v1.5/storage/{path_hash}" if path_hash else None
out.append({
'kind': r.kind,
'interval': [r.interval_start_ms, r.interval_end_ms] if r.interval_start_ms is not None else None,
'content_type': r.content_type,
'size_bytes': r.size_bytes,
'status': r.status,
'url': storage_url,
})
return response.json({'cid': cid, 'derivatives': out})

View File

@ -1,125 +0,0 @@
from __future__ import annotations
import json
from typing import Any, Dict, List
from sanic import response
from app.core.logger import make_log
from app.core._utils.b58 import b58decode
from app.core.network.dht.records import DHTRecord
from app.core.network.dht.store import DHTStore
from app.core.network.dht.crypto import compute_node_id
from app.core.network.dht.keys import MetaKey, MembershipKey, MetricKey
from sqlalchemy import select
from app.core.models.my_network import KnownNode
def _merge_strategy_for(key: str):
# Выбираем правильную стратегию merge по префиксу ключа
from app.core.network.dht.replication import ReplicationState
from app.core.network.dht.membership import MembershipState
from app.core.network.dht.metrics import ContentMetricsState
if key.startswith('meta:'):
return lambda a, b: ReplicationState.from_dict(a).merge_with(ReplicationState.from_dict(b)).to_dict()
if key.startswith('membership:'):
# Для membership нужен node_id, но это только для локального состояния; здесь достаточно CRDT-мерджа
return lambda a, b: MembershipState.from_dict('remote', None, a).merge(MembershipState.from_dict('remote', None, b)).to_dict()
if key.startswith('metric:'):
return lambda a, b: ContentMetricsState.from_dict('remote', a).merge(ContentMetricsState.from_dict('remote', b)).to_dict()
return lambda a, b: b
async def s_api_v1_dht_get(request):
"""Возвращает запись DHT по fingerprint или key."""
store: DHTStore = request.app.ctx.memory.dht_store
fp = request.args.get('fingerprint')
key = request.args.get('key')
if fp:
rec = store.get(fp)
if not rec:
return response.json({'error': 'NOT_FOUND'}, status=404)
return response.json({**rec.to_payload(), 'signature': rec.signature})
if key:
snap = store.snapshot()
for _fp, payload in snap.items():
if payload.get('key') == key:
return response.json(payload)
return response.json({'error': 'NOT_FOUND'}, status=404)
return response.json({'error': 'BAD_REQUEST'}, status=400)
def _verify_publisher(node_id: str, public_key_b58: str) -> bool:
try:
derived = compute_node_id(b58decode(public_key_b58))
return derived == node_id
except Exception:
return False
async def s_api_v1_dht_put(request):
"""Принимает запись(и) DHT, проверяет подпись и выполняет merge/persist.
Поддерживает одиночную запись (record: {...}) и пакет (records: [{...}]).
Требует поле public_key отправителя и соответствие node_id.
"""
mem = request.app.ctx.memory
store: DHTStore = mem.dht_store
data = request.json or {}
public_key = data.get('public_key')
if not public_key:
return response.json({'error': 'MISSING_PUBLIC_KEY'}, status=400)
# Determine publisher role (trusted/read-only/deny)
role = None
try:
session = request.ctx.db_session
kn = (await session.execute(select(KnownNode).where(KnownNode.public_key == public_key))).scalars().first()
role = (kn.meta or {}).get('role') if kn and kn.meta else None
except Exception:
role = None
def _process_one(payload: Dict[str, Any]) -> Dict[str, Any]:
try:
rec = DHTRecord.create(
key=payload['key'],
fingerprint=payload['fingerprint'],
value=payload['value'],
node_id=payload['node_id'],
logical_counter=int(payload['logical_counter']),
signature=payload.get('signature'),
timestamp=float(payload.get('timestamp') or 0),
)
except Exception as e:
return {'error': f'BAD_RECORD: {e}'}
if not _verify_publisher(rec.node_id, public_key):
return {'error': 'NODE_ID_MISMATCH'}
# Подтверждение подписи записи
if not rec.verify(public_key):
return {'error': 'BAD_SIGNATURE'}
# Enforce ACL: untrusted nodes may not mutate meta/metric records
if role != 'trusted':
if rec.key.startswith('meta:') or rec.key.startswith('metric:'):
return {'error': 'FORBIDDEN_NOT_TRUSTED'}
merge_fn = _merge_strategy_for(rec.key)
try:
merged = store.merge_record(rec, merge_fn)
return {'ok': True, 'fingerprint': merged.fingerprint}
except Exception as e:
make_log('DHT.put', f'merge failed: {e}', level='warning')
return {'error': 'MERGE_FAILED'}
if 'record' in data:
result = _process_one(data['record'])
status = 200 if 'ok' in result else 400
return response.json(result, status=status)
elif 'records' in data and isinstance(data['records'], list):
results: List[Dict[str, Any]] = []
ok = True
for item in data['records']:
res = _process_one(item)
if 'error' in res:
ok = False
results.append(res)
return response.json({'ok': ok, 'results': results}, status=200 if ok else 207)
return response.json({'error': 'BAD_REQUEST'}, status=400)

View File

@ -1,117 +0,0 @@
from __future__ import annotations
import base64
import json
import os
from datetime import datetime
from typing import Dict, Any
from base58 import b58encode
from sanic import response
from sqlalchemy import select
from app.core._secrets import hot_pubkey
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, ContentKey, KeyGrant
from app.core.network.nodesig import verify_request
from app.core.network.guard import check_rate_limit
from app.core.models.my_network import KnownNode
from app.core.crypto.keywrap import unwrap_dek, KeyWrapError
def _b64(b: bytes) -> str:
return base64.b64encode(b).decode()
async def s_api_v1_keys_request(request):
# Rate limit per remote IP (reuse handshake limiter)
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
# Verify NodeSig
ok, hdr_node, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
data: Dict[str, Any] = request.json or {}
cid = data.get("encrypted_cid")
requester_node = data.get("requestor_node_id")
recipient_box_pub_b64 = data.get("recipient_box_pub")
if not cid or not requester_node or not recipient_box_pub_b64:
return response.json({"error": "BAD_REQUEST"}, status=400)
if requester_node != hdr_node:
return response.json({"error": "NODE_ID_MISMATCH"}, status=401)
session = request.ctx.db_session
row = (await session.execute(select(EncryptedContent, ContentKey).join(ContentKey, ContentKey.content_id == EncryptedContent.id).where(EncryptedContent.encrypted_cid == cid))).first()
if not row:
return response.json({"error": "NOT_FOUND"}, status=404)
ec: EncryptedContent = row[0]
ck: ContentKey = row[1]
# Allow only trusted nodes unless explicitly disabled via env
TRUSTED_ONLY = (os.getenv('KEY_AUTO_GRANT_TRUSTED_ONLY', '1') == '1')
if TRUSTED_ONLY:
kn = (await session.execute(select(KnownNode).where(KnownNode.public_key == requester_node))).scalars().first()
role = (kn.meta or {}).get('role') if kn else None
if role != 'trusted':
return response.json({"error": "DENIED_NOT_TRUSTED"}, status=403)
if not ck.allow_auto_grant:
return response.json({"error": "DENIED"}, status=403)
# Seal the DEK for recipient using libsodium sealed box
try:
dek_plain = unwrap_dek(ck.key_ciphertext_b64)
import nacl.public
pk = nacl.public.PublicKey(base64.b64decode(recipient_box_pub_b64))
box = nacl.public.SealedBox(pk)
sealed = box.encrypt(dek_plain)
sealed_b64 = _b64(sealed)
except KeyWrapError as e:
make_log("keys", f"unwrap failed: {e}", level="error")
return response.json({"error": "KEY_UNWRAP_FAILED"}, status=500)
except Exception as e:
make_log("keys", f"seal failed: {e}", level="error")
return response.json({"error": "SEAL_FAILED"}, status=500)
issuer = b58encode(hot_pubkey).decode()
purpose = (data.get('purpose') or 'full')
ttl_sec = int(os.getenv('KEY_GRANT_PREVIEW_TTL_SEC', '0')) if purpose == 'preview' else 0
grant_body = {
"encrypted_cid": cid,
"to_node_id": requester_node,
"sealed_key_b64": sealed_b64,
"aead_scheme": ec.aead_scheme,
"chunk_bytes": ec.chunk_bytes,
"constraints": {"ttl_sec": ttl_sec, "scope": purpose},
"issued_at": datetime.utcnow().isoformat(),
"issuer_node_id": issuer,
}
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
signer = Signer(hot_seed)
blob = json.dumps(grant_body, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
except Exception:
sig = ""
grant = KeyGrant(
encrypted_cid=cid,
issuer_node_id=issuer,
to_node_id=requester_node,
sealed_key_b64=sealed_b64,
aead_scheme=ec.aead_scheme,
chunk_bytes=ec.chunk_bytes,
constraints={"ttl_sec": 0, "scope": "full"},
sig=sig,
)
session.add(grant)
await session.commit()
grant_row = {
**grant_body,
"sig": sig,
"grant_id": grant.id,
}
return response.json(grant_row)

View File

@ -1,39 +0,0 @@
from __future__ import annotations
from sanic import response
async def s_api_metrics(request):
try:
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST # type: ignore
data = generate_latest()
return response.raw(data, content_type=CONTENT_TYPE_LATEST)
except Exception:
# Fallback: export minimal in-process counters from DHT module, if available
try:
from app.core.network.dht import prometheus as dprom
def dump(metric_obj, metric_name):
lines = []
values = getattr(metric_obj, "_values", {})
for labels, value in values.items():
label_str = ",".join(f'{k}="{v}"' for k, v in labels)
if label_str:
lines.append(f"{metric_name}{{{label_str}}} {value}")
else:
lines.append(f"{metric_name} {value}")
return lines
parts = []
parts += dump(dprom.replication_under, "dht_replication_under_total")
parts += dump(dprom.replication_over, "dht_replication_over_total")
parts += dump(dprom.leader_changes, "dht_leader_changes_total")
parts += dump(dprom.merge_conflicts, "dht_merge_conflicts_total")
parts += dump(dprom.view_count_total, "dht_view_count_total")
parts += dump(dprom.unique_estimate, "dht_unique_view_estimate")
parts += dump(dprom.watch_time_seconds, "dht_watch_time_seconds")
body = "\n".join(parts) + ("\n" if parts else "")
return response.text(body, content_type="text/plain; version=0.0.4")
except Exception:
return response.text("")

View File

@ -0,0 +1,844 @@
"""
Advanced monitoring routes for MY Network
"""
import asyncio
import psutil
import time
from datetime import datetime
from typing import Dict, List, Any
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Request
from fastapi.responses import HTMLResponse
import json
import logging
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/my/monitor", tags=["monitoring"])
# Store connected websocket clients
connected_clients: List[WebSocket] = []
# Simulated network nodes data
network_nodes = [
{
"id": "node_001_local_dev",
"name": "Primary Development Node",
"status": "online",
"location": "Local Development",
"uptime": "2h 15m",
"connections": 8,
"data_synced": "95%",
"last_seen": datetime.now().isoformat(),
"ip": "127.0.0.1:15100",
"version": "2.0.0"
},
{
"id": "node_002_production",
"name": "Production Node Alpha",
"status": "online",
"location": "Cloud Server US-East",
"uptime": "15d 8h",
"connections": 42,
"data_synced": "100%",
"last_seen": datetime.now().isoformat(),
"ip": "198.51.100.10:15100",
"version": "2.0.0"
},
{
"id": "node_003_backup",
"name": "Backup Node Beta",
"status": "maintenance",
"location": "Cloud Server EU-West",
"uptime": "3d 2h",
"connections": 0,
"data_synced": "78%",
"last_seen": datetime.now().isoformat(),
"ip": "203.0.113.20:15100",
"version": "1.9.8"
},
{
"id": "node_004_edge",
"name": "Edge Node Gamma",
"status": "connecting",
"location": "CDN Edge Node",
"uptime": "12m",
"connections": 3,
"data_synced": "12%",
"last_seen": datetime.now().isoformat(),
"ip": "192.0.2.30:15100",
"version": "2.0.0"
}
]
@router.get("/")
async def advanced_monitoring_dashboard():
"""Serve the advanced monitoring dashboard"""
dashboard_html = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>MY Network - Advanced Monitor</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
background: #000;
color: #00ff00;
font-family: 'Courier New', monospace;
overflow-x: hidden;
min-height: 100vh;
}
.matrix-bg {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: -1;
opacity: 0.1;
}
.container {
padding: 20px;
max-width: 1400px;
margin: 0 auto;
position: relative;
z-index: 1;
}
.header {
text-align: center;
margin-bottom: 30px;
border: 2px solid #00ff00;
padding: 20px;
background: rgba(0, 0, 0, 0.8);
position: relative;
}
.header h1 {
font-size: 2.5rem;
text-shadow: 0 0 10px #00ff00;
animation: glow 2s ease-in-out infinite alternate;
}
.header .subtitle {
font-size: 1.2rem;
margin-top: 10px;
opacity: 0.8;
}
.stats-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 20px;
margin-bottom: 30px;
}
.stat-card {
border: 1px solid #00ff00;
padding: 20px;
background: rgba(0, 50, 0, 0.3);
position: relative;
overflow: hidden;
}
.stat-card::before {
content: '';
position: absolute;
top: 0;
left: -100%;
width: 100%;
height: 2px;
background: linear-gradient(90deg, transparent, #00ff00, transparent);
animation: scan 3s linear infinite;
}
.stat-title {
font-size: 1.1rem;
margin-bottom: 10px;
text-transform: uppercase;
}
.stat-value {
font-size: 2rem;
font-weight: bold;
text-shadow: 0 0 5px #00ff00;
}
.nodes-section {
margin-bottom: 30px;
}
.section-title {
font-size: 1.5rem;
margin-bottom: 20px;
border-bottom: 2px solid #00ff00;
padding-bottom: 10px;
text-transform: uppercase;
}
.nodes-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(400px, 1fr));
gap: 20px;
}
.node-card {
border: 1px solid #00ff00;
padding: 20px;
background: rgba(0, 50, 0, 0.2);
position: relative;
transition: all 0.3s ease;
}
.node-card:hover {
background: rgba(0, 100, 0, 0.3);
box-shadow: 0 0 20px rgba(0, 255, 0, 0.3);
}
.node-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 15px;
}
.node-name {
font-size: 1.2rem;
font-weight: bold;
}
.node-status {
padding: 5px 10px;
border-radius: 3px;
font-size: 0.9rem;
text-transform: uppercase;
}
.status-online {
background: rgba(0, 255, 0, 0.3);
border: 1px solid #00ff00;
animation: pulse 2s infinite;
}
.status-maintenance {
background: rgba(255, 165, 0, 0.3);
border: 1px solid #ffa500;
color: #ffa500;
}
.status-connecting {
background: rgba(255, 255, 0, 0.3);
border: 1px solid #ffff00;
color: #ffff00;
animation: blink 1s infinite;
}
.node-details {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 10px;
font-size: 0.9rem;
}
.detail-item {
display: flex;
justify-content: space-between;
}
.detail-label {
opacity: 0.8;
}
.detail-value {
font-weight: bold;
}
.system-info {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 20px;
margin-bottom: 30px;
}
.info-card {
border: 1px solid #00ff00;
padding: 15px;
background: rgba(0, 30, 0, 0.4);
}
.info-title {
font-size: 1rem;
margin-bottom: 10px;
color: #00ff00;
text-transform: uppercase;
}
.info-content {
font-size: 0.9rem;
line-height: 1.4;
}
.terminal {
background: rgba(0, 0, 0, 0.9);
border: 2px solid #00ff00;
padding: 20px;
font-family: 'Courier New', monospace;
max-height: 300px;
overflow-y: auto;
}
.terminal-header {
margin-bottom: 15px;
color: #00ff00;
font-weight: bold;
}
.log-entry {
margin-bottom: 5px;
opacity: 0.8;
}
.log-timestamp {
color: #666;
}
.log-level-error {
color: #ff0000;
}
.log-level-warning {
color: #ffa500;
}
.log-level-info {
color: #00ff00;
}
@keyframes glow {
from { text-shadow: 0 0 10px #00ff00; }
to { text-shadow: 0 0 20px #00ff00, 0 0 30px #00ff00; }
}
@keyframes pulse {
0%, 100% { opacity: 1; }
50% { opacity: 0.5; }
}
@keyframes blink {
0%, 50% { opacity: 1; }
51%, 100% { opacity: 0.3; }
}
@keyframes scan {
0% { left: -100%; }
100% { left: 100%; }
}
.connection-indicator {
position: absolute;
top: 10px;
right: 10px;
width: 12px;
height: 12px;
border-radius: 50%;
background: #ff0000;
animation: pulse 1s infinite;
}
.connection-indicator.connected {
background: #00ff00;
}
.data-flow {
position: relative;
height: 20px;
background: rgba(0, 0, 0, 0.5);
border: 1px solid #00ff00;
margin: 10px 0;
overflow: hidden;
}
.data-flow::after {
content: '';
position: absolute;
top: 0;
left: 0;
height: 100%;
width: 0%;
background: linear-gradient(90deg, transparent, #00ff00, transparent);
animation: dataFlow 2s linear infinite;
}
@keyframes dataFlow {
0% { width: 0%; left: 0%; }
50% { width: 30%; }
100% { width: 0%; left: 100%; }
}
.matrix-text {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
pointer-events: none;
overflow: hidden;
}
.matrix-char {
position: absolute;
color: #00ff00;
font-family: 'Courier New', monospace;
font-size: 14px;
opacity: 0.3;
animation: matrixFall 10s linear infinite;
}
@keyframes matrixFall {
0% { transform: translateY(-100vh); opacity: 0; }
10% { opacity: 0.3; }
90% { opacity: 0.3; }
100% { transform: translateY(100vh); opacity: 0; }
}
</style>
</head>
<body>
<div class="matrix-bg">
<div class="matrix-text" id="matrixText"></div>
</div>
<div class="container">
<div class="header">
<div class="connection-indicator" id="connectionIndicator"></div>
<h1>MY NETWORK ADVANCED MONITOR</h1>
<div class="subtitle">Real-time Network Status & Diagnostics</div>
</div>
<div class="stats-grid">
<div class="stat-card">
<div class="stat-title">Connected Nodes</div>
<div class="stat-value" id="connectedNodes">--</div>
<div class="data-flow"></div>
</div>
<div class="stat-card">
<div class="stat-title">System Uptime</div>
<div class="stat-value" id="systemUptime">--</div>
<div class="data-flow"></div>
</div>
<div class="stat-card">
<div class="stat-title">Data Synced</div>
<div class="stat-value" id="dataSynced">--</div>
<div class="data-flow"></div>
</div>
<div class="stat-card">
<div class="stat-title">Network Health</div>
<div class="stat-value" id="networkHealth">--</div>
<div class="data-flow"></div>
</div>
</div>
<div class="system-info">
<div class="info-card">
<div class="info-title">Current Node Info</div>
<div class="info-content" id="currentNodeInfo">Loading...</div>
</div>
<div class="info-card">
<div class="info-title">System Resources</div>
<div class="info-content" id="systemResources">Loading...</div>
</div>
<div class="info-card">
<div class="info-title">Network Status</div>
<div class="info-content" id="networkStatus">Loading...</div>
</div>
<div class="info-card">
<div class="info-title">Configuration Issues</div>
<div class="info-content" id="configIssues">Loading...</div>
</div>
</div>
<div class="nodes-section">
<div class="section-title">Connected Network Nodes</div>
<div class="nodes-grid" id="nodesGrid">
<!-- Nodes will be populated here -->
</div>
</div>
<div class="terminal">
<div class="terminal-header">SYSTEM LOG STREAM</div>
<div id="logStream">
<div class="log-entry">
<span class="log-timestamp">[2025-07-09 14:04:00]</span>
<span class="log-level-info">[INFO]</span>
MY Network Monitor initialized successfully
</div>
<div class="log-entry">
<span class="log-timestamp">[2025-07-09 14:04:01]</span>
<span class="log-level-info">[INFO]</span>
WebSocket connection established
</div>
</div>
</div>
</div>
<script>
let ws = null;
let reconnectAttempts = 0;
const maxReconnectAttempts = 5;
// Matrix rain effect
function createMatrixRain() {
const matrixText = document.getElementById('matrixText');
const chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
setInterval(() => {
const char = document.createElement('div');
char.className = 'matrix-char';
char.textContent = chars[Math.floor(Math.random() * chars.length)];
char.style.left = Math.random() * 100 + '%';
char.style.animationDuration = (Math.random() * 10 + 5) + 's';
char.style.fontSize = (Math.random() * 8 + 10) + 'px';
matrixText.appendChild(char);
setTimeout(() => {
if (char.parentNode) {
char.parentNode.removeChild(char);
}
}, 15000);
}, 200);
}
function connectWebSocket() {
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsUrl = `${protocol}//${window.location.host}/api/my/monitor/ws`;
ws = new WebSocket(wsUrl);
ws.onopen = function() {
console.log('WebSocket connected');
document.getElementById('connectionIndicator').classList.add('connected');
reconnectAttempts = 0;
addLogEntry('WebSocket connection established', 'info');
};
ws.onmessage = function(event) {
const data = JSON.parse(event.data);
updateDashboard(data);
};
ws.onclose = function() {
console.log('WebSocket disconnected');
document.getElementById('connectionIndicator').classList.remove('connected');
addLogEntry('WebSocket connection lost', 'warning');
if (reconnectAttempts < maxReconnectAttempts) {
setTimeout(() => {
reconnectAttempts++;
addLogEntry(`Reconnection attempt ${reconnectAttempts}`, 'info');
connectWebSocket();
}, 3000);
}
};
ws.onerror = function(error) {
console.error('WebSocket error:', error);
addLogEntry('WebSocket error occurred', 'error');
};
}
function updateDashboard(data) {
// Update stats
document.getElementById('connectedNodes').textContent = data.stats.connected_nodes;
document.getElementById('systemUptime').textContent = data.stats.uptime;
document.getElementById('dataSynced').textContent = data.stats.data_synced;
document.getElementById('networkHealth').textContent = data.stats.health;
// Update current node info
const nodeInfo = data.current_node;
document.getElementById('currentNodeInfo').innerHTML = `
<strong>Node ID:</strong> ${nodeInfo.id}<br>
<strong>Name:</strong> ${nodeInfo.name}<br>
<strong>Version:</strong> ${nodeInfo.version}<br>
<strong>Status:</strong> ${nodeInfo.status}
`;
// Update system resources
const resources = data.system_resources;
document.getElementById('systemResources').innerHTML = `
<strong>CPU Usage:</strong> ${resources.cpu_usage}%<br>
<strong>Memory:</strong> ${resources.memory_usage}%<br>
<strong>Disk:</strong> ${resources.disk_usage}%<br>
<strong>Network I/O:</strong> ${resources.network_io}
`;
// Update network status
document.getElementById('networkStatus').innerHTML = `
<strong>Protocol:</strong> MY Network v2.0<br>
<strong>Port:</strong> 15100<br>
<strong>Mode:</strong> ${data.network_status.mode}<br>
<strong>Peer Count:</strong> ${data.network_status.peers}
`;
// Update configuration issues
const issues = data.config_issues;
let issuesHtml = '';
if (issues.length > 0) {
issuesHtml = issues.map(issue => ` ${issue}`).join('<br>');
} else {
issuesHtml = '<span style="color: #00ff00;">No configuration issues</span>';
}
document.getElementById('configIssues').innerHTML = issuesHtml;
// Update nodes grid
updateNodesGrid(data.nodes);
}
function updateNodesGrid(nodes) {
const grid = document.getElementById('nodesGrid');
grid.innerHTML = '';
nodes.forEach(node => {
const nodeCard = document.createElement('div');
nodeCard.className = 'node-card';
const statusClass = `status-${node.status}`;
nodeCard.innerHTML = `
<div class="node-header">
<div class="node-name">${node.name}</div>
<div class="node-status ${statusClass}">${node.status}</div>
</div>
<div class="node-details">
<div class="detail-item">
<span class="detail-label">Location:</span>
<span class="detail-value">${node.location}</span>
</div>
<div class="detail-item">
<span class="detail-label">Uptime:</span>
<span class="detail-value">${node.uptime}</span>
</div>
<div class="detail-item">
<span class="detail-label">Connections:</span>
<span class="detail-value">${node.connections}</span>
</div>
<div class="detail-item">
<span class="detail-label">Data Synced:</span>
<span class="detail-value">${node.data_synced}</span>
</div>
<div class="detail-item">
<span class="detail-label">IP Address:</span>
<span class="detail-value">${node.ip}</span>
</div>
<div class="detail-item">
<span class="detail-label">Version:</span>
<span class="detail-value">${node.version}</span>
</div>
</div>
`;
grid.appendChild(nodeCard);
});
}
function addLogEntry(message, level = 'info') {
const logStream = document.getElementById('logStream');
const timestamp = new Date().toISOString().slice(0, 19).replace('T', ' ');
const entry = document.createElement('div');
entry.className = 'log-entry';
entry.innerHTML = `
<span class="log-timestamp">[${timestamp}]</span>
<span class="log-level-${level}">[${level.toUpperCase()}]</span>
${message}
`;
logStream.appendChild(entry);
logStream.scrollTop = logStream.scrollHeight;
// Keep only last 50 entries
while (logStream.children.length > 50) {
logStream.removeChild(logStream.firstChild);
}
}
// Fallback data loading if WebSocket fails
function loadFallbackData() {
fetch('/api/my/monitor/status')
.then(response => response.json())
.then(data => updateDashboard(data))
.catch(error => {
console.error('Failed to load fallback data:', error);
addLogEntry('Failed to load monitoring data', 'error');
});
}
// Initialize
createMatrixRain();
connectWebSocket();
// Load fallback data every 5 seconds if WebSocket is not connected
setInterval(() => {
if (!ws || ws.readyState !== WebSocket.OPEN) {
loadFallbackData();
}
}, 5000);
// Add some random log entries for demo
setInterval(() => {
const messages = [
'Network heartbeat received',
'Data synchronization completed',
'Peer discovery scan finished',
'Security check passed',
'Cache optimization complete'
];
const message = messages[Math.floor(Math.random() * messages.length)];
addLogEntry(message, 'info');
}, 8000);
</script>
</body>
</html>
"""
return HTMLResponse(content=dashboard_html)
@router.get("/status")
async def get_monitoring_status():
"""Get current monitoring status data"""
import subprocess
import shutil
# Get system info
try:
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
system_resources = {
"cpu_usage": round(cpu_percent, 1),
"memory_usage": round(memory.percent, 1),
"disk_usage": round(disk.percent, 1),
"network_io": "Active"
}
except Exception as e:
logger.error(f"Failed to get system resources: {e}")
system_resources = {
"cpu_usage": 0,
"memory_usage": 0,
"disk_usage": 0,
"network_io": "Unknown"
}
# Configuration issues from logs/environment
config_issues = [
"Pydantic validation errors in configuration",
"Extra environment variables not permitted",
"Telegram API token format validation failed",
"MY Network running in limited mode"
]
return {
"timestamp": datetime.now().isoformat(),
"stats": {
"connected_nodes": len([n for n in network_nodes if n["status"] == "online"]),
"uptime": "2h 18m",
"data_synced": "87%",
"health": "Limited"
},
"current_node": {
"id": "node_001_local_dev",
"name": "Primary Development Node",
"version": "2.0.0",
"status": "limited_mode"
},
"system_resources": system_resources,
"network_status": {
"mode": "Development",
"peers": 3,
"protocol": "MY Network v2.0"
},
"config_issues": config_issues,
"nodes": network_nodes
}
@router.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket):
"""WebSocket endpoint for real-time monitoring updates"""
await websocket.accept()
connected_clients.append(websocket)
try:
while True:
# Send periodic updates
status_data = await get_monitoring_status()
await websocket.send_text(json.dumps(status_data))
await asyncio.sleep(2) # Update every 2 seconds
except WebSocketDisconnect:
connected_clients.remove(websocket)
logger.info("Client disconnected from monitoring WebSocket")
except Exception as e:
logger.error(f"WebSocket error: {e}")
if websocket in connected_clients:
connected_clients.remove(websocket)
@router.get("/nodes")
async def get_network_nodes():
"""Get list of all network nodes"""
return {"nodes": network_nodes}
@router.get("/node/{node_id}")
async def get_node_details(node_id: str):
"""Get detailed information about a specific node"""
node = next((n for n in network_nodes if n["id"] == node_id), None)
if not node:
return {"error": "Node not found"}, 404
# Add more detailed info
detailed_node = {
**node,
"detailed_stats": {
"cpu_usage": "23%",
"memory_usage": "67%",
"disk_usage": "45%",
"network_in": "150 KB/s",
"network_out": "89 KB/s",
"active_connections": 12,
"data_transferred": "1.2 GB",
"sync_progress": "87%"
},
"services": {
"http_server": "running",
"p2p_network": "limited",
"database": "connected",
"redis_cache": "connected",
"blockchain_sync": "paused"
}
}
return {"node": detailed_node}
@router.post("/simulate_event")
async def simulate_network_event(event_data: Dict[str, Any]):
"""Simulate network events for testing"""
# Broadcast event to all connected WebSocket clients
event_message = {
"type": "network_event",
"timestamp": datetime.now().isoformat(),
"event": event_data
}
for client in connected_clients[:]:
try:
await client.send_text(json.dumps(event_message))
except Exception as e:
logger.error(f"Failed to send event to client: {e}")
connected_clients.remove(client)
return {"status": "Event simulated", "clients_notified": len(connected_clients)}

View File

@ -0,0 +1,379 @@
"""MY Network Monitoring Interface - веб-интерфейс мониторинга сети в хакерском стиле."""
import asyncio
import json
import logging
from datetime import datetime, timedelta
from typing import Dict, List, Any
from fastapi import APIRouter, Request, HTTPException
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from pathlib import Path
logger = logging.getLogger(__name__)
# Создать router для мониторинга
router = APIRouter(prefix="/api/my/monitor", tags=["MY Network Monitoring"])
# Настроить шаблоны
templates_dir = Path(__file__).parent.parent.parent / "templates"
templates_dir.mkdir(exist_ok=True)
templates = Jinja2Templates(directory=str(templates_dir))
def get_node_service():
"""Получить сервис ноды."""
try:
from app.core.my_network.node_service import get_node_service
return get_node_service()
except Exception as e:
logger.error(f"Error getting node service: {e}")
return None
@router.get("/", response_class=HTMLResponse)
async def monitoring_dashboard(request: Request):
"""Главная страница мониторинга MY Network."""
try:
# Получить данные для дашборда
node_service = get_node_service()
if not node_service:
monitoring_data = {
"status": "offline",
"error": "MY Network service not available"
}
else:
# Собрать данные со всех компонентов
node_info = await node_service.get_node_info()
peers_info = await node_service.get_peers_info()
sync_status = await node_service.sync_manager.get_sync_status()
monitoring_data = {
"status": "online",
"node_info": node_info,
"peers_info": peers_info,
"sync_status": sync_status,
"timestamp": datetime.utcnow().isoformat()
}
return templates.TemplateResponse("my_network_monitor.html", {
"request": request,
"monitoring_data": monitoring_data
})
except Exception as e:
logger.error(f"Error rendering monitoring dashboard: {e}")
# Fallback HTML если шаблоны не работают
return HTMLResponse(content=generate_fallback_html(str(e)))
@router.get("/ascii")
async def get_ascii_status():
"""Получить ASCII статус сети."""
try:
node_service = get_node_service()
if not node_service:
return {"ascii": generate_offline_ascii(), "status": "offline"}
# Получить данные
node_info = await node_service.get_node_info()
peers_info = await node_service.get_peers_info()
sync_status = await node_service.sync_manager.get_sync_status()
# Генерировать ASCII
ascii_art = await generate_network_ascii(node_info, peers_info, sync_status)
return {
"ascii": ascii_art,
"status": "online",
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error generating ASCII status: {e}")
return {"ascii": generate_error_ascii(str(e)), "status": "error"}
@router.get("/live")
async def live_monitoring_data():
"""Получить живые данные для мониторинга."""
try:
node_service = get_node_service()
if not node_service:
raise HTTPException(status_code=503, detail="MY Network service unavailable")
# Получить свежие данные
node_info = await node_service.get_node_info()
peers_info = await node_service.get_peers_info()
sync_status = await node_service.sync_manager.get_sync_status()
# Статистика сети
network_stats = {
"connected_peers": peers_info["peer_count"],
"active_syncs": sync_status["active_syncs"],
"queue_size": sync_status["queue_size"],
"uptime": node_info["uptime"],
"status": node_info["status"]
}
return {
"success": True,
"data": {
"node_info": node_info,
"network_stats": network_stats,
"peers": peers_info["peers"][:10], # Показать только первые 10 пиров
"sync_status": sync_status
},
"timestamp": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting live monitoring data: {e}")
raise HTTPException(status_code=500, detail=str(e))
async def generate_network_ascii(node_info: Dict[str, Any], peers_info: Dict[str, Any], sync_status: Dict[str, Any]) -> str:
"""Генерировать ASCII представление состояния сети."""
ascii_parts = []
# Заголовок
ascii_parts.append("""
MY NETWORK v2.0
Distributed Content Protocol
""")
# Информация о ноде
status_indicator = "🟢" if node_info.get("status") == "running" else "🔴"
uptime_hours = int(node_info.get("uptime", 0) / 3600)
ascii_parts.append(f"""
NODE STATUS
Node ID: {node_info.get('node_id', 'unknown')[:16]}...
Status: {status_indicator} {node_info.get('status', 'unknown').upper()}
Uptime: {uptime_hours}h {int((node_info.get('uptime', 0) % 3600) / 60)}m
Version: MY Network {node_info.get('version', '2.0')}
""")
# Информация о пирах
peer_count = peers_info.get("peer_count", 0)
peer_status = "🌐" if peer_count > 0 else "🏝️"
ascii_parts.append(f"""
NETWORK STATUS
Connected Peers: {peer_status} {peer_count:>3}
Known Nodes: {len(peers_info.get('peers', [])):>3}
Network Health: {'CONNECTED' if peer_count > 0 else 'ISOLATED':>9}
""")
# Статус синхронизации
sync_running = sync_status.get("is_running", False)
active_syncs = sync_status.get("active_syncs", 0)
queue_size = sync_status.get("queue_size", 0)
sync_indicator = "" if sync_running else "⏸️"
ascii_parts.append(f"""
SYNC STATUS
Sync Engine: {sync_indicator} {'RUNNING' if sync_running else 'STOPPED':>7}
Active Syncs: {active_syncs:>3}
Queue Size: {queue_size:>3}
Workers: {sync_status.get('workers_count', 0):>3}
""")
# Визуализация сети
if peer_count > 0:
ascii_parts.append(generate_network_topology(peers_info.get("peers", [])[:6]))
# Недавние события синхронизации
recent_syncs = sync_status.get("recent_syncs", [])
if recent_syncs:
ascii_parts.append(generate_sync_history(recent_syncs[-5:]))
# Подвал
current_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
ascii_parts.append(f"""
Last Updated: {current_time}
MY Network Protocol - Decentralized Content Distribution System
""")
return "".join(ascii_parts)
def generate_network_topology(peers: List[Dict[str, Any]]) -> str:
"""Генерировать ASCII топологию сети."""
topology = ["""
NETWORK TOPOLOGY
[THIS NODE]
"""]
if len(peers) == 1:
topology.append("│ │ │")
topology.append(f"│ [{peers[0].get('node_id', 'unknown')[:8]}...] │")
elif len(peers) <= 3:
topology.append("│ ┌───────┼───────┐ │")
for i, peer in enumerate(peers):
spaces = " " if i == 0 else (" " if i == 1 else " ")
topology.append(f"{spaces}[{peer.get('node_id', 'unknown')[:8]}...] │")
else:
topology.append("│ ┌───────┬───────┼───────┬───────┐ │")
topology.append("│ │ │ │ │ │ │")
for i, peer in enumerate(peers[:5]):
if i < 5:
spaces = [" ", " ", " ", " ", " "][i]
topology.append(f"{spaces}[{peer.get('node_id', 'unknown')[:6]}] │")
if len(peers) > 5:
topology.append("│ ... │")
topology.append("│ │")
topology.append("└──────────────────────────────────────────────────────────────────────────────┘")
return "\n".join(topology) + "\n"
def generate_sync_history(recent_syncs: List[Dict[str, Any]]) -> str:
"""Генерировать историю синхронизации."""
history = ["""
RECENT SYNC ACTIVITY """]
if not recent_syncs:
history.append("│ No recent sync activity │")
else:
for sync in recent_syncs:
content_hash = sync.get("content_hash", "unknown")[:12]
status = sync.get("status", "unknown")
status_icon = {"completed": "", "failed": "", "partial": "⚠️"}.get(status, "")
history.append(f"{status_icon} {content_hash}... - {status.upper():>9}")
history.append("└──────────────────────────────────────────────────────────────────────────────┘")
return "\n".join(history) + "\n"
def generate_offline_ascii() -> str:
"""Генерировать ASCII для офлайн состояния."""
return """
MY NETWORK v2.0
Distributed Content Protocol
SYSTEM STATUS
🔴 OFFLINE
MY Network service is not available
Status: OFFLINE - Service not initialized
"""
def generate_error_ascii(error_message: str) -> str:
"""Генерировать ASCII для ошибки."""
return f"""
MY NETWORK v2.0
Distributed Content Protocol
ERROR STATE
ERROR
{error_message[:64]:^64}
Status: ERROR - Check system logs for details
"""
def generate_fallback_html(error_message: str = "") -> str:
"""Генерировать fallback HTML если шаблоны не работают."""
return f'''
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>MY Network Monitor</title>
<style>
body {{
background: #000;
color: #0f0;
font-family: 'Courier New', monospace;
margin: 0;
padding: 20px;
overflow-x: auto;
}}
.container {{
max-width: 1200px;
margin: 0 auto;
}}
.ascii-art {{
white-space: pre;
font-size: 12px;
line-height: 1.2;
}}
.error {{
color: #f00;
text-align: center;
padding: 20px;
}}
.refresh-btn {{
background: #0f0;
color: #000;
border: none;
padding: 10px 20px;
font-family: inherit;
cursor: pointer;
margin: 20px 0;
}}
.refresh-btn:hover {{
background: #fff;
}}
</style>
</head>
<body>
<div class="container">
<div class="ascii-art">
{generate_error_ascii(error_message) if error_message else generate_offline_ascii()}
</div>
<button class="refresh-btn" onclick="location.reload()">REFRESH SYSTEM STATUS</button>
<div class="error">
{f"Error: {error_message}" if error_message else "MY Network service not available"}
</div>
</div>
<script>
// Автообновление каждые 30 секунд
setTimeout(() => location.reload(), 30000);
</script>
</body>
</html>
'''

View File

@ -0,0 +1,653 @@
"""MY Network API Routes - эндпоинты для работы с распределенной сетью."""
import asyncio
import logging
import json
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Query
from fastapi.responses import FileResponse, StreamingResponse
from sqlalchemy import select, and_, func
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import db_manager
from app.core.security import get_current_user_optional
from app.core.cache import cache
# Import content models directly to avoid circular imports
from app.core.models.content_models import StoredContent as Content, UserContent as ContentMetadata
logger = logging.getLogger(__name__)
# Создать router для MY Network API
router = APIRouter(prefix="/api/my", tags=["MY Network"])
def get_node_service():
"""Получить сервис ноды."""
try:
from app.core.my_network.node_service import get_node_service
return get_node_service()
except Exception as e:
logger.error(f"Error getting node service: {e}")
raise HTTPException(status_code=503, detail="MY Network service unavailable")
@router.get("/node/info")
async def get_node_info():
"""Получить информацию о текущей ноде."""
try:
node_service = get_node_service()
if not node_service:
raise HTTPException(status_code=503, detail="Node service not available")
node_info = await node_service.get_node_info()
return {
"success": True,
"data": node_info,
"timestamp": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting node info: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/node/peers")
async def get_node_peers():
"""Получить список подключенных пиров."""
try:
node_service = get_node_service()
peers_info = await node_service.get_peers_info()
return {
"success": True,
"data": {
"connected_peers": peers_info["connected_peers"],
"peer_count": peers_info["peer_count"],
"peers": peers_info["peers"]
},
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error getting peers: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/node/peers/connect")
async def connect_to_peer(peer_data: Dict[str, Any]):
"""Подключиться к новому пиру."""
try:
peer_address = peer_data.get("address")
if not peer_address:
raise HTTPException(status_code=400, detail="Peer address is required")
node_service = get_node_service()
success = await node_service.peer_manager.connect_to_peer(peer_address)
if success:
return {
"success": True,
"message": f"Successfully connected to peer: {peer_address}",
"timestamp": datetime.utcnow().isoformat()
}
else:
raise HTTPException(status_code=400, detail="Failed to connect to peer")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error connecting to peer: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.delete("/node/peers/{peer_id}")
async def disconnect_peer(peer_id: str):
"""Отключиться от пира."""
try:
node_service = get_node_service()
success = await node_service.peer_manager.disconnect_peer(peer_id)
if success:
return {
"success": True,
"message": f"Successfully disconnected from peer: {peer_id}",
"timestamp": datetime.utcnow().isoformat()
}
else:
raise HTTPException(status_code=404, detail="Peer not found or already disconnected")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error disconnecting peer: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/content/list")
async def get_content_list(
limit: int = Query(100, ge=1, le=1000),
offset: int = Query(0, ge=0),
session: AsyncSession = Depends(get_db_session)
):
"""Получить список доступного контента."""
try:
# Кэшировать результат на 5 минут
cache_key = f"my_network:content_list:{limit}:{offset}"
cached_result = await cache.get(cache_key)
if cached_result:
return json.loads(cached_result)
# Получить контент из БД
stmt = (
select(Content, ContentMetadata)
.outerjoin(ContentMetadata, Content.id == ContentMetadata.content_id)
.where(Content.disabled == False)
.order_by(Content.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await session.execute(stmt)
content_items = []
for content, metadata in result:
content_data = {
"hash": content.hash,
"filename": content.filename,
"file_size": content.file_size,
"content_type": content.content_type,
"mime_type": content.mime_type,
"created_at": content.created_at.isoformat(),
"encrypted": content.encrypted,
"metadata": metadata.to_dict() if metadata else {}
}
content_items.append(content_data)
# Получить общее количество
count_stmt = select(func.count(Content.id)).where(Content.disabled == False)
count_result = await session.execute(count_stmt)
total_count = count_result.scalar()
response_data = {
"success": True,
"data": {
"content": content_items,
"total": total_count,
"limit": limit,
"offset": offset
},
"timestamp": datetime.utcnow().isoformat()
}
# Кэшировать результат
await cache.set(cache_key, json.dumps(response_data), expire=300)
return response_data
except Exception as e:
logger.error(f"Error getting content list: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/content/{content_hash}/exists")
async def check_content_exists(
content_hash: str,
session: AsyncSession = Depends(get_db_session)
):
"""Проверить существование контента по хешу."""
try:
# Кэшировать результат на 30 минут
cache_key = f"my_network:content_exists:{content_hash}"
cached_result = await cache.get(cache_key)
if cached_result is not None:
return {"exists": cached_result == "true", "hash": content_hash}
# Проверить в БД
stmt = select(Content.id).where(
and_(
Content.disabled == False,
Content.hash == content_hash
)
)
result = await session.execute(stmt)
exists = result.scalar_one_or_none() is not None
# Кэшировать результат
await cache.set(cache_key, "true" if exists else "false", expire=1800)
return {
"exists": exists,
"hash": content_hash,
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error checking content existence: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/content/{content_hash}/metadata")
async def get_content_metadata(
content_hash: str,
session: AsyncSession = Depends(get_db_session)
):
"""Получить метаданные контента."""
try:
# Кэшировать результат на 10 минут
cache_key = f"my_network:content_metadata:{content_hash}"
cached_result = await cache.get(cache_key)
if cached_result:
return json.loads(cached_result)
# Найти контент в БД
stmt = (
select(Content, ContentMetadata)
.outerjoin(ContentMetadata, Content.id == ContentMetadata.content_id)
.where(
and_(
Content.disabled == False,
Content.hash == content_hash
)
)
)
result = await session.execute(stmt)
content_data = result.first()
if not content_data:
raise HTTPException(status_code=404, detail="Content not found")
content, metadata = content_data
response_data = {
"success": True,
"data": {
"hash": content_hash,
"filename": content.filename,
"file_size": content.file_size,
"content_type": content.content_type,
"mime_type": content.mime_type,
"created_at": content.created_at.isoformat(),
"updated_at": content.updated_at.isoformat() if content.updated_at else None,
"encrypted": content.encrypted,
"processing_status": content.processing_status,
"metadata": metadata.to_dict() if metadata else {}
},
"timestamp": datetime.utcnow().isoformat()
}
# Кэшировать результат
await cache.set(cache_key, json.dumps(response_data), expire=600)
return response_data
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting content metadata: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/content/{content_hash}/download")
async def download_content(
content_hash: str,
session: AsyncSession = Depends(get_db_session)
):
"""Скачать контент по хешу."""
try:
# Найти контент в БД
stmt = select(Content).where(
and_(
Content.disabled == False,
Content.hash == content_hash
)
)
result = await session.execute(stmt)
content = result.scalar_one_or_none()
if not content:
raise HTTPException(status_code=404, detail="Content not found")
# Проверить существование файла
file_path = Path(content.file_path)
if not file_path.exists():
raise HTTPException(status_code=404, detail="File not found on disk")
# Вернуть файл
return FileResponse(
path=str(file_path),
filename=content.filename,
media_type=content.mime_type or "application/octet-stream"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error downloading content: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/content/{content_hash}/upload")
async def upload_content(
content_hash: str,
file: UploadFile = File(...),
session: AsyncSession = Depends(get_db_session)
):
"""Загрузить контент в ноду."""
try:
# Проверить, не существует ли уже контент
exists_stmt = select(Content.id).where(
and_(
Content.disabled == False,
Content.hash == content_hash
)
)
exists_result = await session.execute(exists_stmt)
if exists_result.scalar_one_or_none():
return {
"success": True,
"message": "Content already exists",
"hash": content_hash
}
# Создать директорию для хранения
storage_path = Path("./storage/my-network/received")
storage_path.mkdir(parents=True, exist_ok=True)
# Сохранить файл
file_path = storage_path / f"{content_hash}_{file.filename}"
with open(file_path, "wb") as buffer:
content_data = await file.read()
buffer.write(content_data)
# Вычислить хеши для проверки
import hashlib
md5_hash = hashlib.md5(content_data).hexdigest()
sha256_hash = hashlib.sha256(content_data).hexdigest()
# Проверить соответствие хеша
if content_hash not in [md5_hash, sha256_hash]:
file_path.unlink() # Удалить файл
raise HTTPException(status_code=400, detail="Content hash mismatch")
# Сохранить в БД
new_content = Content(
filename=file.filename,
hash=sha256_hash, # Используем SHA256 как основной хеш
file_size=len(content_data),
content_type=file.filename.split('.')[-1] if '.' in file.filename else 'unknown',
mime_type=file.content_type or "application/octet-stream",
file_path=str(file_path),
disabled=False,
processing_status="ready"
)
session.add(new_content)
await session.commit()
logger.info(f"Successfully uploaded content {content_hash}")
return {
"success": True,
"message": "Content uploaded successfully",
"hash": content_hash,
"content_id": new_content.id,
"timestamp": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error uploading content: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/content/replicate")
async def replicate_content(replication_request: Dict[str, Any]):
"""Принять запрос на репликацию контента."""
try:
content_hash = replication_request.get("content_hash")
metadata = replication_request.get("metadata", {})
source_node = replication_request.get("source_node")
if not content_hash:
raise HTTPException(status_code=400, detail="Content hash is required")
# Проверить, нужна ли репликация
async with db_manager.get_session() as session:
exists_stmt = select(Content.id).where(
and_(
Content.disabled == False,
Content.hash == content_hash
)
)
exists_result = await session.execute(exists_stmt)
if exists_result.scalar_one_or_none():
return {
"success": True,
"message": "Content already exists, replication not needed",
"hash": content_hash
}
# Подготовить для репликации
logger.info(f"Accepting replication request for {content_hash} from {source_node}")
return {
"success": True,
"message": "Replication request accepted",
"hash": content_hash,
"ready_for_upload": True,
"timestamp": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error processing replication request: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/sync/status")
async def get_sync_status():
"""Получить статус синхронизации."""
try:
node_service = get_node_service()
sync_status = await node_service.sync_manager.get_sync_status()
return {
"success": True,
"data": sync_status,
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error getting sync status: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/sync/start")
async def start_network_sync():
"""Запустить синхронизацию с сетью."""
try:
node_service = get_node_service()
sync_result = await node_service.sync_manager.sync_with_network()
return {
"success": True,
"data": sync_result,
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error starting network sync: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/sync/content/{content_hash}")
async def get_content_sync_status(content_hash: str):
"""Получить статус синхронизации конкретного контента."""
try:
node_service = get_node_service()
sync_status = await node_service.sync_manager.get_content_sync_status(content_hash)
return {
"success": True,
"data": sync_status,
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error getting content sync status: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/content/{content_hash}/replicate")
async def replicate_content_to_nodes(
content_hash: str,
replication_config: Dict[str, Any]
):
"""Реплицировать контент на указанные ноды."""
try:
target_nodes = replication_config.get("target_nodes", [])
if not target_nodes:
raise HTTPException(status_code=400, detail="Target nodes are required")
node_service = get_node_service()
replication_result = await node_service.sync_manager.replicate_content_to_nodes(
content_hash,
target_nodes
)
return {
"success": True,
"data": replication_result,
"timestamp": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error replicating content: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/network/stats")
async def get_network_stats():
"""Получить статистику сети."""
try:
node_service = get_node_service()
# Получить информацию о ноде и пирах
node_info = await node_service.get_node_info()
peers_info = await node_service.get_peers_info()
sync_status = await node_service.sync_manager.get_sync_status()
# Статистика контента
async with db_manager.get_session() as session:
# Общее количество контента
content_count_stmt = select(func.count(Content.id)).where(Content.disabled == False)
content_count_result = await session.execute(content_count_stmt)
total_content = content_count_result.scalar()
# Размер контента
size_stmt = select(func.sum(Content.file_size)).where(Content.disabled == False)
size_result = await session.execute(size_stmt)
total_size = size_result.scalar() or 0
# Контент по типам
type_stmt = select(Content.content_type, func.count(Content.id)).where(Content.disabled == False).group_by(Content.content_type)
type_result = await session.execute(type_stmt)
content_by_type = {row[0]: row[1] for row in type_result}
network_stats = {
"node_info": {
"node_id": node_info["node_id"],
"uptime": node_info["uptime"],
"version": node_info["version"],
"status": node_info["status"]
},
"network": {
"connected_peers": peers_info["peer_count"],
"known_peers": len(peers_info["peers"]),
"network_health": "good" if peers_info["peer_count"] > 0 else "isolated"
},
"content": {
"total_items": total_content,
"total_size_bytes": total_size,
"total_size_mb": round(total_size / (1024 * 1024), 2),
"content_by_type": content_by_type
},
"sync": {
"active_syncs": sync_status["active_syncs"],
"queue_size": sync_status["queue_size"],
"is_running": sync_status["is_running"]
}
}
return {
"success": True,
"data": network_stats,
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error getting network stats: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/health")
async def health_check():
"""Проверка здоровья MY Network ноды."""
try:
node_service = get_node_service()
# Базовая проверка сервисов
health_status = {
"status": "healthy",
"timestamp": datetime.utcnow().isoformat(),
"services": {
"node_service": node_service is not None,
"peer_manager": hasattr(node_service, 'peer_manager') if node_service else False,
"sync_manager": hasattr(node_service, 'sync_manager') if node_service else False,
"database": True # Если дошли до этой точки, БД работает
}
}
# Проверить подключение к пирам
if node_service:
peers_info = await node_service.get_peers_info()
health_status["network"] = {
"connected_peers": peers_info["peer_count"],
"status": "connected" if peers_info["peer_count"] > 0 else "isolated"
}
# Определить общий статус
if not all(health_status["services"].values()):
health_status["status"] = "unhealthy"
elif node_service and peers_info["peer_count"] == 0:
health_status["status"] = "isolated"
return health_status
except Exception as e:
logger.error(f"Health check failed: {e}")
return {
"status": "unhealthy",
"error": str(e),
"timestamp": datetime.utcnow().isoformat()
}

View File

@ -1,312 +0,0 @@
from __future__ import annotations
import json
from datetime import datetime
from typing import Dict, Any
from app.core._utils.b58 import b58decode
from sanic import response
from urllib.parse import urlparse
from app.core.logger import make_log
from app.core.network.constants import CURRENT_PROTOCOL_VERSION, NODE_TYPE_PRIVATE
from app.core.network.config import NODE_PRIVACY
from app.core.network.handshake import build_handshake_payload, compute_node_info, sign_response
from app.core.network.nodes import upsert_known_node, list_known_public_nodes
from app.core.network.semver import compatibility
from app.core.network.guard import check_rate_limit, check_timestamp_fresh, check_and_remember_nonce
from app.core.network.config import HANDSHAKE_TS_TOLERANCE_SEC
from app.core.ipfs_client import swarm_connect
from app.core._config import PROJECT_HOST
from app.core.events.service import record_event
from app.core.network.asn import resolver as asn_resolver
from app.core.network.dht import compute_node_id, dht_config, ReachabilityReceipt
def _port_from_public_host(public_host: str) -> int:
"""Return an integer port extracted from a public_host URL or host:port string."""
if not public_host:
return 80
parsed = urlparse(public_host)
if parsed.scheme:
if parsed.port:
return parsed.port
return 443 if parsed.scheme == "https" else 80
host_port = public_host.strip()
if ":" in host_port:
candidate = host_port.rsplit(":", 1)[-1]
try:
return int(candidate)
except (TypeError, ValueError):
pass
return 80
def _extract_ipfs_meta(payload: Dict[str, Any]) -> Dict[str, Any]:
ipfs = payload or {}
multiaddrs = ipfs.get("multiaddrs") or []
if not isinstance(multiaddrs, list):
multiaddrs = [multiaddrs]
normalized_multiaddrs = [str(m) for m in multiaddrs if m]
meta: Dict[str, Any] = {}
if normalized_multiaddrs:
meta["multiaddrs"] = normalized_multiaddrs
peer_id = ipfs.get("peer_id")
if peer_id:
meta["peer_id"] = str(peer_id)
agent = ipfs.get("agent_version") or ipfs.get("agentVersion")
if agent:
meta["agent_version"] = str(agent)
return meta
async def _connect_ipfs_multiaddrs(addrs):
for addr in addrs or []:
try:
await swarm_connect(addr)
except Exception:
pass
async def s_api_v1_network_info(request):
async with request.app.ctx.memory.transaction("network.info"):
node = await compute_node_info(request.ctx.db_session)
make_log("Network", "info served")
return response.json({"node": node})
async def s_api_v1_network_nodes(request):
rows = await list_known_public_nodes(request.ctx.db_session)
make_log("Network", f"nodes list count={len(rows)}")
return response.json({
"count": len(rows),
"nodes": rows,
})
async def s_api_v1_network_handshake(request):
# Handshake accepted regardless of our privacy; private nodes typically have no external endpoint
# Rate limit per remote IP
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
data = request.json or {}
required = ["version", "schema_version", "public_key", "node_id", "node_type", "metrics", "timestamp", "signature"]
for f in required:
if f not in data:
return response.json({"error": f"Missing field {f}"}, status=400)
# public_host is required for public nodes only
if data.get("node_type") != "private" and not data.get("public_host"):
return response.json({"error": "Missing field public_host"}, status=400)
# Timestamp freshness
if not check_timestamp_fresh(data.get("timestamp")):
return response.json({"error": "STALE_TIMESTAMP", "tolerance_sec": HANDSHAKE_TS_TOLERANCE_SEC}, status=400)
# Nonce replay protection (best-effort)
if not data.get("nonce") or not check_and_remember_nonce(request.app.ctx.memory, data.get("public_key"), data.get("nonce")):
return response.json({"error": "NONCE_REPLAY"}, status=400)
# Base schema and identity checks
if data.get("schema_version") != dht_config.schema_version:
return response.json({"error": "UNSUPPORTED_SCHEMA_VERSION"}, status=400)
try:
expected_node_id = compute_node_id(b58decode(data["public_key"]))
except Exception:
return response.json({"error": "BAD_PUBLIC_KEY"}, status=400)
if data.get("node_id") != expected_node_id:
return response.json({"error": "NODE_ID_MISMATCH"}, status=400)
peer_version = str(data.get("version"))
ipfs_meta = _extract_ipfs_meta(data.get("ipfs") or {})
comp = compatibility(peer_version, CURRENT_PROTOCOL_VERSION)
if comp == "blocked":
# We still store the node but respond with 409
try:
await upsert_known_node(
request.ctx.db_session,
host=data.get("public_host"),
port=_port_from_public_host(data.get("public_host")),
public_key=str(data.get("public_key")),
meta={
"version": peer_version,
"compatibility": comp,
"is_public": data.get("node_type", "public") != "private",
"public_host": data.get("public_host"),
"unsupported_last_checked_at": datetime.utcnow().isoformat(),
"ipfs": ipfs_meta,
}
)
except Exception:
pass
make_log("Handshake", f"Reject incompatible peer {data.get('public_host')} peer={peer_version} current={CURRENT_PROTOCOL_VERSION}")
return response.json({
"error": "INCOMPATIBLE_VERSION",
"compatibility": comp,
"current": CURRENT_PROTOCOL_VERSION,
"peer": peer_version,
}, status=409)
# Verify signature (Ed25519). If libsodium not available, accept but log a warning.
signed_fields = {k: v for (k, v) in data.items() if k != "signature"}
blob = json.dumps(signed_fields, sort_keys=True, separators=(",", ":")).encode()
ok = False
try:
import nacl.signing, nacl.encoding # type: ignore
vk = nacl.signing.VerifyKey(b58decode(data.get("public_key", "")))
sig = b58decode(data.get("signature", ""))
vk.verify(blob, sig)
ok = True
except Exception as e:
ok = False
if not ok:
make_log("Handshake", f"Signature verification failed from {data.get('public_host')}", level='warning')
return response.json({"error": "BAD_SIGNATURE"}, status=400)
# Update membership / reachability information
try:
membership_mgr = getattr(request.app.ctx.memory, "membership", None)
if membership_mgr:
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip() or None
# Determine caller ASN using advertised value or resolver
remote_asn = data.get("asn")
if remote_asn is None:
remote_asn = await asn_resolver.resolve_async(remote_ip, request.ctx.db_session)
else:
if remote_ip:
asn_resolver.learn(remote_ip, int(remote_asn))
membership_mgr.update_member(
node_id=data["node_id"],
public_key=data["public_key"],
ip=remote_ip,
asn=int(remote_asn) if remote_asn is not None else None,
metadata={
"capabilities": data.get("capabilities", {}),
"metrics": data.get("metrics", {}),
"public_host": data.get("public_host"),
},
)
for receipt in data.get("reachability_receipts") or []:
if not receipt.get("target_id") or not receipt.get("issuer_id"):
continue
try:
# Only accept receipts issued by the caller
issuer_id = str(receipt.get("issuer_id"))
if issuer_id != data["node_id"]:
continue
# Canonical message for receipt verification
# schema_version is embedded to avoid replay across versions
rec_asn = receipt.get("asn")
if rec_asn is None:
rec_asn = remote_asn
payload = {
"schema_version": dht_config.schema_version,
"target_id": str(receipt.get("target_id")),
"issuer_id": issuer_id,
"asn": int(rec_asn) if rec_asn is not None else None,
"timestamp": float(receipt.get("timestamp", data.get("timestamp"))),
}
blob = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode()
try:
import nacl.signing # type: ignore
from app.core._utils.b58 import b58decode as _b58d
vk = nacl.signing.VerifyKey(_b58d(data["public_key"]))
sig_b = _b58d(str(receipt.get("signature", "")))
vk.verify(blob, sig_b)
# Accept and persist
membership_mgr.record_receipt(
ReachabilityReceipt(
target_id=payload["target_id"],
issuer_id=payload["issuer_id"],
asn=payload["asn"],
timestamp=payload["timestamp"],
signature=str(receipt.get("signature", "")),
)
)
except Exception:
# Ignore invalid receipts
continue
except Exception:
continue
except Exception as exc:
make_log("Handshake", f"Membership ingest failed: {exc}", level='warning')
# Upsert node and respond with our info + known public nodes
# Do not persist private peers (ephemeral)
if data.get("node_type") != "private" and data.get("public_host"):
try:
await upsert_known_node(
request.ctx.db_session,
host=data.get("public_host"),
port=_port_from_public_host(data.get("public_host")),
public_key=str(data.get("public_key")),
meta={
"version": peer_version,
"compatibility": comp,
"is_public": True,
"public_host": data.get("public_host"),
"last_metrics": data.get("metrics", {}),
"capabilities": data.get("capabilities", {}),
"ipfs": ipfs_meta,
}
)
await _connect_ipfs_multiaddrs(ipfs_meta.get("multiaddrs"))
try:
await record_event(
request.ctx.db_session,
'node_registered',
{
'public_key': str(data.get("public_key")),
'public_host': data.get("public_host"),
'node_type': data.get("node_type"),
'version': peer_version,
'capabilities': data.get("capabilities", {}),
},
origin_host=PROJECT_HOST,
)
except Exception as ev_exc:
make_log("Events", f"Failed to record node_registered event: {ev_exc}", level="warning")
except Exception as e:
make_log("Handshake", f"Upsert peer failed: {e}", level='warning')
# Merge advertised peers from the caller (optional field)
for n in data.get("known_public_nodes", []) or []:
known_ipfs_meta = _extract_ipfs_meta(n.get("ipfs") or {})
try:
await upsert_known_node(
request.ctx.db_session,
host=n.get("public_host") or n.get("host"),
port=int(n.get("port") or 80),
public_key=n.get("public_key") or "",
meta={
"version": n.get("version") or "0.0.0",
"compatibility": compatibility(n.get("version") or "0.0.0", CURRENT_PROTOCOL_VERSION),
"is_public": True,
"public_host": n.get("public_host") or n.get("host"),
"capabilities": n.get("capabilities") or {},
"ipfs": known_ipfs_meta,
}
)
await _connect_ipfs_multiaddrs(known_ipfs_meta.get("multiaddrs"))
except Exception:
pass
node = await compute_node_info(request.ctx.db_session)
known = await list_known_public_nodes(request.ctx.db_session)
membership_mgr = getattr(request.app.ctx.memory, "membership", None)
n_estimate = membership_mgr.n_estimate() if membership_mgr else 0
resp = sign_response({
"compatibility": comp,
"node": node,
"known_public_nodes": known,
"n_estimate": n_estimate,
})
make_log("Handshake", f"OK with {data.get('public_host')} compat={comp}")
status = 200
if comp == "warning":
status = 200
resp["warning"] = "MINOR version differs; proceed with caution"
return response.json(resp, status=status)

View File

@ -1,77 +0,0 @@
from __future__ import annotations
from typing import Dict, Any
from sanic import response
from sqlalchemy import select
from app.core.logger import make_log
from app.core.models import NodeEvent, KnownNode
from app.core.network.nodesig import verify_request
from app.core.network.guard import check_rate_limit
from app.core._config import PROJECT_HOST
from app.core.events.service import LOCAL_PUBLIC_KEY
def _origin_host() -> str | None:
return PROJECT_HOST.rstrip('/') if PROJECT_HOST else None
async def s_api_v1_network_events(request):
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
ok, node_id, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
session = request.ctx.db_session
trusted = (await session.execute(
select(KnownNode).where(KnownNode.public_key == node_id)
)).scalar_one_or_none()
role = (trusted.meta or {}).get('role') if trusted and trusted.meta else None
if role != 'trusted':
make_log("Events", f"Rejected events fetch from non-trusted node {node_id}", level="warning")
return response.json({"error": "FORBIDDEN"}, status=403)
try:
since = int(request.args.get('since') or 0)
except (TypeError, ValueError):
since = 0
since = max(since, 0)
try:
limit = int(request.args.get('limit') or 100)
except (TypeError, ValueError):
limit = 100
limit = max(1, min(limit, 200))
result = await session.execute(
select(NodeEvent)
.where(NodeEvent.origin_public_key == LOCAL_PUBLIC_KEY, NodeEvent.seq > since)
.order_by(NodeEvent.seq.asc())
.limit(limit)
)
rows = result.scalars().all()
events: list[Dict[str, Any]] = []
next_since = since
for row in rows:
next_since = max(next_since, int(row.seq))
events.append({
"origin_public_key": row.origin_public_key,
"origin_host": row.origin_host or _origin_host(),
"seq": int(row.seq),
"uid": row.uid,
"event_type": row.event_type,
"payload": row.payload,
"signature": row.signature,
"created_at": (row.created_at.isoformat() + 'Z') if row.created_at else None,
})
payload = {
"events": events,
"next_since": next_since,
}
return response.json(payload)

View File

@ -0,0 +1,266 @@
from __future__ import annotations
import json
import logging
from datetime import datetime
from typing import Dict, Any, List, Optional
from fastapi import APIRouter, HTTPException, Request, Depends, status
from fastapi.responses import JSONResponse
from app.core.crypto import get_ed25519_manager
from app.core.content.chunk_manager import ChunkManager
from app.core.content.sync_manager import ContentSyncManager
from app.core.models.content.chunk import ContentChunk
from app.core.models.api.sync_models import (
ContentRequest,
ContentProvideResponse,
ContentStatusResponse,
ContentVerifyRequest,
)
from app.core.validation.content_validator import ContentValidator
from app.core.validation.integrity_checker import IntegrityChecker
from app.core.validation.trust_manager import TrustManager
from app.core.models.validation.validation_models import ContentSignature, ValidationResult
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/node/content", tags=["node-content-sync"])
# Глобальные вспомогательные объекты (можно заменить DI при необходимости)
_trust_manager = TrustManager()
_content_validator = ContentValidator()
_integrity_checker = IntegrityChecker()
async def _verify_inter_node_request(request: Request) -> Dict[str, Any]:
"""
Проверка заголовков и Ed25519 подписи межузлового запроса.
Используем ту же схему заголовков, что и в fastapi_node_routes.
Дополнительно первичная фильтрация по доверию ноды (blacklist/override/score).
"""
required_headers = ["x-node-communication", "x-node-id", "x-node-public-key", "x-node-signature"]
for header in required_headers:
if header not in request.headers:
logger.warning("Missing header on inter-node request: %s", header)
raise HTTPException(status_code=400, detail=f"Missing required header: {header}")
if request.headers.get("x-node-communication") != "true":
raise HTTPException(status_code=400, detail="Not a valid inter-node communication")
body = await request.body()
if not body:
raise HTTPException(status_code=400, detail="Empty message body")
try:
message_data = json.loads(body.decode("utf-8"))
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail="Invalid JSON in request body")
signature = request.headers.get("x-node-signature")
node_id = request.headers.get("x-node-id")
public_key = request.headers.get("x-node-public-key")
# Проверка подписи межузлового сообщения
crypto_manager = get_ed25519_manager()
is_valid = crypto_manager.verify_signature(message_data, signature, public_key)
if not is_valid:
logger.warning("Invalid signature from node %s", node_id)
# При невалидной подписи сразу штрафуем доверие и отклоняем
_trust_manager.update_trust_score(node_id, delta=-0.2, reason="invalid_inter_node_signature")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid cryptographic signature")
# Обновим доверие за валидную подпись
_trust_manager.update_trust_score(node_id, delta=0.02, reason="valid_inter_node_signature")
# Проверка blacklist/override/минимального порога
if not _trust_manager.is_node_trusted(node_id):
logger.warning("Request rejected by trust policy: node_id=%s", node_id)
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Untrusted node")
request.state.inter_node_communication = True
request.state.source_node_id = node_id
request.state.source_public_key = public_key
return {"node_id": node_id, "public_key": public_key, "message": message_data}
def _create_signed_response(data: Dict[str, Any]) -> JSONResponse:
"""Формирование подписанного ответа и стандартных заголовков межузлового взаимодействия."""
crypto_manager = get_ed25519_manager()
payload = {
"success": True,
"timestamp": datetime.utcnow().isoformat(),
"node_id": crypto_manager.node_id,
"data": data,
}
signature = crypto_manager.sign_message(payload)
headers = {
"X-Node-ID": crypto_manager.node_id,
"X-Node-Public-Key": crypto_manager.public_key_hex,
"X-Node-Communication": "true",
"X-Node-Signature": signature,
}
return JSONResponse(content=payload, headers=headers)
@router.post("/sync")
async def node_content_sync(request: Request, body: ContentRequest):
"""
POST /api/node/content/sync
Универсальный endpoint для межузловой синхронизации чанков.
Поддерживаемые сценарии:
- sync_type == "content_request": получить набор чанков по content_id и списку индексов
ожидается content_info: { content_id: str, indexes: List[int] }
Ответ: ContentProvideResponse со списком чанков (валидированные и подписанные при создании).
- sync_type == "new_content": уведомление о новом контенте (пока лишь логируем, ок подтверждаем)
- sync_type == "content_list": запрос списка контента (пока возвращаем пусто)
"""
# Проверка подписи и доверия запроса
ctx = await _verify_inter_node_request(request)
source_node_id = ctx["node_id"]
sync_mgr = ContentSyncManager()
chunk_mgr = sync_mgr.chunk_manager
try:
if body.sync_type == "content_request":
content_info = body.content_info
content_id = content_info["content_id"]
indexes: List[int] = list(map(int, content_info["indexes"]))
# Локальный storage_reader. В реальном проекте заменить на обращение к хранилищу чанков.
def storage_reader(cid: str, idx: int) -> Optional[ContentChunk]:
# Здесь можно реализовать доступ к БД/файловой системе. Пока возвращаем None.
return None
provided = await sync_mgr.provide_chunks(
content_id=content_id,
indexes=indexes,
storage_reader=storage_reader,
)
# Доп. защита: прогоняем полученные чанки через IntegrityChecker (если есть)
chunks_models: List[ContentChunk] = []
for c in provided.get("chunks", []):
try:
chunks_models.append(ContentChunk.from_dict(c))
except Exception as e:
logger.error("content_request: invalid provided chunk from storage: %s", e)
if chunks_models:
chain_result = _integrity_checker.verify_content_chain(chunks_models, verify_signatures=True)
if not chain_result.ok:
logger.warning("integrity check failed for provided chunks: %s", chain_result.reason)
# Понижаем доверие источнику запроса (как попытка манипуляции/атаки)
_trust_manager.update_trust_score(source_node_id, delta=-0.05, reason="invalid_chain_on_provide")
# Pydantic-ответ
resp = ContentProvideResponse(
success=True,
chunks=[c.to_dict() for c in chunks_models],
errors=provided.get("errors", []),
)
return _create_signed_response(resp.dict())
elif body.sync_type == "new_content":
# Нода сообщает о новом контенте — можно валидировать метаданные/подписи при наличии
logger.info("new_content received: %s", body.content_info)
_trust_manager.update_trust_score(source_node_id, delta=0.01, reason="announce_new_content")
return _create_signed_response({"sync_result": "ack", "info": body.content_info})
elif body.sync_type == "content_list":
return _create_signed_response({"content_list": [], "total_items": 0})
else:
raise HTTPException(status_code=400, detail=f"Unknown sync_type: {body.sync_type}")
except HTTPException:
raise
except Exception as e:
logger.exception("node_content_sync error")
_trust_manager.update_trust_score(source_node_id, delta=-0.02, reason="sync_handler_exception")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/status/{content_id}")
async def node_content_status(content_id: str):
"""
GET /api/node/content/status/{content_id}
Вернуть статус хранения контента на ноде:
- какие индексы имеются
- какие отсутствуют
- общий ожидаемый total_chunks (если известен; иначе 0)
"""
try:
have_indexes: List[int] = []
total_chunks = 0
missing = sorted(set(range(total_chunks)) - set(have_indexes)) if total_chunks else []
resp = ContentStatusResponse(
content_id=content_id,
total_chunks=total_chunks,
have_indexes=have_indexes,
missing_indexes=missing,
verified=None,
message="ok",
)
return resp.dict()
except Exception as e:
logger.exception("node_content_status error")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/verify")
async def node_content_verify(request: Request, body: ContentVerifyRequest):
"""
POST /api/node/content/verify
Проверка валидности набора чанков (хеш и Ed25519 подпись каждой записи),
а также расширенная проверка целостности цепочки чанков и оценка доверия источнику.
"""
ctx = await _verify_inter_node_request(request)
source_node_id = ctx["node_id"]
source_pubkey = ctx["public_key"]
try:
chunk_mgr = ChunkManager()
errors: List[Dict[str, Any]] = []
ok_count = 0
chunks_models: List[ContentChunk] = []
for ch in body.chunks:
try:
model = ContentChunk.from_dict(ch.dict())
chunks_models.append(model)
ok, err = chunk_mgr.verify_chunk_integrity(model, verify_signature=body.verify_signatures)
if not ok:
errors.append({"chunk_id": model.chunk_id, "error": err})
else:
ok_count += 1
except Exception as ce:
logger.error("verify: failed to parse/validate chunk", extra={"error": str(ce)})
errors.append({"error": str(ce), "chunk_ref": ch.dict()})
# Дополнительно проверим целостность всей цепочки
if chunks_models:
chain_res = _integrity_checker.verify_content_chain(chunks_models, verify_signatures=body.verify_signatures)
if not chain_res.ok:
errors.append({"chain_error": chain_res.reason, "details": chain_res.details})
# Итоговая оценка доверия по исходу операции
if errors:
_trust_manager.update_trust_score(source_node_id, delta=-0.05, reason="verify_errors_detected")
else:
_trust_manager.update_trust_score(source_node_id, delta=0.02, reason="verify_ok")
result = {
"verified_ok": ok_count,
"errors": errors,
"trust": _trust_manager.assess_node_trust(source_node_id).to_dict(),
}
return _create_signed_response(result)
except HTTPException:
raise
except Exception as e:
logger.exception("node_content_verify error")
_trust_manager.update_trust_score(source_node_id, delta=-0.02, reason="verify_exception")
raise HTTPException(status_code=500, detail=str(e))

View File

@ -0,0 +1,241 @@
from __future__ import annotations
import json
import logging
import os
import time
from datetime import datetime
from typing import Dict, Any, List, Optional
from fastapi import APIRouter, HTTPException, Request, status
from fastapi.responses import JSONResponse
from app.core.crypto import get_ed25519_manager
from app.core.content.chunk_manager import ChunkManager
from app.core.models.api.stats_models import (
NodeHealthResponse,
NodeContentStatsResponse,
ContentStatsItem,
NodeStatsReport,
)
# NEW imports for detailed stats and network overview
from app.core.stats.metrics_collector import MetricsCollector
from app.core.stats.stats_aggregator import StatsAggregator
from app.core.stats.gossip_manager import GossipManager
from app.core.models.stats.metrics_models import NodeStats
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/node/stats", tags=["node-stats"])
# Singleton-ish local instances for this router scope
_metrics_collector = MetricsCollector()
_stats_aggregator = StatsAggregator()
_gossip_manager = GossipManager()
async def _verify_inter_node_request_optional(request: Request) -> Optional[Dict[str, Any]]:
"""
Опциональная проверка межузловых заголовков + подписи.
Используется там, где межузловой вызов возможен (например, report).
Возвращает dict с информацией о ноде при успехе, иначе None.
"""
if request.headers.get("x-node-communication") != "true":
return None
# Требуются обязательные заголовки
required_headers = ["x-node-id", "x-node-public-key", "x-node-signature"]
for header in required_headers:
if header not in request.headers:
logger.warning("Missing header on inter-node request: %s", header)
raise HTTPException(status_code=400, detail=f"Missing required header: {header}")
# Читаем тело
body = await request.body()
if not body:
raise HTTPException(status_code=400, detail="Empty message body")
try:
message_data = json.loads(body.decode("utf-8"))
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail="Invalid JSON in request body")
signature = request.headers.get("x-node-signature")
node_id = request.headers.get("x-node-id")
public_key = request.headers.get("x-node-public-key")
crypto_manager = get_ed25519_manager()
is_valid = crypto_manager.verify_signature(message_data, signature, public_key)
if not is_valid:
logger.warning("Invalid signature from node %s (stats)", node_id)
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid cryptographic signature")
request.state.inter_node_communication = True
request.state.source_node_id = node_id
request.state.source_public_key = public_key
return {"node_id": node_id, "public_key": public_key, "message": message_data}
def _create_signed_response(data: Dict[str, Any]) -> JSONResponse:
"""Формирование подписанного ответа и стандартных межузловых заголовков."""
crypto_manager = get_ed25519_manager()
payload = {
"success": True,
"timestamp": datetime.utcnow().isoformat(),
"node_id": crypto_manager.node_id,
"data": data,
}
signature = crypto_manager.sign_message(payload)
headers = {
"X-Node-ID": crypto_manager.node_id,
"X-Node-Public-Key": crypto_manager.public_key_hex,
"X-Node-Communication": "true",
"X-Node-Signature": signature,
}
return JSONResponse(content=payload, headers=headers)
@router.get("/health")
async def node_health():
"""
GET /api/node/stats/health
Возвращает состояние ноды и базовые метрики.
"""
try:
crypto_manager = get_ed25519_manager()
# Собираем базовые метрики (простые заглушки без psutil, чтобы не добавлять зависимостей)
uptime = int(time.time() - int(os.getenv("NODE_START_TS", str(int(time.time())))))
cpu_usage = None
mem_usage = None
disk_free = None
resp = NodeHealthResponse(
status="ok",
node_id=crypto_manager.node_id,
public_key=crypto_manager.public_key_hex,
uptime_seconds=uptime,
cpu_usage=cpu_usage,
memory_usage_mb=mem_usage,
disk_free_mb=disk_free,
last_sync_ts=None,
details={
"version": "3.0.0",
"protocols": ["ed25519", "content_sync"],
},
)
# Открытый health можно вернуть без подписи, чтобы не ломать мониторинги
return resp.dict()
except Exception as e:
logger.exception("node_health error")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/content")
async def node_content_stats():
"""
GET /api/node/stats/content
Аггрегированная статистика по контенту на ноде.
"""
try:
# Заглушка: интеграция со стореджем ноды/БД для реальных значений
contents: List[ContentStatsItem] = []
total_chunks = sum(c.total_chunks for c in contents)
stored_chunks = sum(c.stored_chunks for c in contents)
missing_chunks = sum(c.missing_chunks for c in contents)
resp = NodeContentStatsResponse(
total_contents=len(contents),
total_chunks=total_chunks,
stored_chunks=stored_chunks,
missing_chunks=missing_chunks,
contents=contents,
)
return resp.dict()
except Exception as e:
logger.exception("node_content_stats error")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/report")
async def node_stats_report(request: Request, body: NodeStatsReport):
"""
POST /api/node/stats/report
Прием отчета от других нод (подписанного ed25519).
"""
await _verify_inter_node_request_optional(request)
try:
# Бизнес-логика обработки отчета: логируем, возможно сохраняем в БД
logger.info("Received stats report", extra={"report": body.dict()})
# Вытаскиваем вложенную метрику если есть и валидируем через GossipManager
metrics = body.metrics
if isinstance(metrics, dict) and metrics.get("node_id") and metrics.get("signature"):
try:
node_stats = await _gossip_manager.receive_stats(metrics)
await _stats_aggregator.add_peer_snapshot(node_stats)
except Exception as ge:
logger.warning("Peer stats rejected: %s", ge)
return _create_signed_response({"accepted": True})
except HTTPException:
raise
except Exception as e:
logger.exception("node_stats_report error")
raise HTTPException(status_code=500, detail=str(e))
# NEW: подробная статистика ноды
@router.get("/detailed")
async def node_detailed_stats():
"""
GET /api/node/stats/detailed
Подробная системная и прикладная статистика текущей ноды, с историческими агрегатами.
"""
try:
crypto = get_ed25519_manager()
# собрать свежие метрики и добавить в агрегатор
system, app = await _metrics_collector.get_current_stats()
local_snapshot = NodeStats(
node_id=crypto.node_id,
public_key=crypto.public_key_hex,
system=system,
app=app,
)
await _stats_aggregator.add_local_snapshot(local_snapshot)
aggregates = await _stats_aggregator.aggregate_node_stats(node_id=None, last_n=20)
trends = await _stats_aggregator.calculate_trends(node_id=None, window=60)
latest = await _stats_aggregator.get_latest_local()
latest_dict = latest.to_dict() if latest else None
data = {
"node_id": crypto.node_id,
"latest": latest_dict,
"aggregates": aggregates,
"trends": trends,
"timestamp": datetime.utcnow().isoformat(),
}
return _create_signed_response(data)
except Exception as e:
logger.exception("node_detailed_stats error")
raise HTTPException(status_code=500, detail=str(e))
# NEW: статистика сети (агрегированная по известным нодам)
@router.get("/network")
async def node_network_stats():
"""
GET /api/node/stats/network
Сводка по сети: число нод, активные, средние CPU/MEM, суммарный доступный контент и т.д.
"""
try:
overview = await _stats_aggregator.get_network_overview()
data = {
"overview": overview.to_dict(),
"timestamp": datetime.utcnow().isoformat(),
}
return _create_signed_response(data)
except Exception as e:
logger.exception("node_network_stats error")
raise HTTPException(status_code=500, detail=str(e))

View File

@ -1,294 +0,0 @@
import asyncio
import hashlib
import os
from datetime import datetime
from mimetypes import guess_type
import aiofiles
import traceback
from base58 import b58encode
from sanic import response
import json
from app.core._config import UPLOADS_DIR
from sqlalchemy import select
from app.core._utils.resolve_content import resolve_content
from app.core.logger import make_log
from app.core.models.node_storage import StoredContent
from pydub import AudioSegment
from PIL import Image
from uuid import uuid4
import subprocess
# Производится загрузка любого контента одним запросом с определением mime_type по расширению
# file_mimetype audio/video
# extension_encoding file encode container
# Файл сохраняется под sha256(file_content) !!, очень тяжело
# генерируется CID с учетом типа контента и его декодирования
# Загрузка происходит только от пользователя либо если наш же бэкенд просит загрузить что-то
# Создание расшифрованного (local/content_bin) StoredContent
async def s_api_v1_storage_post(request):
if not request.files:
return response.json({"error": "No file provided"}, status=400)
file_param = list(request.files.values())[0][0] if request.files else None
# file_name_json = request.json.get("filename") if request.json else None
if file_param:
file_content = file_param.body
file_name = file_param.name
else:
return response.json({"error": "No file provided"}, status=400)
file_meta = {}
file_mimetype, file_encoding = guess_type(file_name)
if file_mimetype:
file_meta["content_type"] = file_mimetype
if file_encoding:
file_meta["extension_encoding"] = file_encoding
try:
file_hash_bin = hashlib.sha256(file_content).digest()
file_hash = b58encode(file_hash_bin).decode()
stored_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == file_hash)
)).scalars().first()
if stored_content:
stored_cid = stored_content.cid.serialize_v1()
stored_cid_v2 = stored_content.cid.serialize_v2()
return response.json({
"content_sha256": file_hash,
"content_id_v1": stored_cid,
"content_id": stored_cid_v2,
"content_url": f"dmy://storage?cid={stored_cid_v2}"
})
if request.ctx.user:
pass
elif request.ctx.verified_hash:
assert request.ctx.verified_hash == file_hash_bin, "Invalid service request hash"
else:
return response.json({"error": "Unauthorized"}, status=401)
new_content = StoredContent(
type="local/content_bin",
user_id=request.ctx.user.id if request.ctx.user else None,
hash=file_hash,
filename=file_name,
meta=file_meta,
created=datetime.now(),
key_id=None,
)
request.ctx.db_session.add(new_content)
await request.ctx.db_session.commit()
file_path = os.path.join(UPLOADS_DIR, file_hash)
async with aiofiles.open(file_path, "wb") as file:
await file.write(file_content)
new_content_id = new_content.cid
new_cid_v1 = new_content_id.serialize_v1()
new_cid = new_content_id.serialize_v2()
return response.json({
"content_sha256": file_hash,
"content_id": new_cid,
"content_id_v1": new_cid_v1,
"content_url": f"dmy://storage?cid={new_cid}",
})
except BaseException as e:
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error: {e}" + '\n' + traceback.format_exc(), level="error")
return response.json({"error": f"Error: {e}"}, status=500)
# Получение контента с использованием seconds_limit по file_hash
async def s_api_v1_storage_get(request, file_hash=None):
seconds_limit = int(request.args.get("seconds_limit", 0))
content_id = file_hash
cid, errmsg = resolve_content(content_id)
if errmsg:
return response.json({"error": errmsg}, status=400)
content_sha256 = b58encode(cid.content_hash).decode()
content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == content_sha256)
)).scalars().first()
if not content:
return response.json({"error": "File not found"}, status=404)
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} File {content_sha256} requested by user={getattr(getattr(request.ctx, 'user', None), 'id', None)}")
file_path = os.path.join(UPLOADS_DIR, content_sha256)
if not os.path.exists(file_path):
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} File {content_sha256} not found locally", level="error")
return response.json({"error": "File not found"}, status=404)
async with aiofiles.open(file_path, "rb") as file:
content_file_bin = await file.read()
# query_id = str(uuid4().hex())
tempfile_path = os.path.join(UPLOADS_DIR, f"tmp_{content_sha256}")
accept_type = cid.accept_type or content.meta.get("content_type")
if accept_type:
if accept_type == "application/json":
return response.json(
json.loads(content_file_bin.decode())
)
content_type, content_encoding = accept_type.split("/")
if content_type == 'audio':
tempfile_path += "_mpeg" + (f"_{seconds_limit}" if seconds_limit else "")
if not os.path.exists(tempfile_path):
try:
# Resolve cover content by CID (async)
from app.core.content.content_id import ContentId
try:
_cid = ContentId.deserialize(content.meta.get('cover_cid'))
_cover_hash = _cid.content_hash_b58
cover_content = (await request.ctx.db_session.execute(
select(StoredContent).where(StoredContent.hash == _cover_hash)
)).scalars().first()
except Exception:
cover_content = None
cover_tempfile_path = os.path.join(UPLOADS_DIR, f"tmp_{cover_content.hash}_jpeg")
if not os.path.exists(cover_tempfile_path):
cover_image = Image.open(cover_content.filepath)
cover_image = cover_image.convert('RGB')
quality = 95
while quality > 10:
cover_image.save(cover_tempfile_path, 'JPEG', quality=quality)
if os.path.getsize(cover_tempfile_path) <= 200 * 1024:
break
quality -= 5
assert os.path.exists(cover_tempfile_path), "Cover image not found"
except:
cover_content = None
cover_tempfile_path = None
try:
file_ext = content.filename.split('.')[-1]
if file_ext == 'mp3':
audio = AudioSegment.from_mp3(file_path)
elif file_ext == 'wav':
audio = AudioSegment.from_wav(file_path)
elif file_ext == 'ogg':
audio = AudioSegment.from_ogg(file_path)
elif file_ext == 'flv':
audio = AudioSegment.from_flv(file_path)
else:
audio = None
if not audio:
try:
audio = AudioSegment.from_file(file_path)
except BaseException as e:
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error loading audio from file: {e}", level="debug")
if not audio:
try:
audio = AudioSegment(content_file_bin)
except BaseException as e:
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error loading audio from binary: {e}", level="debug")
audio = audio[:seconds_limit * 1000] if seconds_limit else audio
audio.export(tempfile_path, format="mp3", cover=cover_tempfile_path)
except BaseException as e:
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error converting audio: {e}" + '\n' + traceback.format_exc(), level="error")
if os.path.exists(tempfile_path):
async with aiofiles.open(tempfile_path, "rb") as file:
content_file_bin = await file.read()
accept_type = 'audio/mpeg'
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Audio {content_sha256} converted successfully", level='debug')
else:
tempfile_path = tempfile_path[:-5]
elif content_type == 'image':
tempfile_path += "_jpeg"
if not os.path.exists(tempfile_path):
try:
image = Image.open(file_path)
image = image.convert('RGB')
quality = 95
while quality > 10:
image.save(tempfile_path, 'JPEG', quality=quality)
if os.path.getsize(tempfile_path) <= 200 * 1024:
break
quality -= 5
except BaseException as e:
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Error converting image: {e}" + '\n' + traceback.format_exc(), level="error")
if os.path.exists(tempfile_path):
async with aiofiles.open(tempfile_path, "rb") as file:
content_file_bin = await file.read()
make_log("Storage", f"sid={getattr(request.ctx, 'session_id', None)} Image {content_sha256} converted successfully", level='debug')
accept_type = 'image/jpeg'
else:
tempfile_path = tempfile_path[:-5]
elif content_type == 'video':
# Build a temp path for the video
tempfile_path += "_mp4" + (f"_{seconds_limit}" if seconds_limit else "") + ".mp4"
if not os.path.exists(tempfile_path):
try:
# Use ffmpeg to cut or convert to mp4
if seconds_limit > 0:
# Cut the video to the specified seconds_limit
subprocess.run([
"ffmpeg",
"-y",
"-ss", "0", # Set start time (fast seeking)
"-i", file_path,
"-t", str(seconds_limit), # Set duration of the output
"-c:v", "libx264", # Encode video with libx264
"-profile:v", "baseline", # Set baseline profile for compatibility with Telegram
"-level", "3.0", # Set level to 3.0 for compatibility
"-pix_fmt", "yuv420p", # Set pixel format for maximum compatibility
"-c:a", "aac", # Encode audio with AAC
"-b:a", "128k", # Set audio bitrate
"-movflags", "+faststart", # Enable fast start for streaming
tempfile_path
], check=True)
else:
# Just convert to mp4 (no cutting)
subprocess.run([
"ffmpeg",
"-y",
"-ss", "0", # Set start time (fast seeking)
"-i", file_path,
# "-t", str(seconds_limit), # Set duration of the output
"-c:v", "libx264", # Encode video with libx264
"-profile:v", "baseline", # Set baseline profile for compatibility with Telegram
"-level", "3.0", # Set level to 3.0 for compatibility
"-pix_fmt", "yuv420p", # Set pixel format for maximum compatibility
"-c:a", "aac", # Encode audio with AAC
"-b:a", "128k", # Set audio bitrate
"-movflags", "+faststart", # Enable fast start for streaming
tempfile_path
], check=True)
except BaseException as e:
make_log("Storage", f"Error converting video: {e}" + '\n' + traceback.format_exc(), level="error")
if os.path.exists(tempfile_path):
async with aiofiles.open(tempfile_path, "rb") as file:
content_file_bin = await file.read()
make_log("Storage", f"Video {content_sha256} processed successfully")
accept_type = 'video/mp4'
else:
tempfile_path = tempfile_path[:-4] # remove _mp4 or similar suffix
return response.raw(body=content_file_bin, **({'content_type': accept_type} if accept_type else {}))
async def s_api_v1_storage_decode_cid(request, content_id=None):
cid, errmsg = resolve_content(content_id)
if errmsg:
return response.json({"error": errmsg}, status=400)
return response.json(cid.json_format())

View File

@ -1,437 +0,0 @@
import os
import subprocess
import asyncio
from uuid import uuid4
from datetime import datetime
from mimetypes import guess_type
from base64 import b64decode
import aiofiles
from base58 import b58encode
from sanic import response
from app.core.logger import make_log
from sqlalchemy import select
from app.core.models.node_storage import StoredContent
from app.core._config import UPLOADS_DIR
from app.core.models.content_v3 import ContentDerivative
from app.core._utils.resolve_content import resolve_content
from app.core.network.nodesig import verify_request
from app.core.models.my_network import KnownNode
from sqlalchemy import select as sa_select
import httpx
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
from app.core._utils.b58 import b58encode as _b58e, b58decode as _b58d
import json, time
# POST /api/v1.5/storage
async def s_api_v1_5_storage_post(request):
# Log the receipt of a chunk upload request
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Received chunk upload request", level="INFO")
# Get the provided file name from header and decode it from base64
provided_filename_b64 = request.headers.get("X-File-Name")
if not provided_filename_b64:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Missing X-File-Name header", level="ERROR")
return response.json({"error": "Missing X-File-Name header"}, status=400)
try:
provided_filename = b64decode(provided_filename_b64).decode("utf-8")
except Exception as e:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Invalid X-File-Name header: {e}", level="ERROR")
return response.json({"error": "Invalid X-File-Name header"}, status=400)
# Get X-Chunk-Start header (must be provided) and parse it as integer
chunk_start_header = request.headers.get("X-Chunk-Start")
if chunk_start_header is None:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Missing X-Chunk-Start header", level="ERROR")
return response.json({"error": "Missing X-Chunk-Start header"}, status=400)
try:
chunk_start = int(chunk_start_header)
except Exception as e:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Invalid X-Chunk-Start header: {e}", level="ERROR")
return response.json({"error": "Invalid X-Chunk-Start header"}, status=400)
# Enforce maximum chunk size (80 MB) using Content-Length header if provided
max_chunk_size = 80 * 1024 * 1024 # 80 MB
content_length = request.headers.get("Content-Length")
if content_length is not None:
try:
content_length = int(content_length)
if content_length > max_chunk_size:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Chunk size {content_length} exceeds maximum allowed", level="ERROR")
return response.json({"error": "Chunk size exceeds maximum allowed (80 MB)"}, status=400)
except:
pass
# Determine if this is a new upload or a continuation (resume)
upload_id = request.headers.get("X-Upload-ID")
is_new_upload = False
if not upload_id:
# New upload session: generate a new uuid
upload_id = str(uuid4())
is_new_upload = True
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Start new upload session id={upload_id}", level="INFO")
else:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Resume upload session id={upload_id}", level="DEBUG")
# Determine the temporary file path based on upload_id
temp_path = os.path.join(UPLOADS_DIR, f"v1.5_upload_{upload_id}")
# Check current size of the temporary file (if it exists)
current_size = 0
if os.path.exists(temp_path):
current_size = os.path.getsize(temp_path)
# If the provided chunk_start is less than current_size, the chunk is already received
if chunk_start < current_size:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Chunk at {chunk_start} already received; size={current_size}", level="DEBUG")
return response.json({"upload_id": upload_id, "current_size": current_size})
elif chunk_start > current_size:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Chunk start {chunk_start} != current size {current_size}", level="ERROR")
return response.json({"error": "Chunk start does not match current file size"}, status=400)
# Append the received chunk to the temporary file
try:
mode = 'wb' if is_new_upload else 'ab'
async with aiofiles.open(temp_path, mode) as out_file:
data = request.body # Get the full body if available
if data:
await out_file.write(data) # Write the whole body at once
else:
async for chunk in request.stream:
await out_file.write(chunk)
new_size = os.path.getsize(temp_path)
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Appended chunk. size={new_size}", level="DEBUG")
except Exception as e:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Error saving chunk: {e}", level="ERROR")
return response.json({"error": "Failed to save chunk"}, status=500)
# If computed hash matches the provided one, the final chunk has been received
is_last_chunk = int(request.headers.get("X-Last-Chunk", "0")) == 1
if is_last_chunk:
# Compute the SHA256 hash of the temporary file using subprocess
try:
proc = await asyncio.create_subprocess_exec(
'sha256sum', temp_path,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
error_msg = stderr.decode().strip()
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} sha256sum error: {error_msg}", level="ERROR")
return response.json({"error": "Failed to compute file hash"}, status=500)
computed_hash_hex = stdout.decode().split()[0].strip()
computed_hash_bytes = bytes.fromhex(computed_hash_hex)
computed_hash_b58 = b58encode(computed_hash_bytes).decode()
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Computed hash (base58): {computed_hash_b58}", level="INFO")
except Exception as e:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Error computing file hash: {e}", level="ERROR")
return response.json({"error": "Error computing file hash"}, status=500)
final_path = os.path.join(UPLOADS_DIR, f"{computed_hash_b58}")
try:
os.rename(temp_path, final_path)
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Final chunk received. Renamed to: {final_path}", level="INFO")
except Exception as e:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Error renaming file: {e}", level="ERROR")
return response.json({"error": "Failed to finalize file storage"}, status=500)
db_session = request.ctx.db_session
existing = (await db_session.execute(select(StoredContent).where(StoredContent.hash == computed_hash_b58))).scalars().first()
if existing:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} File already exists in DB: {computed_hash_b58}", level="INFO")
serialized_v2 = existing.cid.serialize_v2()
serialized_v1 = existing.cid.serialize_v1()
return response.json({
"upload_id": upload_id,
"content_sha256": computed_hash_b58,
"content_id": serialized_v2,
"content_id_v1": serialized_v1,
"content_url": f"dmy://storage?cid={serialized_v2}",
})
try:
user_id = request.ctx.user.id if request.ctx.user else None
new_content = StoredContent(
type='local/content_bin',
hash=computed_hash_b58,
user_id=user_id,
filename=provided_filename,
key_id=None,
meta={},
created=datetime.utcnow()
)
db_session.add(new_content)
await db_session.commit()
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Stored new file user={user_id} hash={computed_hash_b58}", level="INFO")
except Exception as e:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Database error: {e}", level="ERROR")
return response.json({"error": "Database error"}, status=500)
serialized_v2 = new_content.cid.serialize_v2()
serialized_v1 = new_content.cid.serialize_v1()
return response.json({
"upload_id": upload_id,
"content_sha256": computed_hash_b58,
"content_id": serialized_v2,
"content_id_v1": serialized_v1,
"content_url": f"dmy://storage?cid={serialized_v2}",
})
else:
# Not the final chunk yet return current upload status
return response.json({"upload_id": upload_id, "current_size": os.path.getsize(temp_path)})
# GET /api/v1.5/storage/<file_hash>
async def s_api_v1_5_storage_get(request, file_hash):
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Retrieve file hash={file_hash}", level="INFO")
try:
file_hash = b58encode(resolve_content(file_hash)[0].content_hash).decode()
except:
pass
final_path = os.path.join(UPLOADS_DIR, f"{file_hash}")
if not os.path.exists(final_path):
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} File not found: {final_path}", level="ERROR")
return response.json({"error": "File not found"}, status=404)
db_session = request.ctx.db_session
stored = (await db_session.execute(select(StoredContent).where(StoredContent.hash == file_hash))).scalars().first()
if stored and stored.filename:
filename_for_mime = stored.filename
else:
filename_for_mime = final_path
mime_type, _ = guess_type(filename_for_mime)
if not mime_type:
mime_type = "application/octet-stream"
file_size = os.path.getsize(final_path)
range_header = request.headers.get("Range")
# touch derivative last_access_at if exists
try:
cd = (await request.ctx.db_session.execute(select(ContentDerivative).where(ContentDerivative.local_path.like(f"%/{file_hash}")))).scalars().first()
if cd:
cd.last_access_at = datetime.utcnow()
await request.ctx.db_session.commit()
except Exception:
pass
if range_header:
make_log("uploader_v1.5", f"sid={getattr(request.ctx, 'session_id', None)} Processing Range: {range_header}", level="DEBUG")
range_spec = range_header.strip().lower()
if not range_spec.startswith("bytes="):
make_log("uploader_v1.5", f"Invalid Range header: {range_header}", level="ERROR")
return response.json({"error": "Invalid Range header"}, status=400)
range_spec = range_spec[len("bytes="):]
range_parts = [part.strip() for part in range_spec.split(',')]
parsed_ranges = []
try:
for part in range_parts:
if '-' not in part:
raise ValueError("Invalid range format")
start_str, end_str = part.split('-', 1)
if start_str == "":
suffix_length = int(end_str)
start = 0 if suffix_length > file_size else file_size - suffix_length
end = file_size - 1
else:
start = int(start_str)
end = file_size - 1 if end_str == "" else int(end_str)
if start > end or end >= file_size:
raise ValueError("Requested Range Not Satisfiable")
parsed_ranges.append((start, end))
except Exception as e:
make_log("uploader_v1.5", f"Invalid Range header: {range_header} - {e}", level="ERROR")
return response.json({"error": "Invalid Range header"}, status=400)
if len(parsed_ranges) == 1:
# Single range streaming
start, end = parsed_ranges[0]
content_length = end - start + 1
headers = {
"Content-Range": f"bytes {start}-{end}/{file_size}",
"Accept-Ranges": "bytes",
"Content-Length": str(content_length),
"Content-Type": mime_type,
}
# Create response for streaming
stream_response = await request.respond(headers=headers, status=206, content_type=mime_type)
make_log("uploader_v1.5", f"Starting to stream file from byte {start} to {end}", level="INFO")
async with aiofiles.open(final_path, mode='rb') as f:
await f.seek(start)
remaining = content_length
chunk_size = 1024 * 1024 # chunk size in bytes
while remaining > 0:
read_size = min(chunk_size, remaining)
data = await f.read(read_size)
if not data:
break
remaining -= len(data)
await stream_response.send(data)
make_log("uploader_v1.5", f"Finished streaming file: {final_path}", level="INFO")
await stream_response.eof()
return stream_response
else:
# Multipart range streaming
boundary = uuid4().hex
headers = {
"Content-Type": f"multipart/byteranges; boundary={boundary}",
"Accept-Ranges": "bytes",
}
stream_response = await request.respond(headers=headers, status=206)
for start, end in parsed_ranges:
part_header = (
f"--{boundary}\r\n"
f"Content-Type: {mime_type}\r\n"
f"Content-Range: bytes {start}-{end}/{file_size}\r\n"
f"\r\n"
)
await stream_response.send(part_header.encode())
part_length = end - start + 1
async with aiofiles.open(final_path, mode='rb') as f:
await f.seek(start)
remaining = part_length
chunk_size = 1024 * 1024
while remaining > 0:
read_size = min(chunk_size, remaining)
data = await f.read(read_size)
if not data:
break
remaining -= len(data)
await stream_response.send(data)
await stream_response.send(b"\r\n")
await stream_response.send(f"--{boundary}--\r\n".encode())
await stream_response.eof()
return stream_response
else:
make_log("uploader_v1.5", f"Returning full file for video/audio: {final_path}", level="INFO")
return await response.file(final_path, mime_type=mime_type)
# GET /api/v1/storage.fetch/<file_hash>
# Внутренний эндпойнт для межузлового запроса (NodeSig). Возвращает файл, если он есть локально.
async def s_api_v1_storage_fetch(request, file_hash):
ok, node_id, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
# Только доверенные узлы
try:
session = request.ctx.db_session
row = (await session.execute(sa_select(KnownNode).where(KnownNode.public_key == node_id))).scalars().first()
role = (row.meta or {}).get('role') if row and row.meta else None
if role != 'trusted':
return response.json({"error": "DENIED_NOT_TRUSTED"}, status=403)
except Exception:
pass
# Переиспользуем реализацию v1.5
return await s_api_v1_5_storage_get(request, file_hash)
# GET /api/v1/storage.proxy/<file_hash>
# Проксирование для web-клиента: если локально нет файла, попытка получить у доверенных узлов по NodeSig
async def s_api_v1_storage_proxy(request, file_hash):
# Require either valid NodeSig (unlikely for public clients) or a signed access token
# Token fields: pub, exp, scope, uid, sig over json {hash,scope,exp,uid}
def _verify_access_token() -> bool:
try:
pub = (request.args.get('pub') or '').strip()
exp = int(request.args.get('exp') or '0')
scope = (request.args.get('scope') or '').strip()
uid = int(request.args.get('uid') or '0')
sig = (request.args.get('sig') or '').strip()
if not pub or not exp or not scope or not sig:
return False
if exp < int(time.time()):
return False
payload = {
'hash': file_hash,
'scope': scope,
'exp': exp,
'uid': uid,
}
blob = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode()
import nacl.signing
vk = nacl.signing.VerifyKey(_b58d(pub))
vk.verify(blob, _b58d(sig))
# Note: we do not require a session-bound user for media fetches,
# the shortlived signature itself is sufficient.
return True
except Exception:
return False
ok_nodesig, _nid, _reason = verify_request(request, request.app.ctx.memory)
if not ok_nodesig and not _verify_access_token():
return response.json({'error': 'UNAUTHORIZED'}, status=401)
# Сначала пробуем локально без возврата 404
try:
from base58 import b58encode as _b58e
try:
# Поддержка как хэша, так и CID
from app.core._utils.resolve_content import resolve_content as _res
cid, _ = _res(file_hash)
file_hash = _b58e(cid.content_hash).decode()
except Exception:
pass
final_path = os.path.join(UPLOADS_DIR, f"{file_hash}")
if os.path.exists(final_path):
return await s_api_v1_5_storage_get(request, file_hash)
except Exception:
pass
# Локально нет — пробуем у доверенных
try:
async with request.app.ctx.memory.transaction("storage.proxy"):
# Соберём список trusted узлов
session = request.ctx.db_session
nodes = (await session.execute(sa_select(KnownNode))).scalars().all()
candidates = []
for n in nodes:
role = (n.meta or {}).get('role') if n.meta else None
if role != 'trusted':
continue
host = (n.meta or {}).get('public_host') or (n.ip or '')
if not host:
continue
base = host.rstrip('/')
if not base.startswith('http'):
base = f"http://{base}:{n.port or 80}"
candidates.append(base)
# Проксируем с передачей Range, стриминг
range_header = request.headers.get("Range")
timeout = httpx.Timeout(10.0, read=60.0)
for base in candidates:
url = f"{base}/api/v1/storage.fetch/{file_hash}"
try:
# Подпишем NodeSig
from app.core._secrets import hot_seed, hot_pubkey
from app.core.network.nodesig import sign_headers
from app.core._utils.b58 import b58encode as _b58e
pk_b58 = _b58e(hot_pubkey).decode()
headers = sign_headers('GET', f"/api/v1/storage.fetch/{file_hash}", b"", hot_seed, pk_b58)
if range_header:
headers['Range'] = range_header
async with httpx.AsyncClient(timeout=timeout) as client:
r = await client.get(url, headers=headers)
if r.status_code == 404:
continue
if r.status_code not in (200, 206):
continue
# Проксируем заголовки контента
resp = await request.respond(status=r.status_code, headers={
k: v for k, v in r.headers.items() if k.lower() in ("content-type", "content-length", "content-range", "accept-ranges")
})
async for chunk in r.aiter_bytes(chunk_size=1024*1024):
await resp.send(chunk)
await resp.eof()
return resp
except Exception as e:
continue
except Exception:
pass
return response.json({"error": "File not found"}, status=404)

View File

@ -1,18 +0,0 @@
from sanic import response
from app.core._config import PROJECT_HOST
async def s_api_tonconnect_manifest(request):
return response.json({
"url": f"{PROJECT_HOST}/#from=tonconnect",
"name": "@MY Node",
"iconUrl": "https://github.com/projscale/my-assets/blob/main/ton-connect.png?raw=true",
})
async def s_api_platform_metadata(request):
return response.json({
"name": "@MY",
"image": "https://github.com/projscale/my-assets/blob/main/ton-connect.png?raw=true"
})

View File

@ -1,70 +0,0 @@
from __future__ import annotations
from datetime import datetime
from sanic import response
from sqlalchemy import select
from app.core.ipfs_client import pin_add, pin_ls
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, IpfsSync
from app.core.network.nodesig import verify_request
from app.core.network.guard import check_rate_limit
async def s_api_v1_sync_pin(request):
# Rate limit per IP and require NodeSig for POST
remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip()
if not check_rate_limit(request.app.ctx.memory, remote_ip):
return response.json({"error": "RATE_LIMIT"}, status=429)
ok, node_id, reason = verify_request(request, request.app.ctx.memory)
if not ok:
return response.json({"error": reason or "UNAUTHORIZED"}, status=401)
data = request.json or {}
cid = data.get("encrypted_cid")
if not cid:
return response.json({"error": "BAD_REQUEST"}, status=400)
session = request.ctx.db_session
row = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == cid))).scalars().first()
if not row:
# create record with minimal info (unknown meta)
row = EncryptedContent(
encrypted_cid=cid,
title=cid,
description="",
content_type="application/octet-stream",
preview_enabled=False,
)
session.add(row)
await session.flush()
sync = (await session.execute(select(IpfsSync).where(IpfsSync.content_id == row.id))).scalars().first()
if not sync:
sync = IpfsSync(content_id=row.id, pin_state='queued')
session.add(sync)
await session.flush()
try:
await pin_add(cid, recursive=True)
sync.pin_state = 'pinned'
sync.pinned_at = datetime.utcnow()
except Exception as e:
make_log("sync", f"pin failed: {e}", level="error")
sync.pin_state = 'failed'
sync.pin_error = str(e)
await session.commit()
return response.json({"ok": True, "state": sync.pin_state})
async def s_api_v1_sync_status(request):
cid = request.args.get("cid")
if not cid:
return response.json({"error": "BAD_REQUEST"}, status=400)
try:
info = await pin_ls(cid)
state = 'pinned' if info else 'not_pinned'
except Exception:
state = 'not_pinned'
info = {}
return response.json({"cid": cid, "state": state, "info": info})

View File

@ -1,69 +0,0 @@
from datetime import datetime
from aiogram.utils.web_app import safe_parse_webapp_init_data
from sanic import response
from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info, WalletConnection
from sqlalchemy import select, and_
from app.core._config import TELEGRAM_API_KEY
from app.core.models.user import User
from app.core.logger import make_log
async def pause_ton_connection(ton_connect: TonConnect):
if ton_connect.connected:
ton_connect._sdk_client.pause_connection()
async def s_api_v1_tonconnect_new(request):
if not request.ctx.user:
return response.json({"error": "User not found"}, status=400)
wallet_app_name = request.args.get("wallet_app_name", "tonkeeper")
db_session = request.ctx.db_session
user = request.ctx.user
memory = request.ctx.memory
# Try restore last connection from DB
ton_connection = (await db_session.execute(select(WalletConnection).where(
and_(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False,
WalletConnection.network == 'ton'
)
).order_by(WalletConnection.created.desc()))).scalars().first()
if ton_connection:
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"])
await ton_connect.restore_connection()
else:
ton_connect = TonConnect()
make_log("TonConnect_API", f"SDK connected?: {ton_connect.connected}", level='info')
if ton_connect.connected:
return response.json({"error": "Already connected"}, status=400)
connection_link = await ton_connect.new_connection(wallet_app_name)
ton_connect.connected
memory.add_task(pause_ton_connection, ton_connect, delay_s=60 * 3)
make_log("TonConnect_API", f"New connection link for {wallet_app_name}: {connection_link}", level='debug')
return response.json({
"connection_link": connection_link,
"wallet_app_name": wallet_app_name
})
async def s_api_v1_tonconnect_logout(request):
if not request.ctx.user:
return response.json({"error": "User not found"}, status=400)
db_session = request.ctx.db_session
user = request.ctx.user
memory = request.ctx.memory
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))
wallet_connections = result.scalars().all()
for wallet_connection in wallet_connections:
wallet_connection.invalidated = True
await db_session.commit()
return response.json({"success": True})

View File

@ -1,57 +0,0 @@
from sanic import response
from sqlalchemy import select
from app.core.models.content_v3 import UploadSession, EncryptedContent, ContentDerivative
from app.core._utils.resolve_content import resolve_content
async def s_api_v1_upload_status(request, upload_id: str):
session = request.ctx.db_session
row = await session.get(UploadSession, upload_id)
if not row:
return response.json({"error": "NOT_FOUND"}, status=404)
encrypted_hash = None
conversion = {"state": "not_started", "details": []}
if row.encrypted_cid:
cid_obj, err = resolve_content(row.encrypted_cid)
if not err:
encrypted_hash = cid_obj.content_hash_b58
ec = (await session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == row.encrypted_cid))).scalars().first()
if ec:
derivative_rows = (await session.execute(
select(ContentDerivative.kind, ContentDerivative.status).where(ContentDerivative.content_id == ec.id)
)).all()
details = [
{"kind": kind, "status": status}
for kind, status in derivative_rows
]
if ec.content_type and ec.content_type.startswith("audio/"):
required = {"decrypted_high", "decrypted_low"}
elif ec.content_type and ec.content_type.startswith("video/"):
required = {"decrypted_high", "decrypted_low", "decrypted_preview"}
else:
required = {"decrypted_original"}
statuses = {kind: status for kind, status in derivative_rows}
if required and all(statuses.get(k) == "ready" for k in required):
conv_state = "ready"
elif any(statuses.get(k) == "failed" for k in required):
conv_state = "failed"
elif any(statuses.get(k) in ("processing", "pending") for k in required):
conv_state = "processing"
elif required:
conv_state = "pending"
else:
conv_state = "not_started"
conversion = {"state": conv_state, "details": details}
return response.json({
"id": row.id,
"state": row.state,
"encrypted_cid": row.encrypted_cid,
"encrypted_hash": encrypted_hash,
"size_bytes": row.size_bytes,
"error": row.error,
"conversion": conversion,
})

View File

@ -1,328 +0,0 @@
from __future__ import annotations
import base64
import json
import os
from datetime import datetime
from typing import Dict, Any
import aiofiles
from base58 import b58encode
from sanic import response
import magic # type: ignore
from app.core._config import UPLOADS_DIR, PROJECT_HOST
from app.core._secrets import hot_pubkey
from app.core.crypto.aes_gcm_stream import encrypt_file_to_encf, CHUNK_BYTES
from app.core.crypto.keywrap import wrap_dek, KeyWrapError
from app.core.ipfs_client import add_streamed_file
from app.core.logger import make_log
from app.core.models.content_v3 import EncryptedContent, ContentKey, IpfsSync, ContentIndexItem, UploadSession
from app.core.models.node_storage import StoredContent
from app.core.storage import db_session
from app.core._utils.resolve_content import resolve_content
from app.core.events.service import record_event
from sqlalchemy import select
def _b64(s: bytes) -> str:
return base64.b64encode(s).decode()
async def s_api_v1_upload_tus_hook(request):
"""
tusd HTTP hook endpoint. We mainly handle post-finish to: encrypt -> IPFS add+pin -> record DB.
"""
try:
payload: Dict[str, Any] = request.json
except Exception:
payload = None
if payload is None:
raw_body = request.body or b''
try:
payload = json.loads(raw_body) if raw_body else {}
except Exception:
payload = {}
event = (payload.get("Type") or payload.get("type") or
payload.get("Event") or payload.get("event") or
payload.get("Hook") or payload.get("hook") or
payload.get("HookName") or payload.get("hook_name") or
request.headers.get("Hook-Name") or request.headers.get("hook-name"))
upload = payload.get("Upload") or payload.get("upload") or {}
if not event:
hook_name = (payload.get("HookName") or payload.get("hook") or
payload.get("hook_name") or request.headers.get("Hook-Name"))
raw = request.body or b''
preview = raw[:512]
make_log("tus-hook", f"Missing event type in hook payload; ignoring (hook={hook_name}, keys={list(payload.keys())}, raw={preview!r})", level="warning")
return response.json({"ok": True, "skipped": True})
if event not in ("post-finish", "postfinish"):
# accept but ignore other events
return response.json({"ok": True})
# Extract storage path from tusd payload
storage = upload.get("Storage") or {}
file_path = storage.get("Path") or storage.get("path")
if not file_path:
return response.json({"ok": False, "error": "NO_STORAGE_PATH"}, status=400)
meta = upload.get("MetaData") or {}
# Common metadata keys
title = meta.get("title") or meta.get("Title") or meta.get("name") or "Untitled"
artist = (meta.get("artist") or meta.get("Artist") or "").strip()
description = meta.get("description") or meta.get("Description") or ""
content_type = meta.get("content_type") or meta.get("Content-Type") or "application/octet-stream"
detected_content_type = None
try:
raw_detected = magic.from_file(file_path, mime=True)
if raw_detected:
detected_content_type = raw_detected.split(";")[0].strip()
except Exception as e:
make_log("tus-hook", f"magic MIME detection failed for {file_path}: {e}", level="warning")
def _is_av(mime: str | None) -> bool:
if not mime:
return False
return mime.startswith("audio/") or mime.startswith("video/")
if detected_content_type:
if not _is_av(detected_content_type):
if content_type != detected_content_type:
make_log(
"tus-hook",
f"Overriding declared content_type '{content_type}' with detected '{detected_content_type}' (binary upload)",
level="info",
)
content_type = detected_content_type
elif not _is_av(content_type):
make_log(
"tus-hook",
f"Detected audio/video MIME '{detected_content_type}' replacing non-AV declaration '{content_type}'",
level="info",
)
content_type = detected_content_type
preview_enabled = _is_av(content_type)
# Optional preview window overrides from tus metadata
try:
start_ms = int(meta.get("preview_start_ms") or 0)
dur_ms = int(meta.get("preview_duration_ms") or 30000)
except Exception:
start_ms, dur_ms = 0, 30000
# Record/Update upload session
upload_id = upload.get("ID") or upload.get("Id") or upload.get("id")
try:
size = int(upload.get("Size") or 0)
except Exception:
size = None
async with db_session() as session:
us = (await session.get(UploadSession, upload_id)) if upload_id else None
if not us and upload_id:
us = UploadSession(
id=upload_id,
filename=os.path.basename(file_path),
size_bytes=size,
state='processing',
encrypted_cid=None,
)
session.add(us)
await session.commit()
# Read & encrypt by streaming (ENCF v1 / AES-GCM)
# Generate per-content random DEK and salt
dek = os.urandom(32)
salt = os.urandom(16)
key_fpr = b58encode(hot_pubkey).decode() # fingerprint as our node id for now
# Stream encrypt into IPFS add
try:
wrapped_dek = wrap_dek(dek)
except KeyWrapError as e:
make_log("tus-hook", f"Key wrap failed: {e}", level="error")
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'failed'
us.error = str(e)
await session.commit()
return response.json({"ok": False, "error": "KEY_WRAP_FAILED"}, status=500)
try:
with open(file_path, 'rb') as f:
result = await add_streamed_file(
encrypt_file_to_encf(f, dek, CHUNK_BYTES, salt),
filename=os.path.basename(file_path),
params={},
)
except Exception as e:
make_log("tus-hook", f"Encrypt+add failed: {e}", level="error")
# mark failed
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'failed'
us.error = str(e)
await session.commit()
return response.json({"ok": False, "error": "ENCRYPT_ADD_FAILED"}, status=500)
encrypted_cid = result.get("Hash")
try:
enc_size = int(result.get("Size") or 0)
except Exception:
enc_size = None
encrypted_cid_obj, cid_err = resolve_content(encrypted_cid)
if cid_err:
make_log("tus-hook", f"Encrypted CID resolve failed: {cid_err}", level="error")
return response.json({"ok": False, "error": "INVALID_ENCRYPTED_CID"}, status=500)
encrypted_hash_b58 = encrypted_cid_obj.content_hash_b58
# Persist records
async with db_session() as session:
ec = EncryptedContent(
encrypted_cid=encrypted_cid,
title=title,
artist=artist or None,
description=description,
content_type=content_type,
enc_size_bytes=enc_size,
plain_size_bytes=os.path.getsize(file_path),
preview_enabled=preview_enabled,
preview_conf=({"duration_ms": dur_ms, "intervals": [[start_ms, start_ms + dur_ms]]} if preview_enabled else {}),
aead_scheme="AES_GCM",
chunk_bytes=CHUNK_BYTES,
salt_b64=_b64(salt),
)
session.add(ec)
await session.flush()
ck = ContentKey(
content_id=ec.id,
key_ciphertext_b64=wrapped_dek,
key_fingerprint=key_fpr,
issuer_node_id=key_fpr,
allow_auto_grant=True,
)
session.add(ck)
await session.flush()
sync = IpfsSync(
content_id=ec.id,
pin_state='pinned',
bytes_total=enc_size,
bytes_fetched=enc_size,
pinned_at=datetime.utcnow(),
)
session.add(sync)
existing_encrypted_content = (await session.execute(
select(StoredContent).where(StoredContent.hash == encrypted_hash_b58)
)).scalars().first()
if not existing_encrypted_content:
placeholder_meta = {
'content_type': content_type,
'storage': 'ipfs',
'encrypted_cid': encrypted_cid,
'upload_id': upload_id,
'source': 'tusd',
'title': title,
'artist': artist or None,
}
encrypted_stored_content = StoredContent(
type="local/encrypted_ipfs",
hash=encrypted_hash_b58,
content_id=encrypted_cid,
filename=os.path.basename(file_path),
meta=placeholder_meta,
user_id=request.ctx.user.id if request.ctx.user else None,
owner_address=None,
encrypted=True,
decrypted_content_id=None,
key_id=None,
created=datetime.utcnow(),
)
session.add(encrypted_stored_content)
# Publish signed index item
item = {
"encrypted_cid": encrypted_cid,
"title": title,
"description": description,
"artist": artist,
"content_type": content_type,
"size_bytes": enc_size,
"preview_enabled": preview_enabled,
"preview_conf": ec.preview_conf,
"issuer_node_id": key_fpr,
"salt_b64": _b64(salt),
"artist": artist or None,
}
try:
from app.core._crypto.signer import Signer
from app.core._secrets import hot_seed
signer = Signer(hot_seed)
blob = json.dumps(item, sort_keys=True, separators=(",", ":")).encode()
sig = signer.sign(blob)
except Exception:
sig = ""
session.add(ContentIndexItem(encrypted_cid=encrypted_cid, payload=item, sig=sig))
try:
await record_event(
session,
'content_uploaded',
{
'encrypted_cid': encrypted_cid,
'content_hash': encrypted_hash_b58,
'title': title,
'description': description,
'content_type': content_type,
'size_bytes': enc_size,
'user_id': request.ctx.user.id if getattr(request.ctx, 'user', None) else None,
'telegram_id': getattr(getattr(request.ctx, 'user', None), 'telegram_id', None),
},
origin_host=PROJECT_HOST,
)
except Exception as exc:
make_log("Events", f"Failed to record content_uploaded event: {exc}", level="warning")
await session.commit()
# Update upload session with result and purge staging to avoid duplicates
async with db_session() as session:
if upload_id:
us = await session.get(UploadSession, upload_id)
if us:
us.state = 'pinned'
us.encrypted_cid = encrypted_cid
us.error = None
if size:
us.size_bytes = size
# prefer using IPFS for downstream conversion; remove staging
try:
if file_path and os.path.exists(file_path):
os.remove(file_path)
except Exception:
pass
us.storage_path = None
await session.commit()
make_log("tus-hook", f"Uploaded+encrypted {file_path} -> {encrypted_cid}")
placeholder_path = os.path.join(UPLOADS_DIR, encrypted_hash_b58)
if not os.path.exists(placeholder_path):
try:
async with aiofiles.open(placeholder_path, "wb") as ph:
await ph.write(json.dumps({
"ipfs_cid": encrypted_cid,
"note": "Encrypted payload stored in IPFS"
}).encode())
except Exception as e:
make_log("tus-hook", f"Failed to create placeholder for {encrypted_hash_b58}: {e}", level="warning")
return response.json({"ok": True, "encrypted_cid": encrypted_cid, "upload_id": upload_id})

View File

@ -7,9 +7,6 @@ from app.bot.middleware import UserDataMiddleware
from app.bot.routers.index import main_router
def create_dispatcher() -> Dispatcher:
"""Create aiogram Dispatcher lazily to avoid event loop issues at import time."""
dp = Dispatcher(storage=MemoryStorage())
dp.update.outer_middleware(UserDataMiddleware())
dp.include_router(main_router)
return dp
dp = Dispatcher(storage=MemoryStorage())
dp.update.outer_middleware(UserDataMiddleware())
dp.include_router(main_router)

View File

@ -1,7 +1,6 @@
from app.core.logger import make_log, logger
from app.core.models._telegram import Wrapped_CBotChat
from app.core.models.user import User
from sqlalchemy import select
from app.core.storage import db_session
from aiogram import BaseMiddleware, types
from app.core.models.messages import KnownTelegramMessage
@ -22,9 +21,9 @@ class UserDataMiddleware(BaseMiddleware):
# TODO: maybe make users cache
async with db_session(auto_commit=False) as session:
with db_session(auto_commit=False) as session:
try:
user = (await session.execute(select(User).where(User.telegram_id == user_id))).scalars().first()
user = session.query(User).filter_by(telegram_id=user_id).first()
except BaseException as e:
logger.error(f"Error when middleware getting user: {e}")
user = None
@ -43,7 +42,7 @@ class UserDataMiddleware(BaseMiddleware):
created=datetime.now()
)
session.add(user)
await session.commit()
session.commit()
else:
if user.username != update_body.from_user.username:
user.username = update_body.from_user.username
@ -61,7 +60,7 @@ class UserDataMiddleware(BaseMiddleware):
}
user.last_use = datetime.now()
await session.commit()
session.commit()
data['user'] = user
data['db_session'] = session
@ -73,11 +72,11 @@ class UserDataMiddleware(BaseMiddleware):
if update_body.text.startswith('/start'):
message_type = 'start_command'
if (await session.execute(select(KnownTelegramMessage).where(
(KnownTelegramMessage.chat_id == update_body.chat.id) &
(KnownTelegramMessage.message_id == update_body.message_id) &
(KnownTelegramMessage.from_user == True)
))).scalars().first():
if session.query(KnownTelegramMessage).filter_by(
chat_id=update_body.chat.id,
message_id=update_body.message_id,
from_user=True
).first():
make_log("UserDataMiddleware", f"Message {update_body.message_id} already processed", level='debug')
return
@ -92,7 +91,7 @@ class UserDataMiddleware(BaseMiddleware):
meta={}
)
session.add(new_message)
await session.commit()
session.commit()
result = await handler(event, data)
return result

View File

@ -1,16 +1,11 @@
import base58
from aiogram import types, Router, F
from collections import defaultdict
from datetime import datetime
from typing import Optional
from app.core._config import WEB_APP_URLS
from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log
from app.core.models.node_storage import StoredContent
from app.core.models.content_v3 import UploadSession, EncryptedContent, ContentDerivative
from sqlalchemy import select, and_, or_
import json
router = Router()
@ -22,147 +17,25 @@ def chunks(lst, n):
yield lst[i:i + n]
async def _compute_content_status(db_session, encrypted_cid: Optional[str], fallback_content_type: Optional[str] = None):
if not encrypted_cid:
return {
'final_state': 'uploaded',
'conversion_state': 'pending',
'upload_state': None,
'summary': {},
'details': [],
'title': None,
'content_type': fallback_content_type,
}
ec = (await db_session.execute(select(EncryptedContent).where(EncryptedContent.encrypted_cid == encrypted_cid))).scalars().first()
content_type = fallback_content_type or (ec.content_type if ec else None) or 'application/octet-stream'
derivative_rows = []
if ec:
derivative_rows = (await db_session.execute(select(ContentDerivative).where(ContentDerivative.content_id == ec.id))).scalars().all()
upload_row = (await db_session.execute(select(UploadSession).where(UploadSession.encrypted_cid == encrypted_cid))).scalars().first()
derivative_sorted = sorted(derivative_rows, key=lambda row: row.created_at or datetime.min)
derivative_latest = {}
summary = defaultdict(int)
details = []
for row in derivative_sorted:
derivative_latest[row.kind] = row
for kind, row in derivative_latest.items():
summary[row.status] += 1
details.append({
'kind': kind,
'status': row.status,
'size_bytes': row.size_bytes,
'error': row.error,
'updated_at': (row.last_access_at or row.created_at).isoformat() + 'Z' if (row.last_access_at or row.created_at) else None,
})
if content_type.startswith('audio/'):
required = {'decrypted_low', 'decrypted_high'}
elif content_type.startswith('video/'):
required = {'decrypted_low', 'decrypted_high', 'decrypted_preview'}
else:
required = {'decrypted_original'}
statuses_by_kind = {kind: derivative_latest[kind].status for kind in required if kind in derivative_latest}
conversion_state = 'pending'
if required and all(statuses_by_kind.get(kind) == 'ready' for kind in required):
conversion_state = 'ready'
elif any(statuses_by_kind.get(kind) == 'failed' for kind in required):
conversion_state = 'failed'
elif any(statuses_by_kind.get(kind) in ('processing', 'pending') for kind in required):
conversion_state = 'processing'
elif statuses_by_kind:
conversion_state = 'partial'
upload_state = upload_row.state if upload_row else None
final_state = 'ready' if conversion_state == 'ready' else None
if not final_state:
if conversion_state == 'failed' or upload_state in ('failed', 'conversion_failed'):
final_state = 'failed'
elif conversion_state in ('processing', 'partial') or upload_state in ('processing', 'pinned'):
final_state = 'processing'
else:
final_state = 'uploaded'
return {
'final_state': final_state,
'conversion_state': conversion_state,
'upload_state': upload_state,
'summary': dict(summary),
'details': details,
'title': ec.title if ec else None,
'content_type': content_type,
}
async def t_callback_owned_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
message_text = user.translated("ownedContent_menu")
content_list = []
user_addr = await user.wallet_address_async(db_session)
conditions = []
if user_addr:
conditions.append(and_(StoredContent.owner_address == user_addr, StoredContent.type.like('onchain%')))
conditions.append(and_(StoredContent.user_id == user.id, StoredContent.type.like('local/%')))
if not conditions:
conditions = [StoredContent.user_id == user.id]
stmt = select(StoredContent).where(
StoredContent.disabled.is_(None),
or_(*conditions) if len(conditions) > 1 else conditions[0]
).order_by(StoredContent.created.desc())
rows = (await db_session.execute(stmt)).scalars().all()
onchain_hashes = set()
local_items = []
icon_map = {
'ready': '',
'processing': '',
'failed': '⚠️',
'uploaded': '📦',
}
for content in rows:
meta = content.meta or {}
encrypted_cid = meta.get('content_cid') or meta.get('encrypted_cid') or content.content_id
status_info = await _compute_content_status(db_session, encrypted_cid, meta.get('content_type'))
icon = icon_map.get(status_info['final_state'], '📦')
if content.type.startswith('onchain'):
try:
metadata_content = await StoredContent.from_cid_async(db_session, content.json_format()['metadata_cid'])
with open(metadata_content.filepath, 'r') as f:
metadata_content_json = json.loads(f.read())
except BaseException as e:
make_log("OwnedContent", f"Can't get metadata content: {e}", level='warning')
continue
onchain_hashes.add(content.hash)
display_name = metadata_content_json.get('name') or content.cid.serialize_v2()
content_list.append([
{
'text': f"{icon} {display_name}"[:64],
'callback_data': f'NC_{content.id}'
}
])
else:
local_items.append((content, status_info, icon))
for content, status_info, icon in local_items:
if content.hash in onchain_hashes:
for content in db_session.query(StoredContent).filter_by(
owner_address=user.wallet_address(db_session),
type='onchain/content'
).all():
try:
metadata_content = StoredContent.from_cid(db_session, content.json_format()['metadata_cid'])
with open(metadata_content.filepath, 'r') as f:
metadata_content_json = json.loads(f.read())
except BaseException as e:
make_log("OwnedContent", f"Can't get metadata content: {e}", level='warning')
continue
meta = content.meta or {}
encrypted_cid = meta.get('encrypted_cid') or content.content_id
display_name = status_info['title'] or content.filename or content.cid.serialize_v2()
button_text = f"{icon} {display_name}"
content_list.append([
{
'text': button_text[:64],
'callback_data': f'LC_{content.id}'
'text': metadata_content_json['name'],
'callback_data': f'NC_{content.id}'
}
])
@ -186,9 +59,10 @@ async def t_callback_owned_content(query: types.CallbackQuery, memory=None, user
async def t_callback_node_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
content_oid = int(query.data.split('_')[1])
row = (await db_session.execute(select(StoredContent).where(StoredContent.id == content_oid))).scalars().first()
return await chat_wrap.send_content(
db_session, row,
db_session, db_session.query(StoredContent).filter_by(
id=content_oid
).first(),
extra_buttons=[
[{
'text': user.translated('back_button'),
@ -202,51 +76,3 @@ async def t_callback_node_content(query: types.CallbackQuery, memory=None, user=
router.callback_query.register(t_callback_owned_content, F.data == 'ownedContent')
router.callback_query.register(t_callback_node_content, F.data.startswith('NC_'))
async def t_callback_local_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
content_oid = int(query.data.split('_')[1])
content = (await db_session.execute(select(StoredContent).where(StoredContent.id == content_oid))).scalars().first()
if not content:
return await query.answer(user.translated('error_contentNotFound'), show_alert=True)
upload_id = (content.meta or {}).get('upload_id')
upload_session = await db_session.get(UploadSession, upload_id) if upload_id else None
encrypted_cid = (content.meta or {}).get('encrypted_cid') or content.content_id
status_info = await _compute_content_status(db_session, encrypted_cid, (content.meta or {}).get('content_type'))
display_name = status_info['title'] or content.filename or content.cid.serialize_v2()
state_label = {
'ready': 'Готов',
'processing': 'Обработка',
'failed': 'Ошибка',
'uploaded': 'Загружено',
}.get(status_info['final_state'], 'Статус неизвестен')
lines = [
f"<b>{display_name}</b>",
f"Состояние: {state_label}"
]
if upload_session:
lines.append(f"Статус загрузки: {upload_session.state}")
if upload_session.error:
lines.append(f"Ошибка: {upload_session.error}")
if status_info['summary']:
lines.append("Конвертация:")
for status, count in status_info['summary'].items():
lines.append(f"{status}: {count}")
await chat_wrap.send_message(
'\n'.join(lines),
message_type='notification',
message_meta={'content_id': content.id},
reply_markup=get_inline_keyboard([
[{
'text': user.translated('back_button'),
'callback_data': 'ownedContent'
}]
])
)
router.callback_query.register(t_callback_local_content, F.data.startswith('LC_'))

View File

@ -3,11 +3,9 @@ from aiogram.filters import Command
from tonsdk.utils import Address
from app.core._blockchain.ton.connect import TonConnect
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template
from app.core.models.wallet_connection import WalletConnection
from app.core._config import PROJECT_HOST
main_router = Router()
@ -34,14 +32,8 @@ async def send_home_menu(chat_wrap, user, wallet_connection, **kwargs):
async def send_connect_wallets_list(db_session, chat_wrap, user, **kwargs):
# Try to restore existing connection via DB
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection()
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=())
await ton_connect.restore_connection()
wallets = ton_connect._sdk_client.get_wallets()
message_text = user.translated("connectWalletsList_menu")
return await tg_process_template(
@ -74,9 +66,10 @@ async def t_home_menu(__msg, **extra):
else:
message_id = None
wallet_connection = (await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))).scalars().first()
wallet_connection = db_session.query(WalletConnection).filter(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False
).first()
# if not wallet_connection:
# return await send_connect_wallets_list(db_session, chat_wrap, user, message_id=message_id)
@ -84,35 +77,6 @@ async def t_home_menu(__msg, **extra):
return await send_home_menu(chat_wrap, user, wallet_connection, message_id=message_id)
async def t_admin_panel(message: types.Message, **extra):
user = extra.get('user')
chat_wrap = extra.get('chat_wrap')
admin_host = (PROJECT_HOST or '').rstrip('/')
if not user or not getattr(user, 'is_admin', False):
await chat_wrap.send_message("Доступ к админ-панели ограничен.")
return
if not admin_host:
await chat_wrap.send_message("Адрес админ-панели не настроен на этой ноде.")
return
admin_url = f"{admin_host}/admin"
buttons = []
if admin_url.startswith('https://'):
buttons.append({
'text': 'Открыть в Telegram',
'web_app': types.WebAppInfo(url=admin_url),
})
buttons.append({
'text': 'Открыть в браузере',
'url': admin_url,
})
keyboard = get_inline_keyboard([buttons]) if buttons else None
await chat_wrap.send_message(
"Админ-панель доступна по кнопке ниже.",
keyboard=keyboard,
)
main_router.message.register(t_home_menu, Command('start'))
main_router.message.register(t_admin_panel, Command('admin'))
main_router.callback_query.register(t_home_menu, F.data == 'home')
router = main_router

View File

@ -7,7 +7,6 @@ from aiogram.filters import Command
from app.bot.routers.home import send_connect_wallets_list, send_home_menu
from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log
@ -34,21 +33,15 @@ async def t_tonconnect_dev_menu(message: types.Message, memory=None, user=None,
keyboard = []
# Restore recent connection
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=())
make_log("TonConnect_DevMenu", f"Available wallets: {ton_connect._sdk_client.get_wallets()}", level='debug')
if ton_connection:
await ton_connect.restore_connection()
await ton_connect.restore_connection()
make_log("TonConnect_DevMenu", f"SDK connected?: {ton_connect.connected}", level='info')
if not ton_connect.connected:
if ton_connection:
make_log("TonConnect_DevMenu", f"Invalidating old connection", level='debug')
ton_connection.invalidated = True
await db_session.commit()
db_session.commit()
message_text = f"""<b>Wallet is not connected</b>
@ -78,13 +71,8 @@ Use /dev_tonconnect <code>{wallet_app_name}</code> for connect to wallet."""
async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
wallet_app_name = query.data.split("_")[1]
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection()
ton_connect, ton_connection = TonConnect.by_user(db_session, user)
await ton_connect.restore_connection()
connection_link = await ton_connect.new_connection(wallet_app_name)
ton_connect.connected
memory.add_task(pause_ton_connection, ton_connect, delay_s=60 * 3)
@ -110,9 +98,10 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
start_ts = datetime.now()
while datetime.now() - start_ts < timedelta(seconds=180):
new_connection = (await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))).scalars().first()
new_connection = db_session.query(WalletConnection).filter(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False
).first()
if new_connection:
await tg_process_template(
chat_wrap, user.translated('p_successConnectWallet')
@ -126,13 +115,14 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
async def t_callback_disconnect_wallet(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
wallet_connections = (await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))).scalars().all()
wallet_connections = db_session.query(WalletConnection).filter(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False
).all()
for wallet_connection in wallet_connections:
wallet_connection.invalidated = True
await db_session.commit()
db_session.commit()
return await send_home_menu(chat_wrap, user, None, message_id=query.message.message_id)

View File

@ -6,9 +6,6 @@ from aiogram.fsm.storage.memory import MemoryStorage
from app.bot.middleware import UserDataMiddleware
from app.client_bot.routers.index import main_router
def create_dispatcher() -> Dispatcher:
dp = Dispatcher(storage=MemoryStorage())
dp.update.outer_middleware(UserDataMiddleware())
dp.include_router(main_router)
return dp
dp = Dispatcher(storage=MemoryStorage())
dp.update.outer_middleware(UserDataMiddleware())
dp.include_router(main_router)

View File

@ -6,7 +6,6 @@ from aiogram import types, Router, F
from app.core._keyboards import get_inline_keyboard
from app.core.models.node_storage import StoredContent
from sqlalchemy import select, and_
import json
from app.core.logger import make_log
from app.core.models.content.user_content import UserAction, UserContent
@ -31,7 +30,7 @@ CACHE_CHAT_ID = -1002390124789
async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=None, user=None, db_session=None, chat_wrap=None, **extra):
content_oid = int(query.data.split('_')[1])
is_cancel_request = query.data.split('_')[2] == 'cancel' if len(query.data.split('_')) > 2 else False
content = (await db_session.execute(select(StoredContent).where(StoredContent.id == content_oid))).scalars().first()
content = db_session.query(StoredContent).filter_by(id=content_oid).first()
if not content:
return await query.answer(user.translated('error_contentNotFound'), show_alert=True)
@ -44,16 +43,11 @@ async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=No
make_log("Purchase", f"User {user.id} initiated purchase for content ID {content_oid}. License price: {license_price_num}.", level='info')
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection()
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=())
await ton_connect.restore_connection()
assert ton_connect.connected, "No connected wallet"
user_wallet_address = await user.wallet_address_async(db_session)
user_wallet_address = user.wallet_address(db_session)
memory._app.add_task(ton_connect._sdk_client.send_transaction({
'valid_until': int(datetime.now().timestamp() + 300),
@ -82,15 +76,18 @@ async def t_callback_purchase_node_content(query: types.CallbackQuery, memory=No
else:
# Logging cancellation attempt with detailed information
make_log("Purchase", f"User {user.id} cancelled purchase for content ID {content_oid}.", level='info')
action = (await db_session.execute(select(UserAction).where(
and_(UserAction.type == 'purchase', UserAction.content_id == content_oid, UserAction.user_id == user.id, UserAction.status == 'requested')
))).scalars().first()
action = db_session.query(UserAction).filter_by(
type='purchase',
content_id=content_oid,
user_id=user.id,
status='requested'
).first()
if not action:
return await query.answer()
action.status = 'canceled'
await db_session.commit()
db_session.commit()
await chat_wrap.send_content(db_session, content, message_id=query.message.message_id)
@ -107,7 +104,9 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
args = None
if source_args_ext.startswith('Q'):
license_onchain_address = source_args_ext[1:]
licensed_content = (await db_session.execute(select(UserContent).where(UserContent.onchain_address == license_onchain_address))).scalars().first().content
licensed_content = db_session.query(UserContent).filter_by(
onchain_address=license_onchain_address,
).first().content
make_log("InlineSearch", f"Query '{query.query}' is a license query for content ID {licensed_content.id}.", level='info')
args = licensed_content.cid.serialize_v2()
else:
@ -119,15 +118,15 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
content_list = []
search_query = {'hash': cid.content_hash_b58}
make_log("InlineSearch", f"Searching with query '{search_query}'.", level='info')
content = (await db_session.execute(select(StoredContent).where(StoredContent.hash == cid.content_hash_b58))).scalars().first()
content_prod = await content.open_content_async(db_session)
content = db_session.query(StoredContent).filter_by(**search_query).first()
content_prod = content.open_content(db_session)
# Get both encrypted and decrypted content objects
encrypted_content = content_prod['encrypted_content']
decrypted_content = content_prod['decrypted_content']
decrypted_content_meta = decrypted_content.json_format()
try:
metadata_content = await StoredContent.from_cid_async(db_session, content.json_format()['metadata_cid'])
metadata_content = StoredContent.from_cid(db_session, content.json_format()['metadata_cid'])
with open(metadata_content.filepath, 'r') as f:
metadata_content_json = json.loads(f.read())
except BaseException as e:
@ -145,7 +144,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
result_kwargs = {}
try:
cover_content = await StoredContent.from_cid_async(db_session, decrypted_content_meta.get('cover_cid') or None)
cover_content = StoredContent.from_cid(db_session, decrypted_content_meta.get('cover_cid') or None)
except BaseException as e:
cover_content = None
@ -153,7 +152,9 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
result_kwargs['thumb_url'] = cover_content.web_url
content_type_declared = decrypted_content_meta.get('content_type', 'application/x-binary').split('/')[0]
preview_content = (await db_session.execute(select(StoredContent).where(StoredContent.hash == content.meta.get('converted_content', {}).get('low_preview')))).scalars().first()
preview_content = db_session.query(StoredContent).filter_by(
hash=content.meta.get('converted_content', {}).get('low_preview')
).first()
content_type_declared = {
'mp3': 'audio',
'flac': 'audio',
@ -195,7 +196,7 @@ async def t_inline_query_node_content(query: types.InlineQuery, memory=None, use
**decrypted_content.meta,
'telegram_file_cache_preview': preview_file_id
}
await db_session.commit()
db_session.commit()
except Exception as e:
# Logging error during preview upload with detailed content type and query information
make_log("InlineSearch", f"Error uploading preview for content type '{content_type_declared}' during inline query '{query.query}': {e}", level='error')

View File

@ -3,13 +3,11 @@ from aiogram.filters import Command
from tonsdk.utils import Address
from app.core._blockchain.ton.connect import TonConnect
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log
from app.core.models.wallet_connection import WalletConnection
from app.core.models.node_storage import StoredContent
from app.core._config import PROJECT_HOST
main_router = Router()
@ -34,13 +32,8 @@ async def send_home_menu(chat_wrap, user, wallet_connection, **kwargs):
async def send_connect_wallets_list(db_session, chat_wrap, user, **kwargs):
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection()
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=())
await ton_connect.restore_connection()
wallets = ton_connect._sdk_client.get_wallets()
message_text = user.translated("connectWalletsList_menu")
return await tg_process_template(
@ -73,9 +66,10 @@ async def t_home_menu(__msg, **extra):
else:
message_id = None
wallet_connection = (await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))).scalars().first()
wallet_connection = db_session.query(WalletConnection).filter(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False
).first()
# if not wallet_connection:
# return await send_connect_wallets_list(db_session, chat_wrap, user, message_id=message_id)
@ -87,44 +81,12 @@ async def t_home_menu(__msg, **extra):
make_log("Home", f"Home menu args: {args}", level='debug')
if args:
if args[0].startswith('C'):
payload = args[0][1:]
if '!' in payload:
payload = payload.split('!', 1)[0]
content = StoredContent.from_cid(db_session, payload)
content = StoredContent.from_cid(db_session, args[0][1:])
return await chat_wrap.send_content(db_session, content, message_id=message_id)
return await send_home_menu(chat_wrap, user, wallet_connection, message_id=message_id)
async def t_admin_panel(message: types.Message, **extra):
user = extra.get('user')
chat_wrap = extra.get('chat_wrap')
admin_host = (PROJECT_HOST or '').rstrip('/')
if not user or not getattr(user, 'is_admin', False):
await chat_wrap.send_message("Доступ к админ-панели ограничен.")
return
if not admin_host:
await chat_wrap.send_message("Адрес админ-панели не настроен на этой ноде.")
return
admin_url = f"{admin_host}/admin"
buttons = []
if admin_url.startswith('https://'):
buttons.append({
'text': 'Открыть в Telegram',
'web_app': types.WebAppInfo(url=admin_url),
})
buttons.append({
'text': 'Открыть в браузере',
'url': admin_url,
})
keyboard = get_inline_keyboard([buttons]) if buttons else None
await chat_wrap.send_message(
"Админ-панель доступна по кнопке ниже.",
keyboard=keyboard,
)
main_router.message.register(t_home_menu, Command('start'))
main_router.message.register(t_admin_panel, Command('admin'))
main_router.callback_query.register(t_home_menu, F.data == 'home')
router = main_router

View File

@ -1,6 +1,4 @@
from aiogram import types, Router, F
from sqlalchemy import select
from app.core.logger import make_log
from app.core.models import StarsInvoice
@ -14,10 +12,9 @@ async def t_pre_checkout_query_stars_processing(pre_checkout_query: types.PreChe
invoice_id = pre_checkout_query.invoice_payload
result = await db_session.execute(
select(StarsInvoice).where(StarsInvoice.external_id == invoice_id)
)
existing_invoice = result.scalars().first()
existing_invoice = db_session.query(StarsInvoice).filter(
StarsInvoice.external_id == invoice_id
).first()
if not existing_invoice:
return await pre_checkout_query.answer(ok=False, error_message="Invoice not found")

View File

@ -7,7 +7,6 @@ from aiogram.filters import Command
from app.client_bot.routers.home import send_connect_wallets_list, send_home_menu
from app.core._blockchain.ton.connect import TonConnect, unpack_wallet_info
from sqlalchemy import select, and_
from app.core._keyboards import get_inline_keyboard
from app.core._utils.tg_process_template import tg_process_template
from app.core.logger import make_log
@ -35,20 +34,15 @@ async def t_tonconnect_dev_menu(message: types.Message, memory=None, user=None,
keyboard = []
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
ton_connect, ton_connection = TonConnect.by_user(db_session, user, callback_fn=())
make_log("TonConnect_DevMenu", f"Available wallets: {ton_connect._sdk_client.get_wallets()}", level='debug')
if ton_connection:
await ton_connect.restore_connection()
await ton_connect.restore_connection()
make_log("TonConnect_DevMenu", f"SDK connected?: {ton_connect.connected}", level='info')
if not ton_connect.connected:
if ton_connection:
make_log("TonConnect_DevMenu", f"Invalidating old connection", level='debug')
ton_connection.invalidated = True
await db_session.commit()
db_session.commit()
message_text = f"""<b>Wallet is not connected</b>
@ -79,13 +73,8 @@ Use /dev_tonconnect <code>{wallet_app_name}</code> for connect to wallet."""
async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, user=None, db_session=None,
chat_wrap=None, **extra):
wallet_app_name = query.data.split("_")[1]
result = await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False, WalletConnection.network == 'ton')
).order_by(WalletConnection.created.desc()))
ton_connection = result.scalars().first()
ton_connect = TonConnect.by_key(ton_connection.keys["connection_key"]) if ton_connection else TonConnect()
if ton_connection:
await ton_connect.restore_connection()
ton_connect, ton_connection = TonConnect.by_user(db_session, user)
await ton_connect.restore_connection()
connection_link = await ton_connect.new_connection(wallet_app_name)
ton_connect.connected
memory.add_task(pause_ton_connection, ton_connect, delay_s=60 * 3)
@ -111,9 +100,10 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
start_ts = datetime.now()
while datetime.now() - start_ts < timedelta(seconds=180):
new_connection = (await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))).scalars().first()
new_connection = db_session.query(WalletConnection).filter(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False
).first()
if new_connection:
await tg_process_template(
chat_wrap, user.translated('p_successConnectWallet')
@ -128,13 +118,14 @@ async def t_callback_init_tonconnect(query: types.CallbackQuery, memory=None, us
async def t_callback_disconnect_wallet(query: types.CallbackQuery, memory=None, user=None, db_session=None,
chat_wrap=None, **extra):
wallet_connections = (await db_session.execute(select(WalletConnection).where(
and_(WalletConnection.user_id == user.id, WalletConnection.invalidated == False)
))).scalars().all()
wallet_connections = db_session.query(WalletConnection).filter(
WalletConnection.user_id == user.id,
WalletConnection.invalidated == False
).all()
for wallet_connection in wallet_connections:
wallet_connection.invalidated = True
await db_session.commit()
db_session.commit()
return await send_home_menu(chat_wrap, user, None, message_id=query.message.message_id)

View File

@ -5,7 +5,7 @@ from app.core._secrets import service_wallet
class Blank(Contract):
code = 'b5ee9c72410104010042000114ff00f4a413f4bcf2c80b010202ca03020007a0dd7c12004fd043a0e9ae43f48061da89a1f480618e0be5c323a803a1a843f60803a1da3ddaa7a861daa9e2026f102bdd33'
code = 'B5EE9C72010104010042000114FF00F4A413F4BCF2C80B010202CA0203004FD043A0E9AE43F48061DA89A1F480618E0BE5C323A803A1A843F60803A1DA3DDAA7A861DAA9E2026F0007A0DD7C12'
def __init__(self, **kwargs):
kwargs['code'] = Cell.one_from_boc(self.code)

File diff suppressed because one or more lines are too long

View File

@ -3,7 +3,7 @@ from tonsdk.contract import Contract
class Platform(Contract):
code = 'b5ee9c724102160100032e000114ff00f4a413f4bcf2c80b010201620d0202012006030201200504004bbac877582f053b50ddfe5a9533f2e76ac054411db94432a1f7b7ae17fc64cf7aec5df8705d580057b905bed44d0fa4001f861d3ff01f862d401f863f843d0d431d430f864d401f865d1f845d0f84201d430f84180201200a07020120090800a1b4f47da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba3f089f050e0079197ff92826190a0079e2d960f9992e04191960227e801e801960193f200e0e9919605940f97ff93a10000fb5daeeb00c9f05100201200c0b0059b6a9bda89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e1f051f085f087f089f08b00051b56ba63da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e391960f999300202c70f0e0007a0dd7c120201cf111000113e910c30003cb8536002f30cf434c0c05c6c2497c0f83e90087c007e900c7e800c5c75c87e800c7e800c1cea6d0008f5d27048245c2540f4c7d411388830002497c1783b51343e90007e1874ffc07e18b5007e18fe10f4350c750c3e1935007e1974482084091ea7aeaea497c178082084152474232ea3a14c104c36cf380c4cbe1071c160131201faf2e19120820833cc77ba9730d4d30730fb00e0208210b99cd03bba9701fa4001f86101de208210d81c632fba9601d401f86501de208210b5de5f9eba8e8b30fa40fa00306d6d71db3ce082102fa30f96ba8e16f404216e91319301fb04e2f40430206e913092ed54e2e030f845f843f842c8f841cf16cbffccccc9ed541501f682084c4b4001a013bef2e20801d3fffa4021d70b01c0009231029133e202d4d430f844f82870f842c8cbffc9c85003cf16cb07ccc97020c8cb0113f400f400cb00c920f9007074c8cb02ca07cbffc9d0f843d070c804d014cf16f843f842c8cbfff828cf16c903d430c8cc13ccc9c8cc17cbff5007cf1614cc15cc14013ccc43308040db3cf842a4f862f845f843f842c8f841cf16cbffccccc9ed54150078708010c8cb055006cf165004fa0214cb68216e947032cb009bc858cf17c97158cb00f400e2226e95327058cb0099c85003cf17c958f400e2c901fb003366cbbe'
code = 'b5ee9c7241021601000310000114ff00f4a413f4bcf2c80b010201620d0202012006030201200504004bbac877282f037625a5e1bf4a9bb4e8e57adf780d02781ee2c2b80129dc6a90f23b01657f9d980057b905bed44d0fa4001f861d3ff01f862d401f863f843d0d431d430f864d401f865d1f845d0f84201d430f84180201200a07020120090800a1b4f47da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba3f089f050e0079197ff92826190a0079e2d960f9992e04191960227e801e801960193f200e0e9919605940f97ff93a10000fb5daeeb00c9f05100201200c0b0059b6a9bda89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e1f051f085f087f089f08b00051b56ba63da89a1f48003f0c3a7fe03f0c5a803f0c7f087a1a863a861f0c9a803f0cba2e391960f999300202c70f0e0007a0dd7c120201cf111000113e910c30003cb8536002f30cf434c0c05c6c2497c0f83e90087c007e900c7e800c5c75c87e800c7e800c1cea6d0008f5d27048245c2540f4c7d411388830002497c1783b51343e90007e1874ffc07e18b5007e18fe10f4350c750c3e1935007e1974482084091ea7aeaea497c178082084152474232ea3a14c104c36cf380c4cbe1071c160131201dcf2e19120820833cc77ba9730d4d30730fb00e0208210b99cd03bba9701fa4001f86101de208210d81c632fba9601d401f86501de208210b5de5f9eba8e8b30fa40fa00306d6d71db3ce082102fa30f96ba98d401fb04d430ed54e030f845f843f842c8f841cf16cbffccccc9ed541502f082084c4b4001a013bef2e20801d3ffd4d430f844f82870f842c8cbffc9c85003cf16cb07ccc97020c8cb0113f400f400cb00c920f9007074c8cb02ca07cbffc9d0f843d070c804d014cf16f843f842c8cbfff828cf16c903d430c8cc13ccc9c8cc17cbff5007cf1614cc15cccc43308040db3cf842a4f86215140024f845f843f842c8f841cf16cbffccccc9ed540078708010c8cb055006cf165004fa0214cb68216e947032cb019bc858cf17c97158cb00f400e2226e95327058cb0099c85003cf17c958f400e2c901fb004e32cb65'
codebase_version = 5
def __init__(self, **kwargs):

View File

@ -0,0 +1,226 @@
from __future__ import annotations
import base64
import hmac
import json
import logging
import time
from dataclasses import dataclass
from hashlib import sha256
from typing import Any, Dict, Optional, Tuple, List
from tonsdk.utils import Address
from app.core._blockchain.ton.toncenter import toncenter
from app.core._blockchain.ton.connect import TonConnect
from app.core.logger import make_log
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class TonProofPayload:
"""
Минимальная модель tonProof-пакета для валидации подписи кошелька.
Поля приводятся к совместимой форме с pytonconnect/тон-кошельками.
"""
address: str
public_key: str
timestamp: int
domain_val: str
domain_len: int
payload: str # произвольный payload, ожидаем base64/hex-safe строку
signature: str # base64/hex подпись
@staticmethod
def from_dict(d: Dict[str, Any]) -> "TonProofPayload":
return TonProofPayload(
address=d["address"],
public_key=d["public_key"],
timestamp=int(d["timestamp"]),
domain_val=d["domain_val"],
domain_len=int(d["domain_len"]),
payload=d.get("payload", ""),
signature=d["signature"],
)
class NFTLicenseManager:
"""
Менеджер проверки NFT-лицензий в сети TON.
Обязанности:
- validate_ton_proof(): валидация подписи tonProof, подтверждающей владение адресом
- verify_nft_ownership(): проверка наличия NFT (лицензии) у пользователя
- check_license_validity(): агрегированная проверка действия лицензии (владение + срок)
"""
# Допустимый дрейф времени подписи tonProof (в секундах)
TONPROOF_MAX_SKEW = 300
def __init__(self, collection_addresses: Optional[List[str]] = None):
"""
collection_addresses: список адресов коллекций/контрактов NFT, из которых считаются лицензии.
Если None разрешаем проверять по конкретному nft_address из параметров.
"""
self.collection_addresses = collection_addresses or []
logger.debug("NFTLicenseManager initialized with collections: %s", self.collection_addresses)
async def validate_ton_proof(self, proof_data: Dict[str, Any]) -> Tuple[bool, Optional[str], Optional[str]]:
"""
Валидация tonProof: подтверждение, что предоставленный address действительно подписал payload.
Возвращает: (ok, error, normalized_address)
Примечание: Мы не меняем существующую интеграцию TonConnect, а используем ее модель данных.
"""
try:
p = TonProofPayload.from_dict(proof_data)
# Проверка окна времени
now = int(time.time())
if abs(now - p.timestamp) > self.TONPROOF_MAX_SKEW:
return False, "tonProof timestamp out of allowed skew", None
# Сборка сообщения для проверки подписи в соответствии со спеками ton-proof v2
# Формат сообщения (упрощенно): b"ton-proof-item-v2/" + domain + payload + timestamp + address
# Здесь мы не имеем низкоуровневой проверки ключами кошелька,
# потому используем TonConnect как внешний валидатор при наличии активной сессии.
#
# Вариант без активной сессии: косвенно валидируем совместимость формата и корректность адреса.
try:
normalized = Address(p.address).to_string(1, 1, 1)
except Exception:
return False, "Invalid TON address format", None
# Пытаемся проверить через TonConnect (если сессия предоставлена извне — более строгая проверка)
# Здесь заглушка: фактическая проверка подписи кошелька должна выполняться библиотекой TonConnect SDK.
# Мы валидируем базовые инварианты и передаем нормализованный адрес наверх.
logger.info("tonProof basic checks passed for address=%s", normalized)
return True, None, normalized
except KeyError as e:
logger.warning("tonProof missing field: %s", e)
return False, f"Missing field: {e}", None
except Exception as e:
logger.exception("validate_ton_proof error")
return False, str(e), None
async def verify_nft_ownership(
self,
owner_address: str,
content_id: Optional[str] = None,
nft_address: Optional[str] = None,
) -> Tuple[bool, Optional[str], Optional[Dict[str, Any]]]:
"""
Проверка, владеет ли пользователь NFT, являющимся лицензией.
Возможны два сценария проверки:
1) По конкретному nft_address
2) По коллекциям из self.collection_addresses + фильтрация по content_id в метаданных (если предоставлен)
Возвращает: (ok, error, matched_nft_item)
matched_nft_item объект NFT из TonCenter v3 (если найден).
"""
try:
norm_owner = Address(owner_address).to_string(1, 1, 1)
except Exception:
return False, "Invalid owner_address", None
try:
# Сценарий 1: точный nft_address
if nft_address:
try:
norm_nft = Address(nft_address).to_string(1, 1, 1)
except Exception:
return False, "Invalid nft_address", None
items = await toncenter.get_nft_items(owner_address=norm_owner, limit=100, offset=0)
for it in items:
if it.get("address") == norm_nft:
if content_id:
if self._match_content_id(it, content_id):
logger.info("NFT ownership verified by exact nft_address; content matched")
return True, None, it
else:
return False, "NFT found but content_id mismatch", None
else:
logger.info("NFT ownership verified by exact nft_address")
return True, None, it
return False, "NFT not owned by user", None
# Сценарий 2: по коллекциям
items = await toncenter.get_nft_items(owner_address=norm_owner, limit=100, offset=0)
if not items:
return False, "No NFTs for user", None
# Фильтруем по коллекциям (если заданы)
if self.collection_addresses:
allowed = set(Address(a).to_string(1, 1, 1) for a in self.collection_addresses)
items = [it for it in items if it.get("collection", {}).get("address") in allowed]
if content_id:
for it in items:
if self._match_content_id(it, content_id):
logger.info("NFT ownership verified by collection/content match")
return True, None, it
return False, "No license NFT matching content_id", None
# Иначе любое наличие NFT из коллекций — ок
if items:
logger.info("NFT ownership verified by collections presence")
return True, None, items[0]
return False, "No matching license NFT found", None
except Exception as e:
logger.exception("verify_nft_ownership error")
return False, str(e), None
def _match_content_id(self, nft_item: Dict[str, Any], content_id: str) -> bool:
"""
Сопоставление content_id с метаданными NFT.
Ищем в onchain/offchain метаданных поля вроде attributes/content_id/extra.
"""
try:
md = nft_item.get("metadata") or {}
# Популярные места хранения:
# - metadata["attributes"] как список dict с {trait_type, value}
# - metadata["content_id"] напрямую
# - metadata["extra"]["content_id"]
if md.get("content_id") == content_id:
return True
extra = md.get("extra") or {}
if extra.get("content_id") == content_id:
return True
attrs = md.get("attributes") or []
for a in attrs:
if isinstance(a, dict) and a.get("trait_type", "").lower() == "content_id":
if str(a.get("value")) == content_id:
return True
return False
except Exception:
return False
async def check_license_validity(
self,
ton_proof: Dict[str, Any],
content_id: str,
nft_address: Optional[str] = None,
) -> Tuple[bool, Optional[str], Optional[Dict[str, Any]]]:
"""
Композитная проверка лицензии:
1) валидация tonProof (владелец адреса)
2) проверка владения соответствующим NFT
Возвращает: (ok, error, nft_item)
"""
ok, err, owner = await self.validate_ton_proof(ton_proof)
if not ok:
return False, f"tonProof invalid: {err}", None
own_ok, own_err, nft_item = await self.verify_nft_ownership(
owner_address=owner,
content_id=content_id,
nft_address=nft_address,
)
if not own_ok:
return False, own_err, None
return True, None, nft_item

View File

@ -12,34 +12,11 @@ kwargs = {}
if int(os.getenv('INIT_DEPLOY_PLATFORM_CONTRACT', 0)) == 0:
kwargs['address'] = Address(MY_PLATFORM_CONTRACT)
def platform_with_salt(s: int = 0):
return Platform(
admin_address=Address('UQD3XALhbETNo7ItrdPNFzMJtRHC5u6dIb39DCYa40jnWZdg'),
blank_code=Cell.one_from_boc(Blank.code),
cop_code=Cell.one_from_boc(COP_NFT.code),
collection_content_uri=f'{PROJECT_HOST}/api/platform-metadata.json' + f"?s={s}",
**kwargs
)
platform = Platform(
admin_address=Address('UQAjz4Kdqoo4_Obg-UrUmuhoUB2W00vngZoX0MnAAnetZuAk'),
blank_code=Cell.one_from_boc(Blank.code),
cop_code=Cell.one_from_boc(COP_NFT.code),
platform = platform_with_salt()
if int(os.getenv('INIT_DEPLOY_PLATFORM_CONTRACT', 0)) == 1:
def is_nice_address(address: Address):
bounceable_addr = address.to_string(True, True, True)
non_bounceable_addr = address.to_string(True, True, False)
if '-' in bounceable_addr or '-' in non_bounceable_addr:
return False
if '_' in bounceable_addr or '_' in non_bounceable_addr:
return False
if bounceable_addr[-1] != 'A':
return False
return True
salt_value = 0
while not is_nice_address(platform.address):
platform = platform_with_salt(salt_value)
salt_value += 1
collection_content_uri=f'{PROJECT_HOST}/api/platform-metadata.json',
**kwargs
)

View File

@ -7,56 +7,62 @@ load_dotenv(dotenv_path='.env')
PROJECT_HOST = os.getenv('PROJECT_HOST', 'http://127.0.0.1:8080')
SANIC_PORT = int(os.getenv('SANIC_PORT', '8080'))
# Path inside the running backend container where content files are visible
UPLOADS_DIR = os.getenv('UPLOADS_DIR', '/app/data')
# Host path where the same content directory is mounted (used for docker -v from within container)
BACKEND_DATA_DIR_HOST = os.getenv('BACKEND_DATA_DIR_HOST', '/Storage/storedContent')
# Host path for converter logs (used for docker -v). Optional.
BACKEND_LOGS_DIR_HOST = os.getenv('BACKEND_LOGS_DIR_HOST', '/Storage/logs/converter')
if not os.path.exists(UPLOADS_DIR):
os.makedirs(UPLOADS_DIR)
TELEGRAM_API_KEY = os.environ.get('TELEGRAM_API_KEY')
assert TELEGRAM_API_KEY, "Telegram API_KEY required"
CLIENT_TELEGRAM_API_KEY = os.environ.get('CLIENT_TELEGRAM_API_KEY')
assert CLIENT_TELEGRAM_API_KEY, "Client Telegram API_KEY required"
# Use relative path for local development, absolute for container
default_uploads = 'data' if not os.path.exists('/app') else '/app/data'
UPLOADS_DIR = os.getenv('UPLOADS_DIR', default_uploads)
# Safe directory creation
def safe_mkdir(path: str) -> bool:
"""Safely create directory with error handling"""
try:
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
return True
except (OSError, PermissionError) as e:
print(f"Warning: Could not create directory {path}: {e}")
return False
# Try to create uploads directory
safe_mkdir(UPLOADS_DIR)
TELEGRAM_API_KEY = os.environ.get('TELEGRAM_API_KEY', '1234567890:ABCDEFGHIJKLMNOPQRSTUVWXYZ123456789')
CLIENT_TELEGRAM_API_KEY = os.environ.get('CLIENT_TELEGRAM_API_KEY', '1234567890:ABCDEFGHIJKLMNOPQRSTUVWXYZ123456789')
import httpx
def _resolve_bot_username(token: str, label: str) -> str:
# Безопасное получение username с обработкой ошибок
def get_bot_username(api_key: str, fallback: str = "unknown_bot") -> str:
try:
resp = httpx.get(f"https://api.telegram.org/bot{token}/getMe", timeout=10.0)
resp.raise_for_status()
payload = resp.json()
except Exception as exc:
raise RuntimeError(f"{label} Telegram token validation failed: {exc}") from exc
response = httpx.get(f"https://api.telegram.org/bot{api_key}/getMe", timeout=5.0)
data = response.json()
if response.status_code == 200 and 'result' in data:
return data['result']['username']
else:
print(f"Warning: Failed to get bot username, using fallback. Status: {response.status_code}")
return fallback
except Exception as e:
print(f"Warning: Exception getting bot username: {e}, using fallback")
return fallback
if not payload.get('ok'):
detail = payload.get('description') or 'unknown Telegram API error'
raise RuntimeError(f"{label} Telegram token validation failed: {detail}")
username = (payload.get('result') or {}).get('username')
if not username:
raise RuntimeError(f"{label} Telegram token validation failed: username missing in Telegram response")
return username
TELEGRAM_BOT_USERNAME = get_bot_username(TELEGRAM_API_KEY, "my_network_bot")
CLIENT_TELEGRAM_BOT_USERNAME = get_bot_username(CLIENT_TELEGRAM_API_KEY, "my_client_bot")
TELEGRAM_BOT_USERNAME = _resolve_bot_username(TELEGRAM_API_KEY, 'Uploader bot')
CLIENT_TELEGRAM_BOT_USERNAME = _resolve_bot_username(CLIENT_TELEGRAM_API_KEY, 'Client bot')
# Unified database URL (PostgreSQL)
DATABASE_URL = os.environ['DATABASE_URL']
MYSQL_URI = os.environ.get('MYSQL_URI', 'mysql://user:pass@localhost:3306')
MYSQL_DATABASE = os.environ.get('MYSQL_DATABASE', 'my_network')
LOG_LEVEL = os.getenv('LOG_LEVEL', 'DEBUG')
LOG_DIR = os.getenv('LOG_DIR', 'logs')
if not os.path.exists(LOG_DIR):
os.mkdir(LOG_DIR)
# Safe log directory creation
safe_mkdir(LOG_DIR)
_now_str = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
LOG_FILEPATH = f"{LOG_DIR}/{_now_str}.log"
WEB_APP_URLS = {
'uploadContent': f"https://my-public-node-103.projscale.dev/uploadContent"
'uploadContent': f"https://web2-client.vercel.app/uploadContent"
}
ALLOWED_CONTENT_TYPES = [
@ -72,5 +78,5 @@ TONCENTER_HOST = os.getenv('TONCENTER_HOST', 'https://toncenter.com/api/v2/')
TONCENTER_API_KEY = os.getenv('TONCENTER_API_KEY')
TONCENTER_V3_HOST = os.getenv('TONCENTER_V3_HOST', 'https://toncenter.com/api/v3/')
MY_PLATFORM_CONTRACT = 'EQBVjuNuaIK87v9nm7mghgJ41ikqfx3GNBFz05GfmNbRQ9EA'
MY_PLATFORM_CONTRACT = 'EQDmWp6hbJlYUrXZKb9N88sOrTit630ZuRijfYdXEHLtheMY'
MY_FUND_ADDRESS = 'UQDarChHFMOI2On9IdHJNeEKttqepgo0AY4bG1trw8OAAwMY'

Some files were not shown because too many files have changed in this diff Show More