Merge remote-tracking branch 'origin/master'

This commit is contained in:
user 2025-08-15 19:31:47 +03:00
commit 8b5a9f3dc2
7 changed files with 252 additions and 326 deletions

View File

@ -1,47 +1,36 @@
FROM python:3.11-slim FROM python:3.11-slim
# Установка системных зависимостей для PostgreSQL и приложения
RUN apt-get update && apt-get install -y \
build-essential \
curl \
git \
libpq-dev \
postgresql-client \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Создание рабочей директории
WORKDIR /app WORKDIR /app
# Копирование файлов зависимостей # Установка системных зависимостей
COPY pyproject.toml ./ RUN apt-get update && apt-get install -y \
COPY requirements.txt ./ gcc \
g++ \
curl \
&& rm -rf /var/lib/apt/lists/*
# Установка Python зависимостей # Копирование requirements и установка Python зависимостей
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Копирование исходного кода # Копирование кода приложения
COPY . . COPY app/ ./app/
COPY alembic/ ./alembic/
COPY alembic.ini .
COPY bootstrap.json .
# Создание директорий для данных и логов # Создание директорий
RUN mkdir -p /app/data /app/logs RUN mkdir -p /app/storage /app/logs
# Создание пользователя для безопасности # Права доступа
RUN groupadd -r myapp && useradd -r -g myapp myapp RUN chmod +x /app/app/main.py
RUN chown -R myapp:myapp /app
USER myapp # Переменные окружения для корректного запуска
ENV UVICORN_HOST=0.0.0.0
ENV UVICORN_PORT=8000
ENV API_HOST=0.0.0.0
ENV API_PORT=8000
# Порт приложения
EXPOSE 8000 EXPOSE 8000
# Переменные окружения CMD ["uvicorn", "app.fastapi_main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "1"]
ENV PYTHONPATH=/app
ENV PYTHONUNBUFFERED=1
ENV USE_FASTAPI=true
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost:8000/api/system/health || exit 1
# Команда запуска FastAPI с uvicorn
CMD ["uvicorn", "app.fastapi_main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "1"]

View File

@ -163,8 +163,8 @@ def create_fastapi_app() -> FastAPI:
title="MY Network Uploader Bot - FastAPI", title="MY Network Uploader Bot - FastAPI",
description="Decentralized content uploader with web2-client compatibility", description="Decentralized content uploader with web2-client compatibility",
version="3.0.0", version="3.0.0",
docs_url="/docs" if getattr(settings, 'DEBUG', False) else None, docs_url="/docs",
redoc_url="/redoc" if getattr(settings, 'DEBUG', False) else None, redoc_url="/redoc",
lifespan=lifespan lifespan=lifespan
) )

View File

@ -1,131 +1,23 @@
{ {
"version": "2.0", "version": "3.0.0",
"checksum": "bootstrap-config-v2.0", "network_id": "my-network-1754810662",
"signature": "signed-by-my-network-core", "created_at": "2025-08-10T07:24:22Z",
"last_updated": "2025-07-11T01:59:00Z",
"bootstrap_nodes": [ "bootstrap_nodes": [
{ {
"id": "my-public-node-3", "id": "node-7fd144167286645c",
"address": "my-public-node-3.projscale.dev:15100", "node_id": "node-7fd144167286645c",
"region": "europe", "address": "2a02:6b40:2000:16b1::1",
"priority": 1, "port": 8000,
"public_key": "bootstrap-node-public-key-1", "public_key": "7fd144167286645c3b01cd16d87a775f57ec134dbad412e13f3016c33e936177",
"capabilities": ["replication", "monitoring", "consensus"] "trusted": true,
}, "node_type": "bootstrap"
{
"id": "local-dev-node",
"address": "localhost:15100",
"region": "local",
"priority": 2,
"public_key": "local-dev-node-public-key",
"capabilities": ["development", "testing"]
} }
], ],
"network_settings": { "network_settings": {
"protocol_version": "2.0", "protocol_version": "3.0",
"max_peers": 50, "max_peers": 50,
"connection_timeout": 30,
"heartbeat_interval": 60,
"discovery_interval": 300,
"replication_factor": 3,
"consensus_threshold": 0.66
},
"sync_settings": {
"sync_interval": 300, "sync_interval": 300,
"batch_size": 100, "individual_decisions": true,
"max_concurrent_syncs": 5, "no_consensus": true
"retry_attempts": 3,
"retry_delay": 10,
"workers_count": 4,
"chunk_size": 1048576
},
"content_settings": {
"max_file_size": 104857600,
"allowed_types": ["*"],
"compression": true,
"encryption": false,
"deduplication": true,
"retention_days": 365
},
"security_settings": {
"require_authentication": false,
"rate_limiting": true,
"max_requests_per_minute": 1000,
"allowed_origins": ["*"],
"encryption_enabled": false,
"signature_verification": false
},
"api_settings": {
"port": 15100,
"host": "0.0.0.0",
"cors_enabled": true,
"documentation_enabled": true,
"monitoring_endpoint": "/api/my/monitor",
"health_endpoint": "/health",
"metrics_endpoint": "/metrics"
},
"monitoring_settings": {
"enabled": true,
"real_time_updates": true,
"websocket_enabled": true,
"metrics_collection": true,
"log_level": "INFO",
"dashboard_theme": "matrix",
"update_interval": 30
},
"storage_settings": {
"base_path": "./storage/my-network",
"database_url": "sqlite+aiosqlite:///app/data/my_network.db",
"backup_enabled": false,
"cleanup_enabled": true,
"max_storage_gb": 100
},
"consensus": {
"algorithm": "raft",
"leader_election_timeout": 150,
"heartbeat_timeout": 50,
"log_compaction": true,
"snapshot_interval": 1000
},
"feature_flags": {
"experimental_features": false,
"advanced_monitoring": true,
"websocket_support": true,
"real_time_sync": true,
"load_balancing": true,
"auto_scaling": false,
"content_caching": true
},
"regional_settings": {
"europe": {
"primary_nodes": ["my-public-node-3.projscale.dev:15100"],
"fallback_nodes": ["backup-eu.projscale.dev:15100"],
"latency_threshold": 100
},
"local": {
"primary_nodes": ["localhost:15100"],
"fallback_nodes": [],
"latency_threshold": 10
}
},
"emergency_settings": {
"emergency_mode": false,
"failover_enabled": true,
"backup_bootstrap_urls": [
"https://raw.githubusercontent.com/mynetwork/bootstrap/main/bootstrap.json"
],
"emergency_contacts": []
} }
} }

View File

@ -1,93 +1,62 @@
version: '3.8'
services: services:
# MY Network v2.0 Application app:
my-network: build: .
build: container_name: my-network-app
context: .
dockerfile: Dockerfile
container_name: my-network-node
restart: unless-stopped restart: unless-stopped
ports: ports:
- "8000:8000" - "8000:8000"
- "3000:8000" # Альтернативный порт для nginx
environment:
# Database - PostgreSQL для production
- DATABASE_URL=postgresql+asyncpg://mynetwork:password@postgres:5432/mynetwork
# Application
- API_HOST=0.0.0.0
- API_PORT=8000
- DEBUG=false
- ENVIRONMENT=production
# Security
- SECRET_KEY=${SECRET_KEY:-my-network-secret-key-change-this}
- JWT_SECRET_KEY=${JWT_SECRET_KEY:-jwt-secret-change-this}
# MY Network specific
- MY_NETWORK_MODE=main-node
- MY_NETWORK_PORT=8000
- MY_NETWORK_HOST=0.0.0.0
- BOOTSTRAP_NODE=my-public-node-3.projscale.dev:8000
# Monitoring
- MONITORING_ENABLED=true
- METRICS_ENABLED=true
# Storage
- STORAGE_PATH=/app/data/storage
- LOGS_PATH=/app/logs
# Cache (Redis optional)
- REDIS_ENABLED=false
- CACHE_ENABLED=false
depends_on:
postgres:
condition: service_healthy
volumes: volumes:
- ./data:/app/data - ${STORAGE_PATH:-./storage}:/app/storage
- ${DOCKER_SOCK_PATH:-/var/run/docker.sock}:/var/run/docker.sock
- ./logs:/app/logs - ./logs:/app/logs
- ./sqlStorage:/app/sqlStorage - ./config/keys:/app/keys:ro
- ./storedContent:/app/storedContent environment:
- ./bootstrap.json:/app/bootstrap.json:ro - DATABASE_URL=${DATABASE_URL}
- ./.env:/app/.env:ro - REDIS_URL=${REDIS_URL}
healthcheck: - NODE_ID=${NODE_ID}
test: ["CMD", "curl", "-f", "http://localhost:8000/health"] - NODE_TYPE=${NODE_TYPE}
interval: 30s - NODE_VERSION=${NODE_VERSION}
timeout: 10s - NETWORK_MODE=${NETWORK_MODE}
retries: 3 - ALLOW_INCOMING_CONNECTIONS=${ALLOW_INCOMING_CONNECTIONS}
start_period: 40s - SECRET_KEY=${SECRET_KEY}
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
- ENCRYPTION_KEY=${ENCRYPTION_KEY}
- STORAGE_PATH=/app/storage
- API_HOST=${API_HOST}
- API_PORT=${API_PORT}
- DOCKER_SOCK_PATH=/var/run/docker.sock
- NODE_PRIVATE_KEY_PATH=/app/keys/node_private_key
- NODE_PUBLIC_KEY_PATH=/app/keys/node_public_key
- NODE_PUBLIC_KEY_HEX=${NODE_PUBLIC_KEY_HEX}
- TELEGRAM_API_KEY=${TELEGRAM_API_KEY}
- CLIENT_TELEGRAM_API_KEY=${CLIENT_TELEGRAM_API_KEY}
- LOG_LEVEL=${LOG_LEVEL}
- LOG_PATH=/app/logs
- BOOTSTRAP_CONFIG=${BOOTSTRAP_CONFIG}
- MAX_PEER_CONNECTIONS=${MAX_PEER_CONNECTIONS}
- SYNC_INTERVAL=${SYNC_INTERVAL}
- CONVERT_MAX_PARALLEL=${CONVERT_MAX_PARALLEL}
- CONVERT_TIMEOUT=${CONVERT_TIMEOUT}
depends_on:
- postgres
- redis
networks: networks:
- my-network - my-network
profiles:
- main-node
# PostgreSQL (for production setups)
postgres: postgres:
image: postgres:15-alpine image: postgres:15-alpine
container_name: my-network-postgres container_name: my-network-postgres
restart: unless-stopped restart: unless-stopped
environment: environment:
- POSTGRES_USER=${POSTGRES_USER:-mynetwork} - POSTGRES_DB=${POSTGRES_DB}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-password} - POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_DB=${POSTGRES_DB:-mynetwork} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes: volumes:
- postgres_data:/var/lib/postgresql/data - postgres_data:/var/lib/postgresql/data
ports: - ./init_db.sql:/docker-entrypoint-initdb.d/init_db.sql
- "5432:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-mynetwork} -d ${POSTGRES_DB:-mynetwork}"]
interval: 10s
timeout: 5s
retries: 5
start_period: 30s
networks: networks:
- my-network - my-network
profiles:
- postgres
# Redis (for caching and sessions)
redis: redis:
image: redis:7-alpine image: redis:7-alpine
container_name: my-network-redis container_name: my-network-redis
@ -95,35 +64,13 @@ services:
command: redis-server --appendonly yes command: redis-server --appendonly yes
volumes: volumes:
- redis_data:/data - redis_data:/data
ports:
- "6379:6379"
networks: networks:
- my-network - my-network
profiles:
- redis
# Nginx Reverse Proxy volumes:
nginx: postgres_data:
image: nginx:alpine redis_data:
container_name: my-network-nginx
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- ./ssl:/etc/nginx/ssl:ro
depends_on:
- my-network
networks:
- my-network
profiles:
- nginx
networks: networks:
my-network: my-network:
driver: bridge driver: bridge
volumes:
postgres_data:
redis_data:

139
init_db.sql Normal file
View File

@ -0,0 +1,139 @@
-- MY Network v3.0 Database Initialization
-- Extension for UUID generation
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Create enum types
DO $$ BEGIN
CREATE TYPE content_status AS ENUM ('pending', 'processing', 'completed', 'failed');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Create stored_content table (compatible with DEPRECATED-uploader-bot)
CREATE TABLE IF NOT EXISTS stored_content (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
hash VARCHAR(255) UNIQUE NOT NULL,
original_filename VARCHAR(255) NOT NULL,
file_type VARCHAR(100) NOT NULL,
file_size BIGINT NOT NULL,
content_type VARCHAR(255),
storage_path TEXT NOT NULL,
decrypted_path TEXT,
encrypted_path TEXT NOT NULL,
thumbnail_path TEXT,
converted_formats JSONB DEFAULT '{}',
metadata JSONB DEFAULT '{}',
encryption_key TEXT NOT NULL,
upload_date TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
last_accessed TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
access_count INTEGER DEFAULT 0,
status content_status DEFAULT 'pending',
uploader_id VARCHAR(255),
tags TEXT[],
description TEXT,
is_public BOOLEAN DEFAULT false,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Create indexes for performance
CREATE INDEX IF NOT EXISTS idx_stored_content_hash ON stored_content(hash);
CREATE INDEX IF NOT EXISTS idx_stored_content_status ON stored_content(status);
CREATE INDEX IF NOT EXISTS idx_stored_content_upload_date ON stored_content(upload_date);
CREATE INDEX IF NOT EXISTS idx_stored_content_uploader_id ON stored_content(uploader_id);
CREATE INDEX IF NOT EXISTS idx_stored_content_file_type ON stored_content(file_type);
-- Create nodes table for network management
CREATE TABLE IF NOT EXISTS nodes (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
node_id VARCHAR(255) UNIQUE NOT NULL,
address INET NOT NULL,
port INTEGER NOT NULL,
public_key TEXT,
node_type VARCHAR(50) NOT NULL,
version VARCHAR(20) NOT NULL,
last_seen TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
trust_score DECIMAL(3,2) DEFAULT 1.0,
is_active BOOLEAN DEFAULT true,
metadata JSONB DEFAULT '{}',
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Create indexes for nodes
CREATE INDEX IF NOT EXISTS idx_nodes_node_id ON nodes(node_id);
CREATE INDEX IF NOT EXISTS idx_nodes_address ON nodes(address);
CREATE INDEX IF NOT EXISTS idx_nodes_is_active ON nodes(is_active);
CREATE INDEX IF NOT EXISTS idx_nodes_last_seen ON nodes(last_seen);
-- Create content_sync table for decentralized synchronization
CREATE TABLE IF NOT EXISTS content_sync (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
content_hash VARCHAR(255) NOT NULL,
node_id VARCHAR(255) NOT NULL,
sync_status VARCHAR(50) DEFAULT 'pending',
attempts INTEGER DEFAULT 0,
last_attempt TIMESTAMP WITH TIME ZONE,
error_message TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Create indexes for content_sync
CREATE INDEX IF NOT EXISTS idx_content_sync_hash ON content_sync(content_hash);
CREATE INDEX IF NOT EXISTS idx_content_sync_node_id ON content_sync(node_id);
CREATE INDEX IF NOT EXISTS idx_content_sync_status ON content_sync(sync_status);
-- Create conversion_jobs table
CREATE TABLE IF NOT EXISTS conversion_jobs (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
content_id UUID REFERENCES stored_content(id),
target_format VARCHAR(50) NOT NULL,
status content_status DEFAULT 'pending',
priority INTEGER DEFAULT 5,
attempts INTEGER DEFAULT 0,
max_attempts INTEGER DEFAULT 3,
error_message TEXT,
conversion_params JSONB DEFAULT '{}',
output_path TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
started_at TIMESTAMP WITH TIME ZONE,
completed_at TIMESTAMP WITH TIME ZONE
);
-- Create indexes for conversion_jobs
CREATE INDEX IF NOT EXISTS idx_conversion_jobs_content_id ON conversion_jobs(content_id);
CREATE INDEX IF NOT EXISTS idx_conversion_jobs_status ON conversion_jobs(status);
CREATE INDEX IF NOT EXISTS idx_conversion_jobs_priority ON conversion_jobs(priority);
-- Update trigger for updated_at columns
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
-- Apply update triggers
DROP TRIGGER IF EXISTS update_stored_content_updated_at ON stored_content;
CREATE TRIGGER update_stored_content_updated_at
BEFORE UPDATE ON stored_content
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
DROP TRIGGER IF EXISTS update_nodes_updated_at ON nodes;
CREATE TRIGGER update_nodes_updated_at
BEFORE UPDATE ON nodes
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
DROP TRIGGER IF EXISTS update_content_sync_updated_at ON content_sync;
CREATE TRIGGER update_content_sync_updated_at
BEFORE UPDATE ON content_sync
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
DROP TRIGGER IF EXISTS update_conversion_jobs_updated_at ON conversion_jobs;
CREATE TRIGGER update_conversion_jobs_updated_at
BEFORE UPDATE ON conversion_jobs
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();

View File

@ -34,6 +34,8 @@ ton = "^0.24"
validators = "^0.22.0" validators = "^0.22.0"
python-dateutil = "^2.8.2" python-dateutil = "^2.8.2"
typing-extensions = "^4.8.0" typing-extensions = "^4.8.0"
tonsdk = "^1.0.15"
pytonconnect = "^0.3.0"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
pytest = "^7.4.3" pytest = "^7.4.3"

View File

@ -1,80 +1,37 @@
# FastAPI Migration Requirements - MY Network v3.0
# Полная миграция от Sanic к FastAPI с совместимостью
# === Core FastAPI Stack ===
fastapi==0.104.1 fastapi==0.104.1
uvicorn[standard]==0.24.0 uvicorn[standard]==0.24.0
python-multipart==0.0.6 pydantic==2.4.2
pydantic-settings==2.0.3
# === Authentication & Security ===
python-jose[cryptography]==3.3.0
passlib[bcrypt]==1.7.4
python-jwt==4.0.0
pyjwt==2.8.0
bcrypt==4.1.2
# === Database & ORM ===
sqlalchemy==2.0.23 sqlalchemy==2.0.23
alembic==1.12.1 alembic==1.12.1
asyncpg==0.29.0 asyncpg==0.29.0
# psycopg2-binary==2.9.9 # Removed - conflicts with asyncpg
# === Caching & Redis ===
redis==5.0.1 redis==5.0.1
aioredis==2.0.1 aioredis==2.0.1
aiofiles==23.2.1
# === Cryptography === cryptography==41.0.7
python-jose[cryptography]==3.3.0
python-multipart==0.0.6
httpx==0.25.2
websockets==12.0
docker==6.1.3
base58==2.1.1
passlib[bcrypt]==1.7.4
python-telegram-bot==20.7
APScheduler==3.10.4
psutil==5.9.6
requests==2.31.0
PyYAML==6.0.1
python-dotenv==1.0.0
Pillow==10.1.0
ffmpeg-python==0.2.0
python-magic==0.4.27
jinja2==3.1.2
starlette==0.27.0
structlog==23.2.0
aiogram==3.3.0
sanic==23.12.1
PyJWT==2.8.0
cryptography==41.0.7 cryptography==41.0.7
ed25519==1.5 ed25519==1.5
pynacl==1.5.0 tonsdk==1.0.15
PyNaCl==1.5.0 pytonconnect==0.3.0
base58==2.1.1
# === HTTP & API ===
httpx==0.25.2
aiohttp==3.9.0
requests==2.31.0
# === Data Processing ===
pydantic==2.5.0
pydantic-settings==2.1.0
# === Validation & Parsing ===
email-validator==2.1.0
python-dateutil==2.8.2
# === File Handling ===
python-magic==0.4.27
pillow==10.1.0
aiofiles==23.2.1
# === Monitoring & Logging ===
structlog==23.2.0
psutil==5.9.6
prometheus-client==0.19.0
# === WebSocket Support ===
websockets==12.0
# === Audio Processing ===
pydub==0.25.1
# === Development & Testing ===
pytest==7.4.3
pytest-asyncio==0.21.1
pytest-cov==4.1.0
black==23.11.0
isort==5.12.0
flake8==6.1.0
# === Environment & Configuration ===
python-dotenv==1.0.0
pyyaml==6.0.1
# === Compatibility Libraries ===
typing-extensions==4.8.0
starlette==0.27.0
# === Optional: Production Deployment ===
# uvloop==0.19.0 # Для лучшей производительности (разкомментировать в production)
# gunicorn==21.2.0 # Для production deployment