new startup script
This commit is contained in:
parent
cf1b715ab8
commit
1b94882b45
339
start.sh
339
start.sh
|
|
@ -1238,6 +1238,322 @@ patch_app_routes() {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Совместимость с DEPRECATED API v1/system: добавляем compat-роуты и базовую анти-replay защиту
|
||||||
|
add_compatibility_layer() {
|
||||||
|
log_info "Добавление слоя обратной совместимости API..."
|
||||||
|
local app_dir="$PROJECT_DIR/my-network/app"
|
||||||
|
local api_dir="$app_dir/api"
|
||||||
|
mkdir -p "$api_dir"
|
||||||
|
|
||||||
|
# 1) Создаём fastapi_compat_routes.py с legacy эндпоинтами
|
||||||
|
cat > "$api_dir/fastapi_compat_routes.py" << 'EOF'
|
||||||
|
"""
|
||||||
|
Compatibility routes to preserve deprecated uploader-bot API surface (v1/system).
|
||||||
|
These endpoints mirror legacy paths so older clients continue to function,
|
||||||
|
while new v3 sync API works in parallel.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, UploadFile, File, HTTPException, Query
|
||||||
|
from fastapi.responses import JSONResponse, StreamingResponse, PlainTextResponse
|
||||||
|
from sqlalchemy import text
|
||||||
|
import aiofiles
|
||||||
|
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.core.config import get_settings
|
||||||
|
from app.core.database import db_manager
|
||||||
|
from app.core.storage import LocalStorageBackend
|
||||||
|
|
||||||
|
router = APIRouter(prefix="", tags=["compat-v1"])
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
settings = get_settings()
|
||||||
|
|
||||||
|
|
||||||
|
def _is_table_missing_error(exc: Exception) -> bool:
|
||||||
|
try:
|
||||||
|
msg = str(exc)
|
||||||
|
return 'UndefinedTable' in msg or 'does not exist' in msg or ('relation' in msg and 'does not exist' in msg)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/system.version")
|
||||||
|
async def system_version():
|
||||||
|
codebase_hash = os.getenv("CODEBASE_HASH", "unknown")
|
||||||
|
codebase_branch = os.getenv("CODEBASE_BRANCH", os.getenv("GIT_BRANCH", "main"))
|
||||||
|
return {"codebase_hash": codebase_hash, "codebase_branch": codebase_branch}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/system.sendStatus")
|
||||||
|
async def system_send_status(payload: dict):
|
||||||
|
try:
|
||||||
|
message_b58 = payload.get("message")
|
||||||
|
signature = payload.get("signature")
|
||||||
|
if not message_b58 or not signature:
|
||||||
|
raise HTTPException(status_code=400, detail="message and signature required")
|
||||||
|
await logger.ainfo("Compat system.sendStatus", signature=signature)
|
||||||
|
return {"ok": True}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
await logger.aerror("sendStatus failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="sendStatus failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/tonconnect-manifest.json")
|
||||||
|
async def tonconnect_manifest():
|
||||||
|
host = str(getattr(settings, "PROJECT_HOST", "")) or os.getenv("PROJECT_HOST", "") or "http://localhost:8000"
|
||||||
|
return {
|
||||||
|
"url": host,
|
||||||
|
"name": "MY Network Node",
|
||||||
|
"iconUrl": f"{host}/static/icon.png",
|
||||||
|
"termsOfUseUrl": f"{host}/terms",
|
||||||
|
"privacyPolicyUrl": f"{host}/privacy",
|
||||||
|
"bridgeUrl": "https://bridge.tonapi.io/bridge",
|
||||||
|
"manifestVersion": 2
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform-metadata.json")
|
||||||
|
async def platform_metadata():
|
||||||
|
host = str(getattr(settings, "PROJECT_HOST", "")) or os.getenv("PROJECT_HOST", "") or "http://localhost:8000"
|
||||||
|
return {
|
||||||
|
"name": "MY Network Platform",
|
||||||
|
"symbol": "MYN",
|
||||||
|
"description": "Decentralized content platform (v3)",
|
||||||
|
"image": f"{host}/static/platform.png",
|
||||||
|
"external_url": host,
|
||||||
|
"version": "3.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/")
|
||||||
|
async def index_root():
|
||||||
|
return PlainTextResponse("MY Network Node", status_code=200)
|
||||||
|
|
||||||
|
@router.get("/favicon.ico")
|
||||||
|
async def favicon():
|
||||||
|
return PlainTextResponse("", status_code=204)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/node")
|
||||||
|
async def v1_node():
|
||||||
|
from app.core.crypto import get_ed25519_manager
|
||||||
|
cm = get_ed25519_manager()
|
||||||
|
return {"id": cm.node_id, "node_address": "", "master_address": "", "indexer_height": 0, "services": {}}
|
||||||
|
|
||||||
|
@router.get("/api/v1/nodeFriendly")
|
||||||
|
async def v1_node_friendly():
|
||||||
|
from app.core.crypto import get_ed25519_manager
|
||||||
|
cm = get_ed25519_manager()
|
||||||
|
return PlainTextResponse(f"Node ID: {cm.node_id}
|
||||||
|
Indexer height: 0
|
||||||
|
Services: none
|
||||||
|
")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/v1/auth.twa")
|
||||||
|
async def v1_auth_twa(payload: dict):
|
||||||
|
user_ref = payload.get("user") or {}
|
||||||
|
token = base64.b64encode(f"twa:{user_ref}".encode()).decode()
|
||||||
|
return {"token": token}
|
||||||
|
|
||||||
|
@router.get("/api/v1/auth.me")
|
||||||
|
async def v1_auth_me():
|
||||||
|
return {"user": None, "status": "guest"}
|
||||||
|
|
||||||
|
@router.post("/api/v1/auth.selectWallet")
|
||||||
|
async def v1_auth_select_wallet(payload: dict):
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
@router.get("/api/v1/tonconnect.new")
|
||||||
|
async def v1_tonconnect_new():
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
@router.post("/api/v1/tonconnect.logout")
|
||||||
|
async def v1_tonconnect_logout(payload: dict):
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/v1/storage")
|
||||||
|
async def v1_storage_upload(file: UploadFile = File(...)):
|
||||||
|
try:
|
||||||
|
data = await file.read()
|
||||||
|
if not data:
|
||||||
|
raise HTTPException(status_code=400, detail="empty file")
|
||||||
|
backend = LocalStorageBackend()
|
||||||
|
from hashlib import sha256
|
||||||
|
file_hash = sha256(data).hexdigest()
|
||||||
|
file_path = os.path.join(backend.files_path, file_hash)
|
||||||
|
async with aiofiles.open(file_path, 'wb') as f:
|
||||||
|
await f.write(data)
|
||||||
|
return {"hash": file_hash}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
await logger.aerror("v1 upload failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="upload failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/storage/{file_hash}")
|
||||||
|
async def v1_storage_get(file_hash: str):
|
||||||
|
try:
|
||||||
|
async with db_manager.get_session() as session:
|
||||||
|
result = await session.execute(text("SELECT file_path FROM my_network_content WHERE hash=:h LIMIT 1"), {"h": file_hash})
|
||||||
|
row = result.first()
|
||||||
|
if not row or not row[0]:
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
backend = LocalStorageBackend()
|
||||||
|
return StreamingResponse(backend.get_file_stream(row[0]))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
await logger.aerror("v1 storage get failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/storage.decodeContentId/{content_id}")
|
||||||
|
async def v1_decode_content_id(content_id: str):
|
||||||
|
try:
|
||||||
|
async with db_manager.get_session() as session:
|
||||||
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE id=:i LIMIT 1"), {"i": content_id})
|
||||||
|
row = result.first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
return {"id": str(row[0]), "hash": row[1], "filename": row[2], "size": row[3], "mime_type": row[4]}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
await logger.aerror("decodeContentId failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/content.list")
|
||||||
|
async def v1_content_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
|
||||||
|
try:
|
||||||
|
async with db_manager.get_session() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content ORDER BY created_at DESC LIMIT :lim OFFSET :off"),
|
||||||
|
{"lim": limit, "off": offset}
|
||||||
|
)
|
||||||
|
rows = result.fetchall() or []
|
||||||
|
return {
|
||||||
|
"items": [
|
||||||
|
{"id": str(r[0]), "hash": r[1], "filename": r[2], "size": r[3], "mime_type": r[4]} for r in rows
|
||||||
|
],
|
||||||
|
"limit": limit,
|
||||||
|
"offset": offset
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
return {"items": [], "limit": limit, "offset": offset}
|
||||||
|
await logger.aerror("content.list failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/content.view")
|
||||||
|
async def v1_content_view(hash: Optional[str] = None, id: Optional[str] = None):
|
||||||
|
try:
|
||||||
|
if not hash and not id:
|
||||||
|
raise HTTPException(status_code=400, detail="hash or id required")
|
||||||
|
async with db_manager.get_session() as session:
|
||||||
|
if hash:
|
||||||
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE hash=:h LIMIT 1"), {"h": hash})
|
||||||
|
else:
|
||||||
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE id=:i LIMIT 1"), {"i": id})
|
||||||
|
row = result.first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
return {"id": str(row[0]), "hash": row[1], "filename": row[2], "size": row[3], "mime_type": row[4], "created_at": None}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
await logger.aerror("content.view failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/content.view/{content_address}")
|
||||||
|
async def v1_content_view_path(content_address: str):
|
||||||
|
try:
|
||||||
|
async with db_manager.get_session() as session:
|
||||||
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE id=:v OR hash=:v LIMIT 1"), {"v": content_address})
|
||||||
|
row = result.first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
return {"id": str(row[0]), "hash": row[1], "filename": row[2], "size": row[3], "mime_type": row[4], "created_at": None}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
await logger.aerror("content.view(path) failed", error=str(e))
|
||||||
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/content.friendlyList")
|
||||||
|
async def v1_content_friendly_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
|
||||||
|
return await v1_content_list(limit, offset)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1.5/content.list")
|
||||||
|
async def v1_5_content_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
|
||||||
|
return await v1_content_list(limit, offset)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/v1/blockchain.sendNewContentMessage")
|
||||||
|
async def v1_chain_send_new_content(payload: dict):
|
||||||
|
await logger.ainfo("compat blockchain.sendNewContentMessage", payload=payload)
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/v1/blockchain.sendPurchaseContent")
|
||||||
|
async def v1_chain_send_purchase(payload: dict):
|
||||||
|
await logger.ainfo("compat blockchain.sendPurchaseContent", payload=payload)
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/v1/blockchain.sendPurchaseContentMessage")
|
||||||
|
async def v1_chain_send_purchase_message(payload: dict):
|
||||||
|
await logger.ainfo("compat blockchain.sendPurchaseContentMessage", payload=payload)
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/v1/account")
|
||||||
|
async def v1_account():
|
||||||
|
return {"ok": True}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# 2) Регистрируем compat-роуты в FastAPI
|
||||||
|
local fm="$app_dir/fastapi_main.py"
|
||||||
|
if [ -f "$fm" ]; then
|
||||||
|
if ! grep -q "fastapi_compat_routes" "$fm"; then
|
||||||
|
sed -i "/from app.api.fastapi_system_routes/a from app.api.fastapi_compat_routes import router as compat_router" "$fm"
|
||||||
|
sed -i "/app.include_router(node_stats_router)/a \ app.include_router(compat_router) # legacy compat routes" "$fm"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 3) Базовая анти‑replay защита в межузловых запросах
|
||||||
|
local fnr="$api_dir/fastapi_node_routes.py"
|
||||||
|
if [ -f "$fnr" ]; then
|
||||||
|
# Импорт кэша
|
||||||
|
if ! grep -q "get_cache_manager" "$fnr"; then
|
||||||
|
sed -i "/from app.core.logging/a from app.core.database import get_cache_manager" "$fnr"
|
||||||
|
fi
|
||||||
|
# Вставка проверки nonce/timestamp после разбора message_data
|
||||||
|
sed -i "/message_data = json.loads/a \
|
||||||
|
# Anti-replay: validate timestamp and nonce\n try:\n ts = message_data.get(\"timestamp\")\n nonce = message_data.get(\"nonce\")\n if ts:\n from datetime import datetime, timezone\n now = datetime.now(timezone.utc).timestamp()\n if abs(float(ts) - float(now)) > 300:\n raise HTTPException(status_code=400, detail=\"stale timestamp\")\n if nonce:\n cache = await get_cache_manager()\n cache_key = f\"replay:{node_id}:{nonce}\"\n if await cache.get(cache_key):\n raise HTTPException(status_code=400, detail=\"replay detected\")\n await cache.set(cache_key, True, ttl=600)\n except Exception as _e:\n # For backward compatibility, do not fail hard if fields missing\n pass" "$fnr"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Функция для проверки доступности Docker registry
|
# Функция для проверки доступности Docker registry
|
||||||
check_docker_registry() {
|
check_docker_registry() {
|
||||||
log_info "Проверка доступности Docker registry..."
|
log_info "Проверка доступности Docker registry..."
|
||||||
|
|
@ -2063,30 +2379,44 @@ create_systemd_service() {
|
||||||
compose_service_cmd="docker compose" # по умолчанию
|
compose_service_cmd="docker compose" # по умолчанию
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Создаем unit с усиленными зависимостями и префлайт-проверками
|
||||||
cat > /etc/systemd/system/my-network.service << EOF
|
cat > /etc/systemd/system/my-network.service << EOF
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=MY Network v3.0 Node
|
Description=MY Network v3.0 Node
|
||||||
|
Wants=network-online.target
|
||||||
|
After=network-online.target docker.service
|
||||||
Requires=docker.service
|
Requires=docker.service
|
||||||
After=docker.service
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=oneshot
|
Type=oneshot
|
||||||
RemainAfterExit=yes
|
RemainAfterExit=yes
|
||||||
WorkingDirectory=$PROJECT_DIR/my-network
|
WorkingDirectory=$PROJECT_DIR/my-network
|
||||||
|
Environment=COMPOSE_PROJECT_NAME=my-network
|
||||||
|
ExecStartPre=/bin/sh -lc 'which docker >/dev/null'
|
||||||
|
ExecStartPre=/bin/sh -lc "$compose_service_cmd version"
|
||||||
|
ExecStartPre=/bin/sh -lc "$compose_service_cmd -f docker-compose.yml config -q"
|
||||||
|
ExecStartPre=/bin/sh -lc "$compose_service_cmd pull --quiet || true"
|
||||||
ExecStart=/bin/sh -lc "$compose_service_cmd up -d"
|
ExecStart=/bin/sh -lc "$compose_service_cmd up -d"
|
||||||
ExecStop=/bin/sh -lc "$compose_service_cmd down"
|
ExecStop=/bin/sh -lc "$compose_service_cmd down"
|
||||||
TimeoutStartSec=300
|
TimeoutStartSec=300
|
||||||
|
TimeoutStopSec=120
|
||||||
User=root
|
User=root
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# Активация сервиса
|
# Активация и немедленный запуск
|
||||||
systemctl daemon-reload
|
systemctl daemon-reload
|
||||||
systemctl enable my-network
|
systemctl enable --now my-network || true
|
||||||
|
|
||||||
log_success "Systemd сервис создан и активирован"
|
# Верификация запуска
|
||||||
|
if systemctl is-active my-network >/dev/null 2>&1; then
|
||||||
|
log_success "Systemd сервис создан, активирован и запущен"
|
||||||
|
else
|
||||||
|
log_warn "Systemd сервис создан, но не активен. Проверяем логи и пробуем запустить..."
|
||||||
|
systemctl start my-network || true
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# Финальный отчет
|
# Финальный отчет
|
||||||
|
|
@ -2305,6 +2635,7 @@ main() {
|
||||||
create_directories
|
create_directories
|
||||||
setup_project
|
setup_project
|
||||||
patch_app_routes
|
patch_app_routes
|
||||||
|
add_compatibility_layer
|
||||||
build_converter_image
|
build_converter_image
|
||||||
setup_nginx
|
setup_nginx
|
||||||
generate_config
|
generate_config
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue