Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
4b06cd8a77
|
|
@ -6,17 +6,16 @@ while new v3 sync API works in parallel.
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import os
|
import os
|
||||||
from typing import Optional, List
|
from typing import Optional
|
||||||
|
|
||||||
import aiofiles
|
|
||||||
from fastapi import APIRouter, UploadFile, File, HTTPException, Query
|
from fastapi import APIRouter, UploadFile, File, HTTPException, Query
|
||||||
from fastapi.responses import JSONResponse, StreamingResponse, PlainTextResponse
|
from fastapi.responses import JSONResponse, StreamingResponse, PlainTextResponse
|
||||||
from sqlalchemy import select
|
from sqlalchemy import text
|
||||||
|
import aiofiles
|
||||||
|
|
||||||
from app.core.logging import get_logger
|
from app.core.logging import get_logger
|
||||||
from app.core.config import get_settings
|
from app.core.config import get_settings
|
||||||
from app.core.database import db_manager
|
from app.core.database import db_manager
|
||||||
from app.core.models.content_models import StoredContent as Content
|
|
||||||
from app.core.storage import LocalStorageBackend
|
from app.core.storage import LocalStorageBackend
|
||||||
|
|
||||||
router = APIRouter(prefix="", tags=["compat-v1"])
|
router = APIRouter(prefix="", tags=["compat-v1"])
|
||||||
|
|
@ -24,14 +23,12 @@ logger = get_logger(__name__)
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
|
|
||||||
|
|
||||||
@router.get("/")
|
def _is_table_missing_error(exc: Exception) -> bool:
|
||||||
async def index_root():
|
try:
|
||||||
return PlainTextResponse("MY Network Node", status_code=200)
|
msg = str(exc)
|
||||||
|
return 'UndefinedTable' in msg or 'does not exist' in msg or ('relation' in msg and 'does not exist' in msg)
|
||||||
|
except Exception:
|
||||||
@router.get("/favicon.ico")
|
return False
|
||||||
async def favicon():
|
|
||||||
return PlainTextResponse("", status_code=204)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/system.version")
|
@router.get("/api/system.version")
|
||||||
|
|
@ -84,18 +81,22 @@ async def platform_metadata():
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Legacy index and favicon
|
||||||
|
@router.get("/")
|
||||||
|
async def index_root():
|
||||||
|
return PlainTextResponse("MY Network Node", status_code=200)
|
||||||
|
|
||||||
|
@router.get("/favicon.ico")
|
||||||
|
async def favicon():
|
||||||
|
return PlainTextResponse("", status_code=204)
|
||||||
|
|
||||||
|
|
||||||
|
# Legacy node endpoints
|
||||||
@router.get("/api/v1/node")
|
@router.get("/api/v1/node")
|
||||||
async def v1_node():
|
async def v1_node():
|
||||||
from app.core.crypto import get_ed25519_manager
|
from app.core.crypto import get_ed25519_manager
|
||||||
cm = get_ed25519_manager()
|
cm = get_ed25519_manager()
|
||||||
return {
|
return {"id": cm.node_id, "node_address": "", "master_address": "", "indexer_height": 0, "services": {}}
|
||||||
"id": cm.node_id,
|
|
||||||
"node_address": "",
|
|
||||||
"master_address": "",
|
|
||||||
"indexer_height": 0,
|
|
||||||
"services": {}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/v1/nodeFriendly")
|
@router.get("/api/v1/nodeFriendly")
|
||||||
async def v1_node_friendly():
|
async def v1_node_friendly():
|
||||||
|
|
@ -104,69 +105,43 @@ async def v1_node_friendly():
|
||||||
return PlainTextResponse(f"Node ID: {cm.node_id}\nIndexer height: 0\nServices: none\n")
|
return PlainTextResponse(f"Node ID: {cm.node_id}\nIndexer height: 0\nServices: none\n")
|
||||||
|
|
||||||
|
|
||||||
|
# Legacy auth endpoints
|
||||||
@router.post("/api/v1/auth.twa")
|
@router.post("/api/v1/auth.twa")
|
||||||
async def v1_auth_twa(payload: dict):
|
async def v1_auth_twa(payload: dict):
|
||||||
user_ref = payload.get("user") or {}
|
user_ref = payload.get("user") or {}
|
||||||
token = base64.b64encode(f"twa:{user_ref}".encode()).decode()
|
token = base64.b64encode(f"twa:{user_ref}".encode()).decode()
|
||||||
return {"token": token}
|
return {"token": token}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/v1/auth.me")
|
@router.get("/api/v1/auth.me")
|
||||||
async def v1_auth_me():
|
async def v1_auth_me():
|
||||||
return {"user": None, "status": "guest"}
|
return {"user": None, "status": "guest"}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/v1/auth.selectWallet")
|
@router.post("/api/v1/auth.selectWallet")
|
||||||
async def v1_auth_select_wallet(payload: dict):
|
async def v1_auth_select_wallet(payload: dict):
|
||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/v1/tonconnect.new")
|
@router.get("/api/v1/tonconnect.new")
|
||||||
async def v1_tonconnect_new():
|
async def v1_tonconnect_new():
|
||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/v1/tonconnect.logout")
|
@router.post("/api/v1/tonconnect.logout")
|
||||||
async def v1_tonconnect_logout(payload: dict):
|
async def v1_tonconnect_logout(payload: dict):
|
||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/v1.5/storage")
|
|
||||||
async def v1_5_storage_upload(file: UploadFile = File(...)):
|
|
||||||
return await v1_storage_upload(file)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/v1.5/storage/{file_hash}")
|
|
||||||
async def v1_5_storage_get(file_hash: str):
|
|
||||||
return await v1_storage_get(file_hash)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/v1/storage")
|
@router.post("/api/v1/storage")
|
||||||
async def v1_storage_upload(file: UploadFile = File(...)):
|
async def v1_storage_upload(file: UploadFile = File(...)):
|
||||||
try:
|
try:
|
||||||
data = await file.read()
|
data = await file.read()
|
||||||
if not data:
|
if not data:
|
||||||
raise HTTPException(status_code=400, detail="empty file")
|
raise HTTPException(status_code=400, detail="empty file")
|
||||||
|
|
||||||
backend = LocalStorageBackend()
|
backend = LocalStorageBackend()
|
||||||
|
from hashlib import sha256
|
||||||
file_hash = sha256(data).hexdigest()
|
file_hash = sha256(data).hexdigest()
|
||||||
file_path = os.path.join(backend.files_path, file_hash)
|
file_path = os.path.join(backend.files_path, file_hash)
|
||||||
async with aiofiles.open(file_path, 'wb') as f:
|
async with aiofiles.open(file_path, 'wb') as f:
|
||||||
await f.write(data)
|
await f.write(data)
|
||||||
|
# Возвращаем hash без записи ORM, чтобы избежать конфликтов схем
|
||||||
async with db_manager.get_session() as session:
|
|
||||||
existing = await session.execute(select(Content).where(Content.hash == file_hash))
|
|
||||||
if existing.scalars().first() is None:
|
|
||||||
content = Content(
|
|
||||||
hash=file_hash,
|
|
||||||
filename=file.filename or file_hash,
|
|
||||||
file_size=len(data),
|
|
||||||
mime_type=file.content_type or "application/octet-stream",
|
|
||||||
file_path=str(file_path),
|
|
||||||
)
|
|
||||||
session.add(content)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
return {"hash": file_hash}
|
return {"hash": file_hash}
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
|
|
@ -179,16 +154,17 @@ async def v1_storage_upload(file: UploadFile = File(...)):
|
||||||
async def v1_storage_get(file_hash: str):
|
async def v1_storage_get(file_hash: str):
|
||||||
try:
|
try:
|
||||||
async with db_manager.get_session() as session:
|
async with db_manager.get_session() as session:
|
||||||
result = await session.execute(select(Content).where(Content.hash == file_hash))
|
result = await session.execute(text("SELECT file_path FROM my_network_content WHERE hash=:h LIMIT 1"), {"h": file_hash})
|
||||||
content = result.scalars().first()
|
row = result.first()
|
||||||
if not content or not content.file_path:
|
if not row or not row[0]:
|
||||||
raise HTTPException(status_code=404, detail="not found")
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
|
|
||||||
backend = LocalStorageBackend()
|
backend = LocalStorageBackend()
|
||||||
return StreamingResponse(backend.get_file_stream(content.file_path))
|
return StreamingResponse(backend.get_file_stream(row[0]))
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
await logger.aerror("v1 storage get failed", error=str(e))
|
await logger.aerror("v1 storage get failed", error=str(e))
|
||||||
raise HTTPException(status_code=500, detail="failed")
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
@ -197,20 +173,16 @@ async def v1_storage_get(file_hash: str):
|
||||||
async def v1_decode_content_id(content_id: str):
|
async def v1_decode_content_id(content_id: str):
|
||||||
try:
|
try:
|
||||||
async with db_manager.get_session() as session:
|
async with db_manager.get_session() as session:
|
||||||
result = await session.execute(select(Content).where(Content.id == content_id))
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE id=:i LIMIT 1"), {"i": content_id})
|
||||||
content = result.scalars().first()
|
row = result.first()
|
||||||
if not content:
|
if not row:
|
||||||
raise HTTPException(status_code=404, detail="not found")
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
return {
|
return {"id": str(row[0]), "hash": row[1], "filename": row[2], "size": row[3], "mime_type": row[4]}
|
||||||
"id": content.id,
|
|
||||||
"hash": content.hash,
|
|
||||||
"filename": content.filename,
|
|
||||||
"size": content.file_size,
|
|
||||||
"mime_type": content.mime_type,
|
|
||||||
}
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
await logger.aerror("decodeContentId failed", error=str(e))
|
await logger.aerror("decodeContentId failed", error=str(e))
|
||||||
raise HTTPException(status_code=500, detail="failed")
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
@ -219,22 +191,21 @@ async def v1_decode_content_id(content_id: str):
|
||||||
async def v1_content_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
|
async def v1_content_list(limit: int = Query(50, ge=1, le=200), offset: int = Query(0, ge=0)):
|
||||||
try:
|
try:
|
||||||
async with db_manager.get_session() as session:
|
async with db_manager.get_session() as session:
|
||||||
result = await session.execute(select(Content).offset(offset).limit(limit))
|
result = await session.execute(
|
||||||
items: List[Content] = result.scalars().all()
|
text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content ORDER BY created_at DESC LIMIT :lim OFFSET :off"),
|
||||||
|
{"lim": limit, "off": offset}
|
||||||
|
)
|
||||||
|
rows = result.fetchall() or []
|
||||||
return {
|
return {
|
||||||
"items": [
|
"items": [
|
||||||
{
|
{"id": str(r[0]), "hash": r[1], "filename": r[2], "size": r[3], "mime_type": r[4]} for r in rows
|
||||||
"id": it.id,
|
|
||||||
"hash": it.hash,
|
|
||||||
"filename": it.filename,
|
|
||||||
"size": it.file_size,
|
|
||||||
"mime_type": it.mime_type,
|
|
||||||
} for it in items
|
|
||||||
],
|
],
|
||||||
"limit": limit,
|
"limit": limit,
|
||||||
"offset": offset
|
"offset": offset
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
return {"items": [], "limit": limit, "offset": offset}
|
||||||
await logger.aerror("content.list failed", error=str(e))
|
await logger.aerror("content.list failed", error=str(e))
|
||||||
raise HTTPException(status_code=500, detail="failed")
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
@ -245,26 +216,19 @@ async def v1_content_view(hash: Optional[str] = None, id: Optional[str] = None):
|
||||||
if not hash and not id:
|
if not hash and not id:
|
||||||
raise HTTPException(status_code=400, detail="hash or id required")
|
raise HTTPException(status_code=400, detail="hash or id required")
|
||||||
async with db_manager.get_session() as session:
|
async with db_manager.get_session() as session:
|
||||||
stmt = select(Content)
|
|
||||||
if hash:
|
if hash:
|
||||||
stmt = stmt.where(Content.hash == hash)
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE hash=:h LIMIT 1"), {"h": hash})
|
||||||
if id:
|
else:
|
||||||
stmt = stmt.where(Content.id == id)
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE id=:i LIMIT 1"), {"i": id})
|
||||||
result = await session.execute(stmt)
|
row = result.first()
|
||||||
content = result.scalars().first()
|
if not row:
|
||||||
if not content:
|
|
||||||
raise HTTPException(status_code=404, detail="not found")
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
return {
|
return {"id": str(row[0]), "hash": row[1], "filename": row[2], "size": row[3], "mime_type": row[4], "created_at": None}
|
||||||
"id": content.id,
|
|
||||||
"hash": content.hash,
|
|
||||||
"filename": content.filename,
|
|
||||||
"size": content.file_size,
|
|
||||||
"mime_type": content.mime_type,
|
|
||||||
"created_at": getattr(content, "created_at", None)
|
|
||||||
}
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
await logger.aerror("content.view failed", error=str(e))
|
await logger.aerror("content.view failed", error=str(e))
|
||||||
raise HTTPException(status_code=500, detail="failed")
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
@ -273,21 +237,16 @@ async def v1_content_view(hash: Optional[str] = None, id: Optional[str] = None):
|
||||||
async def v1_content_view_path(content_address: str):
|
async def v1_content_view_path(content_address: str):
|
||||||
try:
|
try:
|
||||||
async with db_manager.get_session() as session:
|
async with db_manager.get_session() as session:
|
||||||
result = await session.execute(select(Content).where((Content.id == content_address) | (Content.hash == content_address)))
|
result = await session.execute(text("SELECT id, hash, filename, file_size, mime_type FROM my_network_content WHERE id=:v OR hash=:v LIMIT 1"), {"v": content_address})
|
||||||
content = result.scalars().first()
|
row = result.first()
|
||||||
if not content:
|
if not row:
|
||||||
raise HTTPException(status_code=404, detail="not found")
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
return {
|
return {"id": str(row[0]), "hash": row[1], "filename": row[2], "size": row[3], "mime_type": row[4], "created_at": None}
|
||||||
"id": content.id,
|
|
||||||
"hash": content.hash,
|
|
||||||
"filename": content.filename,
|
|
||||||
"size": content.file_size,
|
|
||||||
"mime_type": content.mime_type,
|
|
||||||
"created_at": getattr(content, "created_at", None)
|
|
||||||
}
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if _is_table_missing_error(e):
|
||||||
|
raise HTTPException(status_code=404, detail="not found")
|
||||||
await logger.aerror("content.view(path) failed", error=str(e))
|
await logger.aerror("content.view(path) failed", error=str(e))
|
||||||
raise HTTPException(status_code=500, detail="failed")
|
raise HTTPException(status_code=500, detail="failed")
|
||||||
|
|
||||||
|
|
@ -323,4 +282,3 @@ async def v1_chain_send_purchase_message(payload: dict):
|
||||||
@router.get("/api/v1/account")
|
@router.get("/api/v1/account")
|
||||||
async def v1_account():
|
async def v1_account():
|
||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,32 +20,33 @@ router = APIRouter(prefix="/api/node", tags=["node-communication"])
|
||||||
|
|
||||||
async def validate_node_request(request: Request) -> Dict[str, Any]:
|
async def validate_node_request(request: Request) -> Dict[str, Any]:
|
||||||
"""Валидация межузлового запроса с обязательной проверкой подписи"""
|
"""Валидация межузлового запроса с обязательной проверкой подписи"""
|
||||||
# Проверяем наличие обязательных заголовков
|
# Заголовки
|
||||||
required_headers = ["x-node-communication", "x-node-id", "x-node-public-key", "x-node-signature"]
|
required_headers = ["x-node-communication", "x-node-id", "x-node-public-key", "x-node-signature"]
|
||||||
for header in required_headers:
|
for header in required_headers:
|
||||||
if header not in request.headers:
|
if header not in request.headers:
|
||||||
raise HTTPException(status_code=400, detail=f"Missing required header: {header}")
|
raise HTTPException(status_code=400, detail=f"Missing required header: {header}")
|
||||||
|
|
||||||
# Проверяем, что это межузловое общение
|
|
||||||
if request.headers.get("x-node-communication") != "true":
|
if request.headers.get("x-node-communication") != "true":
|
||||||
raise HTTPException(status_code=400, detail="Not a valid inter-node communication")
|
raise HTTPException(status_code=400, detail="Not a valid inter-node communication")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
crypto_manager = get_ed25519_manager()
|
crypto_manager = get_ed25519_manager()
|
||||||
|
|
||||||
# Получаем заголовки
|
|
||||||
signature = request.headers.get("x-node-signature")
|
signature = request.headers.get("x-node-signature")
|
||||||
node_id = request.headers.get("x-node-id")
|
node_id = request.headers.get("x-node-id")
|
||||||
public_key = request.headers.get("x-node-public-key")
|
public_key = request.headers.get("x-node-public-key")
|
||||||
|
|
||||||
# Читаем тело запроса
|
# Тело запроса
|
||||||
body = await request.body()
|
body = await request.body()
|
||||||
if not body:
|
if not body:
|
||||||
raise HTTPException(status_code=400, detail="Empty message body")
|
raise HTTPException(status_code=400, detail="Empty message body")
|
||||||
|
|
||||||
|
# JSON
|
||||||
try:
|
try:
|
||||||
message_data = json.loads(body.decode())
|
message_data = json.loads(body.decode())
|
||||||
# Anti-replay: validate timestamp and nonce
|
except json.JSONDecodeError:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid JSON in request body")
|
||||||
|
|
||||||
|
# Anti-replay (необязательно для обратной совместимости)
|
||||||
try:
|
try:
|
||||||
ts = message_data.get("timestamp")
|
ts = message_data.get("timestamp")
|
||||||
nonce = message_data.get("nonce")
|
nonce = message_data.get("nonce")
|
||||||
|
|
@ -61,25 +62,17 @@ async def validate_node_request(request: Request) -> Dict[str, Any]:
|
||||||
raise HTTPException(status_code=400, detail="replay detected")
|
raise HTTPException(status_code=400, detail="replay detected")
|
||||||
await cache.set(cache_key, True, ttl=600)
|
await cache.set(cache_key, True, ttl=600)
|
||||||
except Exception:
|
except Exception:
|
||||||
# Backward compatible: missing fields
|
# ignore for backward compatibility
|
||||||
pass
|
pass
|
||||||
except json.JSONDecodeError:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid JSON in request body")
|
|
||||||
|
|
||||||
# Проверяем подпись
|
# Подпись
|
||||||
is_valid = crypto_manager.verify_signature(message_data, signature, public_key)
|
is_valid = crypto_manager.verify_signature(message_data, signature, public_key)
|
||||||
|
|
||||||
if not is_valid:
|
if not is_valid:
|
||||||
logger.warning(f"Invalid signature from node {node_id}")
|
logger.warning(f"Invalid signature from node {node_id}")
|
||||||
raise HTTPException(status_code=403, detail="Invalid cryptographic signature")
|
raise HTTPException(status_code=403, detail="Invalid cryptographic signature")
|
||||||
|
|
||||||
logger.debug(f"Valid signature verified for node {node_id}")
|
logger.debug(f"Valid signature verified for node {node_id}")
|
||||||
|
return {"node_id": node_id, "public_key": public_key, "message": message_data}
|
||||||
return {
|
|
||||||
"node_id": node_id,
|
|
||||||
"public_key": public_key,
|
|
||||||
"message": message_data
|
|
||||||
}
|
|
||||||
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
|
|
@ -88,6 +81,7 @@ async def validate_node_request(request: Request) -> Dict[str, Any]:
|
||||||
raise HTTPException(status_code=500, detail="Cryptographic verification failed")
|
raise HTTPException(status_code=500, detail="Cryptographic verification failed")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def create_node_response(data: Dict[str, Any], request: Request) -> JSONResponse:
|
async def create_node_response(data: Dict[str, Any], request: Request) -> JSONResponse:
|
||||||
"""Создать ответ для межузлового общения с подписью"""
|
"""Создать ответ для межузлового общения с подписью"""
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -314,8 +314,6 @@ def setup_exception_handlers(app: FastAPI):
|
||||||
|
|
||||||
# Увеличиваем счетчик ошибок для мониторинга
|
# Увеличиваем счетчик ошибок для мониторинга
|
||||||
from app.api.fastapi_system_routes import increment_error_counter
|
from app.api.fastapi_system_routes import increment_error_counter
|
||||||
from app.api.fastapi_compat_routes import router as compat_router
|
|
||||||
from app.api.fastapi_v3_routes import router as v3_router
|
|
||||||
await increment_error_counter()
|
await increment_error_counter()
|
||||||
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
|
|
@ -340,8 +338,6 @@ def setup_middleware_hooks(app: FastAPI):
|
||||||
|
|
||||||
# Увеличиваем счетчик запросов
|
# Увеличиваем счетчик запросов
|
||||||
from app.api.fastapi_system_routes import increment_request_counter
|
from app.api.fastapi_system_routes import increment_request_counter
|
||||||
from app.api.fastapi_compat_routes import router as compat_router
|
|
||||||
from app.api.fastapi_v3_routes import router as v3_router
|
|
||||||
await increment_request_counter()
|
await increment_request_counter()
|
||||||
|
|
||||||
# Проверяем режим обслуживания
|
# Проверяем режим обслуживания
|
||||||
|
|
@ -387,8 +383,6 @@ from app.api.fastapi_v3_routes import router as v3_router
|
||||||
)
|
)
|
||||||
|
|
||||||
from app.api.fastapi_system_routes import increment_error_counter
|
from app.api.fastapi_system_routes import increment_error_counter
|
||||||
from app.api.fastapi_compat_routes import router as compat_router
|
|
||||||
from app.api.fastapi_v3_routes import router as v3_router
|
|
||||||
await increment_error_counter()
|
await increment_error_counter()
|
||||||
|
|
||||||
raise
|
raise
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
{
|
{
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"network_id": "my-network-1755263533",
|
"network_id": "my-network-1755317385",
|
||||||
"created_at": "2025-08-15T13:12:13Z",
|
"created_at": "2025-08-16T04:09:45Z",
|
||||||
"bootstrap_nodes": [
|
"bootstrap_nodes": [
|
||||||
{
|
{
|
||||||
"id": "node-e3ebfd8e2444dd4f",
|
"id": "node-3a2c6a21e3401fce",
|
||||||
"node_id": "node-e3ebfd8e2444dd4f",
|
"node_id": "node-3a2c6a21e3401fce",
|
||||||
"address": "2a02:6b40:2000:16b1::1",
|
"address": "2a02:6b40:2000:16b1::1",
|
||||||
"port": 8000,
|
"port": 8000,
|
||||||
"public_key": "e3ebfd8e2444dd4f8747472a3c753708e45a47b16f33401790caa5c5ca67534d",
|
"public_key": "3a2c6a21e3401fceed1fb63c45d068f20e21b48159db3a961a2c43e8701071d4",
|
||||||
"trusted": true,
|
"trusted": true,
|
||||||
"node_type": "bootstrap"
|
"node_type": "bootstrap"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue