chunks streadming, my network models
This commit is contained in:
parent
a2c3fe4f98
commit
dba359bb6a
|
|
@ -0,0 +1,55 @@
|
||||||
|
|
||||||
|
1. GET /api/v1/node
|
||||||
|
{
|
||||||
|
node_address: service_wallet,
|
||||||
|
master_address: platform collection,
|
||||||
|
indexer_height: int,
|
||||||
|
services: {
|
||||||
|
..status&delay only
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
2. POST /api/system.sendStatus
|
||||||
|
3. GET /api/system.version
|
||||||
|
{
|
||||||
|
codebase_hash: string,
|
||||||
|
codebase_branch: string
|
||||||
|
}
|
||||||
|
|
||||||
|
4. GET /api/tonconnect-manifest.json
|
||||||
|
используется для tonconnect на фронтенде бота (централизация)
|
||||||
|
|
||||||
|
5. GET /api/platform-metadata.json
|
||||||
|
используется при создании platform, если ранее не создано
|
||||||
|
|
||||||
|
6. POST /api/v1/auth.twa
|
||||||
|
сохраняет информацию о пользователе (его кошельке), выдает взамен токен для операций с этой нодой
|
||||||
|
|
||||||
|
7. GET /api/v1/tonconnect.new [! DEPRECATED !]
|
||||||
|
8. POST /api/v1/tonconnect.logout [! DEPRECATED !] но возможно нужно использовать при отключении кошелька в вебаппе
|
||||||
|
|
||||||
|
9. POST /api/v1/storage
|
||||||
|
загрузка контента на ноду с получением его хэша, TODO: нужно делать загрузку по частям чтобы сервер не падал
|
||||||
|
|
||||||
|
10. GET /api/v1/storage/<file_hash>
|
||||||
|
получение файла целиком. TODO: изучить реальный стриминг файла
|
||||||
|
TODO: реализовать поиск файла на других нодах
|
||||||
|
|
||||||
|
11. GET /api/v1/storage.decodeContentId/<content_id>
|
||||||
|
просто баловство
|
||||||
|
|
||||||
|
12. GET /api/v1/account [! DEPRECATED !]
|
||||||
|
получение информации о пользователе, не нужно от слова совсем
|
||||||
|
|
||||||
|
13. POST /api/v1/blockchain.sendNewContentMessage
|
||||||
|
14. POST /api/v1/blockchain.sendPurchaseContent
|
||||||
|
|
||||||
|
15. GET /api/v1/content.list
|
||||||
|
16. GET /api/v1/content.view
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
```shell
|
```shell
|
||||||
cd sanic-telegram-bot
|
cd sanic-telegram-bot
|
||||||
# edit .env file
|
# edit .env file
|
||||||
|
# build media_converter git.projscale.dev/my-dev/converter-module
|
||||||
docker-compose up --build
|
docker-compose up --build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ from app.api.routes.auth import s_api_v1_auth_twa
|
||||||
from app.api.routes.statics import s_api_tonconnect_manifest, s_api_platform_metadata
|
from app.api.routes.statics import s_api_tonconnect_manifest, s_api_platform_metadata
|
||||||
from app.api.routes.node_storage import s_api_v1_storage_post, s_api_v1_storage_get, \
|
from app.api.routes.node_storage import s_api_v1_storage_post, s_api_v1_storage_get, \
|
||||||
s_api_v1_storage_decode_cid
|
s_api_v1_storage_decode_cid
|
||||||
|
from app.api.routes.progressive_storage import s_api_v1_5_storage_get, s_api_v1_5_storage_post
|
||||||
from app.api.routes.account import s_api_v1_account_get
|
from app.api.routes.account import s_api_v1_account_get
|
||||||
from app.api.routes._blockchain import s_api_v1_blockchain_send_new_content_message, \
|
from app.api.routes._blockchain import s_api_v1_blockchain_send_new_content_message, \
|
||||||
s_api_v1_blockchain_send_purchase_content_message
|
s_api_v1_blockchain_send_purchase_content_message
|
||||||
|
|
@ -39,6 +40,9 @@ app.add_route(s_api_v1_auth_twa, "/api/v1/auth.twa", methods=["POST", "OPTIONS"]
|
||||||
app.add_route(s_api_v1_tonconnect_new, "/api/v1/tonconnect.new", methods=["GET", "OPTIONS"])
|
app.add_route(s_api_v1_tonconnect_new, "/api/v1/tonconnect.new", methods=["GET", "OPTIONS"])
|
||||||
app.add_route(s_api_v1_tonconnect_logout, "/api/v1/tonconnect.logout", methods=["POST", "OPTIONS"])
|
app.add_route(s_api_v1_tonconnect_logout, "/api/v1/tonconnect.logout", methods=["POST", "OPTIONS"])
|
||||||
|
|
||||||
|
app.add_route(s_api_v1_5_storage_post, "/api/v1.5/storage", methods=["POST", "OPTIONS"])
|
||||||
|
app.add_route(s_api_v1_5_storage_get, "/api/v1.5/storage/<file_hash>", methods=["GET", "OPTIONS"])
|
||||||
|
|
||||||
app.add_route(s_api_v1_storage_post, "/api/v1/storage", methods=["POST", "OPTIONS"])
|
app.add_route(s_api_v1_storage_post, "/api/v1/storage", methods=["POST", "OPTIONS"])
|
||||||
app.add_route(s_api_v1_storage_get, "/api/v1/storage/<file_hash>", methods=["GET", "OPTIONS"])
|
app.add_route(s_api_v1_storage_get, "/api/v1/storage/<file_hash>", methods=["GET", "OPTIONS"])
|
||||||
app.add_route(s_api_v1_storage_decode_cid, "/api/v1/storage.decodeContentId/<content_id>", methods=["GET", "OPTIONS"])
|
app.add_route(s_api_v1_storage_decode_cid, "/api/v1/storage.decodeContentId/<content_id>", methods=["GET", "OPTIONS"])
|
||||||
|
|
|
||||||
|
|
@ -75,6 +75,7 @@ async def try_service_authorization(request):
|
||||||
if not signature:
|
if not signature:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# TODO: смысл этой проверки если это можно подменить?
|
||||||
message_hash_b58 = request.headers.get('X-Message-Hash')
|
message_hash_b58 = request.headers.get('X-Message-Hash')
|
||||||
if not message_hash_b58:
|
if not message_hash_b58:
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -77,9 +77,12 @@ async def s_api_v1_blockchain_send_new_content_message(request):
|
||||||
image_content_cid = None
|
image_content_cid = None
|
||||||
image_content = None
|
image_content = None
|
||||||
|
|
||||||
|
|
||||||
|
content_title = f"{', '.join(request.json['authors'])} - {request.json['title']}" if request.json['authors'] else request.json['title']
|
||||||
|
|
||||||
metadata_content = await create_metadata_for_item(
|
metadata_content = await create_metadata_for_item(
|
||||||
request.ctx.db_session,
|
request.ctx.db_session,
|
||||||
title=f"{', '.join(request.json['authors'])} - {request.json['title']}" if request.json['authors'] else request.json['title'],
|
title=content_title,
|
||||||
cover_url=f"{PROJECT_HOST}/api/v1/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None,
|
cover_url=f"{PROJECT_HOST}/api/v1/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None,
|
||||||
authors=request.json['authors'],
|
authors=request.json['authors'],
|
||||||
hashtags=request.json['hashtags']
|
hashtags=request.json['hashtags']
|
||||||
|
|
@ -96,6 +99,16 @@ async def s_api_v1_blockchain_send_new_content_message(request):
|
||||||
)
|
)
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
await request.ctx.user_uploader_wrapper.send_message(
|
||||||
|
request.ctx.user.translated('p_uploadContentTxRequested').format(
|
||||||
|
title=content_title,
|
||||||
|
), message_type='hint', message_meta={
|
||||||
|
'encrypted_content_hash': encrypted_content_cid.content_hash,
|
||||||
|
'hint_type': 'uploadContentTxRequested'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
return response.json({
|
return response.json({
|
||||||
'address': platform.address.to_string(1, 1, 1),
|
'address': platform.address.to_string(1, 1, 1),
|
||||||
'amount': str(int(0.15 * 10 ** 9)),
|
'amount': str(int(0.15 * 10 ** 9)),
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,14 @@ from uuid import uuid4
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
# Производится загрузка любого контента одним запросом с определением mime_type по расширению
|
||||||
|
# file_mimetype audio/video
|
||||||
|
# extension_encoding file encode container
|
||||||
|
# Файл сохраняется под sha256(file_content) !!, очень тяжело
|
||||||
|
# генерируется CID с учетом типа контента и его декодирования
|
||||||
|
# Загрузка происходит только от пользователя либо если наш же бэкенд просит загрузить что-то
|
||||||
|
# Создание расшифрованного (local/content_bin) StoredContent
|
||||||
|
|
||||||
async def s_api_v1_storage_post(request):
|
async def s_api_v1_storage_post(request):
|
||||||
if not request.files:
|
if not request.files:
|
||||||
return response.json({"error": "No file provided"}, status=400)
|
return response.json({"error": "No file provided"}, status=400)
|
||||||
|
|
@ -93,6 +101,8 @@ async def s_api_v1_storage_post(request):
|
||||||
return response.json({"error": f"Error: {e}"}, status=500)
|
return response.json({"error": f"Error: {e}"}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
# Получение контента с использованием seconds_limit по file_hash
|
||||||
|
|
||||||
async def s_api_v1_storage_get(request, file_hash=None):
|
async def s_api_v1_storage_get(request, file_hash=None):
|
||||||
seconds_limit = int(request.args.get("seconds_limit", 0))
|
seconds_limit = int(request.args.get("seconds_limit", 0))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,278 @@
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import asyncio
|
||||||
|
from uuid import uuid4
|
||||||
|
from datetime import datetime
|
||||||
|
from mimetypes import guess_type
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
from base58 import b58encode
|
||||||
|
from sanic import response
|
||||||
|
|
||||||
|
from app.core.logger import make_log
|
||||||
|
from app.core.models.node_storage import StoredContent
|
||||||
|
from app.core._config import UPLOADS_DIR
|
||||||
|
from app.core._utils.resolve_content import resolve_content
|
||||||
|
|
||||||
|
|
||||||
|
# POST /api/v1.5/storage
|
||||||
|
async def s_api_v1_5_storage_post(request):
|
||||||
|
# Log the start of the file upload process
|
||||||
|
make_log("uploader_v1.5", "Received file upload request", level="INFO")
|
||||||
|
|
||||||
|
# Get the provided file hash from header (hex format)
|
||||||
|
provided_hash_hex = request.headers.get("X-Content-SHA256")
|
||||||
|
if not provided_hash_hex:
|
||||||
|
make_log("uploader_v1.5", "Missing X-Content-SHA256 header", level="ERROR")
|
||||||
|
return response.json({"error": "Missing X-Content-SHA256 header"}, status=400)
|
||||||
|
try:
|
||||||
|
provided_hash_bytes = bytes.fromhex(provided_hash_hex)
|
||||||
|
provided_hash_b58 = b58encode(provided_hash_bytes).decode()
|
||||||
|
make_log("uploader_v1.5", f"Provided hash (base58): {provided_hash_b58}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Invalid X-Content-SHA256 header format: {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Invalid X-Content-SHA256 header format"}, status=400)
|
||||||
|
|
||||||
|
provided_filename = request.headers.get("X-File-Name")
|
||||||
|
|
||||||
|
# Check if the file already exists in the database
|
||||||
|
db_session = request.ctx.db_session
|
||||||
|
existing = db_session.query(StoredContent).filter_by(hash=provided_hash_b58).first()
|
||||||
|
if existing:
|
||||||
|
make_log("uploader_v1.5", f"File with hash {provided_hash_b58} already exists in DB", level="INFO")
|
||||||
|
serialized_v2 = existing.serialize_v2() # returns a string
|
||||||
|
serialized_v1 = existing.serialize_v1() # returns a string
|
||||||
|
return response.json({
|
||||||
|
"content_sha256": provided_hash_b58,
|
||||||
|
"content_id": serialized_v2,
|
||||||
|
"content_id_v1": serialized_v1,
|
||||||
|
"content_url": f"dmy://storage?cid={serialized_v2}",
|
||||||
|
})
|
||||||
|
|
||||||
|
# Save uploaded file to a temporary location using streaming
|
||||||
|
temp_filename = f"v1.5_upload_{uuid4()}"
|
||||||
|
temp_path = os.path.join(UPLOADS_DIR, temp_filename)
|
||||||
|
make_log("uploader_v1.5", f"Saving file to temporary location: {temp_path}", level="INFO")
|
||||||
|
try:
|
||||||
|
async with aiofiles.open(temp_path, 'wb') as out_file:
|
||||||
|
async for chunk in request.stream:
|
||||||
|
await out_file.write(chunk)
|
||||||
|
make_log("uploader_v1.5", f"Finished saving file to temporary location: {temp_path}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Error saving uploaded file: {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Failed to save uploaded file"}, status=500)
|
||||||
|
|
||||||
|
# Compute file SHA256 using subprocess to avoid loading the file into memory
|
||||||
|
make_log("uploader_v1.5", f"Computing file hash using subprocess for file: {temp_path}", level="INFO")
|
||||||
|
try:
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
'sha256sum', temp_path,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
error_msg = stderr.decode().strip()
|
||||||
|
make_log("uploader_v1.5", f"sha256sum error: {error_msg}", level="ERROR")
|
||||||
|
return response.json({"error": "Failed to compute file hash"}, status=500)
|
||||||
|
# Parse output: "<hash> <filename>"
|
||||||
|
computed_hash_hex = stdout.decode().split()[0].strip()
|
||||||
|
computed_hash_bytes = bytes.fromhex(computed_hash_hex)
|
||||||
|
computed_hash_b58 = b58encode(computed_hash_bytes).decode()
|
||||||
|
make_log("uploader_v1.5", f"Computed hash (base58): {computed_hash_b58}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Error computing file hash: {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Error computing file hash"}, status=500)
|
||||||
|
|
||||||
|
# Verify that the computed hash matches the provided hash
|
||||||
|
if computed_hash_b58 != provided_hash_b58:
|
||||||
|
make_log("uploader_v1.5", f"Hash mismatch: provided {provided_hash_b58} vs computed {computed_hash_b58}", level="ERROR")
|
||||||
|
try:
|
||||||
|
os.remove(temp_path)
|
||||||
|
make_log("uploader_v1.5", f"Temporary file removed due to hash mismatch: {temp_path}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Error removing temp file: {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Hash mismatch"}, status=400)
|
||||||
|
|
||||||
|
# Determine the final file path
|
||||||
|
final_path = os.path.join(UPLOADS_DIR, f"v1.5_{computed_hash_b58}")
|
||||||
|
if os.path.exists(final_path):
|
||||||
|
make_log("uploader_v1.5", f"File already exists on disk: {final_path}", level="INFO")
|
||||||
|
try:
|
||||||
|
os.remove(temp_path)
|
||||||
|
make_log("uploader_v1.5", f"Temporary file removed: {temp_path}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Error removing temp file: {e}", level="ERROR")
|
||||||
|
existing = db_session.query(StoredContent).filter_by(hash=computed_hash_b58).first()
|
||||||
|
if existing:
|
||||||
|
serialized_v2 = existing.serialize_v2()
|
||||||
|
serialized_v1 = existing.serialize_v1()
|
||||||
|
return response.json({
|
||||||
|
"content_sha256": computed_hash_b58,
|
||||||
|
"content_id": serialized_v2,
|
||||||
|
"content_id_v1": serialized_v1,
|
||||||
|
"content_url": f"dmy://storage?cid={serialized_v2}",
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
os.rename(temp_path, final_path)
|
||||||
|
make_log("uploader_v1.5", f"Renamed temporary file to final location: {final_path}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Error renaming file: {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Failed to finalize file storage"}, status=500)
|
||||||
|
|
||||||
|
# Create a new StoredContent record with user_id from request.ctx.user and commit to DB
|
||||||
|
try:
|
||||||
|
new_content = StoredContent(
|
||||||
|
type='local/content_bin',
|
||||||
|
hash=computed_hash_b58,
|
||||||
|
user_id=request.ctx.user.id, # 'user_id' is added to StoredContent
|
||||||
|
filename=provided_filename,
|
||||||
|
key_id=None,
|
||||||
|
meta={},
|
||||||
|
created=datetime.utcnow()
|
||||||
|
)
|
||||||
|
db_session.add(new_content)
|
||||||
|
db_session.commit()
|
||||||
|
make_log("uploader_v1.5", f"New file stored and indexed for user {request.ctx.user.id} with hash {computed_hash_b58}", level="INFO")
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Database error: {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Database error"}, status=500)
|
||||||
|
|
||||||
|
serialized_v2 = new_content.serialize_v2()
|
||||||
|
serialized_v1 = new_content.serialize_v1()
|
||||||
|
return response.json({
|
||||||
|
"content_sha256": computed_hash_b58,
|
||||||
|
"content_id": serialized_v2,
|
||||||
|
"content_id_v1": serialized_v1,
|
||||||
|
"content_url": f"dmy://storage?cid={serialized_v2}",
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# GET /api/v1.5/storage/<file_hash>
|
||||||
|
async def s_api_v1_5_storage_get(request, file_hash):
|
||||||
|
# Log the file retrieval request
|
||||||
|
make_log("uploader_v1.5", f"Received file retrieval request for hash: {file_hash}", level="INFO")
|
||||||
|
|
||||||
|
# Determine the file path based on the provided file_hash
|
||||||
|
final_path = os.path.join(UPLOADS_DIR, f"v1.5_{file_hash}")
|
||||||
|
if not os.path.exists(final_path):
|
||||||
|
make_log("uploader_v1.5", f"File not found: {final_path}", level="ERROR")
|
||||||
|
return response.json({"error": "File not found"}, status=404)
|
||||||
|
|
||||||
|
# Retrieve the StoredContent record from the database
|
||||||
|
db_session = request.ctx.db_session
|
||||||
|
stored = db_session.query(StoredContent).filter_by(hash=file_hash).first()
|
||||||
|
if stored and stored.filename:
|
||||||
|
filename_for_mime = stored.filename
|
||||||
|
else:
|
||||||
|
# If the record is not found or filename is not set, fallback to the file path
|
||||||
|
filename_for_mime = final_path
|
||||||
|
|
||||||
|
# Determine MIME type using filename from StoredContent
|
||||||
|
mime_type, _ = guess_type(filename_for_mime)
|
||||||
|
if not mime_type:
|
||||||
|
mime_type = "application/octet-stream"
|
||||||
|
|
||||||
|
file_size = os.path.getsize(final_path)
|
||||||
|
range_header = request.headers.get("Range")
|
||||||
|
|
||||||
|
if range_header:
|
||||||
|
make_log("uploader_v1.5", f"Processing Range header: {range_header}", level="INFO")
|
||||||
|
range_spec = range_header.strip().lower()
|
||||||
|
if not range_spec.startswith("bytes="):
|
||||||
|
make_log("uploader_v1.5", f"Invalid Range header: {range_header}", level="ERROR")
|
||||||
|
return response.json({"error": "Invalid Range header"}, status=400)
|
||||||
|
range_spec = range_spec[len("bytes="):]
|
||||||
|
# Split by comma to handle multiple ranges
|
||||||
|
range_parts = [part.strip() for part in range_spec.split(',')]
|
||||||
|
parsed_ranges = []
|
||||||
|
try:
|
||||||
|
for part in range_parts:
|
||||||
|
if '-' not in part:
|
||||||
|
raise ValueError("Invalid range format")
|
||||||
|
start_str, end_str = part.split('-', 1)
|
||||||
|
if start_str == "":
|
||||||
|
# Suffix byte range: last N bytes
|
||||||
|
suffix_length = int(end_str)
|
||||||
|
if suffix_length > file_size:
|
||||||
|
start = 0
|
||||||
|
else:
|
||||||
|
start = file_size - suffix_length
|
||||||
|
end = file_size - 1
|
||||||
|
else:
|
||||||
|
start = int(start_str)
|
||||||
|
if end_str == "":
|
||||||
|
end = file_size - 1
|
||||||
|
else:
|
||||||
|
end = int(end_str)
|
||||||
|
if start > end or end >= file_size:
|
||||||
|
raise ValueError("Requested Range Not Satisfiable")
|
||||||
|
parsed_ranges.append((start, end))
|
||||||
|
except Exception as e:
|
||||||
|
make_log("uploader_v1.5", f"Invalid Range header: {range_header} - {e}", level="ERROR")
|
||||||
|
return response.json({"error": "Invalid Range header"}, status=400)
|
||||||
|
|
||||||
|
# If only one range is requested, use single range response
|
||||||
|
if len(parsed_ranges) == 1:
|
||||||
|
start, end = parsed_ranges[0]
|
||||||
|
content_length = end - start + 1
|
||||||
|
headers = {
|
||||||
|
"Content-Range": f"bytes {start}-{end}/{file_size}",
|
||||||
|
"Accept-Ranges": "bytes",
|
||||||
|
"Content-Length": str(content_length),
|
||||||
|
"Content-Type": mime_type,
|
||||||
|
}
|
||||||
|
async def stream_file_range():
|
||||||
|
# Stream single range content
|
||||||
|
make_log("uploader_v1.5", f"Starting to stream file from byte {start} to {end}", level="INFO")
|
||||||
|
async with aiofiles.open(final_path, mode='rb') as f:
|
||||||
|
await f.seek(start)
|
||||||
|
remaining = content_length
|
||||||
|
chunk_size = 1024 * 1024 # 1MB chunks
|
||||||
|
while remaining > 0:
|
||||||
|
read_size = min(chunk_size, remaining)
|
||||||
|
data = await f.read(read_size)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
remaining -= len(data)
|
||||||
|
yield data
|
||||||
|
make_log("uploader_v1.5", f"Finished streaming file: {final_path}", level="INFO")
|
||||||
|
return response.stream(stream_file_range, status=206, headers=headers)
|
||||||
|
else:
|
||||||
|
# Multiple ranges requested: create a multipart/byteranges response
|
||||||
|
boundary = uuid4().hex # Generate a random boundary string
|
||||||
|
headers = {
|
||||||
|
"Content-Type": f"multipart/byteranges; boundary={boundary}",
|
||||||
|
"Accept-Ranges": "bytes",
|
||||||
|
}
|
||||||
|
async def stream_multipart():
|
||||||
|
# For each range, yield the boundary, part headers, and the file content
|
||||||
|
for start, end in parsed_ranges:
|
||||||
|
part_header = (
|
||||||
|
f"--{boundary}\r\n"
|
||||||
|
f"Content-Type: {mime_type}\r\n"
|
||||||
|
f"Content-Range: bytes {start}-{end}/{file_size}\r\n"
|
||||||
|
f"\r\n"
|
||||||
|
)
|
||||||
|
yield part_header.encode()
|
||||||
|
part_length = end - start + 1
|
||||||
|
async with aiofiles.open(final_path, mode='rb') as f:
|
||||||
|
await f.seek(start)
|
||||||
|
remaining = part_length
|
||||||
|
chunk_size = 1024 * 1024 # 1MB chunks
|
||||||
|
while remaining > 0:
|
||||||
|
read_size = min(chunk_size, remaining)
|
||||||
|
data = await f.read(read_size)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
remaining -= len(data)
|
||||||
|
yield data
|
||||||
|
yield b"\r\n"
|
||||||
|
# Final boundary marker
|
||||||
|
yield f"--{boundary}--\r\n".encode()
|
||||||
|
return response.stream(stream_multipart, status=206, headers=headers)
|
||||||
|
else:
|
||||||
|
# No Range header: return full file
|
||||||
|
make_log("uploader_v1.5", f"Returning full file for video/audio: {final_path}", level="INFO")
|
||||||
|
return await response.file(final_path, mime_type=mime_type)
|
||||||
|
|
@ -55,6 +55,7 @@ async def main_fn(memory):
|
||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
return await main_fn(memory)
|
return await main_fn(memory)
|
||||||
|
|
||||||
|
# TODO: не деплоить если указан master_address и мы проверили что аккаунт существует. Сейчас platform у каждой ноды будет разным
|
||||||
platform_state = await toncenter.get_account(platform.address.to_string(1, 1, 1))
|
platform_state = await toncenter.get_account(platform.address.to_string(1, 1, 1))
|
||||||
if not platform_state.get('code'):
|
if not platform_state.get('code'):
|
||||||
make_log("TON", "Platform contract is not deployed, send deploy transaction..", level="info")
|
make_log("TON", "Platform contract is not deployed, send deploy transaction..", level="info")
|
||||||
|
|
|
||||||
|
|
@ -10,3 +10,4 @@ from app.core.models.user_activity import UserActivity
|
||||||
from app.core.models.content.user_content import UserContent, UserAction
|
from app.core.models.content.user_content import UserContent, UserAction
|
||||||
from app.core.models._config import ServiceConfigValue
|
from app.core.models._config import ServiceConfigValue
|
||||||
from app.core.models.asset import Asset
|
from app.core.models.asset import Asset
|
||||||
|
from app.core.models.my_network import KnownNode, KnownNodeIncident, RemoteContentIndex
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,12 @@ class KnownTelegramMessage(AlchemyBase):
|
||||||
|
|
||||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||||
type = Column(String(64), nullable=True)
|
type = Column(String(64), nullable=True)
|
||||||
|
# common: все сообщения (удаляются постоянно)
|
||||||
|
# start_command: /start (остается одно)
|
||||||
|
# notification (не удаляется): licenseWasBought, contentWasIndexed
|
||||||
|
# content/audio, content/video (удаляются только если пришел еще раз этот же контент в лс)
|
||||||
|
# hint
|
||||||
|
|
||||||
bot_id = Column(Integer, nullable=False, default=1) # 0 – uploader, 1 – client
|
bot_id = Column(Integer, nullable=False, default=1) # 0 – uploader, 1 – client
|
||||||
chat_id = Column(BigInteger, nullable=False)
|
chat_id = Column(BigInteger, nullable=False)
|
||||||
message_id = Column(BigInteger, nullable=False)
|
message_id = Column(BigInteger, nullable=False)
|
||||||
|
|
@ -20,5 +26,7 @@ class KnownTelegramMessage(AlchemyBase):
|
||||||
created = Column(DateTime, nullable=False, default=0)
|
created = Column(DateTime, nullable=False, default=0)
|
||||||
deleted = Column(Boolean, nullable=True, default=False)
|
deleted = Column(Boolean, nullable=True, default=False)
|
||||||
meta = Column(JSON, nullable=False, default={})
|
meta = Column(JSON, nullable=False, default={})
|
||||||
|
# можно записывать все что угодно о сообщениях
|
||||||
|
|
||||||
content_id = Column(Integer, ForeignKey('node_storage.id'), nullable=True)
|
content_id = Column(Integer, ForeignKey('node_storage.id'), nullable=True)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,47 @@
|
||||||
|
from .base import AlchemyBase
|
||||||
|
from sqlalchemy import Column, BigInteger, Integer, String, ForeignKey, DateTime, JSON, Boolean
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class KnownNode(AlchemyBase):
|
||||||
|
__tablename__ = 'known_nodes'
|
||||||
|
|
||||||
|
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||||
|
ip = Column(String(64), nullable=False, unique=True)
|
||||||
|
port = Column(Integer, nullable=False)
|
||||||
|
public_key = Column(String(256), nullable=False)
|
||||||
|
codebase_hash = Column(String(512), nullable=True) # Node software version
|
||||||
|
reputation = Column(Integer, nullable=False, default=0)
|
||||||
|
last_sync = Column(DateTime, nullable=False, default=datetime.now)
|
||||||
|
meta = Column(JSON, nullable=False, default={})
|
||||||
|
located_at = Column(DateTime, nullable=False, default=datetime.now)
|
||||||
|
|
||||||
|
|
||||||
|
class KnownNodeIncident(AlchemyBase):
|
||||||
|
__tablename__ = 'known_nodes_incidents'
|
||||||
|
|
||||||
|
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||||
|
node_id = Column(Integer, ForeignKey('known_nodes.id'), nullable=False) # Reference to the node
|
||||||
|
incident_type = Column(String(64), nullable=False) # Type of incident, e.g., 'sync_failure', 'unreachable'
|
||||||
|
description = Column(String(512), nullable=True) # Detailed description of the incident
|
||||||
|
occurred_at = Column(DateTime, nullable=False, default=datetime.utcnow) # Timestamp when the incident occurred
|
||||||
|
severity = Column(Integer, nullable=False, default=1) # Severity level (1-low to 5-critical)
|
||||||
|
resolved = Column(Boolean, nullable=False, default=False) # Whether the incident has been resolved
|
||||||
|
resolved_at = Column(DateTime, nullable=True) # Timestamp when the incident was resolved
|
||||||
|
meta = Column(JSON, nullable=False, default={}) # Additional metadata if needed
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteContentIndex(AlchemyBase):
|
||||||
|
__tablename__ = 'remote_content_index'
|
||||||
|
|
||||||
|
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||||
|
remote_node_id = Column(Integer, ForeignKey('known_nodes.id'), nullable=False) # Reference to the remote node
|
||||||
|
content_type = Column(String(64), nullable=False) # Type of content (e.g., music, video, document)
|
||||||
|
encrypted_hash = Column(String(128), nullable=False) # Encrypted content hash provided initially
|
||||||
|
decrypted_hash = Column(String(128), nullable=True) # Decrypted content hash, available once permission is granted
|
||||||
|
ton_address = Column(String(128), nullable=True) # TON network address for the content
|
||||||
|
onchain_index = Column(Integer, nullable=True) # Onchain index or reference on a blockchain
|
||||||
|
metadata = Column(JSON, nullable=False, default={}) # Additional metadata for flexible content description
|
||||||
|
last_updated = Column(DateTime, nullable=False, default=datetime.utcnow) # Timestamp of the last update
|
||||||
|
created_at = Column(DateTime, nullable=False, default=datetime.utcnow) # Record creation timestamp
|
||||||
|
|
||||||
|
|
@ -32,7 +32,7 @@ services:
|
||||||
maria_db:
|
maria_db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
indexer:
|
indexer: # Отправка уведомления о появлении новой NFT-listen. Установка CID поля у всего контента. Проверка следующего за последним индексом item коллекции и поиск нового контента, отправка информации о том что контент найден его загружателю. Присваивание encrypted_content onchain_index
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
|
|
@ -48,23 +48,7 @@ services:
|
||||||
maria_db:
|
maria_db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
uploader:
|
ton_daemon: # Работа с TON-сетью. Задачи сервисного кошелька и деплой контрактов
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
command: python -m app uploader
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
links:
|
|
||||||
- maria_db
|
|
||||||
volumes:
|
|
||||||
- /Storage/logs:/app/logs
|
|
||||||
- /Storage/storedContent:/app/data
|
|
||||||
depends_on:
|
|
||||||
maria_db:
|
|
||||||
condition: service_healthy
|
|
||||||
|
|
||||||
ton_daemon:
|
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
|
|
@ -80,7 +64,7 @@ services:
|
||||||
maria_db:
|
maria_db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
license_index:
|
license_index: # Проверка кошельков пользователей на новые NFT. Опрос этих NFT на определяемый GET-метод по которому мы определяем что это определенная лицензия и сохранение информации по ней
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
|
|
|
||||||
Binary file not shown.
|
|
@ -171,3 +171,6 @@ msgstr "Open in app"
|
||||||
msgid "p_licenseWasBought"
|
msgid "p_licenseWasBought"
|
||||||
msgstr "💶 {username} just purchased a <a href={nft_address}>license</a> for <b>{content_title}</b>. "
|
msgstr "💶 {username} just purchased a <a href={nft_address}>license</a> for <b>{content_title}</b>. "
|
||||||
"👏 Your content is gaining more appreciation! Keep creating and inspiring! 🚀"
|
"👏 Your content is gaining more appreciation! Keep creating and inspiring! 🚀"
|
||||||
|
|
||||||
|
msgid "p_uploadContentTxRequested"
|
||||||
|
msgstr "Transaction for upload {title} requested. Confirm that and wait notification of transaction result. Convert content may be 20 min"
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,199 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="ru">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Загрузка и стриминг файла</title>
|
||||||
|
<style>
|
||||||
|
body { font-family: Arial, sans-serif; margin: 20px; }
|
||||||
|
section { margin-bottom: 40px; }
|
||||||
|
label { display: block; margin-bottom: 5px; }
|
||||||
|
input, button { margin-bottom: 10px; }
|
||||||
|
#log { border: 1px solid #ccc; padding: 10px; max-height: 200px; overflow-y: auto; background: #f9f9f9; }
|
||||||
|
</style>
|
||||||
|
<!-- Including jsSHA library from CDN -->
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/jsSHA/3.2.0/sha256.js"></script>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Загрузка и стриминг файла</h1>
|
||||||
|
|
||||||
|
<section id="uploadSection">
|
||||||
|
<h2>Загрузка файла</h2>
|
||||||
|
<label for="uploadFile">Выберите файл для загрузки:</label>
|
||||||
|
<input type="file" id="uploadFile">
|
||||||
|
<br>
|
||||||
|
<button id="uploadBtn">Загрузить файл</button>
|
||||||
|
<div id="uploadResult"></div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section id="streamSection">
|
||||||
|
<h2>Стриминг файла</h2>
|
||||||
|
<label for="fileHashInput">Введите file_hash:</label>
|
||||||
|
<input type="text" id="fileHashInput" placeholder="Введите hash">
|
||||||
|
<br>
|
||||||
|
<button id="loadFileBtn">Загрузить файл для стриминга</button>
|
||||||
|
<div id="mediaContainer" style="margin-top:20px;"></div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section id="logSection">
|
||||||
|
<h2>Лог</h2>
|
||||||
|
<div id="log"></div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// Base URL for endpoints
|
||||||
|
const BASE_URL = "https://my-public-node-1.projscale.dev/api/v1.5/storage";
|
||||||
|
|
||||||
|
// Function to append messages to log div
|
||||||
|
function appendLog(message) {
|
||||||
|
const logDiv = document.getElementById('log');
|
||||||
|
const p = document.createElement('p');
|
||||||
|
p.textContent = message;
|
||||||
|
logDiv.appendChild(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to compute SHA-256 hash of a file in hex using jsSHA library with incremental reading
|
||||||
|
function computeSHA256(file) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const chunkSize = 2097152; // 2MB per chunk
|
||||||
|
let offset = 0;
|
||||||
|
const reader = new FileReader();
|
||||||
|
const shaObj = new jsSHA("SHA-256", "ARRAYBUFFER");
|
||||||
|
|
||||||
|
reader.onload = function(e) {
|
||||||
|
// Update the hash object with the current chunk data
|
||||||
|
shaObj.update(e.target.result);
|
||||||
|
offset += chunkSize;
|
||||||
|
appendLog(`Processed ${Math.min(offset, file.size)} из ${file.size} байт`);
|
||||||
|
if (offset < file.size) {
|
||||||
|
readNextChunk();
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
const hash = shaObj.getHash("HEX");
|
||||||
|
resolve(hash);
|
||||||
|
} catch (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
reader.onerror = function(err) {
|
||||||
|
reject(err);
|
||||||
|
};
|
||||||
|
|
||||||
|
function readNextChunk() {
|
||||||
|
const slice = file.slice(offset, offset + chunkSize);
|
||||||
|
reader.readAsArrayBuffer(slice);
|
||||||
|
}
|
||||||
|
|
||||||
|
readNextChunk();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload button event listener
|
||||||
|
document.getElementById('uploadBtn').addEventListener('click', async () => {
|
||||||
|
const fileInput = document.getElementById('uploadFile');
|
||||||
|
const uploadResult = document.getElementById('uploadResult');
|
||||||
|
uploadResult.textContent = "";
|
||||||
|
|
||||||
|
if (!fileInput.files || fileInput.files.length === 0) {
|
||||||
|
uploadResult.textContent = "Пожалуйста, выберите файл.";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const file = fileInput.files[0];
|
||||||
|
appendLog("Starting hash computation...");
|
||||||
|
try {
|
||||||
|
// Compute SHA-256 hash of the file in hex format
|
||||||
|
const hashHex = await computeSHA256(file);
|
||||||
|
appendLog(`Computed SHA-256 hash: ${hashHex}`);
|
||||||
|
|
||||||
|
// Prepare the POST request with file as body and header X-Content-SHA256
|
||||||
|
const response = await fetch(BASE_URL, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"X-Content-SHA256": hashHex,
|
||||||
|
"X-File-Name": file.name, // NEW: передаём имя файла
|
||||||
|
"Content-Type": file.type || "application/octet-stream"
|
||||||
|
},
|
||||||
|
body: file
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
uploadResult.textContent = `Ошибка: ${errorData.error}`;
|
||||||
|
appendLog(`Upload failed: ${errorData.error}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resultData = await response.json();
|
||||||
|
uploadResult.textContent = `Файл загружен успешно. content_sha256: ${resultData.content_sha256}`;
|
||||||
|
appendLog(`Upload successful. Response: ${JSON.stringify(resultData)}`);
|
||||||
|
} catch (err) {
|
||||||
|
uploadResult.textContent = "Ошибка при загрузке файла.";
|
||||||
|
appendLog(`Error during upload: ${err}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Load file button event listener for streaming
|
||||||
|
document.getElementById('loadFileBtn').addEventListener('click', async () => {
|
||||||
|
const fileHash = document.getElementById('fileHashInput').value.trim();
|
||||||
|
const mediaContainer = document.getElementById('mediaContainer');
|
||||||
|
mediaContainer.innerHTML = "";
|
||||||
|
|
||||||
|
if (!fileHash) {
|
||||||
|
mediaContainer.textContent = "Пожалуйста, введите file_hash.";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Construct file URL
|
||||||
|
const fileUrl = `${BASE_URL}/${fileHash}`;
|
||||||
|
appendLog(`Fetching file info for hash: ${fileHash}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Perform a HEAD request to determine Content-Type
|
||||||
|
const headResponse = await fetch(fileUrl, { method: "HEAD" });
|
||||||
|
if (!headResponse.ok) {
|
||||||
|
mediaContainer.textContent = "Файл не найден.";
|
||||||
|
appendLog("File not found during HEAD request.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = headResponse.headers.get("Content-Type") || "";
|
||||||
|
appendLog(`Content-Type: ${contentType}`);
|
||||||
|
|
||||||
|
let mediaElement;
|
||||||
|
// Create appropriate element based on Content-Type
|
||||||
|
if (contentType.startsWith("image/")) {
|
||||||
|
mediaElement = document.createElement("img");
|
||||||
|
mediaElement.style.maxWidth = "100%";
|
||||||
|
} else if (contentType.startsWith("video/")) {
|
||||||
|
mediaElement = document.createElement("video");
|
||||||
|
mediaElement.controls = true;
|
||||||
|
mediaElement.style.maxWidth = "100%";
|
||||||
|
} else if (contentType.startsWith("audio/")) {
|
||||||
|
mediaElement = document.createElement("audio");
|
||||||
|
mediaElement.controls = true;
|
||||||
|
} else {
|
||||||
|
// For other types, create a download link
|
||||||
|
mediaElement = document.createElement("a");
|
||||||
|
mediaElement.textContent = "Скачать файл";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the src or href attribute to stream the file
|
||||||
|
if (mediaElement.tagName === "A") {
|
||||||
|
mediaElement.href = fileUrl;
|
||||||
|
mediaElement.download = "";
|
||||||
|
} else {
|
||||||
|
mediaElement.src = fileUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
mediaContainer.appendChild(mediaElement);
|
||||||
|
appendLog("Media element created and added to the page.");
|
||||||
|
} catch (err) {
|
||||||
|
mediaContainer.textContent = "Ошибка при загрузке файла.";
|
||||||
|
appendLog(`Error during file streaming: ${err}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
Loading…
Reference in New Issue