From dba359bb6a072e23bf50370e7ef9e7b8942bb4f4 Mon Sep 17 00:00:00 2001 From: user Date: Tue, 25 Feb 2025 14:21:16 +0300 Subject: [PATCH] chunks streadming, my network models --- ENDPOINTS.md | 55 ++++ README.md | 1 + app/api/__init__.py | 4 + app/api/middleware.py | 1 + app/api/routes/_blockchain.py | 15 +- app/api/routes/node_storage.py | 10 + app/api/routes/progressive_storage.py | 278 ++++++++++++++++++++ app/core/background/ton_service.py | 1 + app/core/models/__init__.py | 1 + app/core/models/messages.py | 8 + app/core/models/my_network.py | 47 ++++ docker-compose.yml | 22 +- locale/en/LC_MESSAGES/sanic_telegram_bot.mo | Bin 3104 -> 3285 bytes locale/en/LC_MESSAGES/sanic_telegram_bot.po | 3 + uploader_test.html | 199 ++++++++++++++ 15 files changed, 625 insertions(+), 20 deletions(-) create mode 100644 ENDPOINTS.md create mode 100644 app/api/routes/progressive_storage.py create mode 100644 app/core/models/my_network.py create mode 100644 uploader_test.html diff --git a/ENDPOINTS.md b/ENDPOINTS.md new file mode 100644 index 0000000..dd8463c --- /dev/null +++ b/ENDPOINTS.md @@ -0,0 +1,55 @@ + +1. GET /api/v1/node +{ + node_address: service_wallet, + master_address: platform collection, + indexer_height: int, + services: { + ..status&delay only + } +} + +2. POST /api/system.sendStatus +3. GET /api/system.version +{ + codebase_hash: string, + codebase_branch: string +} + +4. GET /api/tonconnect-manifest.json +используется для tonconnect на фронтенде бота (централизация) + +5. GET /api/platform-metadata.json +используется при создании platform, если ранее не создано + +6. POST /api/v1/auth.twa +сохраняет информацию о пользователе (его кошельке), выдает взамен токен для операций с этой нодой + +7. GET /api/v1/tonconnect.new [! DEPRECATED !] +8. POST /api/v1/tonconnect.logout [! DEPRECATED !] но возможно нужно использовать при отключении кошелька в вебаппе + +9. POST /api/v1/storage +загрузка контента на ноду с получением его хэша, TODO: нужно делать загрузку по частям чтобы сервер не падал + +10. GET /api/v1/storage/ +получение файла целиком. TODO: изучить реальный стриминг файла +TODO: реализовать поиск файла на других нодах + +11. GET /api/v1/storage.decodeContentId/ +просто баловство + +12. GET /api/v1/account [! DEPRECATED !] +получение информации о пользователе, не нужно от слова совсем + +13. POST /api/v1/blockchain.sendNewContentMessage +14. POST /api/v1/blockchain.sendPurchaseContent + +15. GET /api/v1/content.list +16. GET /api/v1/content.view + + + + + + + diff --git a/README.md b/README.md index 269fe0e..dbfc5a2 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ ```shell cd sanic-telegram-bot # edit .env file +# build media_converter git.projscale.dev/my-dev/converter-module docker-compose up --build ``` diff --git a/app/api/__init__.py b/app/api/__init__.py index e282659..9b70f64 100644 --- a/app/api/__init__.py +++ b/app/api/__init__.py @@ -17,6 +17,7 @@ from app.api.routes.auth import s_api_v1_auth_twa from app.api.routes.statics import s_api_tonconnect_manifest, s_api_platform_metadata from app.api.routes.node_storage import s_api_v1_storage_post, s_api_v1_storage_get, \ s_api_v1_storage_decode_cid +from app.api.routes.progressive_storage import s_api_v1_5_storage_get, s_api_v1_5_storage_post from app.api.routes.account import s_api_v1_account_get from app.api.routes._blockchain import s_api_v1_blockchain_send_new_content_message, \ s_api_v1_blockchain_send_purchase_content_message @@ -39,6 +40,9 @@ app.add_route(s_api_v1_auth_twa, "/api/v1/auth.twa", methods=["POST", "OPTIONS"] app.add_route(s_api_v1_tonconnect_new, "/api/v1/tonconnect.new", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_tonconnect_logout, "/api/v1/tonconnect.logout", methods=["POST", "OPTIONS"]) +app.add_route(s_api_v1_5_storage_post, "/api/v1.5/storage", methods=["POST", "OPTIONS"]) +app.add_route(s_api_v1_5_storage_get, "/api/v1.5/storage/", methods=["GET", "OPTIONS"]) + app.add_route(s_api_v1_storage_post, "/api/v1/storage", methods=["POST", "OPTIONS"]) app.add_route(s_api_v1_storage_get, "/api/v1/storage/", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_storage_decode_cid, "/api/v1/storage.decodeContentId/", methods=["GET", "OPTIONS"]) diff --git a/app/api/middleware.py b/app/api/middleware.py index 860add9..56d43ec 100644 --- a/app/api/middleware.py +++ b/app/api/middleware.py @@ -75,6 +75,7 @@ async def try_service_authorization(request): if not signature: return + # TODO: смысл этой проверки если это можно подменить? message_hash_b58 = request.headers.get('X-Message-Hash') if not message_hash_b58: return diff --git a/app/api/routes/_blockchain.py b/app/api/routes/_blockchain.py index 1ec2e48..ecd2571 100644 --- a/app/api/routes/_blockchain.py +++ b/app/api/routes/_blockchain.py @@ -77,9 +77,12 @@ async def s_api_v1_blockchain_send_new_content_message(request): image_content_cid = None image_content = None + + content_title = f"{', '.join(request.json['authors'])} - {request.json['title']}" if request.json['authors'] else request.json['title'] + metadata_content = await create_metadata_for_item( request.ctx.db_session, - title=f"{', '.join(request.json['authors'])} - {request.json['title']}" if request.json['authors'] else request.json['title'], + title=content_title, cover_url=f"{PROJECT_HOST}/api/v1/storage/{image_content_cid.serialize_v2()}" if image_content_cid else None, authors=request.json['authors'], hashtags=request.json['hashtags'] @@ -96,6 +99,16 @@ async def s_api_v1_blockchain_send_new_content_message(request): ) i += 1 + await request.ctx.user_uploader_wrapper.send_message( + request.ctx.user.translated('p_uploadContentTxRequested').format( + title=content_title, + ), message_type='hint', message_meta={ + 'encrypted_content_hash': encrypted_content_cid.content_hash, + 'hint_type': 'uploadContentTxRequested' + } + ) + + return response.json({ 'address': platform.address.to_string(1, 1, 1), 'amount': str(int(0.15 * 10 ** 9)), diff --git a/app/api/routes/node_storage.py b/app/api/routes/node_storage.py index 6c88184..f4a4b84 100644 --- a/app/api/routes/node_storage.py +++ b/app/api/routes/node_storage.py @@ -20,6 +20,14 @@ from uuid import uuid4 import subprocess +# Производится загрузка любого контента одним запросом с определением mime_type по расширению +# file_mimetype audio/video +# extension_encoding file encode container +# Файл сохраняется под sha256(file_content) !!, очень тяжело +# генерируется CID с учетом типа контента и его декодирования +# Загрузка происходит только от пользователя либо если наш же бэкенд просит загрузить что-то +# Создание расшифрованного (local/content_bin) StoredContent + async def s_api_v1_storage_post(request): if not request.files: return response.json({"error": "No file provided"}, status=400) @@ -93,6 +101,8 @@ async def s_api_v1_storage_post(request): return response.json({"error": f"Error: {e}"}, status=500) +# Получение контента с использованием seconds_limit по file_hash + async def s_api_v1_storage_get(request, file_hash=None): seconds_limit = int(request.args.get("seconds_limit", 0)) diff --git a/app/api/routes/progressive_storage.py b/app/api/routes/progressive_storage.py new file mode 100644 index 0000000..652aa32 --- /dev/null +++ b/app/api/routes/progressive_storage.py @@ -0,0 +1,278 @@ +import os +import subprocess +import asyncio +from uuid import uuid4 +from datetime import datetime +from mimetypes import guess_type + +import aiofiles +from base58 import b58encode +from sanic import response + +from app.core.logger import make_log +from app.core.models.node_storage import StoredContent +from app.core._config import UPLOADS_DIR +from app.core._utils.resolve_content import resolve_content + + +# POST /api/v1.5/storage +async def s_api_v1_5_storage_post(request): + # Log the start of the file upload process + make_log("uploader_v1.5", "Received file upload request", level="INFO") + + # Get the provided file hash from header (hex format) + provided_hash_hex = request.headers.get("X-Content-SHA256") + if not provided_hash_hex: + make_log("uploader_v1.5", "Missing X-Content-SHA256 header", level="ERROR") + return response.json({"error": "Missing X-Content-SHA256 header"}, status=400) + try: + provided_hash_bytes = bytes.fromhex(provided_hash_hex) + provided_hash_b58 = b58encode(provided_hash_bytes).decode() + make_log("uploader_v1.5", f"Provided hash (base58): {provided_hash_b58}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Invalid X-Content-SHA256 header format: {e}", level="ERROR") + return response.json({"error": "Invalid X-Content-SHA256 header format"}, status=400) + + provided_filename = request.headers.get("X-File-Name") + + # Check if the file already exists in the database + db_session = request.ctx.db_session + existing = db_session.query(StoredContent).filter_by(hash=provided_hash_b58).first() + if existing: + make_log("uploader_v1.5", f"File with hash {provided_hash_b58} already exists in DB", level="INFO") + serialized_v2 = existing.serialize_v2() # returns a string + serialized_v1 = existing.serialize_v1() # returns a string + return response.json({ + "content_sha256": provided_hash_b58, + "content_id": serialized_v2, + "content_id_v1": serialized_v1, + "content_url": f"dmy://storage?cid={serialized_v2}", + }) + + # Save uploaded file to a temporary location using streaming + temp_filename = f"v1.5_upload_{uuid4()}" + temp_path = os.path.join(UPLOADS_DIR, temp_filename) + make_log("uploader_v1.5", f"Saving file to temporary location: {temp_path}", level="INFO") + try: + async with aiofiles.open(temp_path, 'wb') as out_file: + async for chunk in request.stream: + await out_file.write(chunk) + make_log("uploader_v1.5", f"Finished saving file to temporary location: {temp_path}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Error saving uploaded file: {e}", level="ERROR") + return response.json({"error": "Failed to save uploaded file"}, status=500) + + # Compute file SHA256 using subprocess to avoid loading the file into memory + make_log("uploader_v1.5", f"Computing file hash using subprocess for file: {temp_path}", level="INFO") + try: + proc = await asyncio.create_subprocess_exec( + 'sha256sum', temp_path, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await proc.communicate() + if proc.returncode != 0: + error_msg = stderr.decode().strip() + make_log("uploader_v1.5", f"sha256sum error: {error_msg}", level="ERROR") + return response.json({"error": "Failed to compute file hash"}, status=500) + # Parse output: " " + computed_hash_hex = stdout.decode().split()[0].strip() + computed_hash_bytes = bytes.fromhex(computed_hash_hex) + computed_hash_b58 = b58encode(computed_hash_bytes).decode() + make_log("uploader_v1.5", f"Computed hash (base58): {computed_hash_b58}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Error computing file hash: {e}", level="ERROR") + return response.json({"error": "Error computing file hash"}, status=500) + + # Verify that the computed hash matches the provided hash + if computed_hash_b58 != provided_hash_b58: + make_log("uploader_v1.5", f"Hash mismatch: provided {provided_hash_b58} vs computed {computed_hash_b58}", level="ERROR") + try: + os.remove(temp_path) + make_log("uploader_v1.5", f"Temporary file removed due to hash mismatch: {temp_path}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Error removing temp file: {e}", level="ERROR") + return response.json({"error": "Hash mismatch"}, status=400) + + # Determine the final file path + final_path = os.path.join(UPLOADS_DIR, f"v1.5_{computed_hash_b58}") + if os.path.exists(final_path): + make_log("uploader_v1.5", f"File already exists on disk: {final_path}", level="INFO") + try: + os.remove(temp_path) + make_log("uploader_v1.5", f"Temporary file removed: {temp_path}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Error removing temp file: {e}", level="ERROR") + existing = db_session.query(StoredContent).filter_by(hash=computed_hash_b58).first() + if existing: + serialized_v2 = existing.serialize_v2() + serialized_v1 = existing.serialize_v1() + return response.json({ + "content_sha256": computed_hash_b58, + "content_id": serialized_v2, + "content_id_v1": serialized_v1, + "content_url": f"dmy://storage?cid={serialized_v2}", + }) + else: + try: + os.rename(temp_path, final_path) + make_log("uploader_v1.5", f"Renamed temporary file to final location: {final_path}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Error renaming file: {e}", level="ERROR") + return response.json({"error": "Failed to finalize file storage"}, status=500) + + # Create a new StoredContent record with user_id from request.ctx.user and commit to DB + try: + new_content = StoredContent( + type='local/content_bin', + hash=computed_hash_b58, + user_id=request.ctx.user.id, # 'user_id' is added to StoredContent + filename=provided_filename, + key_id=None, + meta={}, + created=datetime.utcnow() + ) + db_session.add(new_content) + db_session.commit() + make_log("uploader_v1.5", f"New file stored and indexed for user {request.ctx.user.id} with hash {computed_hash_b58}", level="INFO") + except Exception as e: + make_log("uploader_v1.5", f"Database error: {e}", level="ERROR") + return response.json({"error": "Database error"}, status=500) + + serialized_v2 = new_content.serialize_v2() + serialized_v1 = new_content.serialize_v1() + return response.json({ + "content_sha256": computed_hash_b58, + "content_id": serialized_v2, + "content_id_v1": serialized_v1, + "content_url": f"dmy://storage?cid={serialized_v2}", + }) + + +# GET /api/v1.5/storage/ +async def s_api_v1_5_storage_get(request, file_hash): + # Log the file retrieval request + make_log("uploader_v1.5", f"Received file retrieval request for hash: {file_hash}", level="INFO") + + # Determine the file path based on the provided file_hash + final_path = os.path.join(UPLOADS_DIR, f"v1.5_{file_hash}") + if not os.path.exists(final_path): + make_log("uploader_v1.5", f"File not found: {final_path}", level="ERROR") + return response.json({"error": "File not found"}, status=404) + + # Retrieve the StoredContent record from the database + db_session = request.ctx.db_session + stored = db_session.query(StoredContent).filter_by(hash=file_hash).first() + if stored and stored.filename: + filename_for_mime = stored.filename + else: + # If the record is not found or filename is not set, fallback to the file path + filename_for_mime = final_path + + # Determine MIME type using filename from StoredContent + mime_type, _ = guess_type(filename_for_mime) + if not mime_type: + mime_type = "application/octet-stream" + + file_size = os.path.getsize(final_path) + range_header = request.headers.get("Range") + + if range_header: + make_log("uploader_v1.5", f"Processing Range header: {range_header}", level="INFO") + range_spec = range_header.strip().lower() + if not range_spec.startswith("bytes="): + make_log("uploader_v1.5", f"Invalid Range header: {range_header}", level="ERROR") + return response.json({"error": "Invalid Range header"}, status=400) + range_spec = range_spec[len("bytes="):] + # Split by comma to handle multiple ranges + range_parts = [part.strip() for part in range_spec.split(',')] + parsed_ranges = [] + try: + for part in range_parts: + if '-' not in part: + raise ValueError("Invalid range format") + start_str, end_str = part.split('-', 1) + if start_str == "": + # Suffix byte range: last N bytes + suffix_length = int(end_str) + if suffix_length > file_size: + start = 0 + else: + start = file_size - suffix_length + end = file_size - 1 + else: + start = int(start_str) + if end_str == "": + end = file_size - 1 + else: + end = int(end_str) + if start > end or end >= file_size: + raise ValueError("Requested Range Not Satisfiable") + parsed_ranges.append((start, end)) + except Exception as e: + make_log("uploader_v1.5", f"Invalid Range header: {range_header} - {e}", level="ERROR") + return response.json({"error": "Invalid Range header"}, status=400) + + # If only one range is requested, use single range response + if len(parsed_ranges) == 1: + start, end = parsed_ranges[0] + content_length = end - start + 1 + headers = { + "Content-Range": f"bytes {start}-{end}/{file_size}", + "Accept-Ranges": "bytes", + "Content-Length": str(content_length), + "Content-Type": mime_type, + } + async def stream_file_range(): + # Stream single range content + make_log("uploader_v1.5", f"Starting to stream file from byte {start} to {end}", level="INFO") + async with aiofiles.open(final_path, mode='rb') as f: + await f.seek(start) + remaining = content_length + chunk_size = 1024 * 1024 # 1MB chunks + while remaining > 0: + read_size = min(chunk_size, remaining) + data = await f.read(read_size) + if not data: + break + remaining -= len(data) + yield data + make_log("uploader_v1.5", f"Finished streaming file: {final_path}", level="INFO") + return response.stream(stream_file_range, status=206, headers=headers) + else: + # Multiple ranges requested: create a multipart/byteranges response + boundary = uuid4().hex # Generate a random boundary string + headers = { + "Content-Type": f"multipart/byteranges; boundary={boundary}", + "Accept-Ranges": "bytes", + } + async def stream_multipart(): + # For each range, yield the boundary, part headers, and the file content + for start, end in parsed_ranges: + part_header = ( + f"--{boundary}\r\n" + f"Content-Type: {mime_type}\r\n" + f"Content-Range: bytes {start}-{end}/{file_size}\r\n" + f"\r\n" + ) + yield part_header.encode() + part_length = end - start + 1 + async with aiofiles.open(final_path, mode='rb') as f: + await f.seek(start) + remaining = part_length + chunk_size = 1024 * 1024 # 1MB chunks + while remaining > 0: + read_size = min(chunk_size, remaining) + data = await f.read(read_size) + if not data: + break + remaining -= len(data) + yield data + yield b"\r\n" + # Final boundary marker + yield f"--{boundary}--\r\n".encode() + return response.stream(stream_multipart, status=206, headers=headers) + else: + # No Range header: return full file + make_log("uploader_v1.5", f"Returning full file for video/audio: {final_path}", level="INFO") + return await response.file(final_path, mime_type=mime_type) diff --git a/app/core/background/ton_service.py b/app/core/background/ton_service.py index 86599c6..bca8872 100644 --- a/app/core/background/ton_service.py +++ b/app/core/background/ton_service.py @@ -55,6 +55,7 @@ async def main_fn(memory): await asyncio.sleep(10) return await main_fn(memory) + # TODO: не деплоить если указан master_address и мы проверили что аккаунт существует. Сейчас platform у каждой ноды будет разным platform_state = await toncenter.get_account(platform.address.to_string(1, 1, 1)) if not platform_state.get('code'): make_log("TON", "Platform contract is not deployed, send deploy transaction..", level="info") diff --git a/app/core/models/__init__.py b/app/core/models/__init__.py index 6276ce2..97ce29f 100644 --- a/app/core/models/__init__.py +++ b/app/core/models/__init__.py @@ -10,3 +10,4 @@ from app.core.models.user_activity import UserActivity from app.core.models.content.user_content import UserContent, UserAction from app.core.models._config import ServiceConfigValue from app.core.models.asset import Asset +from app.core.models.my_network import KnownNode, KnownNodeIncident, RemoteContentIndex diff --git a/app/core/models/messages.py b/app/core/models/messages.py index edfd5dd..31395b9 100644 --- a/app/core/models/messages.py +++ b/app/core/models/messages.py @@ -12,6 +12,12 @@ class KnownTelegramMessage(AlchemyBase): id = Column(Integer, autoincrement=True, primary_key=True) type = Column(String(64), nullable=True) + # common: все сообщения (удаляются постоянно) + # start_command: /start (остается одно) + # notification (не удаляется): licenseWasBought, contentWasIndexed + # content/audio, content/video (удаляются только если пришел еще раз этот же контент в лс) + # hint + bot_id = Column(Integer, nullable=False, default=1) # 0 – uploader, 1 – client chat_id = Column(BigInteger, nullable=False) message_id = Column(BigInteger, nullable=False) @@ -20,5 +26,7 @@ class KnownTelegramMessage(AlchemyBase): created = Column(DateTime, nullable=False, default=0) deleted = Column(Boolean, nullable=True, default=False) meta = Column(JSON, nullable=False, default={}) + # можно записывать все что угодно о сообщениях + content_id = Column(Integer, ForeignKey('node_storage.id'), nullable=True) diff --git a/app/core/models/my_network.py b/app/core/models/my_network.py new file mode 100644 index 0000000..561eee1 --- /dev/null +++ b/app/core/models/my_network.py @@ -0,0 +1,47 @@ +from .base import AlchemyBase +from sqlalchemy import Column, BigInteger, Integer, String, ForeignKey, DateTime, JSON, Boolean +from datetime import datetime + + +class KnownNode(AlchemyBase): + __tablename__ = 'known_nodes' + + id = Column(Integer, autoincrement=True, primary_key=True) + ip = Column(String(64), nullable=False, unique=True) + port = Column(Integer, nullable=False) + public_key = Column(String(256), nullable=False) + codebase_hash = Column(String(512), nullable=True) # Node software version + reputation = Column(Integer, nullable=False, default=0) + last_sync = Column(DateTime, nullable=False, default=datetime.now) + meta = Column(JSON, nullable=False, default={}) + located_at = Column(DateTime, nullable=False, default=datetime.now) + + +class KnownNodeIncident(AlchemyBase): + __tablename__ = 'known_nodes_incidents' + + id = Column(Integer, autoincrement=True, primary_key=True) + node_id = Column(Integer, ForeignKey('known_nodes.id'), nullable=False) # Reference to the node + incident_type = Column(String(64), nullable=False) # Type of incident, e.g., 'sync_failure', 'unreachable' + description = Column(String(512), nullable=True) # Detailed description of the incident + occurred_at = Column(DateTime, nullable=False, default=datetime.utcnow) # Timestamp when the incident occurred + severity = Column(Integer, nullable=False, default=1) # Severity level (1-low to 5-critical) + resolved = Column(Boolean, nullable=False, default=False) # Whether the incident has been resolved + resolved_at = Column(DateTime, nullable=True) # Timestamp when the incident was resolved + meta = Column(JSON, nullable=False, default={}) # Additional metadata if needed + + +class RemoteContentIndex(AlchemyBase): + __tablename__ = 'remote_content_index' + + id = Column(Integer, autoincrement=True, primary_key=True) + remote_node_id = Column(Integer, ForeignKey('known_nodes.id'), nullable=False) # Reference to the remote node + content_type = Column(String(64), nullable=False) # Type of content (e.g., music, video, document) + encrypted_hash = Column(String(128), nullable=False) # Encrypted content hash provided initially + decrypted_hash = Column(String(128), nullable=True) # Decrypted content hash, available once permission is granted + ton_address = Column(String(128), nullable=True) # TON network address for the content + onchain_index = Column(Integer, nullable=True) # Onchain index or reference on a blockchain + metadata = Column(JSON, nullable=False, default={}) # Additional metadata for flexible content description + last_updated = Column(DateTime, nullable=False, default=datetime.utcnow) # Timestamp of the last update + created_at = Column(DateTime, nullable=False, default=datetime.utcnow) # Record creation timestamp + diff --git a/docker-compose.yml b/docker-compose.yml index 1551e05..0409315 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,7 +32,7 @@ services: maria_db: condition: service_healthy - indexer: + indexer: # Отправка уведомления о появлении новой NFT-listen. Установка CID поля у всего контента. Проверка следующего за последним индексом item коллекции и поиск нового контента, отправка информации о том что контент найден его загружателю. Присваивание encrypted_content onchain_index build: context: . dockerfile: Dockerfile @@ -48,23 +48,7 @@ services: maria_db: condition: service_healthy - uploader: - build: - context: . - dockerfile: Dockerfile - command: python -m app uploader - env_file: - - .env - links: - - maria_db - volumes: - - /Storage/logs:/app/logs - - /Storage/storedContent:/app/data - depends_on: - maria_db: - condition: service_healthy - - ton_daemon: + ton_daemon: # Работа с TON-сетью. Задачи сервисного кошелька и деплой контрактов build: context: . dockerfile: Dockerfile @@ -80,7 +64,7 @@ services: maria_db: condition: service_healthy - license_index: + license_index: # Проверка кошельков пользователей на новые NFT. Опрос этих NFT на определяемый GET-метод по которому мы определяем что это определенная лицензия и сохранение информации по ней build: context: . dockerfile: Dockerfile diff --git a/locale/en/LC_MESSAGES/sanic_telegram_bot.mo b/locale/en/LC_MESSAGES/sanic_telegram_bot.mo index d185475d9a0a694ce618e3906d184c6f1f211542..3e687906e5f36c2b9ed3f992571ab1b37b1fb47b 100644 GIT binary patch delta 882 zcmYk)&ubGw6u|Lmj5S(+wN;xMe>^=}&>9eWwg{p{5CY;s%9>1YX|h{4lWHllC?0z8 z7wSR8+8#X!f#6k?BGf;iC!zm=ms~yQ_su5YBa_e0?#!E=H@jZko=kVf@^1|xv=iEf zjG0n%{1IC?f1{)ck+3WKuf2jVW^D0v^Vz zSi;*lK{eW_Z~ko9m`Qwxhp>yca1(Xm(w;0&8Bf!{ju&wOb^eFGpYRF&FF4NnW~Pws zYz}qcJn8}qco|pl2Y$s4KBayZFYPntIlf1pFbb6CL$ALAzSGgtO!-o!HfJDBQ@ zUvhB}H&FW>)DnG13gAPJMw94H_53J`VFNxWfS;%5tra>z6WSm&mj)I0mDcKN@SR-q zXTt^z-;+OA*7Ttz=$D&^7EMN$mZ$Z8=*zFHEe5NmJatz)29*|dggL5ELaq3q-^|5h z9K#FT$0};V4m$XYs-iPswuVL2_oty}SfO7*EhL^`L(Fe34ZXOEnxKd!+`=YaV;y&? z+Yv^G%x>@mRe|qNmn3`iH&FSip$}M~UqfxQjicB_PXj#~T2V4-#>V~NSc9>!Pm%nX z;yxR;wQGf6J=F22w5-S92>1&bL5k^DQ5DkpvnNHtI98)_MpbN%OC|je{c!p>^do7- TI!|jfqrFI@9qaVpZYHaLlan?9 diff --git a/locale/en/LC_MESSAGES/sanic_telegram_bot.po b/locale/en/LC_MESSAGES/sanic_telegram_bot.po index 4d38741..6bc61c8 100644 --- a/locale/en/LC_MESSAGES/sanic_telegram_bot.po +++ b/locale/en/LC_MESSAGES/sanic_telegram_bot.po @@ -171,3 +171,6 @@ msgstr "Open in app" msgid "p_licenseWasBought" msgstr "💶 {username} just purchased a license for {content_title}. " "👏 Your content is gaining more appreciation! Keep creating and inspiring! 🚀" + +msgid "p_uploadContentTxRequested" +msgstr "Transaction for upload {title} requested. Confirm that and wait notification of transaction result. Convert content may be 20 min" diff --git a/uploader_test.html b/uploader_test.html new file mode 100644 index 0000000..b2ce0a9 --- /dev/null +++ b/uploader_test.html @@ -0,0 +1,199 @@ + + + + + Загрузка и стриминг файла + + + + + +

Загрузка и стриминг файла

+ +
+

Загрузка файла

+ + +
+ +
+
+ +
+

Стриминг файла

+ + +
+ +
+
+ +
+

Лог

+
+
+ + + +