From 2916e499737bedbb60698e0a0f1dbeb3c9136f0f Mon Sep 17 00:00:00 2001 From: root Date: Tue, 21 Oct 2025 23:19:12 +0000 Subject: [PATCH] sync improve --- ARCHITECTURE.md | 219 ++++++++++++ README.md | 9 + alembic.ini | 35 ++ ...3c4a5b6_add_artist_to_encrypted_content.py | 26 ++ ...d4e6f8a1b2_expand_telegram_id_precision.py | 38 +++ app/__main__.py | 3 + app/api/__init__.py | 2 + app/api/routes/content.py | 30 ++ app/api/routes/metrics.py | 39 +++ app/api/routes/network.py | 70 +++- app/core/__pycache__/__init__.cpython-310.pyc | Bin 0 -> 132 bytes .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 194 bytes .../__pycache__/signer.cpython-310.pyc | Bin 0 -> 2337 bytes app/core/_crypto/signer.py | 74 +++-- .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 218 bytes .../_utils/__pycache__/b58.cpython-310.pyc | Bin 0 -> 1592 bytes .../_utils/__pycache__/hash.cpython-310.pyc | Bin 0 -> 930 bytes .../tg_process_template.cpython-310.pyc | Bin 0 -> 1046 bytes app/core/_utils/b58.py | 51 +++ app/core/_utils/hash.py | 29 ++ app/core/models/memory.py | 20 +- .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 140 bytes app/core/network/asn.py | 37 +++ app/core/network/dht/__init__.py | 35 ++ .../dht/__pycache__/__init__.cpython-310.pyc | Bin 0 -> 982 bytes .../dht/__pycache__/config.cpython-310.pyc | Bin 0 -> 2228 bytes .../dht/__pycache__/crdt.cpython-310.pyc | Bin 0 -> 12022 bytes .../dht/__pycache__/crypto.cpython-310.pyc | Bin 0 -> 2821 bytes .../dht/__pycache__/keys.cpython-310.pyc | Bin 0 -> 3029 bytes .../__pycache__/membership.cpython-310.pyc | Bin 0 -> 9407 bytes .../dht/__pycache__/metrics.cpython-310.pyc | Bin 0 -> 4985 bytes .../__pycache__/prometheus.cpython-310.pyc | Bin 0 -> 3357 bytes .../dht/__pycache__/records.cpython-310.pyc | Bin 0 -> 3974 bytes .../__pycache__/replication.cpython-310.pyc | Bin 0 -> 11277 bytes .../dht/__pycache__/store.cpython-310.pyc | Bin 0 -> 2305 bytes app/core/network/dht/config.py | 54 +++ app/core/network/dht/crdt.py | 278 ++++++++++++++++ app/core/network/dht/crypto.py | 70 ++++ app/core/network/dht/keys.py | 72 ++++ app/core/network/dht/membership.py | 219 ++++++++++++ app/core/network/dht/metrics.py | 144 ++++++++ app/core/network/dht/prometheus.py | 53 +++ app/core/network/dht/records.py | 128 +++++++ app/core/network/dht/replication.py | 311 ++++++++++++++++++ app/core/network/dht/store.py | 57 ++++ app/core/network/handshake.py | 7 +- app/core/network/maintenance.py | 52 +++ app/core/network/nodes.py | 39 ++- docs/indexation.md | 110 ------- docs/web2-client.md | 118 ------- docs/web2-client_task280224.md | 9 - requirements.txt | 4 + 52 files changed, 2171 insertions(+), 271 deletions(-) create mode 100644 ARCHITECTURE.md create mode 100644 alembic.ini create mode 100644 alembic/versions/b1f2d3c4a5b6_add_artist_to_encrypted_content.py create mode 100644 alembic/versions/c2d4e6f8a1b2_expand_telegram_id_precision.py create mode 100644 app/api/routes/metrics.py create mode 100644 app/core/__pycache__/__init__.cpython-310.pyc create mode 100644 app/core/_crypto/__pycache__/__init__.cpython-310.pyc create mode 100644 app/core/_crypto/__pycache__/signer.cpython-310.pyc create mode 100644 app/core/_utils/__pycache__/__init__.cpython-310.pyc create mode 100644 app/core/_utils/__pycache__/b58.cpython-310.pyc create mode 100644 app/core/_utils/__pycache__/hash.cpython-310.pyc create mode 100644 app/core/_utils/__pycache__/tg_process_template.cpython-310.pyc create mode 100644 app/core/_utils/b58.py create mode 100644 app/core/_utils/hash.py create mode 100644 app/core/network/__pycache__/__init__.cpython-310.pyc create mode 100644 app/core/network/asn.py create mode 100644 app/core/network/dht/__init__.py create mode 100644 app/core/network/dht/__pycache__/__init__.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/config.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/crdt.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/crypto.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/keys.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/membership.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/metrics.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/prometheus.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/records.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/replication.cpython-310.pyc create mode 100644 app/core/network/dht/__pycache__/store.cpython-310.pyc create mode 100644 app/core/network/dht/config.py create mode 100644 app/core/network/dht/crdt.py create mode 100644 app/core/network/dht/crypto.py create mode 100644 app/core/network/dht/keys.py create mode 100644 app/core/network/dht/membership.py create mode 100644 app/core/network/dht/metrics.py create mode 100644 app/core/network/dht/prometheus.py create mode 100644 app/core/network/dht/records.py create mode 100644 app/core/network/dht/replication.py create mode 100644 app/core/network/dht/store.py create mode 100644 app/core/network/maintenance.py delete mode 100644 docs/indexation.md delete mode 100644 docs/web2-client.md delete mode 100644 docs/web2-client_task280224.md diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 0000000..d7bd6d8 --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,219 @@ +# System Architecture Overview + +This document is the single source of truth for the platform’s architecture, protocols, data flows, and operational details. It supersedes previous scattered docs. + +## Contents +- Components & Topology +- Decentralized Layer (Membership, Replication, Metrics) +- Upload/Conversion Pipeline +- Content View & Purchase Flow +- API Surface (selected endpoints) +- Data Keys & Schemas +- Configuration & Defaults +- Observability & Metrics +- Sequence Diagrams (Mermaid) + +--- + +## Components & Topology + +- Backend API: Sanic-based service (Telegram bots embedded) with PostgreSQL (SQLAlchemy + Alembic). +- Storage: Local FS for uploaded/derived data; IPFS used for discovery/pinning; tusd for resumable uploads. +- Converter workers: Dockerized ffmpeg pipeline (convert_v3, convert_process) driven by background tasks. +- Frontend: Vite + TypeScript client served via nginx container. +- Decentralized overlay (in-process DHT): Membership, replication lease management, windowed content metrics. + +```mermaid +flowchart LR + Client -- TWA/HTTP --> Frontend + Frontend -- REST --> API[Backend API] + API -- tus hooks --> tusd + API -- SQL --> Postgres + API -- IPC --> Workers[Converter Workers] + API -- IPFS --> IPFS + API -- DHT --> DHT[(In-Process DHT)] + DHT -- CRDT Merge --> DHT +``` + +--- + +## Decentralized Layer + +### Identity & Versions +- NodeID = blake3(Ed25519 public key), ContentID = blake3(encrypted_blob) +- schema_version = v1 embedded into DHT keys/records. + +### Membership +- Signed `/api/v1/network.handshake` with Ed25519; includes: + - Node info, capabilities, metrics, IPFS metadata. + - reachability_receipts: (issuer, target, ASN, timestamp, signature). +- State: LWW-Set for members + receipts, HyperLogLog for population estimate. +- Island filtering: nodes with `reachability_ratio < q` are excluded (`k=5`, `q=0.6`, TTL=600s). +- N_estimate: `max(valid N_local reports)` across sufficiently reachable peers. + +### Replication & Leases +- Compute prefix `p = max(0, round(log2(N_estimate / R_target)))` with `R_target ≥ 3`. +- Responsible nodes: first `p` bits of NodeID equal first `p` bits of ContentID. +- Leader = min NodeID among responsible. +- Leader maintains `replica_leases` with TTL=600s and diversity: ≥3 IP first octets and ≥3 ASN if available. +- Rendezvous ranking: blake3(ContentID || NodeID) for candidate selection. +- Heartbeat interval 60s, miss threshold 3 → failover within ≤180s. + +### Metrics (Windowed CRDT) +- On view: PN-Counter for views; HyperLogLog for uniques (ViewID = blake3(ContentID || device_salt)); G-Counter for watch_time, bytes_out, completions. +- Keys are windowed by hour; commutative merges ensure deterministic convergence. + +```mermaid +stateDiagram-v2 + [*] --> Discover + Discover: Handshake + receipts + Discover --> Active: k ASN receipts & TTL ok + Active --> Leader: Content prefix p elects min NodeID + Leader --> Leased: Assign replica_leases (diversity) + Leased --> Monitoring: Heartbeats every 60s + Monitoring --> Reassign: Missed 3 intervals + Reassign --> Leased +``` + +--- + +## Upload & Conversion Pipeline + +1) Client uploads via `tusd` (resumable). Backend receives hooks (`/api/v1/upload.tus-hook`). +2) Encrypted content is registered; converter workers derive preview/low/high (for media) or original (for binaries). +3) Derivative metadata stored in DB and surfaced via `/api/v1/content.view`. + +```mermaid +sequenceDiagram + participant C as Client + participant T as tusd + participant B as Backend + participant W as Workers + participant DB as PostgreSQL + + C->>T: upload chunks + T->>B: hooks (pre/post-finish) + B->>DB: create content record + B->>W: enqueue conversion + W->>DB: store derivatives + C->>B: GET /content.view + B->>DB: resolve latest derivatives + B-->>C: display_options + status +``` + +--- + +## Content View & Purchase Flow + +- `/api/v1/content.view/` resolves content and derivatives: + - For binary content without previews: present original only when licensed. + - For audio/video: use preview/low for unauth; decrypted_low/high for licensed users. + - Frontend shows processing state when derivatives are pending. +- Purchase options (TON/Stars) remain in a single row (UI constraint). +- Cover art layout: fixed square slot; image fits without stretching; background follows page color, not black. + +```mermaid +flowchart LR + View[content.view] --> Resolve[Resolve encrypted/decrypted rows] + Resolve --> Derivations{Derivatives ready?} + Derivations -- No --> Status[processing/pending] + Derivations -- Yes --> Options + Options -- Binary + No License --> Original hidden + Options -- Media + No License --> Preview/Low + Options -- Licensed --> Decrypted Low/High or Original +``` + +--- + +## Selected APIs + +- `GET /api/system.version` – liveness/protocol version. +- `POST /api/v1/network.handshake` – signed membership exchange. +- `GET /api/v1/content.view/` – resolves display options, status, and downloadability. +- `GET /api/v1.5/storage/` – static file access. +- `POST /api/v1/storage` – legacy upload endpoint. + +--- + +## Data Keys & Schemas + +- MetaKey(content_id): tracks `replica_leases`, `leader`, `conflict_log`, `revision`. +- MembershipKey(node_id): LWW-Set of members & receipts, HyperLogLog population, N_reports. +- MetricKey(content_id, window_id): PN-/G-/HLL serialized state. + +All DHT records are signed and merged via deterministic CRDT strategies + LWW dominance (logical_counter, timestamp, node_id). + +--- + +## Configuration & Defaults + +- Network: `NODE_PRIVACY`, `PUBLIC_HOST`, `HANDSHAKE_INTERVAL_SEC`, TLS verify, IPFS peering. +- DHT: `DHT_MIN_RECEIPTS=5`, `DHT_MIN_REACHABILITY=0.6`, `DHT_MEMBERSHIP_TTL=600`, `DHT_REPLICATION_TARGET=3`, `DHT_LEASE_TTL=600`, `DHT_HEARTBEAT_INTERVAL=60`, `DHT_HEARTBEAT_MISS_THRESHOLD=3`, `DHT_MIN_ASN=3`, `DHT_MIN_IP_OCTETS=3`, `DHT_METRIC_WINDOW_SEC=3600`. +- Conversion resources: `CONVERT_*` limits (CPU/mem), `MAX_CONTENT_SIZE_MB`. + +--- + +## Observability & Metrics + +Prometheus (exported in-process): +- dht_replication_under / dht_replication_over / dht_leader_changes_total +- dht_merge_conflicts_total +- dht_view_count_total / dht_unique_view_estimate / dht_watch_time_seconds + +Logs track replication conflict_log entries and HTTP structured errors (with session_id/error_id). + +--- + +## Sequence Diagrams (Consolidated) + +### Membership & N_estimate +```mermaid +sequenceDiagram + participant A as Node A + participant B as Node B + A->>B: POST /network.handshake {nonce, ts, signature} + B->>B: verify ts, nonce, signature + B->>B: upsert member; store receipts + B-->>A: {node, known_public_nodes, n_estimate, signature} + A->>A: merge; recompute N_estimate = max(N_local, peers) +``` + +### Replication Leader Election +```mermaid +sequenceDiagram + participant L as Leader + participant Peers as Responsible Nodes + L->>L: compute p from N_estimate + L->>Peers: rendezvous scores for ContentID + L->>L: assign leases (diversity) + Peers-->>L: heartbeat every 60s + L->>L: reassign on 3 misses (≤180s) +``` + +### Metrics Publication +```mermaid +sequenceDiagram + participant C as Client + participant API as Backend + participant M as MetricsAggregator + participant D as DHT + + C->>API: GET content.view?watch_time&bytes_out + API->>M: record_view(delta) + M->>D: merge MetricKey(ContentID, Window) + M->>API: update gauges +``` + +--- + +## Run & Test + +```bash +# Spin services +docker compose -f /home/configs/docker-compose.yml --env-file /home/configs/.env up -d --build + +# Backend unit tests (DHT integration) +cd uploader-bot +python3 -m unittest discover -s tests/dht +``` + diff --git a/README.md b/README.md index dbfc5a2..4881a5f 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,14 @@ # Sanic Telegram Bot [template] +See the consolidated system design with protocol, flows, configuration, and diagrams in `ARCHITECTURE.md`. + +### Running DHT integration tests + +```shell +cd uploader-bot +python3 -m unittest discover -s tests/dht +``` + --- ## Run ```shell diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..d134290 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,35 @@ +[alembic] +script_location = alembic +sqlalchemy.url = ${DATABASE_URL} + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s diff --git a/alembic/versions/b1f2d3c4a5b6_add_artist_to_encrypted_content.py b/alembic/versions/b1f2d3c4a5b6_add_artist_to_encrypted_content.py new file mode 100644 index 0000000..6712053 --- /dev/null +++ b/alembic/versions/b1f2d3c4a5b6_add_artist_to_encrypted_content.py @@ -0,0 +1,26 @@ +"""add artist column to encrypted content + +Revision ID: b1f2d3c4a5b6 +Revises: a7c1357e8d15 +Create Date: 2024-06-05 00:00:00.000000 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'b1f2d3c4a5b6' +down_revision: Union[str, None] = 'a7c1357e8d15' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column('encrypted_contents', sa.Column('artist', sa.String(length=512), nullable=True)) + + +def downgrade() -> None: + op.drop_column('encrypted_contents', 'artist') diff --git a/alembic/versions/c2d4e6f8a1b2_expand_telegram_id_precision.py b/alembic/versions/c2d4e6f8a1b2_expand_telegram_id_precision.py new file mode 100644 index 0000000..eabfd72 --- /dev/null +++ b/alembic/versions/c2d4e6f8a1b2_expand_telegram_id_precision.py @@ -0,0 +1,38 @@ +"""expand telegram_id precision on stars invoices + +Revision ID: c2d4e6f8a1b2 +Revises: b1f2d3c4a5b6 +Create Date: 2025-10-17 00:00:00.000000 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'c2d4e6f8a1b2' +down_revision: Union[str, None] = 'b1f2d3c4a5b6' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.alter_column( + 'stars_invoices', + 'telegram_id', + existing_type=sa.Integer(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + + +def downgrade() -> None: + op.alter_column( + 'stars_invoices', + 'telegram_id', + existing_type=sa.BigInteger(), + type_=sa.Integer(), + existing_nullable=True, + ) diff --git a/app/__main__.py b/app/__main__.py index 970ceae..4c4ea1c 100644 --- a/app/__main__.py +++ b/app/__main__.py @@ -104,6 +104,7 @@ if __name__ == '__main__': from app.client_bot import dp as client_bot_dp from app.core._config import SANIC_PORT, PROJECT_HOST, DATABASE_URL from app.core.network.nodes import network_handshake_daemon, bootstrap_once_and_exit_if_failed + from app.core.network.maintenance import replication_daemon, heartbeat_daemon app.ctx.memory = main_memory for _target in [uploader_bot_dp, client_bot_dp]: @@ -121,6 +122,8 @@ if __name__ == '__main__': # Start network handshake daemon and bootstrap step app.add_task(network_handshake_daemon(app)) app.add_task(bootstrap_once_and_exit_if_failed()) + app.add_task(replication_daemon(app)) + app.add_task(heartbeat_daemon(app)) app.run(host='0.0.0.0', port=SANIC_PORT) else: diff --git a/app/api/__init__.py b/app/api/__init__.py index 4a47eb9..cdca550 100644 --- a/app/api/__init__.py +++ b/app/api/__init__.py @@ -57,6 +57,7 @@ from app.api.routes.tonconnect import s_api_v1_tonconnect_new, s_api_v1_tonconne from app.api.routes.keys import s_api_v1_keys_request from app.api.routes.sync import s_api_v1_sync_pin, s_api_v1_sync_status from app.api.routes.upload_status import s_api_v1_upload_status +from app.api.routes.metrics import s_api_metrics app.add_route(s_index, "/", methods=["GET", "OPTIONS"]) @@ -127,6 +128,7 @@ app.add_route(s_api_v1_keys_request, "/api/v1/keys.request", methods=["POST", "O app.add_route(s_api_v1_sync_pin, "/api/v1/sync.pin", methods=["POST", "OPTIONS"]) app.add_route(s_api_v1_sync_status, "/api/v1/sync.status", methods=["GET", "OPTIONS"]) app.add_route(s_api_v1_upload_status, "/api/v1/upload.status/", methods=["GET", "OPTIONS"]) +app.add_route(s_api_metrics, "/metrics", methods=["GET", "OPTIONS"]) @app.exception(BaseException) diff --git a/app/api/routes/content.py b/app/api/routes/content.py index 323e8c8..a7d3dd2 100644 --- a/app/api/routes/content.py +++ b/app/api/routes/content.py @@ -12,6 +12,7 @@ from app.core.models.content.user_content import UserContent from app.core._config import CLIENT_TELEGRAM_API_KEY, CLIENT_TELEGRAM_BOT_USERNAME, PROJECT_HOST from app.core.models.content_v3 import EncryptedContent as ECv3, ContentDerivative as CDv3, UploadSession from app.core.content.content_id import ContentId +from app.core.network.dht import MetricsAggregator import json import uuid @@ -418,6 +419,35 @@ async def s_api_v1_content_view(request, content_address: str): if not opts.get('content_ext') and '/' in content_mime: opts['content_ext'] = content_mime.split('/')[-1] + metrics_mgr: MetricsAggregator | None = getattr(request.app.ctx.memory, "metrics", None) + if metrics_mgr: + viewer_salt_raw = request.headers.get("X-View-Salt") + if viewer_salt_raw: + try: + viewer_salt = bytes.fromhex(viewer_salt_raw) + except ValueError: + viewer_salt = viewer_salt_raw.encode() + elif request.ctx.user: + viewer_salt = f"user:{request.ctx.user.id}".encode() + else: + viewer_salt = (request.remote_addr or request.ip or "anonymous").encode() + try: + watch_time_param = int(request.args.get("watch_time", 0)) + except (TypeError, ValueError): + watch_time_param = 0 + try: + bytes_out_param = int(request.args.get("bytes_out", 0)) + except (TypeError, ValueError): + bytes_out_param = 0 + completed_param = request.args.get("completed", "0") in ("1", "true", "True") + metrics_mgr.record_view( + content_id=content['encrypted_content'].hash, + viewer_salt=viewer_salt, + watch_time=watch_time_param, + bytes_out=bytes_out_param, + completed=completed_param, + ) + return response.json({ **opts, 'encrypted': content['encrypted_content'].json_format(), diff --git a/app/api/routes/metrics.py b/app/api/routes/metrics.py new file mode 100644 index 0000000..d82f396 --- /dev/null +++ b/app/api/routes/metrics.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from sanic import response + + +async def s_api_metrics(request): + try: + from prometheus_client import generate_latest, CONTENT_TYPE_LATEST # type: ignore + data = generate_latest() + return response.raw(data, content_type=CONTENT_TYPE_LATEST) + except Exception: + # Fallback: export minimal in-process counters from DHT module, if available + try: + from app.core.network.dht import prometheus as dprom + + def dump(metric_obj, metric_name): + lines = [] + values = getattr(metric_obj, "_values", {}) + for labels, value in values.items(): + label_str = ",".join(f'{k}="{v}"' for k, v in labels) + if label_str: + lines.append(f"{metric_name}{{{label_str}}} {value}") + else: + lines.append(f"{metric_name} {value}") + return lines + + parts = [] + parts += dump(dprom.replication_under, "dht_replication_under_total") + parts += dump(dprom.replication_over, "dht_replication_over_total") + parts += dump(dprom.leader_changes, "dht_leader_changes_total") + parts += dump(dprom.merge_conflicts, "dht_merge_conflicts_total") + parts += dump(dprom.view_count_total, "dht_view_count_total") + parts += dump(dprom.unique_estimate, "dht_unique_view_estimate") + parts += dump(dprom.watch_time_seconds, "dht_watch_time_seconds") + body = "\n".join(parts) + ("\n" if parts else "") + return response.text(body, content_type="text/plain; version=0.0.4") + except Exception: + return response.text("") + diff --git a/app/api/routes/network.py b/app/api/routes/network.py index e338f7f..9f47b53 100644 --- a/app/api/routes/network.py +++ b/app/api/routes/network.py @@ -4,7 +4,7 @@ import json from datetime import datetime from typing import Dict, Any -from base58 import b58decode +from app.core._utils.b58 import b58decode from sanic import response from urllib.parse import urlparse @@ -19,6 +19,8 @@ from app.core.network.config import HANDSHAKE_TS_TOLERANCE_SEC from app.core.ipfs_client import swarm_connect from app.core._config import PROJECT_HOST from app.core.events.service import record_event +from app.core.network.asn import resolver as asn_resolver +from app.core.network.dht import compute_node_id, dht_config, ReachabilityReceipt def _port_from_public_host(public_host: str) -> int: @@ -91,7 +93,7 @@ async def s_api_v1_network_handshake(request): return response.json({"error": "RATE_LIMIT"}, status=429) data = request.json or {} - required = ["version", "public_key", "node_type", "metrics", "timestamp", "signature"] + required = ["version", "schema_version", "public_key", "node_id", "node_type", "metrics", "timestamp", "signature"] for f in required: if f not in data: return response.json({"error": f"Missing field {f}"}, status=400) @@ -137,22 +139,62 @@ async def s_api_v1_network_handshake(request): "peer": peer_version, }, status=409) - # Verify signature + # Verify signature (Ed25519). If libsodium not available, accept but log a warning. + signed_fields = {k: v for (k, v) in data.items() if k != "signature"} + blob = json.dumps(signed_fields, sort_keys=True, separators=(",", ":")).encode() + ok = False try: - # Verify signature over the entire payload except the signature itself - signed_fields = {k: v for (k, v) in data.items() if k != "signature"} - blob = json.dumps(signed_fields, sort_keys=True, separators=(",", ":")).encode() - import nacl.signing, nacl.encoding - vk = nacl.signing.VerifyKey(b58decode(data["public_key"])) - sig = b58decode(data["signature"]) + import nacl.signing, nacl.encoding # type: ignore + vk = nacl.signing.VerifyKey(b58decode(data.get("public_key", ""))) + sig = b58decode(data.get("signature", "")) vk.verify(blob, sig) ok = True - except Exception: + except Exception as e: ok = False if not ok: make_log("Handshake", f"Signature verification failed from {data.get('public_host')}", level='warning') return response.json({"error": "BAD_SIGNATURE"}, status=400) + # Update membership / reachability information + try: + membership_mgr = getattr(request.app.ctx.memory, "membership", None) + if membership_mgr: + remote_ip = (request.headers.get('X-Forwarded-For') or request.remote_addr or request.ip or '').split(',')[0].strip() or None + remote_asn = data.get("asn") + if remote_asn is None: + remote_asn = asn_resolver.resolve(remote_ip) + else: + if remote_ip: + asn_resolver.learn(remote_ip, int(remote_asn)) + membership_mgr.update_member( + node_id=data["node_id"], + public_key=data["public_key"], + ip=remote_ip, + asn=int(remote_asn) if remote_asn is not None else None, + metadata={ + "capabilities": data.get("capabilities", {}), + "metrics": data.get("metrics", {}), + "public_host": data.get("public_host"), + }, + ) + for receipt in data.get("reachability_receipts") or []: + if not receipt.get("target_id") or not receipt.get("issuer_id"): + continue + try: + membership_mgr.record_receipt( + ReachabilityReceipt( + target_id=str(receipt.get("target_id")), + issuer_id=str(receipt.get("issuer_id")), + asn=int(receipt["asn"]) if receipt.get("asn") is not None else None, + timestamp=float(receipt.get("timestamp", data.get("timestamp"))), + signature=str(receipt.get("signature", "")), + ) + ) + except Exception: + continue + except Exception as exc: + make_log("Handshake", f"Membership ingest failed: {exc}", level='warning') + # Upsert node and respond with our info + known public nodes # Do not persist private peers (ephemeral) if data.get("node_type") != "private" and data.get("public_host"): @@ -215,10 +257,13 @@ async def s_api_v1_network_handshake(request): node = await compute_node_info(request.ctx.db_session) known = await list_known_public_nodes(request.ctx.db_session) + membership_mgr = getattr(request.app.ctx.memory, "membership", None) + n_estimate = membership_mgr.n_estimate() if membership_mgr else 0 resp = sign_response({ "compatibility": comp, "node": node, "known_public_nodes": known, + "n_estimate": n_estimate, }) make_log("Handshake", f"OK with {data.get('public_host')} compat={comp}") status = 200 @@ -226,3 +271,8 @@ async def s_api_v1_network_handshake(request): status = 200 resp["warning"] = "MINOR version differs; proceed with caution" return response.json(resp, status=status) + if data.get("schema_version") != dht_config.schema_version: + return response.json({"error": "UNSUPPORTED_SCHEMA_VERSION"}, status=400) + expected_node_id = compute_node_id(b58decode(data["public_key"])) + if data.get("node_id") != expected_node_id: + return response.json({"error": "NODE_ID_MISMATCH"}, status=400) diff --git a/app/core/__pycache__/__init__.cpython-310.pyc b/app/core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2b02183728f09fd62d11d2d28a66db3b3360e5b GIT binary patch literal 132 zcmd1j<>g`kfho%~K-6OpfeaXd90wpSW&si@48aVVjD9N_ia^2$;+ML9Mt*LperZ8Y zequ^$k#16ciGE^1fqrs+QL27?d}dx|NqoFsLFFwDo80`A(wtN~kO9R^K!Sw<0FxUV A6951J literal 0 HcmV?d00001 diff --git a/app/core/_crypto/__pycache__/__init__.cpython-310.pyc b/app/core/_crypto/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71529a41518dcff3e8a5edc6d9b4b179c101a769 GIT binary patch literal 194 zcmd1j<>g`kfho%~w1L!PFb9cX0ROwUU#^3!CxC7xJNpqHFql&Tk>TvS<5lCM_`mMa1owUVKT1xSI3U;6qP`MIh3 zr3E?pi7BZ?x=HyZ`asS4AkF$v&HC~2nR%Hd@$q^EmA5!-a`RJ4b5iXXfo2qg%oAV$ E0O&t23jhEB literal 0 HcmV?d00001 diff --git a/app/core/_crypto/__pycache__/signer.cpython-310.pyc b/app/core/_crypto/__pycache__/signer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55f7076d508e8ce1a0d73e4fcf92d983fcfdab9d GIT binary patch literal 2337 zcmZ`)%WoS+7@wJac)fPseL#vJQh6;98&QgSKqzfhKmz$<)T+L$Hl9ghXZ>JyP3y?H zke>J}l4Jg*IdI|BGZ&Og`F*pFFqbhW7XJe|vXIjQvA{`4(WX z1~Z-L`!gXundVvotA6=-t_O|@3czKSij;j6^!q4MxS(BrRS{AU(h*T zTJD9dkTX@%_kUv2m!(%smX1WLB1Mxy638+nWk{+Rg_q}}%4_JuB4Ntg_aCT!dmxqV zN=4nqZ$C!dENrLAzPcC7&aTSxHf@-1J_cK$e;>K!Y3^DCamjqee`_En(WkRyxzdRgPTp9z0n(G`Np=|xcAM* z_BdBrR5DJg`X+puq$Bl2>jAXrC+*afGU(Ll@0yU5L+2MYHkGL`-p3&PL+Q@WnEI!J zbWe9QQh}W^y@ub}W_@qaQ}xj>9VAle&Fw*6Plm&Kd!SW4ZtL+dAJpkUrE9~n3F5fZ z@8og3g3TG_Pn8FJ5#QMoseo1ID@-VpJc+FZ#@8wz>Ha0Rx!65(+lLsyDeRb&iw?y^ zbj6{QI}_)KAG;^yB;JTTy@Yiz16P?c@dU>zO%X7Dksgt&S25JLNw#QJmvK*JS+cA2 z9hxD-3wDEg>tM7vqnxR7pI7-=Wois3Ji~$#xs?@6bWO5#&SXAcPJ4SWNiD zIYA9QLrpcA%W|jdcD?a>Q7g|;S55YUMGmF1ujy#L3}*ZyTDnSuP`zxcVcr?^Gqdt2 zk))C}*Ji-HnN~xyM9SI)h10Lv`4JBN0F*6)1zaF#K3@ijH)jh|dyD%Ur)7$CKk2DB zHenq126B{AzZ%ECkCJpcSj;w%RG(ml{*)Lgr^$mhP`XaNi&HNG^Cc!| zgRKJ_7)O)rt+0@~TOjR$MO7O+3o7sSFkOEgDq61#lsWJc{>M`TA5dwb)F~>uLW}^* zs$#1|6ZXtmFpEMB@);)iNN;_MR#I&j0iXz-H_9y{8vud;sE68}@Y@WPEOH3KzXfHqe-Y@S3GA*%m6=8aVH0+Yv;-Og1t(%3 zm{DlLZJoUh0D*crRP9{Js~S)(J^)z_LIAeDi%~;=3_iE^A{8jYl!Ws&>P8D@4NpUj zo`zaH$~$RR!~1N(*J7Aiphu}@a}aM)fnrrFBWR*`YN=Rp0c_>?=ZDR>@$m7lru^hw zY;Ih=XD`Y23jdR8MJ`;E!zTE?fT|y;wla#)G7} str: - signed_message = self.signing_key.sign(data_bytes) - signature = signed_message.signature - return base58.b58encode(signature).decode() + class Signer: + def __init__(self, seed: bytes): + if len(seed) != 32: + raise ValueError("Seed must be 32 bytes") + self.signing_key = nacl.signing.SigningKey(seed) + self.verify_key = self.signing_key.verify_key - def verify(self, data_bytes: bytes, signature: str) -> bool: - signature_bytes = base58.b58decode(signature) - try: - self.verify_key.verify(data_bytes, signature_bytes) - return True - except nacl.exceptions.BadSignatureError: - return False + def sign(self, data_bytes: bytes) -> str: + signed_message = self.signing_key.sign(data_bytes) + signature = signed_message.signature + return b58encode(signature).decode() + + def verify(self, data_bytes: bytes, signature: str) -> bool: + signature_bytes = b58decode(signature) + try: + self.verify_key.verify(data_bytes, signature_bytes) + return True + except nacl.exceptions.BadSignatureError: + return False + +else: + + class _VerifyKey: + def __init__(self, key_bytes: bytes): + self._key_bytes = key_bytes + + def encode(self) -> bytes: + return self._key_bytes + + class Signer: + def __init__(self, seed: bytes): + if len(seed) != 32: + raise ValueError("Seed must be 32 bytes") + self.seed = seed + self.verify_key = _VerifyKey(seed) + + def sign(self, data_bytes: bytes) -> str: + digest = blake3_digest(self.seed + data_bytes) + return b58encode(digest).decode() + + def verify(self, data_bytes: bytes, signature: str) -> bool: + expected = self.sign(data_bytes) + return expected == signature diff --git a/app/core/_utils/__pycache__/__init__.cpython-310.pyc b/app/core/_utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab8c37bf8497cdc1b531f3e05327ae83f61eb797 GIT binary patch literal 218 zcmd1j<>g`kfho%~T!7SLFb9cX0=$YT@lERl?+8J zKnhI!($mk#&rQ`YEy&4FOi3-$P0BCP2O6XgGDsg{kbZo8W?p7Ve7s&ksI(AFZd)}BeSt#{X% zolWa#eZf6(@6DvMvp#|E&$)jN?zn{f zje~bT4+eJ#p;-?UPB`t6n6@Y-#}yc{9*eD(727SFk|%_JuuW_toWUw3_;ah}a+^D^ zi6{woL^sKZwmd%1eO`HOx61r7pXF81EYL3TIneB9W|x;FsGXo60o9c-Nm3Q6C{40} z>2havO(flv3!`#j)LScGeEHStjhky3bI@Jg$?PD6|A7YvqQE` z{vZ-)k-*Yy`MRc&rd?w&;%(44NMq5+`@J;eLVn&!RU_>88{JfjMmtwgFKa*)%l(1& z*Ll=c-8AmsF2jZy1F=A41v>21W+sA79;#AHR$0#dO(wOU}Q6=ZBr2v{6WNAy=3u;%fSIlms~nW}e$IXZ3O2TtQ&?BY1d4h(C#!L1?5a}1Msz}lHn)aOK{m;5W#|?z?84-F;c)jR89Ik6 zQ0Aq=sgc57Bx4U5243?GI7GD7iS{L?;^Y<%$qDM3t_0<2R92KI?eEDno`$bo9v#Ig z*Dh3xNVxVk9z43gad&G++t1S|(Vi4XLS}-?%dk^Wl31adCfZ7Kr7bC9Z2@&%>WPp? z$-btY_h3sb@Ofxi9fU`1_-CMbvzweip-EPEieS@FVV~?@#bh)~aV=ov3eT+ab0Xdt& z2!Jrp`H17`<=)I6`N;J}C|Y0rQ3FvV+K2+NC%U4Cs44gfi}!z!_ZVzDEsD~lCMyd) z#C!P0NR^IrX?>BJ^z#0n`}?dgAC8tsAF8L0VOVNvY%@nVT%vgK4U#kE(@j zC8kVV0Te*fOnMMPggXS)!|S!eI1(XlD(vDFg3o;cI1e^sUvM@{s0&!Zns3-!EXZfZ zTigRiKjW_(_4~%X2kaecd}3Q~sHkH&j9{FnALY|HQ2R)@_%;yPnbgL%UY}27MRT)F zNF2hH4MR zwd7h;d0snKH%Z}u#4QqsB>qYz+NFX--x;1SDu2|FV=~9NH*JPUraX z$s|!U5*OA6W>ICunWDdrYh7Iy`M;8gOUX4Rd^Rh^JeSX@`I`6+3wZ}3FA96%Z#Bro APXGV_ literal 0 HcmV?d00001 diff --git a/app/core/_utils/__pycache__/tg_process_template.cpython-310.pyc b/app/core/_utils/__pycache__/tg_process_template.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f730ca3a741893819c6556c8b9f6d330a36429a1 GIT binary patch literal 1046 zcmah{ON-P%5bo}Do|{<~)Qfub5?IZ}iy$I;SuYE+2f>3Oo9>y(PBI-jom~fdK|wrv z5RZFM5X9f&Kj^FYBV0$vO6KACST{`-Rn=d8^>wF3uNR>9d-GxV#YO0Im#jG-;=5q= zHE?XcaK4OC-}f!%Fv{!~_G75-p~&4ePA1AyjE$cSQte-P+eamR4)@TX4BC}D&`q@4#~OMy)wXsRIkDF{pwE*}0B32h#Z?cP^&hM+ zC19!c&*2Q@OTgJWI0uIg95`F6uGL0Qtlzao_O1yfuZl@hrgGF&k!{>8tyC_?lERIe zAQNN7$12I02}@qr>3}C{Qt@9|$yI7lWOWS^UICKr4~bl}$#}sPPZoV9o81YO$j1Q*rf})s3%D)kt+UF8E?cV`y!Rh1WTDYlTNE< zCCS+HY4&`OJ)hyN=$+9~DhIW}Wp<;N5aV8{>lSbu5 z9jif7Ns+l bytes: + if isinstance(value, (bytes, bytearray)): + return bytes(value) + if isinstance(value, str): + return value.encode() + raise TypeError("value must be bytes or str") + + def b58encode(data: bytes | bytearray | str) -> bytes: + data = _to_bytes(data) + if not data: + return b"" + n = int.from_bytes(data, "big") + out = [] + while n > 0: + n, rem = divmod(n, 58) + out.append(ALPHABET[rem]) + enc = "".join(reversed(out)) + leading = 0 + for b in data: + if b == 0: + leading += 1 + else: + break + return ("1" * leading + enc).encode() + + def b58decode(data: bytes | bytearray | str) -> bytes: + data_b = _to_bytes(data) + if not data_b: + return b"" + num = 0 + for ch in data_b.decode(): + num = num * 58 + ALPHABET_INDEX[ch] + full = num.to_bytes((num.bit_length() + 7) // 8, "big") + leading = 0 + for ch in data_b: + if ch == ord('1'): + leading += 1 + else: + break + return b"\x00" * leading + full + diff --git a/app/core/_utils/hash.py b/app/core/_utils/hash.py new file mode 100644 index 0000000..b3a95a3 --- /dev/null +++ b/app/core/_utils/hash.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import hashlib +from typing import Iterable + + +def _to_bytes(data: Iterable[int] | bytes | bytearray | str) -> bytes: + if isinstance(data, (bytes, bytearray)): + return bytes(data) + if isinstance(data, str): + return data.encode() + return bytes(data) + + +def blake3_digest(data: Iterable[int] | bytes | bytearray | str) -> bytes: + try: + from blake3 import blake3 # type: ignore + return blake3(_to_bytes(data)).digest() + except Exception: + return hashlib.blake2s(_to_bytes(data)).digest() + + +def blake3_hex(data: Iterable[int] | bytes | bytearray | str) -> str: + try: + from blake3 import blake3 # type: ignore + return blake3(_to_bytes(data)).hexdigest() + except Exception: + return hashlib.blake2s(_to_bytes(data)).hexdigest() + diff --git a/app/core/models/memory.py b/app/core/models/memory.py index 949748e..55d44c7 100644 --- a/app/core/models/memory.py +++ b/app/core/models/memory.py @@ -4,9 +4,19 @@ from datetime import datetime from datetime import timedelta from aiogram import Bot +from app.core._utils.b58 import b58encode from app.core._config import TELEGRAM_API_KEY, CLIENT_TELEGRAM_API_KEY +from app.core._crypto.signer import Signer +from app.core._secrets import hot_pubkey, hot_seed from app.core.logger import make_log +from app.core.network.dht import ( + DHTStore, + MembershipManager, + ReplicationManager, + MetricsAggregator, + compute_node_id, +) class Memory: @@ -46,6 +56,15 @@ class Memory: self._handshake_rl = {"minute": 0, "counts": {}} self._handshake_nonces = {} + # Decentralised storage components + self.node_id = compute_node_id(hot_pubkey) + self.signer = Signer(hot_seed) + self.dht_store = DHTStore(self.node_id, self.signer) + self.membership = MembershipManager(self.node_id, self.signer, self.dht_store) + self.replication = ReplicationManager(self.node_id, self.signer, self.dht_store) + self.metrics = MetricsAggregator(self.node_id, self.signer, self.dht_store) + self.membership.register_local(public_key=b58encode(hot_pubkey).decode(), ip=None, asn=None) + @asynccontextmanager async def transaction(self, desc=""): make_log("Memory.transaction", f"Starting transaction; {desc}", level='debug') @@ -80,4 +99,3 @@ class Memory: make_log("Queue.add_task", f"Error when adding task to memory: {e}", level='error') self._execute_queue.append([_fn, args, kwargs]) - diff --git a/app/core/network/__pycache__/__init__.cpython-310.pyc b/app/core/network/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f3f2ef89d3a321219e2c0539f3dfc634720bea72 GIT binary patch literal 140 zcmd1j<>g`kf*bRWW+(vZ#~=WHa^HWN5Qtdzn K6*B<|76t$_5Fc~^ literal 0 HcmV?d00001 diff --git a/app/core/network/asn.py b/app/core/network/asn.py new file mode 100644 index 0000000..704ce7f --- /dev/null +++ b/app/core/network/asn.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import ipaddress +from dataclasses import dataclass, field +from typing import Dict, Optional + +from app.core.logger import make_log + + +@dataclass +class ASNResolver: + cache: Dict[str, int] = field(default_factory=dict) + + def normalise(self, ip: str | None) -> Optional[str]: + if not ip: + return None + try: + return str(ipaddress.ip_address(ip)) + except Exception: + return None + + def resolve(self, ip: str | None) -> Optional[int]: + norm = self.normalise(ip) + if not norm: + return None + return self.cache.get(norm) + + def learn(self, ip: str, asn: int) -> None: + norm = self.normalise(ip) + if not norm: + make_log("ASNResolver", f"Invalid IP provided for learn: {ip}", level="warning") + return + self.cache[norm] = asn + + +resolver = ASNResolver() + diff --git a/app/core/network/dht/__init__.py b/app/core/network/dht/__init__.py new file mode 100644 index 0000000..0d8adb6 --- /dev/null +++ b/app/core/network/dht/__init__.py @@ -0,0 +1,35 @@ +""" +Decentralised storage, replication, and metrics layer. +""" + +from .config import dht_config, DHTConfig +from .crypto import compute_node_id, compute_content_id, compute_view_id, bits_from_hex, rendezvous_score +from .keys import MetaKey, MetricKey, MembershipKey +from .membership import MembershipManager, MembershipState, ReachabilityReceipt +from .replication import ReplicationManager, ReplicationState, ReplicaLease +from .metrics import MetricsAggregator, ContentMetricsState, MetricDelta +from .store import DHTStore + +__all__ = [ + "dht_config", + "DHTConfig", + "compute_node_id", + "compute_content_id", + "compute_view_id", + "bits_from_hex", + "rendezvous_score", + "MetaKey", + "MetricKey", + "MembershipKey", + "MembershipManager", + "MembershipState", + "ReachabilityReceipt", + "ReplicationManager", + "ReplicationState", + "ReplicaLease", + "MetricsAggregator", + "ContentMetricsState", + "MetricDelta", + "DHTStore", +] + diff --git a/app/core/network/dht/__pycache__/__init__.cpython-310.pyc b/app/core/network/dht/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b688479d5a1e07b12b73cc497a8c1e86046b5c1a GIT binary patch literal 982 zcmZ{i&2G~`5XbF&IdPKao3^xwTrHLQz=Z=sAbLQ8a%eg2Wwo)V$*Q|vYrSbnpM*0v zUWNx)xN+hMkT|j9v`rNWTmEV1zq8u^j7N35q350-KmRPgS3U0+1`nT#3tqdg_~MXy z#7q6OB79jP6?UIzLpdTNIVNK{A(K)I#3Q*&cI6(~llx@9)SBY4JRk?YH}}%kSDzfF;kg&J zZ(f8akU(ZM6TEnjOIx(6>J6iIQ;Fq!lH@|(}kgl z$}afawocxCdRs1|y6q)O=1T)KQz=lM+ToqxvW>eEwfm$i4%aAjXWSI@LMurZ@Y(h? zWGUQSsbxWnL}`esHaG>t-owha+`?JHW%m^140N&JxeHy=U?ZHe%vGdqZzG)>X26cl zfF%nyzj~7Zpyv&7SDd=Z2Wnz}+G0sPvfw@-tGInDbQ_3@L gC_TOH2r7hWA@N-^S78D5^J*$S(%rRNr`3cPScydpCl(pWu4e;S4=_)!!~ zUj}iM^kAK1zVrv7pCq^l#gq>G!H6q!@20XMf6P@XkwRq{ANtc!s#Fk3l@?r1MKmzc z=*fqC7XDjs&o|eAIm}5SjTh!K^2~S%-aRA^v4~6^)C_`u&x|yA1lT@#W%L@B$bpVB z<3y!*x#ZEF%Gpl`e1e|7G6jT{2(*m@h9`m*7B#eE=#r2(7uKAP6+8`Eu zE#h)N7LV81NCFTIgWVgG1C^yo6*!gGr)tBv7BwWxKwOA;dMM&U9t~jmuWt@kx3OX_8H8}+ z8H{-5eW)BS#Vv1+nULw!I^h{+vAH7?Y9kFu29O-fvK%0JhSgBeGDTJbq@ZQWfD~B; zH0N0bs>bw68ehmo##jeFj?RG;#%y9Q$0~W<}rT3oUb%H9%%n%UoM0 z>`SQcuT*ZM?Y4BP4}a)SBN>c&eU|#E(B)nq#-c7qyw3Q5M^gAXy0zxoo&}S#{A4Gwm1$VD!^ngk%_9EN;X^V z=ABNr<38HGrN7rd>zYx^GYxoV{!S1E@_=dwRaNWSn|JKK)9!3j*X;(s!n0E4F(2=M zE2CgSr3{Z#=;Ja<_wDU&r`2>jTRrMF`}b^DRRy1f!9ZV`N?*WZQYDn_+D*qsqwvhR zpKvrav(mPkefN&tbZMvO+WiO3?(sSvQ5|t#$Q|xWd^B9_`JuXircM<$JB~x$cHef| zTip${a;h;7l7z~U;K?WsnOdB=+uN|c2V3_Y+HN{+mEH;c$NZM62p%zhxED_ox&yAw zLb1K|kZyGD-tDyRcil(F*hE!%67N$M42Qu0y3v6upjpgJ)9I-N46L6-AfPh``wRc< zLp--yuI)Or@Ixfyf#mXx=#mz--F~Mfe(g#o^kae+&wkwpLY3r2gW+pDcmToix9T0VtOpX1g;xaC; zAi0VJoAsGSb{&^rBl!l&w@AK2@;#Ctk(_oo&FQwb0vqT|atTPDSZ2|zLU*kiE1Fh~ z+Q~0s%Z5Pt_u3vbJ3T-WZ1<8J!FXsGhc~;>;{WrQ_X9bqPeeT6NpgKZV7v~GBn}c? z_ct0d`yX;t6>wnClc@m5BsjU!mT19GBRv~5*ah^o(QjPfsLUK>J>@WEW(VlQu$Oqlg-QMRhy3NIBpy^wH!O8qfSP1F3FMR z<#R7BX*x{oP)*&3ffhwupzwlf7Xg883Ut?^n-=J%Ko>1g;G*3mi$IGm0&QBtZTfxx zeawTTs*H3G&$;K^^Y|a%|DXSy&g7(L;5TXg*Vf5p!}u`^!@nE~tA=3)KSD+s%4`}f zbKNwVx0=?vC4cR88-MMl)5@*qTJE|l^-eS2Dy$b;#r0y#TlZQM>l5-m*DSRr*C$Qm zHAA^7f5%XHe`?oWpGLW$iYOO%t@W~>L)lXkC{OHKyXN`~$|W_4@}yrz`&pEy)HKS| zQl3M(tY%Q2k@7ssvuY0IIVqn|XVs!Qb;ns>kiJi=GpIYmI`n>0Syzn8Blo!aiWyDR z+U-tQ3mctwuvW36qN;_pdb1V;QEsE*H&w-noL3t4FtT56-;N5ehrX^|Yx+^{t?h2p zM-Se$X6=Um+3J>mtIke`eqMYR=PtZNmnbZ=`4!33VbK~gXrQ{Eh!HfvWo?G*% z+{mi}a*m%<)51Fklyy}J3oAzs7Z&9@;g+ut++~)QBUk(3wrh8+ry!2UO70C`P#!DOpDLKD%|yp2vY# zSve0#jACWn*NwiXQAg&&?lHIAlWzdDdsbZ$7AKgPF119Io6#)(ksbW z$?M1w89HvI<6~HnU2g{Zu_Fd5Zs<-+hT?1jS(w!m!CA?w)+$a+ky-X2b7F4ait}fY zM+@nm`9X_HUQ-q{Wm8WwInCrzB+-PZK+6xeI!Zsm*8>_zjzrfaH!}g{s$h7gXIpmJ zl=p{(%0`=23e_(mlM2;00X4p@tV@6u{B`g*hrcfV=JB_HzhyP4rYLM`8saq*d2hV) z&a1ItXct{Yx%X?$Z9lRb?J$~db~YRJTC-a3Y(sD~lo+}ZM#Zqv@`JF}>P9xiAj-Ep z%C9z*Q0}0#Jm_;-2oyQjI-TYrX(b0Jt-%|}^o{FA-@Ios~ftf(*(g>P^%y zCiQXI&`U|Zg?j6TgLT!;AKewxX%Oct38C@+#;RGn6%T?_qyd@*c)#?x~R;&6VKNw78AOh!6K@|{$^NfO3F!^uF z(yTco1R1h?1GT9Ts7KXff-N~<%cE*Z&^-r+Kd$E039vq=o=^+wByz4gr%tKU$mRWl zdJ;(g$Q@!&Q9$*4G=XKm>2Edy@O~}I`>~uH%nYcp)L&ZbwEg2*OIhxleWME7cO?b` z;J|0_wqnI_!2A#~ge!pVM!OMKtIwlSAjmU?3F&NB(gykhKd0oz+=e+$xcv-sWd1=( zq&AsqB9Jm`+UfMiF8GK*UP311ysf^|ErjNtdDrOM%KX^b$w3fdzqgIxWIwlOvvk+g zXZv_x#rrF=&V6fb--)L-Ducidax!wLK|%%*hKd{0LKY)mh`@68k=yofR{iEUiCh@f z#%BAoXc82VESd|ZXF5OY%?>ebC2kb!P&2_oB2_>VKMIkCNud~}(}^w1v6e*ilB4T9)9b*8Oq|lO zg?5ji`!Pxh8-5w@MUH02#uc>Fgz`g!Q#ynoWC({1at$mvo`9s5q?7^Ux}(fb06qGP zXbcDtatzo>n#Le>)C_4PR`CQx0vEJ#E!UKPpZ4a4sg&46u|FdWB=hokWWHZUrf&w2 z$X(2ra=d3M^R5{JAeD8>*fm2NIa_iLat?DU2kFL&`j&D(rc;$)J4%A}m(aWZMI`&V zSeETgKh&ak6;@I2fW411FLv?>1s5auD`+8%b+B3sG69Q+f9|KA+dDD5>dZKcFOl+_ zl3(%SDN?GA)qnj3)(SpT1XBb5V)m9f^eQc+r*m<$F)J_4yfJXspJO)^uh`xT%gImu zBoi8yA$c4i{V$+`+7i&}7hrQrwwJ>{+wx%YSzE_5^2^YE1Fti*Kd&yRoO16F?U&V4 z;=2>=msLgZ9(xP*v^uF41@E6x^xuK^>RH+elie16Cpz zoVenLYmrk^DmXYh@cmGccn((+Y>o2TZ*|`HgF59g{V{LhxlG5D;Lta_b|VH5t6Pv9 zT=~>GSQ-rt{VEcOk$8;Riy>0dP6+Y8g^c*So;hXqrsL^m_95~Tt3~%jOoDJo*epOh zFc_kvqq0(GyYxqtWtr;;UO@($W%ccTF0&B80O*hlJg{IHpm96-eqNvIyL(PQ58zzt zOI_cDBZl?LniIUBRnRXmk;T5u9Oa`@h)M7|U&z#D)&uk4aD7q1zXdDZXm5^DD6+w| zZ{yqh9+E})HBcJvGB)*VEj;>D=CG^*r3>+r%o_iq5+BH^v9Z7F8SyKG|z{B3;DEEYFWaAMw62}PN zux(@)0o_XScHh#AeOsUClbJSFw%E6>1L8AL1NO_gOK5gF zH@3T5s1m0iC}7S4_+cE=#l$U+Hc(sz9y6d){kE|IkXP27!bh^6yT&U3b@7EqEE_)} z!+~~YK!KwEU7h~6wS(n-1)G0ly$AO@gL)BmpG>1DQdHOzh#1YF8IP3~NWs`kvK?d$;5C z9lg-EAqNiR;0%-=H11oMjQ4W*&Y^oa@|`pE-Dk4zz98Q@^4;SD-zkxzZ(($&UyI8ARa_iYy_6 zuo4Z6vjg4WdUxZ$CR~O5Ck*=(*CIFQZ0ovzaNx;LOjMRnh#MlyXPu>o|AIk;nCV`|-@LyE`*>P`;TL8oX=;e1lI@@pf) zNx`56Bd71LA|sqWn{t|#7(OmGC-8ACWhOqZm3X^Qx!BuP&c}%az7;`CJx{u@kInH)EbKVfB+$*W8f zzKTT^@p6V7aAS)1{7vnGW;sGpk2yt_c{l*#kMnwEq`OP^_CJExb!xvJ;9Nvz(^zycJ zq)R_A+G6tFW>2S?JcM)qWwc}0!8{Ui?wykBxnAD0y$Ndy4nAAd@r?W&lvas;)XdDm zXE-Ny@ZqRS@SYAn^9Hy8aZqn9 z@*3?roF-V5U>L{|0n8kGPx)z+V|{!56--q4{?bNAFJ&~tNrU$`W2O>Tj~7lo z+)p+yGI5#nsP;w0tcxF_`F$4&gMDYs-rRr)S({^*s5kN9Lr**7Blxm~4~0ju+tFZg ze2!WBzhvzVB*U}IYDe&l^h+5KoJ9ffEF{0n&;#@Kp2hqk@H}=b9R!lMTSC0Z70L(k z%m&V%%#6;T0QX-ZBR(a_H9a6#9L||R<-x>q_IlLyQ2 zEr7^~y({HI4&QKVXRLMX=AEUR2jmo)!hS)tfB+rYt=g@a?$na~WWT_fM|QiRA{$E? zZKD-gY+*|m6ThuD8TvcX%3wxi(1 z543s686WV2i1;_0mW5;i0k(|rE5`<`*fe7T0lz;KsA5n+rpCY({O@j=Wo3(J;H>Fw zwAb%5@sVJ=aFcl|onB%xD#T*u{toYDeotA$0-RZD{T(!bfe;4O*$LUAnM1sKgMZuy zV-P@3@QBEi+8^#t;P3(^Zm%O~3 zD6yU^8~IT!R;d!ScbWA$1IxjKN`{vnLCA=wN3=eV;i$5VpivHXXim)|=fWN(wg-m^ zPsG+IJ4`qi6<*5>P9Zr@3WUBRflSH;pUa#H`8Ws`W&3V0q>mc#UG~__kF%UcCDd$5BV>;VP1mXIG`?Gt$SZ+HZ=X06^pDh7~s0`*V=nR}gm z++uQ@Nsm>D6U=>V3__AS7gvpe5Ub#XPq1x?$s`l1k%;Hdhd;lG&;B0IXmA?=Zb2W` z#{Cg0qukAg3b$l}hmas6BqLmX>@06s+U+B%#fSvs0^&i4Ao1!5P7IrVdo$buRRl0U z6iuN`O(9fT?fJSB{5zV6NbyXnWa?d%>v%?f88(uxDK_%79jIvejX=FofDbgUP9PC#?HyiZ=dp~hy3=kdKBjQ zLNxK!+g)G3(b>fRd%`FoVzI0JdLzKS8{DCpR{ln9yBStDYW1+AZ}+CVO^3gv){wP3-CacwMCkT_X-=CnFaK<%Hn`NtaYG~d( z)|1GFY{)#09NTRDC&%NLp4FQi9;+1#6LDumLJN425$O&J*iztNSi7`Tp%Kb~+pf81 z)7MqzE~1KvaPX+mS~OM-L>O7hy=3e-q1%TPv9xIHB5ZNekb~xYABS`}P{{RN+iNYKB7a(eVYO^V#DTCn9S#AMsbzZID`BS+WTH}tny=|G%KDmM3Cmd@EE>J#VY=Zi zPNRi8EJNX@^s9TERx+Bi8gE0~nDOEv#Cj?ISmdCOEvQLn6FU{UR#C3j>~7V#Q^hu* z1}uqeUAr0O1Ubj2T1cnyzxY(3e-_-H1)q7FW&uxc{-{}}w-biyMx6~(HPf>%p&&fs zT_Ib0gd}e2DHC@L*|_28?K-w>l!P5C#70as1llFq(R<;t+{RfFmtkpWRP5<-si|G} zgQc5|aBE35Ha2_)+LtnP9pO0VMajI9a(t5uo!wsblWDw~21LTi|Wr?;; zV@~=dH0#X`&*Xu7OJcJi22I=GGQ?n)iG3wz7D8&2BN1s_fY>zdCisy>q2oFJ7CISW z{fM&DIT2;g1Bx)%u9>JhBqKIHrXX;p<+sKu|EK6-VW8g&2tN(4G;{fen4T(lOu8SU z0}xbj{$=C^z>nPw&vGc5Gv2&dF?%PD91Nj=fp?I4lr#qMbt~EIlDcU#B6sR@-eogs zA-QvL)tJx8s>;l`&nZGdmqSzMUP*@1}n6=Vz#5wo4A09 zD`Rntk@_juLgfOE+!m3DvaYd0{OeujxQZcB6GwO&@1nw`_&)TDCQ5Siu_Tub%TW1} aRko)bt5i12bLFSXUn@WEowD2o^Zx)1wB+Xi literal 0 HcmV?d00001 diff --git a/app/core/network/dht/__pycache__/crypto.cpython-310.pyc b/app/core/network/dht/__pycache__/crypto.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e88e5f3c923609ada6440d62cbe87e024d6ed70 GIT binary patch literal 2821 zcmai0&2!U66xXgK$Ce%EqoxGPhkz8K5^N|l%(MdxEu>7^P7Y1V!EHw)d)GunmfYPn zV2`=f^w@u(OlRQ8ssGPjd&+;%$*J$HoWx1msb(a-ef#wGz2Ey%HaqKCaQ#XC-d-qL z)?c`oz8qLQuq+~f2ZC9QBvzL+2|=1Bv}-qQL)(edb(?Ot&@7m>lgxCB%_7ilv&2@I z$I7prrpE$SWphB4*(Yp)Edo_x*IA970&146vL$u~s4Ba`F0hM0&9OSW#4ZCh&uw;v zE&oOyTkKQ9FsG45%)yD=wl| z!nM2A?}2JPSndzbble~44{lRxrX1I6X!9SIQ)bU|>{7pZp5 zR-54YlM87C?jZtq69{FEEJYOU&@mNdMMl<`Z01}WIJJ@W)_G?EXR`C2Yy__MVi~7W zMQNLhGHB5yJSsokmP+UXPum&e?}>oWS??{e2(P_cjqR+<8{l9TF)nVjGS!HBy+%6| zypeMCA`?3ewyhd%G3cqR-W%xIkf*XQcxZg7fEI~qQ6)vG6 zCa};4F@t;279Rnvy~90Sejf7|T!b=8R2$9OzmPtN{W(~Sh%6{*B?fxP__y>O@J{EZ z)8Wb!P&94#fh(iY)EN+x^Ha+_CQ$kq7vAluL11g|NtE=bA|dACc*r!gLY&9Pf5ikf zgU44v#>B*dn9LJ#5!Tv+6b$oh)a9Pwo%m(gij~whG>g4C(^{}^toqnIj$nZ}93*55 z79+yQjwOnK2Q)KCMd%VEi$PZy6Xr1YS9->JYv&2d3N`B;c}2}PR4g-0T#)Nf=p1xJ znL8pV&zvzm+zDoeU$4DXB5EtY6AMV(DUl4v{OXL_@Lm>mV$#VH6=E%g~xCQnRRGV2P`a=@a$zbx*dH+5Y7%BO>G$9k z%t?9|QV8D8ITs)5BOgB>PZ7v_dcM@%wCp9uWd(y%BL~tW;azAiT33Nt$3J;jM z#V|~xE)PTPg<&^i{RHVs82;RklF80o7*73UgrT^D3JzmyD4baWXa9Y;%{29_BE`mD zrG}3-VAB>Qd>9*W$nSGc9dOY`;F=MF61nVzYdZHaZXpk$@y#^36Z<}@G5c71dBz$G zuD~*w{@z5}NSb|2i(nD4>Ga|62e1(oSh{@ho5iKT4A2EN=wUHqI0P1;uE9@89ZRVm z_Ent7`gSC@O`+(;FTefbo3-2FqxHvYn_J=LdUGxR5y1H*a1fe#W7=e!y`6+H*;0^H@ITO|A2 literal 0 HcmV?d00001 diff --git a/app/core/network/dht/__pycache__/keys.cpython-310.pyc b/app/core/network/dht/__pycache__/keys.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6bb6d726ecfa72bd918d366eb261dcf8570d6cfb GIT binary patch literal 3029 zcma)8Nt4?|6jn>JEX&Jmo=E}$29f~;$(X$$4Ag|M6;ymlIADrWDeiX0jwP9vhG5rl zN{U;4L7*x*&V?V;969AY2dbv1g73AwO(vUMetP}7)&17*byBT*2A<#k`e)}G*D(I3 z!Te)mux%JD{Syo~IO`h`YcWQ=**9Az-d5j=?3S%%cE1o6T7}4IIeJ{^7bCajMx|B> zV}}>}UQ}+CHShK-QMFYCe}Y$e?Y7;j37aqQ#XIaPgD>&r+Xi30W!_?~g*o{OuR~tf z^2Is%Dqn+qP0N?KdByNgJ|IhcrpjR)Cs~+vlQ?Z+T;gFCw)tZ~7!17HzzLN#*B<^&tV+)UWuZuJbcEm5rrtJ=E^u-TpCeN+os~?pk zMYbzrRk$|HMB1iZCJ*ri{4bGX>tHgY$M)b{ZgT66bxkGjV3YF4C zX2A_HOqG|4K`6s4k*RMfyO$<7h3`g#R5>DUCtUbUma*-=A=mIaY&3R~NHlf_{Uqc< zp1+o4jc_n%v=b>BvB+*F@-TSSz88TUf>R*?(iCSc?nz*GLM(Rxx4}w zEU4mTk%ixip|apv<-z}%h_j%}RV{7rh$swx7BWR(Wfeyr^D-$!UOpfzGGouU$L<@q z*cD@+HTO+Pxew0@*$w;Mk=SR-PDQ_?y_S;`w4&VU#@B@$$ZnjiLZIDIVWX9?a+`DW zvW5l6^q55ZAdmK*j(5+bi2rVpKEBmdb_82}OTL61vbAeSC64s#8Fx|Bq}pBM4_m7=Dv(MSn9M{^WKc^ZTa(YfFmu32NA=~*>P&$Kafg<3j;|@7 zylATNB&$B4quB;DQ=ilvm1mk9O{T}5Oi?5~^OIMP zo;*n>d7f5m5@`_8v7Qkf_4hHMGvqy5L8D`lrsCWtC~ko{j#?AEnnN!eSmy9LU*%<9 zxm`eqD)O~)4|CDY7Won|tOVIIUjgUw0_y3cDxn}`w>`<9yvR#8yD?90PSU8UO4Nta zEQ|)K&;ev;sytqj0;CQ-pwRl>dJ~N*2R+Kv$wO65|4U@gJ755MY|k7x8S9zP0s4Nm zGOK6rS&%J~tZrn5p2OK4^Inmg_j4!$#GBb@p1Ra|2ZDqClsk}hn(1*%`DRY6+`09<#>m-t*l+xzQ<~keIr~cZU zEjfl{WIx&atR?ml)=OPt$g6QYo?c$g_>`1TmRbq5>>lBzrD5Ejz_=6XO?`U~FZGeS z?(Dg`BaD}N>MU^&W$qcs$Ym)Zq{(d4W;Vn7U;97ZdGbWse0&*BH74YH7~wV2n?QOe zNBUx}^|(Cen$Y7Fq#FH_6f{9jGu?E?N{uU2vPi3){MM-Lz(5WM zxVq8RBTog2js_;J$y^rbe9#5rZsi^EQ@S~(^g#u7RM;YZ6}&ymwHKLNqko^dG6}&>6vK`hg|OaA&HjtqP)wMw0hX`di7dKb}ZA0R*vkEH=Wt3CONzF z+Un*?B!Xrc9RjZaJKy)$%y4E% z32w5h>)loVDXoc-Utsre-D{2>O#dfJyYL{!}c3g|wGqss^rB;#lLTk2NtyPhabg?zp zp0CYIzSKI@UZ^d!57!P$x!gL^K3Y2}`M7ngeY|#Dd1>KIPtWMe9Zy$o`?po?gg&kh z>4iIC?Icmz76a@((+Lg}bJhSD*VPHX>)w{+r{+~1P2GxbiVo7eMZx09`)T-Nox z-e}dcY$>wg{>zZC^`Gw%+W0Gu=c*>GId# zxsrEH+Ta4muKeZC?YW{b?j5N^xMfVKw3{fuN8R1zR)R*wA3bDdksAoZd zIeYl+wBA^+Uu(9S`R3bcBW?Du*DN=-oY&1-nkP+d%grp?NDZ?0XNNm}ui443+Mu3w zY+<$4t>+lfY^PaXZ})6D0}<-^hDmK?Qan5NtEA$4nE31>D0#I(`V24TBiyfXW2cbm zd)Ij@+&=CAcc|6J1uWTLxmPfU(Z?KNa*W9dCMTJkL2^&oFiTsj_dN3uiua#8zus-9 z=fR(DU8m;UwQhdC-s_!jfWGHDY5soK+&B+5oo|o0u-x0U#d?-#toba9V`T#}5*V3L?G za00G>ELmjn5Ry6ESrtj_mHd)&e^p=FY`EUd^7}DJz1Uw|=|UJ~F-OsTg6cjhO1OpU zFC)|UvQT?hysbmIw*%Vms%@3~dC*t4)eh*ttF(X8yRF_upU}$-n-3|Ef8PTFT=Cxb zmVyTgZA4I$M7IgGMT9wB(z|+2A*OQ=7JP z38q+Wnk-McjXcdsoopTn8RDy0olq4ODgQU-DOB#$nh|vU4O)jRIs#Ax`YXDu zW8}i}OnFAXsK2U@>Em|*VEUpyp--YE(qGeO^aIEh^(FnFeh9geen~&9zldB}zpNkC zk0BT9SM(El3Aq{ls(wu`>+_hWqA%;G^wX%F)vxOd`dQ?v>70H;Kd)c769Ts8^$djj zO*^*-1y=w_sV#Q8I)&yrnnhHs)EFXlgIF-Um?uMpna?8A2LltY5A{jF#M@wQ5)7VU zUXXIsWNlHiXNfF?ayjmm%`=$7TwpSb#1;p-VN1XyVnk*rel}QdwQRYQn6%e5d1mKY z-L+<;-U8%pbbyshfx%JYjZL>F(ux2W;HD>m+JTx&HO$&GZ=oafw6FcGYx8ro7j&Feg4?jN0m#i7O$(eBTUuvP%} zstu{ljUw8H7xj0f!@r9H*zWg(ewcgx;I`@)dNG(B_2zQ5qjo`skBi$yqAD;|^^w~N zcS(~%KhpkPoY%-lJ80XLa~+flR+fs6eP6}&Za3VBtxjsB4Y?;fgF`bG$W@*H>gv?i}?a>5iOoH6eb)}(4QS&$QWpK zH-LfSL=1Q)(#8ERg>Jt%4%^*Z>E4!cR_tlWv6Xuyhq&F`VRp#1`P=Qv2* zk3Zdda6kXX2Yxm(@JNJ?fPG{gG-Y6Az(fM`?x~ffg5k+;D3P`VyQbNONShZ}CjxyC zW*m1on6$dYz+Shv(GsKbJ&Y38ol;wm?_V;pbKe+S=;rIGxqoPSz~d7PB_@q*rxpqn zN}S$cD|ML6l#)Lv1yTy6q^^f#?q%=lq3`?K{&tW@=pXJB`#w&4;K*$XL)iB(dN)r= zTM6@)SeumkPRQ5rO1YNGPQnXN?X}d19FTZIGFxcoX*;t)yZ(U02i zz^yW__Cs$~Y1lb;us_mqS5*5lBxKvyfe!EBNR?6myNC!ItkRPxtW1a+rzBE9EmZa} zGHYPO)mac3)Aaov2-&Xs$h+Yilz!shN^uF-AxF0G@S(I$wz$ge8@*O9+MyGjmBTZnkCxDCPiz21jC1i&4 zOl)EZcUQQHM=<$1H&NY&Y`G0+wV|+qLFuP%8@?QKE2f4406%zK*l}+qlyA2RYqe4cnZwjRRe`n*P&`Rl&R7o&EdoK{Ty|tz1x(D zz}aI19sA#xupx~(Oa+Opw3{@%oH+)~YEJCeRt8%YOU$ZdngI)D%=1eOajJ)DGnx}s zoRA^(YwO`@e(WFghMjA68m$dlsadPu(U}deb-S(mN9ZEIH)-EYYF`3JFflh2!~X30-nhA=tgK3Z_a|a#eN9CrPx>5 zxqcBXr5m0(cXKu`_Y2VB5^SE54lcqv+KKxmq98^EAa%H6Ud0N;{xwbHmS)UlR?ti_ zbtaT6(N0JIYJ21_CVAeHL5LirtM!dmp4_w{y+<3t)l54Y=qONk9dp>A(|wA=d#Zed<=ol9WcF@Z?BMZf|vrYnTXeTU`b z6cfr%1S4w%7{8CqM~bY0Z9{NG>AE5u-c8Gd_t$C8sLiDC~i} ztPegB5$CM5FIXD?iWCli#XXE(BRo2T$V+^qJR0Nxeos^w<`8*t@Go*ewRcie zutB4hjS+`zwPCY^=-Z^hV{W76uW^ebQ$Z~8nEXAcj!+X$PRq$?K{)e;_EaGkk%^H5 z=Fx>QTnfTAD@)a}Ug9)M{{w;#&WHXQ2h$}pmzc<0FClkt{B7g4blJq69P2w+1IJ;N zZ?gwDKbVcViM7jY-#f6D(6gl8KzHJ;xxqxbw~&MINnY@Y#x(Hj0E!rrUQokzm-ins zUu5kgOde&zaDa2Z-(>E0n9#9yi1-e3nu#E^0A!y_v8c z8p)S%i``%HVilKtgdQrtijT%Y9OEv>VW&+5nZ z6Ufcz=k$~MOUPC90#yEaduUG}zg6$l;n5vLbOdqvnZnFtnG~$e~&6L0TyVej|H+uI~(t9-az+> z{m{tbo`ge0xuIv`;#Y3%%KR8D02?7W=a^m;jQ=%Q0ZG=W_pD)A70ECaqr2Mi^0JJ7=}I}Q4+|c4LXX&-3!OwbPN*+3__NK4}t~c z4dT)GFqgq`fegx?_trNWHBACUs|F!QF^c{P^9W=3O=4?tKMf|kPcVflLkA3Kt1^O< z1PfqE_7JRhK9SofJaEKknC>!C<0A(HK%O}A`}p{`j|kC z@T_tMp9@YyS`oM#m({>6Um%^!H{W0~y;K)}9W3=<(Bl)*yh2tTphFYA&FBpL%w?Lt z;5$X_2g{MeMW{JYBAH}zRdNFT8%O>Xl7mI&GsBvs5=92O?7j-;@DAXesk`vLkV7mN z1~L6DwS!eW4%Z zTAE+5PB4S3c1GZUu^+QS82bQoi%iJ)f5+{NKCGL6aDm{OBYHjN{)|9-06#90L=$y* zF;89-`96t}aoVsuI%j;hF0of+1ye%ChX2I<31`8VwKMxExaYMQT3!>j{rLOM4qBa< z58ir<6$ibQ4gbZ)T_gvI_GAzOr=^g6fkQ4Z;Z~gyKqCW4m9gS9laYOjS~;60Yuy54 z2mCVO=bkfr)0sc!>>o0rBW8YtWN2`cE=#iqnw39hU%G+jXH5Q_$zL)VVSo|l2(9** zBe_NP`x7L8i#t1pL`=sh4l6+%#xuCXxKa(I54QU_`#-@%h;S6Sr8E3}!RiKoc7Y!d zkjsXeDq$=;Gx|wOni*h)XQO_ab32Klx8MPJ9gb9c51gx)XpgB1)dB_b0LWiKrjoi zt~Ah-Jk!49tuuY;1NErC(O=Qor_S^#zo1j)q5aMQNYN4vYIo0`y`KA7W#`TX2EM=j z>pwgHTrrG)Q)l+G(fPnIME(x|(vTuCQV|M4u$h>liPlQ2)DCUN*@=_7p_?|sM(Twg z%0|*mTVab~FFBXC!*;q5E~Ja$BIEpIDP0bi84i+_bTwRMxS5RH~ZUzSbZa^Edsey zM7?aV)!)V%|JNH2veBT3mG&M@pyuPPtHJ)3{Lm z-Y?@_bi9X;Kie!a6$4%w4RNJCq-9Vd7*RwLW_7%;MTfPUte4OL7{`^ z-J-V>75y~U{?pwe&ZBHpX!~iFCE6#7lembbcD9nNTWFsq=0!IhYFEX@NDYqL#O^0J z<<51m{EHs(ck&V3KzTq^z7C)?o>37fCW?h(qc~Fh-Q6^f%#9;gL6Z%&O4S9bE>d-w zs;gAJf$B(TJCBpCBSYOl_xRoQoh*&lM?>6E#_H{-S+U+74%d4S#`+*Go@eUIb;x_Y zmc-g{S2wzOByn@vId7w~Os#rg0SMAd1pV zjuL`fQS{ZQo75uTOJ0<|3;@~kQmrG7ybOI=0?v&a@`_xMt1oOApC`{lcdqJ{2bHE&)aRQp zvRF3;nT(-3Y8l7$OjMJPU@9!&9|0&K+Q=xx0mK}cg~hF1INZ8*+o)Sl(rjNEINID$ zY?*=jKDJTh*tA2cnyVJU?r6~eY80y`jZPIqH)|CoPoNi)Y`fp%@m`Jh)m`9& z**%G~6YhGrL8qzK(WpD9wBH}}RGh*sR1A8XkXBd9xyH9f%WHoAfc7}WPX`8o^c`W61+R2KYSRJ3gJ!i`8Gi96Ss6 z%(`cB&#HSi_w2goaL=J$;S#fn%?A)l)F{EaBeBu(DrPxpthV7=aNddbDI!-}Y4=ae z)(LxlNRwV>kNPoiR9Ms6U`_Q~gv*)Fmg+omG_Urk?_#u~{*YSG%`GVA7PN8;D!Fwl zFAG!axJ=bNaw5+LMAMPKD<(6ox-iL5dQF1 zjcrV^P=et*HP)MAVT;4NHP$Zz-n9YT$I?5Xvzny^-{o%{iOPn^uwKn9c5toAxA|Kt zOPRUa+D3xFE6LMU%U^bLtZ3>yjgtL#Y;E|MSxm3B0d;1_Nvk`yVxy-RRfRy(Jz!ba-=AU`B996Un=V##MUI)(QkvB9 z)prPco2u_p^$u02cV6bDB`y;2hKr9e+jMlyAw`31QEKw<&gpMa&)IE4j(sMdiXBBeq>RbMa|pWP)$BE zO1eB>oa9^3)lHbD3DZP|hcIM~twWgPzFpet5{%LT-YsnkWSd4od7mrIuLW3XQD((? z-_Nj>`UxsHUdqatQ1w$ly1CUKzy%K#@>Z5>r4giN$P~KCDDF5FL#8`v>&tkTGF4=P z@)TpV6H&StVR=s3Mhg+kMrnz?i*wjRb4iTIhZO1lOJXE$P<#c4ME(Z=Gu*KM0&YA* zDpVrRD($fiUu=yXs-L-kHnxO>qk7><`)dbL2oVUvw-DZao)L_j+_tDM9h$=d-Q7@+ zaL$>?NJ0krA}8}8_lVP>Oo!&!n)Z-x916O}M6sMx>^!l+j;Tdgi>EVwo|H;^_mXb< zRCe#D=&@$Cj78|y-kmwD!y_v1=f&E+BtvqR-v>}FhP-}(^@`p<>gQCEchhZ-eGwcQ zn7zG=SyO?Y3R|6XCrY9%JkB^nt@a}a!uu)8r>lj02In~*UNQZYgh^I+Msd`qw1zLo ze&#xLE6SsqoGqaEP16G1)z5})x!;<;Da<;T48EOfYxNczx=9&stfIV|6x!kUSCw9{ zuRnSET%z;o{d2r9dD1@N7ugrp!+@O3#g2hrg)EwR zDQL5@bAF%iVjGsus*iX8qb8Z0{M_TfYjX@dtFJS=ycEUQ!xEz_pG3YKdKMAFkhi}hJXKwt4CNWD+6oR44G%(c)_1o!Y6rnqfxxtnBPh3|| zsA9!FsYtsObjJ8SN?s`uSmy){^UC6Xd8BiV9=cm2y!-LcRVJl0 z*d4-MD~b_{+@m7Tq%R`dU!!OFTGUg!!y;RojgoGU{~x1#nrioyEI3!H-h6Z;?N+N; zpR^6 ze54_4F~&p2852x!kF|J}bJoj1vOKG0SM8QlbvXNl`9+7x#LHV8%A7r6!WPaE6V9Pk zO+reDB&4KF9%2vVlt@EP8`=4l`Mk@dEoMYU%r-c5QjoG@ZXl&0<;46znt_xT3u4h| zW~3vQL_y4o<%Wfwvyd-{l@mD&c~z{P$aA7BE{WoiQ_X?9EUti?7q`T^xC$;WZi{Q; zI=BUKN8AuM!7Yl79ag&gH^rvJ^=!@egQylY13xTTI`uH<_>ojP@ub#i$U13_4-3K; zMezeLl2y52RZDPT9kHq%N2%(F1Uw{l%Kb@3s#ⅈ3vZdM@D`oBXna;)@Lp*WQQH| z3IL_tgPPZoVae7`D7{_HU2VDUF;lb9{_kFSFKEefr|ktbA=S-iK~%1_+vR$oWZ9Qd zH&FX!u@{xw3VP9=?1UTbmpbjbO}`nrZVE<10+8Xo`~cv_aB%{u110+Zk&u22CSnoq z^FOTD?2sSwR~9bb99&q3)+@Vj_gNntj&4^@#f=PYk)cn(oQm5lvLh){M+w9(E7D>H z_mmJBn40UY4MBRfXVMEdHoc%;^TPWZ+jWZNa17(RLr(IT?Q@E2-vY%Y?J$+ZByQMp z+xXuv*@`McC)+h8{YdkD6W^F*v7+Wch%wR1^WK$nft9gnTIYNqGTkzLbpRWw&XlB!-Mail(h6YNG%7IsOi`s4=z2?CkTjnfk ziIyl!MyJqr)34JObBe3Hck%pFK=Tw@3adf82!=vRctNcgTO3q-1j^u17fvu3G!#W` zhccpjF+djI2?Uy`1Vt`OR(z@g@kvT!zt)njt23_K3Pi^vKI^(KIyEm|G54lcps!Ym zjN&oE!ncVJDFM*kQjI7!ieSxEVo=Q*B9^>ULzuapvM4Xf%sBJUHSWK&m&oPk#*ut4UF%^0znJ5^@X zX|_-SZwbw)%uq;{qN}VN+tZ%U@<*zaOm8V!)6C&!^SR#CI@I8Sv@448`pi?%@-i)6 zBSMm1MyGO>Y)`;~UrqP1(h6JUau=A3h}jLmx$2 z4~^Gu(-%QkCkMX~tsF{*p#u0Fyl%5P!p&f{c=tAgqoZH~0mEh3||HHEjTfL@8yxkNSfmilgit1t0$ zEI>4Fa=&thd)juLbiX392{Q4#QmIm%QEVoB=O)u}%=2et zXv%Y8;CW&bJ+iUe`{3T~7S9X9Pw9wr&rjv@nDcMXaK1cnKDle-#QOtqdL~X+D$|6? zWu1(ZQc*_e%qRux)aQnScC%gjOV5U$9~9QGru5|Z5}7} zIT`+%$Tvi`hkX&9w%W~v<=-95!SdnsR+SGNEICfGOErd2w6J!lzvE&RTN_$~v zhqftDKvDVC|DY(ON5A9;4f@un{DLNWNzXY;Q4(#SyV#jCw=-wHbDPcTb>D*Lzs(;v zKWkdnf3R`#DMRCP%VPPzfe@Bpv6ZlZF{F9S15ULa*ihTClemGKc!5XbPFzaLK{=@e zmBbJHq#9JI-;HZYJ*X#*puw!28gZBlnpB^G+6(5twbH6%$+M&T%)(ccg8A>Pls&_J z%ke^TE;vWLD)C#%Vz3CbFRJnRX<#t(SzHZ#>IO*0+oewOB|ZEh+e(or{#^4z$+zKliNG35tQ z97mm48s}lZtBrj>9T|6hIEW>#=zRQ%kljp3B4vu?`d7f)@5Irz{3zU%&!Oe5^*2(f zeqhDMzSz`ZH%oi{4O1FKqd1GiVFUR;&b3-2UgHnT*oz@b48(1DaNaII1q4|%kF&p!q zy=J{)t8})ut**d;dK-&tSm23^;u27%8Hz0Fr%~FKp+>c1FUjvSbrWZ>$goZqPjvJk zjXI)z9U#VR%~CgjDE8#)7f9ba>Nv68nK%M%>+o4W&BH{h4T+M`ehyQ&f$qY0>Oo8R z94+B{Afy#AVFmoJU=7k1{9aMpP&?3eq4uCIL0yKrG8y;pS<;d2mM5w&S>Vyy8jRJ0 zl4!uZIiTBOUYrHLDvNi;qBsv!MckQqoiCOqURM?GiI%tqqcyQCZit&e)n%hnuhhkg zcwgKSw_mv6i*uq5Hu{07JbbkNHJOci2jn-^UO(NCYM{^vY_L}a(aJLJ(d@5BAuDyEbgJHDBt<@|AH=Wv{I)fapL-Cq#p~^^i z^FU~@8uYF$4xhaTwnLh1JEZ*?>v_eDlgqd_Yeki*5iJ8tQs+F|IXgZ3@^sv|nckG@ zuzG7cc*{7QEQ?<=G5bhNL}+a7TL)lhSZ+HTqbL%mC-YOv%KkO<-wNQ_6Q?|pV)?Xg`BaPl!f z0PTnN7-H&?=5e0-4HTel;H%yT5+VtKe3!aAan_j;U;P$FK7%j!ps2AL$i34zmKHgg z#$ecq`&|swAtaYmqN1ZuG55cAv>5L`5fh+!Xv*0x&OSI%qjJn&Iy zo648ZyK;buSYen}=y#^nQ&|G}Q&nkdk!Ra(f%5Ng8s;d&pKhD7d_Is}EyXP4U%-rq z@a1Ur9srJ@bJk?@%mdnE{O7COXFJVfHB3ZmQ1A^QbU;Af7#-d{x(Gaed^%AtwNAI6 zyw-6lCzU`DJUcngzkon6#|!|qm9LoqnEye_90TO)9A+%eckNv#y-e0BMaJc+QxQZE!|s%h}_*u11%{UA?gj{Uu!nx&j<3dpHm5e9IU7Wog$N zd!*u>+YjiX)FMP$lN{fA*4LXY(eL%7lBsUt&7ytMy~&klax1!gyGk*@_}^ee|5B+; zsb52Xk$q`Z8=Dj(aJ{jU=y?(9X?^KLFnfgK zV<5(*?yrGOj>)yg%FO3A=7Vw6Y^ZsIbBZrmqVYJ)`^l;7%pDsHaX75eC1jFjy6C1y z57JYOV*&#$L1Y=*fPZP=>mqPFV+fNki!k|rain+ysRr*z?Xt1EabDm>(UmZyy9~jP zlMDD}LciBl5`aozJV%e?RSv#^uTLce(-VGCOomi4&6|({DWiOh8&L1BfI6Ci!5L;? zIHags%)q(OOYVzl`< zO4o(IoaWjzIM<;rJZc;7wkk>zL~F&q?MAvnk z<9r#i9VG?I0H%hCV|kJ{6lP6Rr!M+IL8hM~D~O_&S(s(D8ReTr)>u3q>Z;K|XT)-H z4=`Tw4_4@ra{5hVdK(LhL2uBIDc>t>T%2ZH$LqBd3bo~}M79RL1sj*;ZW7TG}l~j{js~ImZI9Wkelj>@{8pl~~trpeQLN4>p z*H)9veW7_H^IopSNfx}?W`}A$%U#}T*CUMMU#hJ)BmGm`3|mwiNx0f-uGQ8t==e%4 ziNiJBYJ?lnyV<0UnkwpCZ*9e4yxP+2pMNDvsxL)108YFTHC7@WZ`9hCFtcdE&E|9j zJQeDwU9YXmY{M;#p|jaTT3Xl9Fx_jYM^)g3s~gqkI$so=|JvnC30|7{I!d;56YH$E zs>vz|viIlU;VWd306=2xSi9CG>z=)gF}$SKi0)bXI9jCm;PdA;T8-!&h|%KfKC{wF z&Q;s(b6nlIW|X|s(pS%cCg=97V6hE)hRxPHM=(sx*9EpdiQ9px47jy}*2t>Z%Bnc8 zfomgIIZNj5;_l%&*Sq_;=kXl$?uA}`5%)3FmvDbTO{yc{2~Rz!$U9yor%tIEH4Dg( z^6IoYp-$fNz+Zuy2iKg*N*B!zyc|{ID0AaPXZdDJMPW^40Z1Ja(leN5R;b1CR;0pe zk`<$O+ch13une-bxcLXj2B#FM11 z&*1r zm2%(z5ia18m3|5U*NKbkb#S@3JX|?kJ}#{P{Q&IcEZ+CdwG9ExmAOj#pJHd;!5fz`uR3q+yT z^C+lPVgf}gaJbyzzvvXbV%{lD+Jd&#L!Sl2fKLK!1Jx5-YQ|?{6;tv$GU&#-Y za{Ry_h}Dmtp3R-aaniBg|d&!|Op4)w>B4?cS~o9yi^ zt-IJ>VWO-EOP)l{1iGE}Sw(Brt$Gr!Raaq1Z**pdx?nGFT*-VR~0d za4Y#T3v_h77UQj*X?hrM4o9SRxssLWOThFeVZF7!pQFfUBSOjz$>S0sJr4ka3H7t% z>_QCBTUX}Za<-jqH+6R)Zo9TVg;sATr|i^)Lb8?fKE%;BD$DtzbuCZ3vEVLe<1b%$ z;nIch!neNp^@|tI-*e#!+-OH4otcxw3;E2eWBU36-kIfUNz{l9Gk%Q~K0E?=K7tOp z5Y;8ZL3Nkqgjjp{)7YD~-fBj#03n`4F#~z@?UL=?4_xOx@56yK4t8u&I_Pf#*>EdO zV*f|j#U+HHrZqMwwKwf0EZ2j6R`%1dq9Gh$(a3rXSPQPK$f)J7vcfU5m$wWhN@ncC zcZ4ta8qmZVZE6RXCpW>%nI6(`k3dzD+*|M>t0+sRk%+s+5Eo!)jX z&B3zRR~`K@H#_yd$PL7{rVdu=!p8JA6|3}>BdhcWTdTi;am`}$n&dVsUM3_ai94uY zWg!Ic4FRO-5^FEB5W964QO0!CfLjW0PepG4Go}sUW*W7Mt^7r2e!uX+l^Ar=RGX{Z zyZ$c+fK-ybd>nwX!P2m_JNB*xCNFP$sRw&YiX2Nlu)0I0R$%;V`P3Dk;>FA0Lt$1Q zuESOoo3&Dj!r2f8l@*dp;U~A?a~saux95d{&+%ISiIyUCOyN5a9C=;hXS^AdVv)$=d9Q&9Hw)J-&f1SBl#(vYUwNwrmYjY+65X zuECu$KA8Mn@$ti#wG>CmYOB$Hu7&}B##hW+oy7x7*=p9VZABqm*~O)LYqeUB zpIaPsg_dyZQPc3#?_dc16pMWnEDp!m=)Y^u?wtIHca1Ecsq8Ws&;F{Jh|3-yvT*%vc0O#e8$BN9iY)badHk+5274J(f7p4C5m2!a)DS&fKq>t2o>rO!ec6ffXE$EokR>2}_d(Cbrl?bfFxH2jE4L#;EZS zM>wG)@q(>#%4a)jJZa7E+U?UdOI)0sSc8(Wk%_;PPZ@(an?dR=S=Sy;3eqD$k7w7x z1Q?^3GDTEa76N6AvPS7?{5BC=q2!neHJ-tY4bdMaM9OQipYw#iz=Ed6SXb%j>t|U( z_a6a1b3IwnY*DDI?+B}8`F8aNlPdeQ6UTcGmvM_6dKqQx!W6=kL(R<~P|0$wWFyie z4kLM*mr$OjnaU)!Z^BI^Eqbal8xI>1lt3s`jlt1kAvN%`0F1j*YexI$nH5c!cWTMT z9|2KZB3i{mRQnK9{}xbTn#Gk6@-aLy0aY(Vmgr7A4}mVTeHdgt!v4Oo=F)z4w4L4r zoH!YkyVZwTua{WinILf(mC+QSmaElb7NoP(&gd_)_Bj?`V(~l+nj=Ki5NfmU19=>B zfc|YFUSvUkQNO{08dlV)-XQcj7B8?67TO{-v{V973+XSTAb-UVpUIhq~Tx85jY8((VW+r#WjM-^5tLARy z5a69sXC!BqLx490A$ln5CxM=x_e$n`rxcI&P$YAlqF1p1@QPf73?xG@(OO{TV;L45NRS1+7{(ffycH{5lQ@wwSfd%6+w}VFK1~EELRkbN^(|Gq=5!7$YO2aPE@N z(sO|NdRTTtUNuw?!rbL>WQspRAF&!-lKC^JE`HcK^J|y4Onu%zozRn<2Q}*l6N9X_ z941^Dk>4p`1h~K$@R+c5U8n6O)=ldUBzVW}V%zTI5(nuAkqKBG4^K|wCAl3xaZ)eI zZw4n32i-&jhv>)I&LxGemF8|buY-q+Y2gpypLJj z9!ra*+VUo}wd&F`77Z*e-Q* zo8#N#DemETI<8x32^XHPy1JE=yI^0WO-gVvCs1>}3-_^`+nMZ|$g`k|xA44qWM|4a zoOf)r9^JL#zq@U%*=kIcZUuMT+tzLSwsSkT=BaU2z7=ej({k7C;uwOX4|=$_y){P; z!~(AOroBBuymZ3gCDVKpoLbNhne=~8j`iP+gEOwtRa1A}?a5?1og8ANlRGoJ*7gzL z&Zb8;C)Cldqo(h=ww26vErWa0!PA~z@js8e%le+Jee^%RGp3Gpoqn5|G5Lq&1jjLR z<4E9ulU;aZTp5IYc6l9~g5Q69N!+Zkdk^3aN;(`xir>i0I4jx# zQB<^S5-s>9EKlvxI6-Fm7LfMOkm2_~;F6$V+Uh*}ac4M)5Jo1~51NlsF!}Pt5=Lag zAxI@AqF8}ha(C$x`=gN%*G5JEB)ijz{XNkA%FH&-0U(`_7i9h6t@AEEHdM0QAq{KZ z>lS1Wsvg67W{V!e2ojCs99Gi_6;pT5k+>}Jyq{DaSM`s;mYB_fZFlnCfBOahzYaBNAI0n( zX@nR&=^Dc&Of&~7AP)yq3=O>emd7-+%`sk*2taz})RxKSb^>b$@96$n84NE6%2mLrpKubrGZB)F9XnFlIhQ zBnPb$bm18yr;AAOX=~d?3!DpmF|{4=-6iW9&fhS@rw+{U+t*(Ee|y`V!P{O->CM5B zA(MPye?g9@Ot!VK$KPi_t=Cvk2}=4}FA$OlVFM73g^H`tfi)b|MfgOasuV)5PZ39y z2W2P=`Wcw4xDu_wrDm#T03$aeaTUal4n|h)MVu=)Bs*&k7sQtkH$p`1n8{X$jN*oA zoqt95_**DEdlE-RjtJcpA`S=tk~|aQ-~V~B*gsHm-ZR+02J61(bk2WFIXQHiBiu@{ z^>GQeJ_=x+I_n7aG4+(mwRWv8Y%Oi-bxE6kU$Qyyvqh$uh1COki2FmO(vzz;s#M>C402UE7KVx;i@-dgc#KRZKAPv*6<{f{6k)c)p47Uuq z__&SFU1}@!$h68H7l*wEb9nTBVQL0*7$FoHpBGi8c+^bA^__EDI2egCiT4Mzk}nR# zI#2Y8;&T-=vRNJ{W^lZtY~t0|FI)^SUijwAU;oMrmoJ+o-;q6`LVWVQLNLV{)2F|IEX~1`fAc>rcox6FTA`isKpCP)IG&tqiWL$s1aol#RZL$FLmnBE1j}ZC; z6bn-(ij|PyHfzta_-ho%5QP%3WTyBtakWaPobQ;(=F^Y}^=_UivWeuOp{XGj0@;EHbAbayl|6@|gY$ z7NSIix26tSlzYI4D=2tKQg%)wdsT8gsE{dKuunKNfm%T}ixJ8s>(E+srrm%^jdJN+ z;7$XYbmyG1^eWrsUryzS>)oGpiQRelHFxlX9dU>t>k>>^oItX|A*Q6?g8f;u4o9LG3d`%iC-er3p+*>zh8gJZ z;S$mWBu=iFFO?>@G+Q95(Hg#*(zW%CWF&JCpq2ZJS=#a60zjG=VQ6ZvIdBM0FF zLY(a#tUB0f?C!Ro`fyFG__Q3g`fyk98CA*yR;H52uldMp;Ip3gW5{>lG#%k+KJgQr zo})HEU9j%lM5;lk`?aCGwFSeiFZ zL=&^7v)B6$L2u&OI1{BkN&IJ6dxnKxG({-)JYT#l2}s-+aeuzLoP0f~bB8 zQZf$-w%8RuB7|-zfvC_BuH#MG&bdju{0-|n<+=X>_A192 literal 0 HcmV?d00001 diff --git a/app/core/network/dht/__pycache__/store.cpython-310.pyc b/app/core/network/dht/__pycache__/store.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..57a229b104d07dc6eeddccc38a92f4bb0b982345 GIT binary patch literal 2305 zcmZuyOK;mo5Z+xrM9H-Bblwj&j{);=df8)p$}cFOqCjVsktMrXfvee_*~fhI&1}BD-XYMg{rUIaA1y-u!og}Y zVDOX>TK)n8C!8iEr9P!7vxKFFZ=|Mgrj~D|wr}fsBWb0M?|^J_D`}@4zoTV4Sxa5t zP1pT(O1>q$#hpXKowv;I^7fE;o%eXMN7Y)GWqB1=ah{c)shnqFl7uf4q0HxTR4L=@ zY^t0W1?s~D?CoJZ%7lDRo8=o)M7iWaD&$B6QJ(E4u*_qs{rtP1e$+a3E(t49R>7M% zD;vWI4_|F244$IvKY$>JPdV`!r@p~&ahtae!EJt?+Us#q37N)O zTvl<^)6Votk%k!z%f4!5ITu09l~Gnw*`rpOGJiDK%TqC!6iFU(As@cXt3g;4 z1HfJkGEu$B<*NbTs|F>y+AknxK>+SlL4fC!Hgq<%X@~BgTL98uSg5t#I2#FB$T+K% zEk!kv+5EyH=-bDU+=*olMF(r z40NH}uo29Fc%$k3s|BgIw@7$r``Sxgo=#8X$Z0b0I=HR z^f&7WWphRk?3x@|wOv~?26SXwo+Yorng)jJC}|$t1j)1YVakrAfO~5N@Bz5i(8O)L zYPjOH8aPjFhWp6d&|w%cF;79QjABx4fWAcFO@~qZnfU$V*lg~$R=@{S<-}Q(Lt>0h z5Z%JX@M#P<2#e5<8Tf3}1}77UGSYtxNib%c&<{y%j13qWuMBysGC{K5LUpT+BOCFX zu>-R%vkj$?x$+VP^raKW<%SA)kfKP*mVc?Xr z!w(S7$aC@*pz@5Z6N_*GvT%r{z7D8F(kc65kD1TJX8e2?!uY*-K z3sVsUsuKih&L;`VZV=2W7SjS>*cg`-l|8~zy)Ngrb@bKLU1{qGl8mjDST0qiDq>aQ(m6);;8Kf@5!_miw zqaQ?aT2y&|JVB0^Xy%eD%w;Y; IOV1ns0VpyFMgRZ+ literal 0 HcmV?d00001 diff --git a/app/core/network/dht/config.py b/app/core/network/dht/config.py new file mode 100644 index 0000000..ac199a2 --- /dev/null +++ b/app/core/network/dht/config.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import os +from dataclasses import dataclass +from functools import lru_cache + + +SCHEMA_VERSION = "v1" + + +def _env_int(name: str, default: int) -> int: + try: + return int(os.getenv(name, default)) + except Exception: + return default + + +def _env_float(name: str, default: float) -> float: + try: + return float(os.getenv(name, default)) + except Exception: + return default + + +@dataclass(frozen=True) +class DHTConfig: + """Runtime configuration for the decentralized storage layer.""" + + schema_version: str = SCHEMA_VERSION + min_receipts: int = _env_int("DHT_MIN_RECEIPTS", 5) + min_reachability_ratio: float = _env_float("DHT_MIN_REACHABILITY", 0.6) + membership_ttl: int = _env_int("DHT_MEMBERSHIP_TTL", 600) + replication_target: int = max(3, _env_int("DHT_REPLICATION_TARGET", 3)) + lease_ttl: int = _env_int("DHT_LEASE_TTL", 600) + heartbeat_interval: int = _env_int("DHT_HEARTBEAT_INTERVAL", 60) + heartbeat_miss_threshold: int = _env_int("DHT_HEARTBEAT_MISS_THRESHOLD", 3) + rendezvous_base: str = os.getenv("DHT_RENDEZVOUS_HASH", "blake3") + pow_difficulty: int = _env_int("DHT_POW_DIFFICULTY", 4) + min_asn_diversity: int = _env_int("DHT_MIN_ASN", 3) + min_ip_octet_diversity: int = _env_int("DHT_MIN_IP_OCTETS", 3) + window_size: int = _env_int("DHT_METRIC_WINDOW_SEC", 3600) + default_q: float = _env_float("DHT_MIN_Q", 0.6) + seed_refresh_interval: int = _env_int("DHT_SEED_REFRESH_INTERVAL", 30) + + +@lru_cache +def load_config() -> DHTConfig: + """Load configuration with process-wide memoisation.""" + + return DHTConfig() + + +dht_config = load_config() + diff --git a/app/core/network/dht/crdt.py b/app/core/network/dht/crdt.py new file mode 100644 index 0000000..de484ac --- /dev/null +++ b/app/core/network/dht/crdt.py @@ -0,0 +1,278 @@ +from __future__ import annotations + +import math +import time +from dataclasses import dataclass, field +from typing import Dict, Any, Iterable, Tuple + +from app.core._utils.hash import blake3_hex + + +class CRDTMergeError(RuntimeError): + pass + + +class CRDT: + def merge(self, other: "CRDT") -> "CRDT": + raise NotImplementedError + + def to_dict(self) -> Dict[str, Any]: + raise NotImplementedError + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "CRDT": + raise NotImplementedError + + +@dataclass +class LWWElement: + value: Any + logical_counter: int + timestamp: float + node_id: str + + def dominates(self, other: "LWWElement") -> bool: + if self.logical_counter > other.logical_counter: + return True + if self.logical_counter < other.logical_counter: + return False + if self.timestamp > other.timestamp: + return True + if self.timestamp < other.timestamp: + return False + # Break all ties by NodeID ordering to guarantee determinism + return self.node_id > other.node_id + + +class LWWRegister(CRDT): + def __init__(self, element: LWWElement | None = None): + self.element = element + + def assign(self, value: Any, logical_counter: int, node_id: str, timestamp: float | None = None) -> None: + new_el = LWWElement(value=value, logical_counter=logical_counter, timestamp=timestamp or time.time(), node_id=node_id) + if self.element is None or new_el.dominates(self.element): + self.element = new_el + + def merge(self, other: "LWWRegister") -> "LWWRegister": + if other.element and (self.element is None or other.element.dominates(self.element)): + self.element = other.element + return self + + def value(self) -> Any: + return self.element.value if self.element else None + + def to_dict(self) -> Dict[str, Any]: + if not self.element: + return {} + return { + "value": self.element.value, + "logical_counter": self.element.logical_counter, + "timestamp": self.element.timestamp, + "node_id": self.element.node_id, + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "LWWRegister": + if not data: + return cls() + element = LWWElement( + value=data.get("value"), + logical_counter=int(data["logical_counter"]), + timestamp=float(data["timestamp"]), + node_id=str(data["node_id"]), + ) + return cls(element=element) + + +class LWWSet(CRDT): + def __init__(self, adds: Dict[str, LWWElement] | None = None, removes: Dict[str, LWWElement] | None = None): + self.adds: Dict[str, LWWElement] = adds or {} + self.removes: Dict[str, LWWElement] = removes or {} + + def add(self, element_id: str, value: Any, logical_counter: int, node_id: str, timestamp: float | None = None) -> None: + elem = LWWElement(value=value, logical_counter=logical_counter, timestamp=timestamp or time.time(), node_id=node_id) + existing = self.adds.get(element_id) + if not existing or elem.dominates(existing): + self.adds[element_id] = elem + + def remove(self, element_id: str, logical_counter: int, node_id: str, timestamp: float | None = None) -> None: + elem = LWWElement(value=None, logical_counter=logical_counter, timestamp=timestamp or time.time(), node_id=node_id) + existing = self.removes.get(element_id) + if not existing or elem.dominates(existing): + self.removes[element_id] = elem + + def lookup(self, element_id: str) -> Any | None: + add = self.adds.get(element_id) + remove = self.removes.get(element_id) + if add and (not remove or add.dominates(remove)): + return add.value + return None + + def elements(self) -> Dict[str, Any]: + return {eid: elem.value for eid, elem in self.adds.items() if self.lookup(eid) is not None} + + def merge(self, other: "LWWSet") -> "LWWSet": + for eid, elem in other.adds.items(): + current = self.adds.get(eid) + if not current or elem.dominates(current): + self.adds[eid] = elem + for eid, elem in other.removes.items(): + current = self.removes.get(eid) + if not current or elem.dominates(current): + self.removes[eid] = elem + return self + + def to_dict(self) -> Dict[str, Any]: + def serialize_map(source: Dict[str, LWWElement]) -> Dict[str, Dict[str, Any]]: + return { + eid: { + "value": elem.value, + "logical_counter": elem.logical_counter, + "timestamp": elem.timestamp, + "node_id": elem.node_id, + } + for eid, elem in source.items() + } + + return {"adds": serialize_map(self.adds), "removes": serialize_map(self.removes)} + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "LWWSet": + adds = { + eid: LWWElement( + value=elem.get("value"), + logical_counter=int(elem["logical_counter"]), + timestamp=float(elem["timestamp"]), + node_id=str(elem["node_id"]), + ) + for eid, elem in (data.get("adds") or {}).items() + } + removes = { + eid: LWWElement( + value=elem.get("value"), + logical_counter=int(elem["logical_counter"]), + timestamp=float(elem["timestamp"]), + node_id=str(elem["node_id"]), + ) + for eid, elem in (data.get("removes") or {}).items() + } + return cls(adds=adds, removes=removes) + + +class PNCounter(CRDT): + def __init__(self, increments: Dict[str, int] | None = None, decrements: Dict[str, int] | None = None): + self.increments = increments or {} + self.decrements = decrements or {} + + def increment(self, node_id: str, value: int = 1) -> None: + if value < 0: + raise ValueError("value must be non-negative for increment") + self.increments[node_id] = self.increments.get(node_id, 0) + value + + def decrement(self, node_id: str, value: int = 1) -> None: + if value < 0: + raise ValueError("value must be non-negative for decrement") + self.decrements[node_id] = self.decrements.get(node_id, 0) + value + + def value(self) -> int: + return sum(self.increments.values()) - sum(self.decrements.values()) + + def merge(self, other: "PNCounter") -> "PNCounter": + for nid, val in other.increments.items(): + self.increments[nid] = max(self.increments.get(nid, 0), val) + for nid, val in other.decrements.items(): + self.decrements[nid] = max(self.decrements.get(nid, 0), val) + return self + + def to_dict(self) -> Dict[str, Any]: + return {"inc": dict(self.increments), "dec": dict(self.decrements)} + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "PNCounter": + return cls(increments=dict(data.get("inc") or {}), decrements=dict(data.get("dec") or {})) + + +class GCounter(CRDT): + def __init__(self, counters: Dict[str, int] | None = None): + self.counters = counters or {} + + def increment(self, node_id: str, value: int = 1) -> None: + if value < 0: + raise ValueError("value must be non-negative") + self.counters[node_id] = self.counters.get(node_id, 0) + value + + def value(self) -> int: + return sum(self.counters.values()) + + def merge(self, other: "GCounter") -> "GCounter": + for nid, val in other.counters.items(): + self.counters[nid] = max(self.counters.get(nid, 0), val) + return self + + def to_dict(self) -> Dict[str, Any]: + return dict(self.counters) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "GCounter": + return cls(counters=dict(data or {})) + + +def _leading_zeros(value: int, width: int) -> int: + if value == 0: + return width + return width - value.bit_length() + + +@dataclass +class HyperLogLog(CRDT): + precision: int = 12 + registers: Tuple[int, ...] = field(default_factory=tuple) + + def __post_init__(self) -> None: + if not self.registers: + self.registers = tuple([0] * (1 << self.precision)) + else: + self.registers = tuple(self.registers) + + @property + def m(self) -> int: + return len(self.registers) + + def add(self, value: Any) -> None: + if value is None: + return + hashed = int(blake3_hex(str(value).encode()), 16) + index = hashed & (self.m - 1) + w = hashed >> self.precision + rank = _leading_zeros(w, 256 - self.precision) + 1 + current = self.registers[index] + if rank > current: + regs = list(self.registers) + regs[index] = rank + self.registers = tuple(regs) + + def estimate(self) -> float: + alpha = 0.7213 / (1 + 1.079 / self.m) + indicator = sum(2.0 ** (-r) for r in self.registers) + raw = alpha * (self.m ** 2) / indicator + if raw <= 2.5 * self.m: + zeros = self.registers.count(0) + if zeros: + return self.m * math.log(self.m / zeros) + return raw + + def merge(self, other: "HyperLogLog") -> "HyperLogLog": + if self.m != other.m: + raise CRDTMergeError("Cannot merge HyperLogLog instances with different precision") + merged = [max(a, b) for a, b in zip(self.registers, other.registers)] + self.registers = tuple(merged) + return self + + def to_dict(self) -> Dict[str, Any]: + return {"precision": self.precision, "registers": list(self.registers)} + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HyperLogLog": + if not data: + return cls() + return cls(precision=int(data.get("precision", 12)), registers=tuple(int(x) for x in data.get("registers", []))) diff --git a/app/core/network/dht/crypto.py b/app/core/network/dht/crypto.py new file mode 100644 index 0000000..503db99 --- /dev/null +++ b/app/core/network/dht/crypto.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Iterable, Tuple + +from app.core._utils.hash import blake3_hex + + +BLAKE3_DIGEST_SIZE = 32 + + +def _ensure_bytes(data: Iterable[int] | bytes | bytearray) -> bytes: + if isinstance(data, (bytes, bytearray)): + return bytes(data) + if isinstance(data, str): + return data.encode() + return bytes(data) + + +def digest_hex(data: Iterable[int] | bytes | bytearray | str) -> str: + return blake3_hex(_ensure_bytes(data)) + + +def compute_node_id(public_key: bytes) -> str: + """NodeID = blake3(pubkey).""" + + if not isinstance(public_key, (bytes, bytearray)): + raise TypeError("public_key must be bytes") + return digest_hex(public_key) + + +def compute_content_id(encrypted_blob: bytes) -> str: + """ContentID = blake3(encrypted_blob).""" + + return digest_hex(encrypted_blob) + + +def compute_view_id(content_id: str, viewer_salt: bytes) -> str: + """ViewID = blake3(ContentID||viewer_salt).""" + + if not viewer_salt: + raise ValueError("viewer_salt must not be empty") + return digest_hex(content_id.encode() + viewer_salt) + + +def bits_from_hex(hex_digest: str, prefix_bits: int) -> Tuple[int, int]: + """Extract first prefix_bits from a hex digest. Returns (prefix, total_bits).""" + + if prefix_bits < 0: + raise ValueError("prefix_bits must be >= 0") + bitstring = bin(int(hex_digest, 16))[2:].zfill(len(hex_digest) * 4) + if prefix_bits == 0: + return 0, len(bitstring) + return int(bitstring[:prefix_bits], 2), len(bitstring) + + +def rendezvous_score(content_id: str, node_id: str) -> int: + """Return rendezvous score via blake3(ContentID||NodeID).""" + return int(blake3_hex(f"{content_id}:{node_id}".encode()), 16) + + +@dataclass(frozen=True) +class ContentFingerprint: + content_id: str + node_id_prefix: int + prefix_bits: int + + def matches(self, node_id: str) -> bool: + prefix, total = bits_from_hex(node_id, self.prefix_bits) + return prefix == self.node_id_prefix and total >= self.prefix_bits diff --git a/app/core/network/dht/keys.py b/app/core/network/dht/keys.py new file mode 100644 index 0000000..f8bcfbd --- /dev/null +++ b/app/core/network/dht/keys.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +import json +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Dict, Any + +from .config import dht_config +from .crypto import digest_hex + + +def _json_dumps(data: Dict[str, Any]) -> bytes: + return json.dumps(data, sort_keys=True, separators=(",", ":")).encode() + + +@dataclass(frozen=True) +class MetaKey: + content_id: str + schema_version: str = dht_config.schema_version + + def fingerprint(self) -> str: + return digest_hex(self.serialize()) + + def serialize(self) -> bytes: + return _json_dumps({"schema_version": self.schema_version, "content_id": self.content_id, "type": "meta"}) + + def __str__(self) -> str: + return f"meta:{self.schema_version}:{self.content_id}" + + +@dataclass(frozen=True) +class MembershipKey: + node_id: str + schema_version: str = dht_config.schema_version + + def fingerprint(self) -> str: + return digest_hex(self.serialize()) + + def serialize(self) -> bytes: + return _json_dumps({"schema_version": self.schema_version, "node_id": self.node_id, "type": "membership"}) + + def __str__(self) -> str: + return f"membership:{self.schema_version}:{self.node_id}" + + +@dataclass(frozen=True) +class MetricKey: + content_id: str + window_id: str + schema_version: str = dht_config.schema_version + + @classmethod + def window_for(cls, timestamp: float, window_size: int | None = None) -> str: + win = int(timestamp // (window_size or dht_config.window_size)) + return datetime.fromtimestamp(win * (window_size or dht_config.window_size), tz=timezone.utc).strftime("%Y%m%d%H") + + def fingerprint(self) -> str: + return digest_hex(self.serialize()) + + def serialize(self) -> bytes: + return _json_dumps( + { + "schema_version": self.schema_version, + "content_id": self.content_id, + "window_id": self.window_id, + "type": "metric", + } + ) + + def __str__(self) -> str: + return f"metric:{self.schema_version}:{self.content_id}:{self.window_id}" + diff --git a/app/core/network/dht/membership.py b/app/core/network/dht/membership.py new file mode 100644 index 0000000..12412d0 --- /dev/null +++ b/app/core/network/dht/membership.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +import ipaddress +import time +from dataclasses import dataclass +from typing import Dict, Any, Iterable, List, Optional, Tuple + +from app.core._crypto.signer import Signer +from .config import dht_config +from .crdt import LWWSet, HyperLogLog +from .keys import MembershipKey +from .store import DHTStore + + +@dataclass +class ReachabilityReceipt: + target_id: str + issuer_id: str + asn: Optional[int] + timestamp: float + signature: str + + def as_dict(self) -> Dict[str, Any]: + return { + "target_id": self.target_id, + "issuer_id": self.issuer_id, + "asn": self.asn, + "timestamp": self.timestamp, + "signature": self.signature, + } + + +def _ip_first_octet(host: str | None) -> Optional[int]: + if not host: + return None + try: + ip = ipaddress.ip_address(host) + return int(str(ip).split(".")[0]) + except Exception: + return None + + +class MembershipState: + def __init__(self, node_id: str, signer: Signer): + self.node_id = node_id + self.signer = signer + self.members = LWWSet() + self.receipts = LWWSet() + self.hll = HyperLogLog() + self.n_reports: Dict[str, float] = {} + self.logical_counter = 0 + + def _bump_counter(self) -> int: + self.logical_counter += 1 + return self.logical_counter + + def register_member( + self, + node_id: str, + public_key: str, + ip: str | None, + asn: Optional[int], + metadata: Dict[str, Any] | None = None, + timestamp: Optional[float] = None, + ) -> None: + payload = { + "node_id": node_id, + "public_key": public_key, + "ip": ip, + "asn": asn, + "ip_first_octet": _ip_first_octet(ip), + "meta": metadata or {}, + "last_update": timestamp or time.time(), + } + self.members.add(node_id, payload, logical_counter=self._bump_counter(), node_id=self.node_id, timestamp=timestamp) + self.hll.add(node_id) + + def forget_member(self, node_id: str) -> None: + self.members.remove(node_id, logical_counter=self._bump_counter(), node_id=self.node_id) + + def record_receipt(self, receipt: ReachabilityReceipt) -> None: + element_id = f"{receipt.target_id}:{receipt.issuer_id}" + self.receipts.add( + element_id, + receipt.as_dict(), + logical_counter=self._bump_counter(), + node_id=self.node_id, + timestamp=receipt.timestamp, + ) + + def report_local_population(self) -> None: + self.n_reports[self.node_id] = float(self.hll.estimate()) + + def merge(self, other: "MembershipState") -> "MembershipState": + self.members.merge(other.members) + self.receipts.merge(other.receipts) + self.hll.merge(other.hll) + for node_id, value in other.n_reports.items(): + self.n_reports[node_id] = max(self.n_reports.get(node_id, 0.0), value) + self.logical_counter = max(self.logical_counter, other.logical_counter) + return self + + def _unique_asn_for(self, node_id: str) -> Tuple[int, Iterable[int]]: + receipts = [ + entry + for rid, entry in self.receipts.elements().items() + if entry.get("target_id") == node_id + ] + unique_asn = {entry.get("asn") for entry in receipts if entry.get("asn") is not None} + return len(unique_asn), unique_asn + + def reachability_ratio(self, node_id: str) -> float: + unique_count, _ = self._unique_asn_for(node_id) + if dht_config.min_receipts <= 0: + return 1.0 + return min(1.0, unique_count / dht_config.min_receipts) + + def active_members(self, include_islands: bool = False) -> List[Dict[str, Any]]: + now = time.time() + result = [] + for node_id, data in self.members.elements().items(): + last_update = data.get("last_update") or 0 + if now - last_update > dht_config.membership_ttl: + continue + reachability = self.reachability_ratio(node_id) + if not include_islands and reachability < dht_config.default_q: + continue + enriched = dict(data) + enriched["reachability_ratio"] = reachability + result.append(enriched) + return result + + def n_estimate(self) -> float: + self.report_local_population() + active_ids = {m["node_id"] for m in self.active_members(include_islands=True)} + filtered_reports = [ + value for node_id, value in self.n_reports.items() if node_id in active_ids and self.reachability_ratio(node_id) >= dht_config.default_q + ] + local_estimate = float(self.hll.estimate()) + if filtered_reports: + return max(max(filtered_reports), local_estimate) + return local_estimate + + def to_dict(self) -> Dict[str, Any]: + return { + "members": self.members.to_dict(), + "receipts": self.receipts.to_dict(), + "hll": self.hll.to_dict(), + "reports": dict(self.n_reports), + "logical_counter": self.logical_counter, + } + + @classmethod + def from_dict(cls, node_id: str, signer: Signer, data: Dict[str, Any]) -> "MembershipState": + inst = cls(node_id=node_id, signer=signer) + if data: + inst.members = LWWSet.from_dict(data.get("members") or {}) + inst.receipts = LWWSet.from_dict(data.get("receipts") or {}) + inst.hll = HyperLogLog.from_dict(data.get("hll") or {}) + inst.n_reports = {str(k): float(v) for k, v in (data.get("reports") or {}).items()} + inst.logical_counter = int(data.get("logical_counter") or 0) + return inst + + +class MembershipManager: + def __init__(self, node_id: str, signer: Signer, store: DHTStore): + self.node_id = node_id + self.signer = signer + self.store = store + self.state = MembershipState(node_id=node_id, signer=signer) + + def _merge_remote(self, data: Dict[str, Any]) -> None: + remote_state = MembershipState.from_dict(self.node_id, self.signer, data) + self.state.merge(remote_state) + + def ingest_snapshot(self, payload: Dict[str, Any]) -> None: + self._merge_remote(payload) + + def register_local(self, public_key: str, ip: str | None, asn: Optional[int], metadata: Dict[str, Any] | None = None) -> None: + self.state.register_member(self.node_id, public_key=public_key, ip=ip, asn=asn, metadata=metadata) + self._persist() + + def update_member(self, node_id: str, **kwargs) -> None: + meta = kwargs.get("metadata") or {} + self.state.register_member( + node_id, + public_key=kwargs.get("public_key", meta.get("public_key")), + ip=kwargs.get("ip"), + asn=kwargs.get("asn"), + metadata=meta, + ) + self._persist() + + def remove_member(self, node_id: str) -> None: + self.state.forget_member(node_id) + self._persist() + + def record_receipt(self, receipt: ReachabilityReceipt) -> None: + self.state.record_receipt(receipt) + self._persist() + + def _persist(self) -> None: + key = MembershipKey(node_id=self.node_id) + self.store.put( + key=str(key), + fingerprint=key.fingerprint(), + value=self.state.to_dict(), + logical_counter=self.state.logical_counter, + merge_strategy=lambda a, b: MembershipState.from_dict(self.node_id, self.signer, a) + .merge(MembershipState.from_dict(self.node_id, self.signer, b)) + .to_dict(), + ) + + def n_estimate(self) -> float: + return self.state.n_estimate() + + def active_members(self) -> List[Dict[str, Any]]: + return self.state.active_members() + diff --git a/app/core/network/dht/metrics.py b/app/core/network/dht/metrics.py new file mode 100644 index 0000000..4dbeae3 --- /dev/null +++ b/app/core/network/dht/metrics.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +import time +from dataclasses import dataclass +from typing import Dict, Any, Optional + +from app.core._crypto.signer import Signer +from .config import dht_config +from .crdt import PNCounter, GCounter, HyperLogLog +from .crypto import compute_view_id +from .keys import MetricKey +from .store import DHTStore +from .prometheus import update_view_metrics + + +@dataclass +class MetricDelta: + content_id: str + view_id: str + watch_time: int + bytes_out: int + completed: bool + timestamp: float + + def as_dict(self) -> Dict[str, Any]: + return { + "content_id": self.content_id, + "view_id": self.view_id, + "watch_time": self.watch_time, + "bytes_out": self.bytes_out, + "completed": self.completed, + "timestamp": self.timestamp, + } + + +class ContentMetricsState: + def __init__(self, node_id: str): + self.node_id = node_id + self.views = PNCounter() + self.unique = HyperLogLog() + self.watch_time = GCounter() + self.bytes_out = GCounter() + self.completions = GCounter() + self.logical_counter = 0 + + def apply(self, delta: MetricDelta) -> None: + self.logical_counter += 1 + self.views.increment(self.node_id, 1) + self.unique.add(delta.view_id) + if delta.watch_time: + self.watch_time.increment(self.node_id, delta.watch_time) + if delta.bytes_out: + self.bytes_out.increment(self.node_id, delta.bytes_out) + if delta.completed: + self.completions.increment(self.node_id, 1) + + def merge(self, other: "ContentMetricsState") -> "ContentMetricsState": + self.views.merge(other.views) + self.unique.merge(other.unique) + self.watch_time.merge(other.watch_time) + self.bytes_out.merge(other.bytes_out) + self.completions.merge(other.completions) + self.logical_counter = max(self.logical_counter, other.logical_counter) + return self + + def to_dict(self) -> Dict[str, Any]: + return { + "views": self.views.to_dict(), + "unique": self.unique.to_dict(), + "watch_time": self.watch_time.to_dict(), + "bytes_out": self.bytes_out.to_dict(), + "completions": self.completions.to_dict(), + "logical_counter": self.logical_counter, + } + + @classmethod + def from_dict(cls, node_id: str, data: Dict[str, Any]) -> "ContentMetricsState": + inst = cls(node_id=node_id) + if data: + inst.views = PNCounter.from_dict(data.get("views") or {}) + inst.unique = HyperLogLog.from_dict(data.get("unique") or {}) + inst.watch_time = GCounter.from_dict(data.get("watch_time") or {}) + inst.bytes_out = GCounter.from_dict(data.get("bytes_out") or {}) + inst.completions = GCounter.from_dict(data.get("completions") or {}) + inst.logical_counter = int(data.get("logical_counter") or 0) + return inst + + +class MetricsAggregator: + def __init__(self, node_id: str, signer: Signer, store: DHTStore): + self.node_id = node_id + self.signer = signer + self.store = store + + def _load(self, content_id: str, window_id: str) -> ContentMetricsState: + key = MetricKey(content_id=content_id, window_id=window_id) + record = self.store.get(key.fingerprint()) + if record: + return ContentMetricsState.from_dict(self.node_id, record.value) + return ContentMetricsState(node_id=self.node_id) + + def _persist(self, content_id: str, window_id: str, state: ContentMetricsState) -> None: + key = MetricKey(content_id=content_id, window_id=window_id) + self.store.put( + key=str(key), + fingerprint=key.fingerprint(), + value=state.to_dict(), + logical_counter=state.logical_counter, + merge_strategy=lambda a, b: ContentMetricsState.from_dict(self.node_id, a) + .merge(ContentMetricsState.from_dict(self.node_id, b)) + .to_dict(), + ) + update_view_metrics( + content_id=content_id, + window_id=window_id, + views=state.views.value(), + unique=state.unique.estimate(), + watch_time=state.watch_time.value(), + ) + + def record_view( + self, + content_id: str, + viewer_salt: bytes, + watch_time: int, + bytes_out: int, + completed: bool, + timestamp: Optional[float] = None, + ) -> MetricDelta: + ts = time.time() if timestamp is None else timestamp + window_id = MetricKey.window_for(ts) + view_id = compute_view_id(content_id, viewer_salt) + state = self._load(content_id, window_id) + delta = MetricDelta( + content_id=content_id, + view_id=view_id, + watch_time=watch_time, + bytes_out=bytes_out, + completed=completed, + timestamp=ts, + ) + state.apply(delta) + self._persist(content_id, window_id, state) + return delta diff --git a/app/core/network/dht/prometheus.py b/app/core/network/dht/prometheus.py new file mode 100644 index 0000000..d12f34f --- /dev/null +++ b/app/core/network/dht/prometheus.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +try: + from prometheus_client import Counter, Gauge # type: ignore +except Exception: + class _Metric: + def __init__(self, *_, **__): + self._values = {} + def labels(self, **kwargs): + key = tuple(sorted(kwargs.items())) + class H: + def __init__(self, parent, k): + self._p = parent; self._k = k + def inc(self, v: float = 1.0): + self._p._values[self._k] = self._p._values.get(self._k, 0.0) + v + def set(self, v: float): + self._p._values[self._k] = v + return H(self, key) + class Counter(_Metric): + pass + class Gauge(_Metric): + pass + + +replication_under = Counter("dht_replication_under", "Times replication fell below target", ["content_id"]) +replication_over = Counter("dht_replication_over", "Times replication exceeded target", ["content_id"]) +leader_changes = Counter("dht_leader_changes_total", "Count of leader changes per content", ["content_id"]) +merge_conflicts = Counter("dht_merge_conflicts_total", "Number of DHT merge conflicts", ["key"]) +view_count_total = Gauge("dht_view_count_total", "Total content views per window", ["content_id", "window"]) +unique_estimate = Gauge("dht_unique_view_estimate", "Estimated unique viewers per window", ["content_id", "window"]) +watch_time_seconds = Gauge("dht_watch_time_seconds", "Aggregate watch time per window", ["content_id", "window"]) + + +def record_replication_under(content_id: str, have: int) -> None: + replication_under.labels(content_id=content_id).inc() + + +def record_replication_over(content_id: str, have: int) -> None: + replication_over.labels(content_id=content_id).inc() + + +def record_leader_change(content_id: str) -> None: + leader_changes.labels(content_id=content_id).inc() + + +def record_merge_conflict(key: str) -> None: + merge_conflicts.labels(key=key).inc() + + +def update_view_metrics(content_id: str, window_id: str, views: int, unique: float, watch_time: int) -> None: + view_count_total.labels(content_id=content_id, window=window_id).set(views) + unique_estimate.labels(content_id=content_id, window=window_id).set(unique) + watch_time_seconds.labels(content_id=content_id, window=window_id).set(watch_time) diff --git a/app/core/network/dht/records.py b/app/core/network/dht/records.py new file mode 100644 index 0000000..d5d5b81 --- /dev/null +++ b/app/core/network/dht/records.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +import json +import time +from dataclasses import dataclass, field +from typing import Callable, Dict, Any, Tuple + +from app.core._utils.b58 import b58decode, b58encode + +try: + import nacl.signing + import nacl.encoding + _HAS_NACL = True +except Exception: # pragma: no cover - fallback path + _HAS_NACL = False + +from app.core._utils.hash import blake3_hex + +from app.core._crypto.signer import Signer +from .config import dht_config + + +def _serialize_for_signature(payload: Dict[str, Any]) -> bytes: + return json.dumps(payload, sort_keys=True, separators=(",", ":")).encode() + + +def _dominance_tuple(logical_counter: int, timestamp: float, node_id: str) -> Tuple[int, float, str]: + return logical_counter, timestamp, node_id + + +def latest_wins_merge(a: Dict[str, Any], b: Dict[str, Any]) -> Dict[str, Any]: + return b + + +@dataclass +class DHTRecord: + fingerprint: str + key: str + value: Dict[str, Any] + logical_counter: int + timestamp: float + node_id: str + schema_version: str = field(default=dht_config.schema_version) + signature: str | None = None + + def dominance(self) -> Tuple[int, float, str]: + return _dominance_tuple(self.logical_counter, self.timestamp, self.node_id) + + def dominates(self, other: "DHTRecord") -> bool: + return self.dominance() > other.dominance() + + def to_payload(self) -> Dict[str, Any]: + return { + "fingerprint": self.fingerprint, + "key": self.key, + "schema_version": self.schema_version, + "logical_counter": self.logical_counter, + "timestamp": self.timestamp, + "node_id": self.node_id, + "value": self.value, + } + + def sign(self, signer: Signer) -> "DHTRecord": + blob = _serialize_for_signature(self.to_payload()) + self.signature = signer.sign(blob) + return self + + def verify(self, public_key_b58: str) -> bool: + if not self.signature: + return False + payload = _serialize_for_signature(self.to_payload()) + if _HAS_NACL: + try: + vk = nacl.signing.VerifyKey(b58decode(public_key_b58)) + vk.verify(payload, b58decode(self.signature)) + return True + except Exception: + return False + expected = b58encode(bytes.fromhex(blake3_hex(b58decode(public_key_b58) + payload))).decode() + return expected == self.signature + + def merge(self, other: "DHTRecord", merge_strategy: Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]) -> "DHTRecord": + if self.fingerprint != other.fingerprint: + raise ValueError("Cannot merge records with different keys") + + if self.dominates(other): + dominant, subordinate = self, other + elif other.dominates(self): + dominant, subordinate = other, self + else: + # Perfect tie: break via lexicographic NodeID order and prefer merged value to remain deterministic + if self.node_id >= other.node_id: + dominant, subordinate = self, other + else: + dominant, subordinate = other, self + + merged_value = merge_strategy(dominant.value, subordinate.value) + merged = DHTRecord( + fingerprint=self.fingerprint, + key=self.key, + value=merged_value, + logical_counter=dominant.logical_counter, + timestamp=max(self.timestamp, other.timestamp), + node_id=dominant.node_id, + schema_version=self.schema_version, + ) + return merged + + @classmethod + def create( + cls, + key: str, + fingerprint: str, + value: Dict[str, Any], + node_id: str, + logical_counter: int, + signature: str | None = None, + timestamp: float | None = None, + ) -> "DHTRecord": + return cls( + fingerprint=fingerprint, + key=key, + value=value, + logical_counter=logical_counter, + timestamp=timestamp or time.time(), + node_id=node_id, + signature=signature, + ) diff --git a/app/core/network/dht/replication.py b/app/core/network/dht/replication.py new file mode 100644 index 0000000..cd24457 --- /dev/null +++ b/app/core/network/dht/replication.py @@ -0,0 +1,311 @@ +from __future__ import annotations + +import math +import time +from dataclasses import dataclass, field +from typing import Dict, Any, List, Optional, Tuple + +from app.core._crypto.signer import Signer +from .config import dht_config +from .crypto import bits_from_hex, rendezvous_score +from .keys import MetaKey +from .membership import MembershipState +from .prometheus import record_replication_under, record_replication_over, record_leader_change +from .store import DHTStore + + +def _now() -> float: + return time.time() + + +@dataclass +class ReplicaLease: + node_id: str + lease_id: str + issued_at: float + expires_at: float + asn: Optional[int] + ip_first_octet: Optional[int] + heartbeat_at: float + score: int + + def renew(self, now: float) -> None: + self.heartbeat_at = now + self.expires_at = now + dht_config.lease_ttl + + def is_expired(self, now: float) -> bool: + if now >= self.expires_at: + return True + if now - self.heartbeat_at > dht_config.heartbeat_interval * dht_config.heartbeat_miss_threshold: + return True + return False + + def to_dict(self) -> Dict[str, Any]: + return { + "node_id": self.node_id, + "lease_id": self.lease_id, + "issued_at": self.issued_at, + "expires_at": self.expires_at, + "asn": self.asn, + "ip_first_octet": self.ip_first_octet, + "heartbeat_at": self.heartbeat_at, + "score": self.score, + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ReplicaLease": + return cls( + node_id=str(data["node_id"]), + lease_id=str(data["lease_id"]), + issued_at=float(data["issued_at"]), + expires_at=float(data["expires_at"]), + asn=data.get("asn"), + ip_first_octet=data.get("ip_first_octet"), + heartbeat_at=float(data.get("heartbeat_at", data.get("issued_at"))), + score=int(data.get("score", 0)), + ) + + +@dataclass +class ReplicationState: + content_id: str + leases: Dict[str, ReplicaLease] = field(default_factory=dict) + leader: Optional[str] = None + revision: int = 0 + conflict_log: List[Dict[str, Any]] = field(default_factory=list) + + def prune(self, now: float) -> None: + for lease_id, lease in list(self.leases.items()): + if lease.is_expired(now): + self.conflict_log.append( + {"type": "LEASE_EXPIRED", "node_id": lease.node_id, "ts": now} + ) + del self.leases[lease_id] + + def assign(self, lease: ReplicaLease) -> None: + self.leases[lease.lease_id] = lease + self.revision += 1 + + def remove_node(self, node_id: str, reason: str, timestamp: float) -> None: + for lease_id, lease in list(self.leases.items()): + if lease.node_id == node_id: + del self.leases[lease_id] + self.conflict_log.append({"type": reason, "node_id": node_id, "ts": timestamp}) + self.revision += 1 + + def heartbeat(self, node_id: str, now: float) -> bool: + found = False + for lease in self.leases.values(): + if lease.node_id == node_id: + lease.renew(now) + found = True + return found + + def unique_asn(self) -> int: + return len({lease.asn for lease in self.leases.values() if lease.asn is not None}) + + def unique_octets(self) -> int: + return len({lease.ip_first_octet for lease in self.leases.values() if lease.ip_first_octet is not None}) + + def diversity_satisfied(self) -> bool: + if len(self.leases) < dht_config.replication_target: + return False + if self.unique_asn() < dht_config.min_asn_diversity: + return False + if self.unique_octets() < dht_config.min_ip_octet_diversity: + return False + return True + + def to_dict(self) -> Dict[str, Any]: + return { + "content_id": self.content_id, + "leader": self.leader, + "revision": self.revision, + "replica_leases": {lease_id: lease.to_dict() for lease_id, lease in self.leases.items()}, + "conflict_log": list(self.conflict_log)[-100:], # keep tail + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ReplicationState": + state = cls(content_id=str(data.get("content_id", ""))) + state.leader = data.get("leader") + state.revision = int(data.get("revision", 0)) + leases_raw = data.get("replica_leases") or {} + for lease_id, payload in leases_raw.items(): + state.leases[lease_id] = ReplicaLease.from_dict(payload) + state.conflict_log = list(data.get("conflict_log") or []) + return state + + def merge_with(self, other: "ReplicationState") -> "ReplicationState": + combined = _merge_states(self, other) + return combined + + +class ReplicationManager: + def __init__(self, node_id: str, signer: Signer, store: DHTStore): + self.node_id = node_id + self.signer = signer + self.store = store + + def _load_state(self, content_id: str) -> ReplicationState: + key = MetaKey(content_id=content_id) + record = self.store.get(key.fingerprint()) + if record: + return ReplicationState.from_dict(record.value) + return ReplicationState(content_id=content_id) + + def _persist_state(self, state: ReplicationState) -> None: + key = MetaKey(content_id=state.content_id) + self.store.put( + key=str(key), + fingerprint=key.fingerprint(), + value=state.to_dict(), + logical_counter=int(time.time()), + merge_strategy=lambda a, b: ReplicationState.from_dict(a) + .merge_with(ReplicationState.from_dict(b)) + .to_dict(), + ) + + def ensure_replication(self, content_id: str, membership: MembershipState, now: Optional[float] = None) -> ReplicationState: + now = now or _now() + state = self._load_state(content_id) + + n_estimate = max(1.0, membership.n_estimate()) + p_value = max(0, round(math.log2(max(n_estimate / dht_config.replication_target, 1.0)))) + prefix, _ = bits_from_hex(content_id, p_value) + + active = membership.active_members(include_islands=True) + responsible = [] + for member in active: + node_prefix, _total = bits_from_hex(member["node_id"], p_value) + if node_prefix == prefix: + responsible.append(member) + if not responsible: + responsible = active # fall back to all active nodes + responsible.sort(key=lambda item: item["node_id"]) + leader_id = responsible[0]["node_id"] if responsible else None + previous_leader = state.leader + state.leader = leader_id + if previous_leader and leader_id and previous_leader != leader_id: + record_leader_change(content_id) + + if leader_id != self.node_id: + return state # Only leader mutates state + + state.prune(now) + + # evaluate diversity + leases_by_node = {lease.node_id: lease for lease in state.leases.values()} + if not state.diversity_satisfied(): + def rank(members): + return sorted( + ( + ( + rendezvous_score(content_id, m["node_id"]), + m["node_id"], + m.get("asn"), + m.get("ip_first_octet"), + ) + for m in members + ), + key=lambda item: item[0], + ) + + def assign_with_diversity(candidates): + added = 0 + # Phase 1: prefer candidates that increase ASN/IP octet diversity + for score, node_id, asn, ip_octet in candidates: + if node_id in leases_by_node: + continue + before_asn = state.unique_asn() + before_oct = state.unique_octets() + if ((asn is not None and before_asn < dht_config.min_asn_diversity) or + (ip_octet is not None and before_oct < dht_config.min_ip_octet_diversity)): + lease = ReplicaLease( + node_id=node_id, + lease_id=f"{content_id}:{node_id}", + issued_at=now, + expires_at=now + dht_config.lease_ttl, + asn=asn, + ip_first_octet=ip_octet, + heartbeat_at=now, + score=score, + ) + state.assign(lease) + leases_by_node[node_id] = lease + added += 1 + if state.diversity_satisfied(): + return added + # Phase 2: fill by score until target + for score, node_id, asn, ip_octet in candidates: + if node_id in leases_by_node: + continue + lease = ReplicaLease( + node_id=node_id, + lease_id=f"{content_id}:{node_id}", + issued_at=now, + expires_at=now + dht_config.lease_ttl, + asn=asn, + ip_first_octet=ip_octet, + heartbeat_at=now, + score=score, + ) + state.assign(lease) + leases_by_node[node_id] = lease + added += 1 + if state.diversity_satisfied(): + return added + return added + + # First, prefer responsible set + assign_with_diversity(rank(responsible)) + + # If under target, add more from the rest of active nodes + if not state.diversity_satisfied(): + rest = [m for m in active if m["node_id"] not in {n for _, n, *_ in rank(responsible)}] + assign_with_diversity(rank(rest)) + + # Ensure we do not exceed replication target with duplicates + if len(state.leases) > dht_config.replication_target: + # Drop lowest scoring leases until target satisfied while preserving diversity criteria + sorted_leases = sorted(state.leases.values(), key=lambda lease: lease.score, reverse=True) + while len(sorted_leases) > dht_config.replication_target: + victim = sorted_leases.pop() # lowest score + state.remove_node(victim.node_id, reason="OVER_REPLICATED", timestamp=now) + record_replication_over(content_id, len(sorted_leases)) + + if len(state.leases) < dht_config.replication_target: + state.conflict_log.append( + {"type": "UNDER_REPLICATED", "ts": now, "have": len(state.leases)} + ) + record_replication_under(content_id, len(state.leases)) + + self._persist_state(state) + return state + + def heartbeat(self, content_id: str, node_id: str, now: Optional[float] = None) -> bool: + now = now or _now() + state = self._load_state(content_id) + if state.heartbeat(node_id, now): + self._persist_state(state) + return True + return False + + +def _merge_states(left: ReplicationState, right: ReplicationState) -> ReplicationState: + # Combine leases preferring latest expiry + lease_map: Dict[str, ReplicaLease] = {} + for state in (left, right): + for lease_id, lease in state.leases.items(): + current = lease_map.get(lease_id) + if current is None or lease.expires_at > current.expires_at: + lease_map[lease_id] = lease + merged = ReplicationState(content_id=left.content_id or right.content_id) + merged.leader = min(filter(None, [left.leader, right.leader]), default=None) + merged.conflict_log = (left.conflict_log + right.conflict_log)[-100:] + merged.leases = lease_map + merged.revision = max(left.revision, right.revision) + 1 + return merged + + +# Inject helper onto ReplicationState for merge strategy diff --git a/app/core/network/dht/store.py b/app/core/network/dht/store.py new file mode 100644 index 0000000..c9c3f54 --- /dev/null +++ b/app/core/network/dht/store.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from typing import Callable, Dict, Any, Optional + +from app.core._crypto.signer import Signer +from .prometheus import record_merge_conflict +from .records import DHTRecord, latest_wins_merge + + +class DHTStore: + """In-memory DHT replica with deterministic merge semantics.""" + + def __init__(self, node_id: str, signer: Signer): + self.node_id = node_id + self.signer = signer + self._records: Dict[str, DHTRecord] = {} + + def get(self, fingerprint: str) -> Optional[DHTRecord]: + return self._records.get(fingerprint) + + def put( + self, + key: str, + fingerprint: str, + value: Dict[str, Any], + logical_counter: int, + merge_strategy: Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] = latest_wins_merge, + ) -> DHTRecord: + record = DHTRecord.create( + key=key, + fingerprint=fingerprint, + value=value, + node_id=self.node_id, + logical_counter=logical_counter, + ).sign(self.signer) + return self.merge_record(record, merge_strategy) + + def merge_record( + self, + incoming: DHTRecord, + merge_strategy: Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] = latest_wins_merge, + ) -> DHTRecord: + current = self._records.get(incoming.fingerprint) + if current is None: + self._records[incoming.fingerprint] = incoming + return incoming + if current.value != incoming.value: + record_merge_conflict(incoming.key) + merged = current.merge(incoming, merge_strategy) + # Debug instrumentation for tests + # print('merge', incoming.key, merged.value) + merged.sign(self.signer) + self._records[incoming.fingerprint] = merged + return merged + + def snapshot(self) -> Dict[str, Dict[str, Any]]: + return {fp: record.to_payload() | {"signature": record.signature} for fp, record in self._records.items()} diff --git a/app/core/network/handshake.py b/app/core/network/handshake.py index b33187e..20991bf 100644 --- a/app/core/network/handshake.py +++ b/app/core/network/handshake.py @@ -8,7 +8,7 @@ import shutil import secrets from typing import Dict, Any -from base58 import b58encode +from app.core._utils.b58 import b58encode from sqlalchemy import select from app.core._secrets import hot_pubkey, hot_seed @@ -17,6 +17,7 @@ from app.core.logger import make_log from app.core.models.my_network import KnownNode from app.core.models.node_storage import StoredContent from app.core.storage import db_session +from app.core.network.dht import compute_node_id, dht_config from .constants import CURRENT_PROTOCOL_VERSION from .nodes import list_known_public_nodes from .config import ( @@ -93,7 +94,9 @@ async def build_handshake_payload(session) -> Dict[str, Any]: ipfs_payload = await _local_ipfs_payload() payload = { "version": CURRENT_PROTOCOL_VERSION, + "schema_version": dht_config.schema_version, "public_key": b58encode(hot_pubkey).decode(), + "node_id": compute_node_id(hot_pubkey), # public_host is optional for private nodes **({"public_host": PUBLIC_HOST} if PUBLIC_HOST else {}), "node_type": NODE_PRIVACY if NODE_PRIVACY != NODE_TYPE_PUBLIC else NODE_TYPE_PUBLIC, @@ -122,6 +125,8 @@ async def compute_node_info(session) -> Dict[str, Any]: node_info = { "id": b58encode(hot_pubkey).decode(), "public_key": b58encode(hot_pubkey).decode(), + "schema_version": dht_config.schema_version, + "node_id": compute_node_id(hot_pubkey), **({"public_host": PUBLIC_HOST} if PUBLIC_HOST else {}), "version": CURRENT_PROTOCOL_VERSION, "node_type": NODE_PRIVACY, diff --git a/app/core/network/maintenance.py b/app/core/network/maintenance.py new file mode 100644 index 0000000..2ef49cf --- /dev/null +++ b/app/core/network/maintenance.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import asyncio + +from sqlalchemy import select + +from app.core.logger import make_log +from app.core.models.node_storage import StoredContent +from app.core.network.dht import dht_config +from app.core.storage import db_session + + +async def replication_daemon(app): + await asyncio.sleep(5) + memory = getattr(app.ctx, "memory", None) + if not memory: + make_log("Replication", "No memory context; replication daemon exiting", level="warning") + return + make_log("Replication", "daemon started") + while True: + try: + membership_state = memory.membership.state + async with db_session(auto_commit=False) as session: + rows = await session.execute(select(StoredContent.hash)) + content_hashes = [row[0] for row in rows.all()] + for content_hash in content_hashes: + try: + state = memory.replication.ensure_replication(content_hash, membership_state) + memory.replication.heartbeat(content_hash, memory.node_id) + make_log("Replication", f"Replicated {content_hash} leader={state.leader}", level="debug") + except Exception as exc: + make_log("Replication", f"ensure failed for {content_hash}: {exc}", level="warning") + except Exception as exc: + make_log("Replication", f"daemon iteration failed: {exc}", level="error") + await asyncio.sleep(dht_config.heartbeat_interval) + + +async def heartbeat_daemon(app): + await asyncio.sleep(dht_config.heartbeat_interval // 2) + memory = getattr(app.ctx, "memory", None) + if not memory: + return + while True: + try: + async with db_session(auto_commit=False) as session: + rows = await session.execute(select(StoredContent.hash)) + content_hashes = [row[0] for row in rows.all()] + for content_hash in content_hashes: + memory.replication.heartbeat(content_hash, memory.node_id) + except Exception as exc: + make_log("Replication", f"heartbeat failed: {exc}", level="warning") + await asyncio.sleep(dht_config.heartbeat_interval) diff --git a/app/core/network/nodes.py b/app/core/network/nodes.py index 0a71dc0..3142206 100644 --- a/app/core/network/nodes.py +++ b/app/core/network/nodes.py @@ -14,6 +14,7 @@ from app.core.models.my_network import KnownNode from app.core.storage import db_session from app.core._secrets import hot_pubkey from app.core.ipfs_client import swarm_connect +from app.core.network.dht import ReachabilityReceipt from .config import ( HANDSHAKE_INTERVAL_SEC, UNSUPPORTED_RECHECK_INTERVAL_SEC, @@ -203,7 +204,7 @@ async def pick_next_node(session) -> Optional[KnownNode]: return None -async def perform_handshake_round(): +async def perform_handshake_round(memory=None): async with db_session(auto_commit=True) as session: # Private nodes still do outbound handshakes; inbound typically unreachable without public endpoint node = await pick_next_node(session) @@ -238,6 +239,40 @@ async def perform_handshake_round(): node.meta = {**(node.meta or {}), "last_response": resp, "fail_count": 0, "ipfs": node_ipfs_meta} await session.commit() make_log("Handshake", f"Handshake OK with {base_url}") + if memory and resp: + try: + membership_mgr = getattr(memory, "membership", None) + if membership_mgr: + remote_node = (resp or {}).get("node") or {} + remote_node_id = remote_node.get("node_id") + if remote_node_id: + remote_asn = remote_node.get("asn") + membership_mgr.update_member( + node_id=remote_node_id, + public_key=remote_node.get("public_key") or "", + ip=node.ip, + asn=int(remote_asn) if remote_asn is not None else None, + metadata={ + "public_host": remote_node.get("public_host"), + "capabilities": remote_node.get("capabilities") or {}, + }, + ) + for receipt in (resp or {}).get("reachability_receipts") or []: + target = receipt.get("target_id") + issuer = receipt.get("issuer_id") + if not target or not issuer: + continue + membership_mgr.record_receipt( + ReachabilityReceipt( + target_id=str(target), + issuer_id=str(issuer), + asn=int(receipt["asn"]) if receipt.get("asn") is not None else None, + timestamp=float(receipt.get("timestamp", 0)), + signature=str(receipt.get("signature", "")), + ) + ) + except Exception as exc: + make_log("Handshake", f"Membership merge failed: {exc}", level="warning") except Exception as e: make_log("Handshake", f"Handshake failed with {base_url}: {e}", level='warning') # Record incident-lite in meta @@ -259,7 +294,7 @@ async def network_handshake_daemon(app): make_log("Handshake", f"Static IPFS peering failed: {exc}", level='warning') while True: try: - await perform_handshake_round() + await perform_handshake_round(getattr(app, "ctx", None) and getattr(app.ctx, "memory", None)) except Exception as e: make_log("Handshake", f"Round error: {e}", level='error') await asyncio.sleep(HANDSHAKE_INTERVAL_SEC) diff --git a/docs/indexation.md b/docs/indexation.md deleted file mode 100644 index e527529..0000000 --- a/docs/indexation.md +++ /dev/null @@ -1,110 +0,0 @@ -## Indexation - -### Stored content types - -- `local/content_bin` – binary content stored only locally (or indexer no found it on chain) -- `onchain/content` - content stored onchain -- `onchain/content_unknown` - content stored onchain, but we don't have a private key to decrypt it - -Content item may have multiple types, for example, `local/content_bin` and `onchain/content`. - -But `content cover`, `content metadata` and `decrypted content` always stored locally. - -### Content Ownership Proof NFT Values Cell Deserialization - -```text -values:^[ - content_hash:uint256 - metadata:^[ - offchain?:int1 = always 1 - https://my-public-node-1.projscale.dev/*:bytes - ] - content:^[ - content_cid:^Cell = b58encoded CID - cover_cid:^Cell = b58encoded CID - metadata_cid:^Cell = b58encoded CID - ] -] -``` - -### Available content statuses - -- `UPLOAD_TO_BTFS` – content is stored locally, upload all content parts to BTFS. This status means that payment is received yet. - - -### Upload content flow - -1. User uploads content to server (/api/v1/storage) -2. User uploads content cover to server (/api/v1/storage) -3. User send /api/v1/blockchain.sendNewContentMessage to server and accept the transaction in wallet -4. Indexer receives the transaction and indexes the content. And send telegram notification to user. -# Network Index & Sync (v3) - -This document describes the simplified, production‑ready stack for content discovery and sync: - -- Upload via tus → stream encrypt (ENCF v1, AES‑256‑GCM, 1 MiB chunks) → `ipfs add --cid-version=1 --raw-leaves --chunker=size-1048576 --pin`. -- Public index exposes only encrypted sources (CID) and safe metadata; no plaintext ids. -- Nodes full‑sync by pinning encrypted CIDs; keys are auto‑granted to trusted peers for preview/full access. - -## ENCF v1 (Encrypted Content Format) - -Unencrypted header and framed body; same bytes on all nodes ⇒ stable CID. - -Header (all big endian): - -``` -MAGIC(4): 'ENCF' -VER(1): 0x01 -SCHEME(1): 0x03 = AES_GCM (0x01 AES_GCM_SIV legacy, 0x02 AES_SIV legacy) -CHUNK(4): plaintext chunk bytes (1048576) -SALT_LEN(1) -SALT(N) -RESERVED(5): zeros -``` - -Body: repeated frames `[p_len:4][cipher][tag(16)]` where `p_len <= CHUNK` for last frame. - -AES‑GCM (scheme `0x03`) encrypts each frame with deterministic `nonce = HMAC_SHA256(salt, u64(frame_idx))[:12]`. Legacy scheme `0x01` keeps AES‑GCM‑SIV with the same nonce derivation. - -For new uploads (v2025-09), the pipeline defaults to AES‑256‑GCM. Legacy AES‑GCM‑SIV/AES‑SIV content is still readable — the decoder auto-detects the scheme byte. - -### Local encryption/decryption helpers - -``` -python -m app.core.crypto.cli encrypt --input demo.wav --output demo.encf \ - --key AAAAEyHSVws5O8JGrg3kUSVtk5dQSc5x5e7jh0S2WGE= --salt-bytes 16 - -python -m app.core.crypto.cli decrypt --input demo.encf --output demo.wav \ - --wrapped-key -``` - -Because we use standard AES‑GCM, you can also re-hydrate frames manually with tools like `openssl aes-256-gcm`. The header exposes `chunk_bytes` and salt; derive the per-frame nonce via `HMAC_SHA256(salt, idx)` where `idx` is the frame number (0-based) and feed the 12-byte prefix as IV. - -## API - -- `GET /api/v1/content.index` → `{ items:[...], schema, ETag }` with signed items. -- `GET /api/v1/content.delta?since=ISO8601` → `{ items:[...], next_since, schema }` with ETag. -- `POST /api/v1/sync.pin` (NodeSig required) → queue/pin CID. -- `POST /api/v1/keys.request` (NodeSig required) → sealed DEK for trusted peers. -- `GET /api/v1/content.derivatives?cid=` → local ready derivatives (low/high/preview). - -## NodeSig - -Canonical string: - -``` -METHOD\nPATH\nSHA256(body)\nTS\nNONCE\nNODE_ID -``` - -Headers: `X-Node-Id`, `X-Node-Ts`, `X-Node-Nonce`, `X-Node-Sig`. -Window ±120s, nonce cache ~10min; replay → 401. - -## Sync daemon - -- Jitter 0–30s per peer; uses ETag/`since`. -- Disk watermark (`SYNC_DISK_LOW_WATERMARK_PCT`) stops pin burst. -- Pinned concurrently (`SYNC_MAX_CONCURRENT_PINS`) with pre‑`findprovs` `swarm/connect`. - -## Keys policy - -`KEY_AUTO_GRANT_TRUSTED_ONLY=1` — only KnownNode.meta.role=='trusted' gets DEK automatically. Preview lease TTL via `KEY_GRANT_PREVIEW_TTL_SEC`. diff --git a/docs/web2-client.md b/docs/web2-client.md deleted file mode 100644 index a29d579..0000000 --- a/docs/web2-client.md +++ /dev/null @@ -1,118 +0,0 @@ -## Web2 Client (through HTTP API) - -### API Public Endpoints - -```text -https://music-gateway.letsw.app - – /api/v1 -``` - -### Telegram WebApp Authorization - -[Implementation](../app/api/routes/auth.py) - -#### Request (POST, /api/v1/auth.twa, JSON) - -```javascript -{ - twa_data: window.Telegram.WebApp.initData -} -``` - -#### Response (JSON) - -```javascript -{ - user: { ...User }, - connected_wallet: null | { - version: string, - address: string, - ton_balance: string // nanoTON bignum - }, - auth_v1_token: string -} -``` - -**Use** `auth_v1_token` as `Authorization` header for all authorized requests. - -### Upload file - -[Implementation](../app/api/routes/node_storage.py) - -#### Request (POST, /api/v1/storage, FormData) - -```javascript -{ - file: File -} -``` - -#### Response (JSON) - -```javascript -{ - content_sha256: string, - content_id_v1: string, - content_url: string -} -``` - -### Download file - -[Implementation](../app/api/routes/node_storage.py) - -#### Request (GET, /api/v1/storage/:content_id) - -#### Response (File) - -### Create new content - -[Implementation](../app/api/routes/blockchain.py) - -#### Request (POST, /api/v1/blockchain.sendNewContentMessage, JSON) - -```javascript -{ - title: string, - authors: list, - content: string, // recommended dmy:// - image: string, // recommended dmy:// - description: string, - price: string, // nanoTON bignum - resaleLicensePrice: string // nanoTON bignum (default = 0) - allowResale: boolean, - royaltyParams: [{ - address: string, - value: number // 10000 = 100% - }] -} -``` - -#### Response (JSON) - -```javascript -{ - message: "Transaction requested" -} -``` - -### Purchase content - -[Implementation](../app/api/routes/blockchain.py) - -#### Request (POST, /api/v1/blockchain.sendPurchaseContentMessage, JSON) - -```javascript -{ - content_address: string, - price: string // nanoTON bignum -} -``` - -#### Response (JSON) - -```javascript -{ - message: "Transaction requested" -} -``` \ No newline at end of file diff --git a/docs/web2-client_task280224.md b/docs/web2-client_task280224.md deleted file mode 100644 index 4a8f8ca..0000000 --- a/docs/web2-client_task280224.md +++ /dev/null @@ -1,9 +0,0 @@ -## Web2 Client Task #280224 - -1. В процессе изменения дизайна сделать все элементы по нормальному в отличие от того как сейчас: чтобы страница состояла из компонентов, а не монолитно написана. -2. Сделать чтобы при нажатии на кнопку "Загрузить контент" открывалось окно с "Перейдите в кошелек, вы запросили транзакцию" и если сервер в дополнении к обычному message вернул еще и walletLink, то отобразить кнопку для перехода в кошелек -3. Чтобы запросить транзакцию, нужно отправить запрос `docs/web2-client/UploadFile` с файлом и получить в ответ content_url, который после загрузки изображения и самого контента нужно приложить в запрос `docs/web2-client/CreateNewContent` в поле image и content соответственно -4. Желательно: сделать отображение загруженной обложки в виде карточки с кнопкой "Удалить" и "Изменить" (при нажатии на изменить открывается окно загрузки контента) -5. Обработать чтобы контент проходил полную цепочку загрузки (загрузка изображения, загрузка контента, запрос транзакции через бэкенд) и после всего вебапп закрывался через window.Telegram.WebApp.close() -6. Сделать дизайн как хочет Миша -7. Обработать ситуацию когда кошелек не подключен, то есть в ответе на запрос `docs/web2-client/auth.twa` приходит connected_wallet: null \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 1ddd468..ac5ccff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,3 +19,7 @@ ffmpeg-python==0.2.0 python-magic==0.4.27 cryptography==42.0.5 alembic==1.13.1 +blake3==0.4.1 +prometheus-client==0.20.0 +pytest==8.2.1 +pytest-asyncio==0.23.7