montana/Русский/Бот/timechain_node.py

331 lines
14 KiB
Python
Raw Permalink Normal View History

#!/usr/bin/env python3
"""
timechain_node.py Montana TimeChain Node Daemon (MAINNET)
Узел таймчейна. Окна времени привязаны к UTC:
- τ₁ (60s): Каждую минуту UTC (XX:XX:00) presence proof + pending TX
- τ₂ (600s): Каждые 10 минут UTC (XX:X0:00) финализация, coinbase эмиссия
- τ₃ (14d): Каждые 14 дней чекпоинт
- τ₄ (4y): Каждые 4 года халвинг
Все окна привязаны к абсолютному UTC времени, не к моменту старта узла.
ML-DSA-65 подписи на каждом окне.
Usage:
python3 timechain_node.py
"""
import json
import logging
import signal
import time
import threading
from pathlib import Path
from datetime import datetime, timezone
from timechain import (
TimeChain, TAU1_SECONDS, TAU2_SECONDS, TAU1_PER_TAU2,
GENESIS_TIMESTAMP_NS, TIME_BANK_TOTAL_SECONDS
)
from transaction import create_coinbase_tx
from node_crypto import generate_keypair, public_key_to_address, sign_message
from presence_proof import PresenceChain
# ═══════════════════════════════════════════════════════════════════════════════
# CONFIGURATION
# ═══════════════════════════════════════════════════════════════════════════════
DATA_DIR = Path(__file__).parent / "data"
KEYS_FILE = DATA_DIR / "timechain_keys.json"
DB_PATH = str(DATA_DIR / "timechain.db")
PRESENCE_DB_PATH = str(DATA_DIR / "presence.db")
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(name)s] %(message)s",
datefmt="%H:%M:%S",
)
log = logging.getLogger("timechain_node")
_running = True
def signal_handler(sig, frame):
global _running
log.info("Shutting down...")
_running = False
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
# ═══════════════════════════════════════════════════════════════════════════════
# UTC TIME WINDOWS
# ═══════════════════════════════════════════════════════════════════════════════
def current_tau1_window() -> int:
"""Номер текущего τ₁ окна = UTC minutes since genesis"""
genesis_sec = GENESIS_TIMESTAMP_NS // 1_000_000_000
now = int(time.time())
return (now - genesis_sec) // TAU1_SECONDS
def current_tau2_window() -> int:
"""Номер текущего τ₂ окна = UTC 10-min windows since genesis"""
genesis_sec = GENESIS_TIMESTAMP_NS // 1_000_000_000
now = int(time.time())
return (now - genesis_sec) // TAU2_SECONDS
def next_tau1_boundary() -> float:
"""Timestamp следующей границы τ₁ (ровная минута UTC)"""
now = time.time()
return float((int(now) // TAU1_SECONDS + 1) * TAU1_SECONDS)
def next_tau2_boundary() -> float:
"""Timestamp следующей границы τ₂ (ровные 10 минут UTC)"""
now = time.time()
return float((int(now) // TAU2_SECONDS + 1) * TAU2_SECONDS)
def wait_until(target: float):
"""Sleep до target timestamp, с возможностью graceful shutdown"""
while _running and time.time() < target:
time.sleep(min(0.5, target - time.time()))
# ═══════════════════════════════════════════════════════════════════════════════
# NODE KEYS
# ═══════════════════════════════════════════════════════════════════════════════
def load_or_create_keys():
"""Load node keys from file or generate new ML-DSA-65 keypair"""
DATA_DIR.mkdir(parents=True, exist_ok=True)
if KEYS_FILE.exists():
with open(KEYS_FILE) as f:
keys = json.load(f)
log.info(f"Loaded keys: {keys['node_id']}")
return keys["node_id"], keys["private_key"], keys["public_key"]
log.info("Generating new ML-DSA-65 keypair...")
private_key, public_key = generate_keypair()
node_id = public_key_to_address(public_key)
keys = {
"node_id": node_id,
"private_key": private_key,
"public_key": public_key,
"generated_at": datetime.now(timezone.utc).isoformat(),
}
with open(KEYS_FILE, "w") as f:
json.dump(keys, f, indent=2)
import stat
KEYS_FILE.chmod(stat.S_IRUSR | stat.S_IWUSR)
log.info(f"Generated node: {node_id}")
return node_id, private_key, public_key
# ═══════════════════════════════════════════════════════════════════════════════
# PARTICIPANTS (for emission)
# ═══════════════════════════════════════════════════════════════════════════════
_active_participants = {}
_participants_lock = threading.Lock()
PRESENCE_QUEUE_FILE = DATA_DIR / "presence_queue.json"
def _read_presence_queue() -> dict:
"""Read and clear shared presence queue written by montana_api.py"""
try:
if PRESENCE_QUEUE_FILE.exists():
with open(PRESENCE_QUEUE_FILE, 'r') as f:
data = json.load(f)
# Clear the file after reading
with open(PRESENCE_QUEUE_FILE, 'w') as f:
json.dump({}, f)
return data if isinstance(data, dict) else {}
except Exception as e:
log.warning(f"Presence queue read error: {e}")
return {}
def get_and_reset_emissions():
"""Collect accumulated presence seconds from queue + local, reset counters"""
with _participants_lock:
now = time.time()
emissions = {}
# Read from shared queue (written by API process)
queued = _read_presence_queue()
for addr, seconds in queued.items():
if isinstance(seconds, (int, float)) and seconds > 0:
emissions[addr] = min(int(seconds), TAU2_SECONDS)
# Merge with local in-memory participants
stale = []
for addr, entry in _active_participants.items():
if now - entry["last_seen"] < TAU2_SECONDS + 60:
if entry["total_seconds"] > 0:
existing = emissions.get(addr, 0)
emissions[addr] = min(existing + entry["total_seconds"], TAU2_SECONDS)
else:
stale.append(addr)
for addr in _active_participants:
_active_participants[addr]["total_seconds"] = 0
for addr in stale:
del _active_participants[addr]
return emissions
# ═══════════════════════════════════════════════════════════════════════════════
# MAIN NODE LOOP
# ═══════════════════════════════════════════════════════════════════════════════
def run_node():
global _running
node_id, private_key, public_key = load_or_create_keys()
tc = TimeChain(
node_id=node_id,
private_key=private_key,
public_key=public_key,
db_path=DB_PATH,
)
tc.register_node(node_id, public_key)
presence_chain = PresenceChain(
node_id=node_id,
private_key=private_key,
pubkey=public_key,
db_path=PRESENCE_DB_PATH,
)
log.info("=" * 60)
log.info(" Montana TimeChain Node — MAINNET")
log.info(f" Node: {node_id}")
log.info(f" tau1: {tc.tau1_count} windows")
log.info(f" tau2: {tc.tau2_count} windows")
log.info(f" Supply: {tc.utxo_set.total_unspent()} Ɉ")
log.info(f" Proofs: {presence_chain.proof_count}")
log.info("=" * 60)
# ═══════════════════════════════════════════════════════════════
# IMMUTABILITY: Full chain verification on startup
# ═══════════════════════════════════════════════════════════════
if tc.tau1_count > 0:
log.info("Verifying chain integrity...")
ok, msg = tc.verify_full_chain()
if not ok:
log.error(f"CHAIN VERIFICATION FAILED: {msg}")
raise RuntimeError(f"Corrupted TimeChain — cannot start: {msg}")
log.info(f"Chain verification: {msg}")
# Verify presence proof chain
ok, msg = presence_chain.verify_chain()
if not ok:
log.error(f"PRESENCE CHAIN VERIFICATION FAILED: {msg}")
raise RuntimeError(f"Corrupted presence chain — cannot start: {msg}")
log.info(f"Presence chain: {msg}")
else:
log.info("Empty chain — skipping verification")
tau1_in_current_tau2 = 0
tau2_number = tc.tau2_count
# Wait for next τ₁ boundary (exact UTC minute)
target = next_tau1_boundary()
now_utc = datetime.now(timezone.utc).strftime("%H:%M:%S")
wait_sec = target - time.time()
log.info(f"Now {now_utc} UTC. Waiting {wait_sec:.1f}s for next tau1 window boundary...")
wait_until(target)
while _running:
now = time.time()
now_utc = datetime.now(timezone.utc).strftime("%H:%M:%S")
tau1_window_num = current_tau1_window()
tau2_window_num = current_tau2_window()
try:
# ─── τ₁ окно: создание presence proof + запись ────────────
proof = presence_chain.create_proof(t2_index=tau2_window_num)
window = tc.create_tau1_window(
transactions=[],
presence_proof_hashes=[proof.proof_hash],
)
tau1_in_current_tau2 += 1
log.info(
f"τ₁ #{window.window_number} | "
f"{now_utc} UTC | "
f"window={tau1_window_num} | "
f"hash={window.window_hash()[:12]}... | "
f"tau1_in_tau2={tau1_in_current_tau2}/{TAU1_PER_TAU2}"
)
# ─── τ₂ граница окна: финализация эмиссии ────────────────
utc_minute = int(now) // 60
at_tau2_boundary = (utc_minute % 10 == 9) and (tau1_in_current_tau2 >= TAU1_PER_TAU2)
if at_tau2_boundary:
log.info(f"τ₂ #{tau2_number} — финализация в {now_utc} UTC...")
emissions = get_and_reset_emissions()
if node_id not in emissions:
emissions[node_id] = TAU2_SECONDS
# Халвинг: эра 0 = коэфф 1.0, эра 1 = 0.5, etc.
tau2_per_halving = 2016 * 104
halving_era = tau2_number // tau2_per_halving
halving_coefficient = 1.0 / (2 ** halving_era)
# Конвертируем emissions dict → coinbase транзакции
coinbase_txs = []
for addr, seconds in emissions.items():
amount = int(seconds * halving_coefficient)
if amount > 0:
coinbase_txs.append(
create_coinbase_tx(addr, amount, tau2_number)
)
tau2_window = tc.finalize_tau2(
coinbase_txs=coinbase_txs,
halving_coefficient=halving_coefficient,
)
if tau2_window:
log.info(
f" τ₂ #{tau2_window.window_number} FINALIZED | "
f"emissions={tau2_window.total_emissions} Ɉ | "
f"participants={len(emissions)} | "
f"coef={halving_coefficient}"
)
tau2_number += 1
else:
log.warning(" τ₂ finalization returned None")
tau1_in_current_tau2 = 0
except Exception as e:
log.error(f"Error in main loop: {e}", exc_info=True)
# ─── Wait for next τ₁ boundary ───────────────────────────────
wait_until(next_tau1_boundary())
log.info("Node stopped.")
stats = tc.get_stats()
log.info(f"Final: tau1={stats['tau1_count']}, tau2={stats['tau2_count']}, supply={stats['total_supply']}")
if __name__ == "__main__":
run_node()