From 78d97281f0b2c3b3d92406f2a970239c4c913338 Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 17:41:22 +0300 Subject: [PATCH 01/27] fix(relay): canonicalise envelope ID and timestamp on mailbox.Store MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The mailbox previously trusted the client-supplied envelope ID and SentAt, which enabled two attacks: - replay via re-broadcast: a malicious relay could resubmit the same ciphertext under multiple IDs, causing the recipient to receive the same plaintext repeatedly; - timestamp spoofing: senders could back-date or future-date messages to bypass the 7-day TTL or fake chronology. Store() now recomputes env.ID as hex(sha256(nonce||ct)[:16]) and overwrites env.SentAt with time.Now().Unix(). Both values are mutated on the envelope pointer so downstream gossipsub publishes agree on the normalised form. Also documents /relay/send as non-E2E — the endpoint seals with the relay's own key, which breaks end-to-end authenticity. Clients wanting real E2E should POST /relay/broadcast with a pre-sealed envelope. Co-Authored-By: Claude Opus 4.7 (1M context) --- node/api_relay.go | 13 ++++++++++--- relay/mailbox.go | 22 ++++++++++++++++++++-- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/node/api_relay.go b/node/api_relay.go index ac949d6..e7ac306 100644 --- a/node/api_relay.go +++ b/node/api_relay.go @@ -167,9 +167,16 @@ func relayInboxCount(rc RelayConfig) http.HandlerFunc { // "msg_b64": "", // } // -// The relay node seals the message using its own X25519 keypair and broadcasts -// it on the relay gossipsub topic. No on-chain fee is attached — delivery is -// free for light clients using this endpoint. +// WARNING — NOT END-TO-END ENCRYPTED. +// The relay node seals the message using its OWN X25519 keypair, not the +// sender's. That means: +// - The relay can read the plaintext (msg_b64 arrives in the clear). +// - The recipient cannot authenticate the sender — they only see "a +// message from the relay". +// For real E2E messaging, clients should seal the envelope themselves and +// use POST /relay/broadcast instead. This endpoint is retained only for +// backwards compatibility with legacy integrations and for bootstrap +// scenarios where the sender doesn't have a long-lived X25519 key yet. func relaySend(rc RelayConfig) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { diff --git a/relay/mailbox.go b/relay/mailbox.go index 755cd4e..88ba5f5 100644 --- a/relay/mailbox.go +++ b/relay/mailbox.go @@ -100,13 +100,31 @@ func (m *Mailbox) Close() error { return m.db.Close() } // // Anti-spam checks (in order): // 1. Ciphertext > MailboxMaxEnvelopeSize → returns ErrEnvelopeTooLarge. -// 2. Duplicate envelope ID → silently overwritten (idempotent). -// 3. Recipient already has MailboxPerRecipientCap entries → oldest evicted first. +// 2. env.ID is recomputed to the canonical value hex(sha256(nonce||ct)[:16]) +// — prevents a malicious relay from storing the same ciphertext under +// multiple IDs (real content-level replay protection). +// 3. env.SentAt is overwritten with server time — senders can't back-date +// or future-date messages to bypass ordering or TTL expiry. +// 4. Duplicate envelope ID → silently no-op (idempotent). +// 5. Recipient already has MailboxPerRecipientCap entries → oldest evicted first. +// +// NOTE: Store MUTATES env.ID and env.SentAt to the canonical / server values. +// Callers that re-broadcast (gossipsub publish) after Store will see the +// normalised envelope, which is desirable — peer nodes then agree on the +// same ID and timestamp. func (m *Mailbox) Store(env *Envelope) error { if len(env.Ciphertext) > MailboxMaxEnvelopeSize { return ErrEnvelopeTooLarge } + // v1.0.1 — canonicalise id & timestamp. Any client-supplied values are + // replaced with server-computed truth. This is the simplest way to + // prevent: + // - replay-via-rebroadcast (same ciphertext under different IDs), + // - timestamp spoofing (bypass TTL / fake chronology). + env.ID = envelopeID(env.Nonce, env.Ciphertext) + env.SentAt = time.Now().Unix() + key := mailboxKey(env.RecipientPub, env.SentAt, env.ID) val, err := json.Marshal(env) if err != nil { From 32eec62ba43ee630e3bad28db4c5a1a31a001d8b Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 17:51:14 +0300 Subject: [PATCH 02/27] fix(chain): RELAY_PROOF dedup by envelopeID + sticky BlockContact MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit RELAY_PROOF previously had no per-envelope dedup — every relay that saw the gossipsub re-broadcast could extract the sender's FeeSig from the envelope and submit its own RELAY_PROOF claim with its own RelayPubKey. The tx-ID uniqueness check didn't help because tx.ID = sha256(relayPubKey||envelopeID)[:16], which is unique per (relay, envelope) pair. A malicious mesh of N relays could drain N× the fee from the sender's balance for a single message. Fix: record prefixRelayProof: on first successful apply and reject subsequent claims for the same envelope. CONTACT_REQUEST previously overwrote any prior record (including a blocked one) back to pending, letting spammers unblock themselves by paying another MinContactFee. Now the handler reads the existing record first and rejects the tx with "recipient has blocked sender" when prev.Status == ContactBlocked. Block becomes sticky. Co-Authored-By: Claude Opus 4.7 (1M context) --- blockchain/chain.go | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/blockchain/chain.go b/blockchain/chain.go index ace73af..1ce8361 100644 --- a/blockchain/chain.go +++ b/blockchain/chain.go @@ -47,6 +47,7 @@ const ( prefixPayChan = "paychan:" // paychan: → PayChanState JSON prefixRelay = "relay:" // relay: → RegisterRelayPayload JSON prefixRelayHB = "relayhb:" // relayhb: → unix seconds (int64) of last HB + prefixRelayProof = "relayproof:" // relayproof: → claimant node_pubkey (1 claim per envelope) prefixContactIn = "contact_in:" // contact_in:: → contactRecord JSON prefixValidator = "validator:" // validator: → "" (presence = active) prefixContract = "contract:" // contract: → ContractRecord JSON @@ -795,9 +796,21 @@ func (c *Chain) applyTx(txn *badger.Txn, tx *Transaction) (uint64, error) { if err := json.Unmarshal(tx.Payload, &p); err != nil { return 0, fmt.Errorf("%w: RELAY_PROOF bad payload: %v", ErrTxFailed, err) } + if p.EnvelopeID == "" { + return 0, fmt.Errorf("%w: RELAY_PROOF: envelope_id is required", ErrTxFailed) + } if p.SenderPubKey == "" || p.FeeUT == 0 || len(p.FeeSig) == 0 { return 0, fmt.Errorf("%w: relay proof missing fee authorization fields", ErrTxFailed) } + // Per-envelope dedup — only one relay may claim the fee for a given + // envelope. Without this check, every relay that saw the gossipsub + // re-broadcast could extract the sender's FeeSig and submit its own + // RELAY_PROOF, draining the sender's balance by N× for one message. + proofKey := []byte(prefixRelayProof + p.EnvelopeID) + if _, err := txn.Get(proofKey); err == nil { + return 0, fmt.Errorf("%w: RELAY_PROOF: envelope %s already claimed", + ErrTxFailed, p.EnvelopeID) + } authBytes := FeeAuthBytes(p.EnvelopeID, p.FeeUT) ok, err := verifyEd25519(p.SenderPubKey, authBytes, p.FeeSig) if err != nil || !ok { @@ -818,6 +831,10 @@ func (c *Chain) applyTx(txn *badger.Txn, tx *Transaction) (uint64, error) { }); err != nil { return 0, err } + // Mark envelope as claimed — prevents replay by other relays. + if err := txn.Set(proofKey, []byte(p.RelayPubKey)); err != nil { + return 0, fmt.Errorf("mark relay proof: %w", err) + } case EventBindWallet: var p BindWalletPayload @@ -956,6 +973,19 @@ func (c *Chain) applyTx(txn *badger.Txn, tx *Transaction) (uint64, error) { return 0, fmt.Errorf("%w: CONTACT_REQUEST: amount %d < MinContactFee %d", ErrTxFailed, tx.Amount, MinContactFee) } + // Sticky block — if recipient previously blocked this sender, refuse + // the new request instead of silently overwriting the blocked status + // back to pending. Prevents unblock-via-respam. + key := prefixContactIn + tx.To + ":" + tx.From + if item, err := txn.Get([]byte(key)); err == nil { + var prev contactRecord + if verr := item.Value(func(val []byte) error { + return json.Unmarshal(val, &prev) + }); verr == nil && prev.Status == string(ContactBlocked) { + return 0, fmt.Errorf("%w: CONTACT_REQUEST: recipient has blocked sender", + ErrTxFailed) + } + } if err := c.debitBalance(txn, tx.From, tx.Amount+tx.Fee); err != nil { return 0, fmt.Errorf("CONTACT_REQUEST debit: %w", err) } @@ -970,7 +1000,6 @@ func (c *Chain) applyTx(txn *badger.Txn, tx *Transaction) (uint64, error) { CreatedAt: tx.Timestamp.Unix(), } val, _ := json.Marshal(rec) - key := prefixContactIn + tx.To + ":" + tx.From if err := txn.Set([]byte(key), val); err != nil { return 0, fmt.Errorf("store contact record: %w", err) } From 8082dd0bf71eb0bb91c5f87f984710a7ba7ee1ff Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 17:54:08 +0300 Subject: [PATCH 03/27] fix(node): rate-limit relay HTTP endpoints MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Relay routes were not wrapped in any guards — /relay/broadcast accepted unlimited writes from any IP, and /relay/inbox could be scraped at line rate. Combined with the per-recipient FIFO eviction (MailboxPerRecipientCap=500), an unauthenticated attacker could wipe a victim's real messages by spamming 500 garbage envelopes. This commit wraps writes in withSubmitTxGuards (10/s per IP + 256 KiB body cap) and reads in withReadLimit (20/s per IP) — the same limits already used for /api/tx and /api/address. Co-Authored-By: Claude Opus 4.7 (1M context) --- node/api_relay.go | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/node/api_relay.go b/node/api_relay.go index e7ac306..eb3ce53 100644 --- a/node/api_relay.go +++ b/node/api_relay.go @@ -37,12 +37,19 @@ type RelayConfig struct { // DELETE /relay/inbox/{envID} ?pub= // GET /relay/contacts ?pub= func registerRelayRoutes(mux *http.ServeMux, rc RelayConfig) { - mux.HandleFunc("/relay/send", relaySend(rc)) - mux.HandleFunc("/relay/broadcast", relayBroadcast(rc)) - mux.HandleFunc("/relay/inbox/count", relayInboxCount(rc)) - mux.HandleFunc("/relay/inbox/", relayInboxDelete(rc)) - mux.HandleFunc("/relay/inbox", relayInboxList(rc)) - mux.HandleFunc("/relay/contacts", relayContacts(rc)) + // Writes go through withSubmitTxGuards: per-IP rate limit (10/s, burst 20) + // + 256 KiB body cap. Without these, a single attacker could spam + // 500 envelopes per victim in a few seconds and evict every real message + // via the mailbox FIFO cap. + mux.HandleFunc("/relay/send", withSubmitTxGuards(relaySend(rc))) + mux.HandleFunc("/relay/broadcast", withSubmitTxGuards(relayBroadcast(rc))) + + // Reads go through withReadLimit: per-IP rate limit (20/s, burst 40). + // Protects against inbox-scraping floods from a single origin. + mux.HandleFunc("/relay/inbox/count", withReadLimit(relayInboxCount(rc))) + mux.HandleFunc("/relay/inbox/", withReadLimit(relayInboxDelete(rc))) + mux.HandleFunc("/relay/inbox", withReadLimit(relayInboxList(rc))) + mux.HandleFunc("/relay/contacts", withReadLimit(relayContacts(rc))) } // relayInboxList handles GET /relay/inbox?pub=[&since=][&limit=N] From 15d0ed306b94cd98137104b69ff91393c141f267 Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 17:55:11 +0300 Subject: [PATCH 04/27] fix(ws): hard-deny inbox:* / typing:* when authX is empty MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The WS topic-auth check had a soft-fail fallback: if the authenticated identity had no registered X25519 public key (authX == ""), the topic-ownership check was skipped and the client could subscribe to any inbox:* or typing:* topic. Exploit: register an Ed25519 identity without an X25519 key, subscribe to the victim's inbox topic, receive their envelope notifications. Now both topics hard-require a registered X25519. Clients must call REGISTER_KEY (publishing X25519) before subscribing. The scope is narrow — only identities that haven't completed REGISTER_KEY yet could have exploited this — but a hard fail is still correct. Co-Authored-By: Claude Opus 4.7 (1M context) --- node/ws.go | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/node/ws.go b/node/ws.go index d8a680f..22dc548 100644 --- a/node/ws.go +++ b/node/ws.go @@ -521,13 +521,17 @@ func (h *WSHub) authorizeSubscribe(c *wsClient, topic string) error { if authed == "" { return fmt.Errorf("inbox:* requires auth") } - // If we have an x25519 mapping, enforce it; otherwise accept - // (best-effort — identity may not be registered yet). - if authX != "" { - want := strings.TrimPrefix(topic, "inbox:") - if want != authX { - return fmt.Errorf("inbox:* only for your own x25519") - } + // Hard-require a registered X25519 identity — otherwise an + // Ed25519-only identity could subscribe to ANY inbox topic by + // design (authX == "" skipped the equality check). Fixed: we + // now refuse the subscription until the client publishes an + // X25519 key via REGISTER_KEY. + if authX == "" { + return fmt.Errorf("inbox:* requires a registered X25519 identity (send REGISTER_KEY first)") + } + want := strings.TrimPrefix(topic, "inbox:") + if want != authX { + return fmt.Errorf("inbox:* only for your own x25519") } return nil } @@ -536,11 +540,12 @@ func (h *WSHub) authorizeSubscribe(c *wsClient, topic string) error { if authed == "" { return fmt.Errorf("typing:* requires auth") } - if authX != "" { - want := strings.TrimPrefix(topic, "typing:") - if want != authX { - return fmt.Errorf("typing:* only for your own x25519") - } + if authX == "" { + return fmt.Errorf("typing:* requires a registered X25519 identity") + } + want := strings.TrimPrefix(topic, "typing:") + if want != authX { + return fmt.Errorf("typing:* only for your own x25519") } return nil } From f2cb5586caf6a5a947eee8889baf5d88aa42af9c Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 17:57:24 +0300 Subject: [PATCH 05/27] fix(relay): require signed Ed25519 auth on DELETE /relay/inbox/{id} MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously the endpoint accepted an unauthenticated DELETE with just ?pub=X — anyone who knew (or enumerated) a pub could wipe that pub's entire inbox, a trivial griefing vector. Now the handler requires a JSON body with {ed25519_pub, sig, ts} where sig signs "inbox-delete:::" under the Ed25519 privkey. The server then looks up the identity on-chain and verifies that the registered X25519 public key matches the ?pub= query — closing the gap between "I can sign" and "my identity owns this mailbox." Timestamp window: ±300s to prevent replay of captured DELETEs. Wires RelayConfig.ResolveX25519 via chain.Identity() in cmd/node/main.go. When ResolveX25519 is nil the endpoint returns 503 (feature unavailable) rather than silently allowing anonymous deletes. Co-Authored-By: Claude Opus 4.7 (1M context) --- cmd/node/main.go | 7 ++++ node/api_relay.go | 81 ++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 87 insertions(+), 1 deletion(-) diff --git a/cmd/node/main.go b/cmd/node/main.go index 83fe3d2..f6d9afc 100644 --- a/cmd/node/main.go +++ b/cmd/node/main.go @@ -920,6 +920,13 @@ func main() { ContactRequests: func(pubKey string) ([]blockchain.ContactInfo, error) { return chain.ContactRequests(pubKey) }, + ResolveX25519: func(ed25519PubHex string) string { + info, err := chain.Identity(ed25519PubHex) + if err != nil || info == nil { + return "" + } + return info.X25519PubKey + }, } go func() { diff --git a/node/api_relay.go b/node/api_relay.go index eb3ce53..687d66f 100644 --- a/node/api_relay.go +++ b/node/api_relay.go @@ -2,6 +2,7 @@ package node import ( "encoding/base64" + "encoding/hex" "encoding/json" "fmt" "net/http" @@ -9,6 +10,7 @@ import ( "time" "go-blockchain/blockchain" + "go-blockchain/identity" "go-blockchain/relay" ) @@ -26,6 +28,12 @@ type RelayConfig struct { // ContactRequests returns incoming contact records for the given Ed25519 pubkey. ContactRequests func(pubKey string) ([]blockchain.ContactInfo, error) + + // ResolveX25519 returns the X25519 hex published by the Ed25519 identity, + // or "" if the identity has not registered or does not exist. Used by + // authenticated mutating endpoints (e.g. DELETE /relay/inbox) to link a + // signing key back to its mailbox pubkey. nil disables those endpoints. + ResolveX25519 func(ed25519PubHex string) string } // registerRelayRoutes wires relay mailbox endpoints onto mux. @@ -116,8 +124,24 @@ func relayInboxList(rc RelayConfig) http.HandlerFunc { } } -// relayInboxDelete handles DELETE /relay/inbox/{envelopeID}?pub= +// relayInboxDelete handles DELETE /relay/inbox/{envelopeID}?pub= +// +// Auth model: +// Query: ?pub= +// Body: {"ed25519_pub":"", "sig":"", "ts":} +// sig = Ed25519(privEd25519, +// "inbox-delete:" + envID + ":" + x25519Pub + ":" + ts) +// ts must be within ±5 minutes of server clock (anti-replay). +// +// Server then: +// 1. Verifies sig over the canonical bytes above. +// 2. Looks up identity(ed25519_pub).X25519Pub — must equal the ?pub= query. +// +// This links the signing key to the mailbox key without exposing the user's +// X25519 private material. func relayInboxDelete(rc RelayConfig) http.HandlerFunc { + const inboxDeleteSkewSecs = 300 // ±5 minutes + return func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodDelete { // Also serve GET /relay/inbox/{id} for convenience (fetch single envelope) @@ -140,6 +164,61 @@ func relayInboxDelete(rc RelayConfig) http.HandlerFunc { return } + // Auth. Unauthenticated DELETE historically let anyone wipe any + // mailbox by just knowing the pub — fixed in v1.0.2 via signed + // Ed25519 identity linked to the x25519 via identity registry. + if rc.ResolveX25519 == nil { + jsonErr(w, fmt.Errorf("mailbox delete not available on this node"), 503) + return + } + var body struct { + Ed25519Pub string `json:"ed25519_pub"` + Sig string `json:"sig"` + Ts int64 `json:"ts"` + } + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + jsonErr(w, fmt.Errorf("invalid JSON body: %w", err), 400) + return + } + if body.Ed25519Pub == "" || body.Sig == "" || body.Ts == 0 { + jsonErr(w, fmt.Errorf("ed25519_pub, sig, ts are required"), 400) + return + } + now := time.Now().Unix() + if body.Ts < now-inboxDeleteSkewSecs || body.Ts > now+inboxDeleteSkewSecs { + jsonErr(w, fmt.Errorf("timestamp out of range (±%ds)", inboxDeleteSkewSecs), 400) + return + } + sigBytes, err := base64.StdEncoding.DecodeString(body.Sig) + if err != nil { + // Also try URL-safe for defensive UX. + sigBytes, err = base64.RawURLEncoding.DecodeString(body.Sig) + if err != nil { + jsonErr(w, fmt.Errorf("sig: invalid base64"), 400) + return + } + } + if _, err := hex.DecodeString(body.Ed25519Pub); err != nil { + jsonErr(w, fmt.Errorf("ed25519_pub: invalid hex"), 400) + return + } + msg := []byte(fmt.Sprintf("inbox-delete:%s:%s:%d", envID, pub, body.Ts)) + ok, err := identity.Verify(body.Ed25519Pub, msg, sigBytes) + if err != nil || !ok { + jsonErr(w, fmt.Errorf("signature invalid"), 403) + return + } + // Link ed25519 → x25519 via identity registry. + registeredX := rc.ResolveX25519(body.Ed25519Pub) + if registeredX == "" { + jsonErr(w, fmt.Errorf("identity has no registered X25519 key"), 403) + return + } + if !strings.EqualFold(registeredX, pub) { + jsonErr(w, fmt.Errorf("pub does not match identity's registered X25519"), 403) + return + } + if err := rc.Mailbox.Delete(pub, envID); err != nil { jsonErr(w, err, 500) return From 88848efa635a4f6918fd92ffbcd8fe2a9467ff5c Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 18:36:00 +0300 Subject: [PATCH 06/27] feat(chain): remove channels, add social feed (Phase A of v2.0.0) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replaces the channel/membership model with a VK/Twitter-style feed: public posts, follow graph, likes. Views are deliberately off-chain (counted by the hosting relay, Phase B). Removed - EventCreateChannel, EventAddMember - CreateChannelPayload, AddMemberPayload, ChannelMember - prefixChannel, prefixChanMember - chain.Channel(), chain.ChannelMembers() - node/api_channels.go - GetChannel, GetChannelMembers on ExplorerQuery Added - Events: CREATE_POST, DELETE_POST, FOLLOW, UNFOLLOW, LIKE_POST, UNLIKE_POST - Payloads: CreatePostPayload, DeletePostPayload, FollowPayload, UnfollowPayload, LikePostPayload, UnlikePostPayload - Stored shape: PostRecord (author, size, hash, hosting relay, timestamp, reply/quote refs, soft-delete flag, fee paid) - State prefixes: post:, postbyauthor:, follow:, followin:, like:, likecount: - Queries: Post(), PostsByAuthor(), Following(), Followers(), LikeCount(), HasLiked() - Cached like counter via bumpLikeCount helper Pricing - BasePostFee = 1000 µT (aligned with MinFee block-validation floor) - PostByteFee = 1 µT/byte of compressed content - Total fee credited in full to HostingRelay pub (storage compensation) - MaxPostSize = 256 KiB Integrity - CREATE_POST validates content_hash length (32 B) and size range - DELETE_POST restricted to post.Author - Duplicate FOLLOW / LIKE rejected - reply_to and quote_of mutually exclusive Tests - TestFeedCreatePost: post stored, indexed, host credited - TestFeedInsufficientFee: underpaid post is skipped - TestFeedFollowUnfollow: follow graph round-trips via forward + inbound indices - TestFeedLikeUnlike: like toggles with dedup, counter stays accurate - TestFeedDeletePostByOther: non-author deletion rejected This is Phase A (chain-layer). Phase B adds the relay feed-mailbox (post bodies + gossipsub) and HTTP endpoints. Phase C adds the client Feed tab. Co-Authored-By: Claude Opus 4.7 (1M context) --- blockchain/chain.go | 386 +++++++++++++++++++++++++++++++++++---- blockchain/chain_test.go | 245 +++++++++++++++++++++++++ blockchain/types.go | 121 +++++++++--- cmd/node/main.go | 2 - node/api_channels.go | 102 ----------- node/api_routes.go | 7 - 6 files changed, 695 insertions(+), 168 deletions(-) delete mode 100644 node/api_channels.go diff --git a/blockchain/chain.go b/blockchain/chain.go index 1ce8361..f3f615c 100644 --- a/blockchain/chain.go +++ b/blockchain/chain.go @@ -40,8 +40,14 @@ const ( prefixHeight = "height" // height → uint64 prefixBalance = "balance:" // balance: → uint64 prefixIdentity = "id:" // id: → RegisterKeyPayload JSON - prefixChannel = "chan:" // chan: → CreateChannelPayload JSON - prefixChanMember = "chan-member:" // chan-member:: → "" (presence = member) + + // Social feed (v2.0.0). Replaced the old channel keys (chan:, chan-member:). + prefixPost = "post:" // post: → PostRecord JSON + prefixPostByAuthor = "postbyauthor:" // postbyauthor::: → "" (chrono index) + prefixFollow = "follow:" // follow:: → "" (presence = follows) + prefixFollowInbound = "followin:" // followin:: → "" (reverse index — counts followers) + prefixLike = "like:" // like:: → "" (presence = liked) + prefixLikeCount = "likecount:" // likecount: → uint64 (cached count) prefixWalletBind = "walletbind:" // walletbind: → wallet_pubkey (string) prefixReputation = "rep:" // rep: → RepStats JSON prefixPayChan = "paychan:" // paychan: → PayChanState JSON @@ -538,11 +544,13 @@ func (c *Chain) Identity(pubKeyHex string) (*RegisterKeyPayload, error) { return &p, err } -// Channel returns the CreateChannelPayload for a channel ID, or nil. -func (c *Chain) Channel(channelID string) (*CreateChannelPayload, error) { - var p CreateChannelPayload +// ── Feed queries (v2.0.0) ────────────────────────────────────────────────── + +// Post returns the PostRecord for a post ID, or nil if not found. +func (c *Chain) Post(postID string) (*PostRecord, error) { + var p PostRecord err := c.db.View(func(txn *badger.Txn) error { - item, err := txn.Get([]byte(prefixChannel + channelID)) + item, err := txn.Get([]byte(prefixPost + postID)) if err != nil { return err } @@ -553,13 +561,62 @@ func (c *Chain) Channel(channelID string) (*CreateChannelPayload, error) { if errors.Is(err, badger.ErrKeyNotFound) { return nil, nil } - return &p, err + if err != nil { + return nil, err + } + return &p, nil } -// ChannelMembers returns the public keys of all members added to channelID. -func (c *Chain) ChannelMembers(channelID string) ([]string, error) { - prefix := []byte(fmt.Sprintf("%s%s:", prefixChanMember, channelID)) - var members []string +// PostsByAuthor returns the last `limit` posts by the given author, newest +// first. Iterates `postbyauthor::...` in reverse order. If limit +// ≤ 0, defaults to 50; capped at 200. +func (c *Chain) PostsByAuthor(authorPub string, limit int) ([]*PostRecord, error) { + if limit <= 0 { + limit = 50 + } + if limit > 200 { + limit = 200 + } + prefix := []byte(prefixPostByAuthor + authorPub + ":") + var out []*PostRecord + + err := c.db.View(func(txn *badger.Txn) error { + opts := badger.DefaultIteratorOptions + opts.Prefix = prefix + opts.Reverse = true // newest (higher ts) first — reverse iteration + opts.PrefetchValues = false + // For reverse iteration Badger requires seeking past the prefix range. + seek := append([]byte{}, prefix...) + seek = append(seek, 0xff) + + it := txn.NewIterator(opts) + defer it.Close() + for it.Seek(seek); it.ValidForPrefix(prefix) && len(out) < limit; it.Next() { + key := string(it.Item().Key()) + // key = "postbyauthor:::" + parts := strings.Split(key, ":") + if len(parts) < 4 { + continue + } + postID := parts[len(parts)-1] + rec, err := c.postInTxn(txn, postID) + if err != nil || rec == nil { + continue + } + if rec.Deleted { + continue + } + out = append(out, rec) + } + return nil + }) + return out, err +} + +// Following returns the Ed25519 pubkeys that `follower` subscribes to. +func (c *Chain) Following(followerPub string) ([]string, error) { + prefix := []byte(prefixFollow + followerPub + ":") + var out []string err := c.db.View(func(txn *badger.Txn) error { opts := badger.DefaultIteratorOptions opts.PrefetchValues = false @@ -568,15 +625,95 @@ func (c *Chain) ChannelMembers(channelID string) ([]string, error) { defer it.Close() for it.Rewind(); it.Valid(); it.Next() { key := string(it.Item().Key()) - // key = "chan-member::" + // key = "follow::" parts := strings.SplitN(key, ":", 3) if len(parts) == 3 { - members = append(members, parts[2]) + out = append(out, parts[2]) } } return nil }) - return members, err + return out, err +} + +// Followers returns the Ed25519 pubkeys that follow `target`. +func (c *Chain) Followers(targetPub string) ([]string, error) { + prefix := []byte(prefixFollowInbound + targetPub + ":") + var out []string + err := c.db.View(func(txn *badger.Txn) error { + opts := badger.DefaultIteratorOptions + opts.PrefetchValues = false + opts.Prefix = prefix + it := txn.NewIterator(opts) + defer it.Close() + for it.Rewind(); it.Valid(); it.Next() { + key := string(it.Item().Key()) + parts := strings.SplitN(key, ":", 3) + if len(parts) == 3 { + out = append(out, parts[2]) + } + } + return nil + }) + return out, err +} + +// LikeCount returns the cached count of likes for a post (O(1)). +func (c *Chain) LikeCount(postID string) (uint64, error) { + var count uint64 + err := c.db.View(func(txn *badger.Txn) error { + item, err := txn.Get([]byte(prefixLikeCount + postID)) + if errors.Is(err, badger.ErrKeyNotFound) { + return nil + } + if err != nil { + return err + } + return item.Value(func(val []byte) error { + if len(val) == 8 { + count = binary.BigEndian.Uint64(val) + } + return nil + }) + }) + return count, err +} + +// HasLiked reports whether `liker` has liked the given post. +func (c *Chain) HasLiked(postID, likerPub string) (bool, error) { + key := []byte(prefixLike + postID + ":" + likerPub) + var ok bool + err := c.db.View(func(txn *badger.Txn) error { + _, err := txn.Get(key) + if err == nil { + ok = true + return nil + } + if errors.Is(err, badger.ErrKeyNotFound) { + return nil + } + return err + }) + return ok, err +} + +// postInTxn is the internal helper used by iteration paths to fetch a full +// PostRecord without opening a new View transaction. +func (c *Chain) postInTxn(txn *badger.Txn, postID string) (*PostRecord, error) { + item, err := txn.Get([]byte(prefixPost + postID)) + if errors.Is(err, badger.ErrKeyNotFound) { + return nil, nil + } + if err != nil { + return nil, err + } + var p PostRecord + if err := item.Value(func(val []byte) error { + return json.Unmarshal(val, &p) + }); err != nil { + return nil, err + } + return &p, nil } // WalletBinding returns the payout wallet pub key bound to a node, or "" if none. @@ -741,41 +878,197 @@ func (c *Chain) applyTx(txn *badger.Txn, tx *Transaction) (uint64, error) { return 0, err } - case EventCreateChannel: - var p CreateChannelPayload + // ── Feed events (v2.0.0) ────────────────────────────────────────── + case EventCreatePost: + var p CreatePostPayload if err := json.Unmarshal(tx.Payload, &p); err != nil { - return 0, fmt.Errorf("%w: CREATE_CHANNEL bad payload: %v", ErrTxFailed, err) + return 0, fmt.Errorf("%w: CREATE_POST bad payload: %v", ErrTxFailed, err) + } + if p.PostID == "" { + return 0, fmt.Errorf("%w: CREATE_POST: post_id required", ErrTxFailed) + } + if len(p.ContentHash) != 32 { + return 0, fmt.Errorf("%w: CREATE_POST: content_hash must be 32 bytes", ErrTxFailed) + } + if p.HostingRelay == "" { + return 0, fmt.Errorf("%w: CREATE_POST: hosting_relay required", ErrTxFailed) + } + if p.Size == 0 || p.Size > MaxPostSize { + return 0, fmt.Errorf("%w: CREATE_POST: size %d out of range (0, %d]", + ErrTxFailed, p.Size, MaxPostSize) + } + if p.ReplyTo != "" && p.QuoteOf != "" { + return 0, fmt.Errorf("%w: CREATE_POST: reply_to and quote_of are mutually exclusive", ErrTxFailed) + } + // Duplicate check — same post_id may only commit once. + if _, err := txn.Get([]byte(prefixPost + p.PostID)); err == nil { + return 0, fmt.Errorf("%w: CREATE_POST: post %s already exists", ErrTxFailed, p.PostID) + } + // Fee formula: BasePostFee + size × PostByteFee. tx.Fee carries the + // full amount; we validate it matches and the sender can afford it. + expectedFee := BasePostFee + p.Size*PostByteFee + if tx.Fee < expectedFee { + return 0, fmt.Errorf("%w: CREATE_POST: fee %d < required %d (base %d + %d × %d bytes)", + ErrTxFailed, tx.Fee, expectedFee, BasePostFee, PostByteFee, p.Size) } if err := c.debitBalance(txn, tx.From, tx.Fee); err != nil { - return 0, fmt.Errorf("CREATE_CHANNEL debit: %w", err) + return 0, fmt.Errorf("CREATE_POST debit: %w", err) } - val, _ := json.Marshal(p) - if err := txn.Set([]byte(prefixChannel+p.ChannelID), val); err != nil { + // Full fee goes to the hosting relay (storage compensation). No + // validator cut on posts — validators earn from other tx types. This + // incentivises nodes to actually host posts. + relayTarget, err := c.resolveRewardTarget(txn, p.HostingRelay) + if err != nil { + return 0, err + } + if err := c.creditBalance(txn, relayTarget, tx.Fee); err != nil { + return 0, fmt.Errorf("credit hosting relay: %w", err) + } + rec := PostRecord{ + PostID: p.PostID, + Author: tx.From, + ContentHash: p.ContentHash, + Size: p.Size, + HostingRelay: p.HostingRelay, + ReplyTo: p.ReplyTo, + QuoteOf: p.QuoteOf, + CreatedAt: tx.Timestamp.Unix(), + FeeUT: tx.Fee, + } + recBytes, _ := json.Marshal(rec) + if err := txn.Set([]byte(prefixPost+p.PostID), recBytes); err != nil { + return 0, err + } + // Chrono index — allows PostsByAuthor to list newest-first in O(N). + idxKey := fmt.Sprintf("%s%s:%020d:%s", prefixPostByAuthor, tx.From, rec.CreatedAt, p.PostID) + if err := txn.Set([]byte(idxKey), []byte{}); err != nil { return 0, err } - case EventAddMember: - var p AddMemberPayload + case EventDeletePost: + var p DeletePostPayload if err := json.Unmarshal(tx.Payload, &p); err != nil { - return 0, fmt.Errorf("%w: ADD_MEMBER bad payload: %v", ErrTxFailed, err) + return 0, fmt.Errorf("%w: DELETE_POST bad payload: %v", ErrTxFailed, err) } - if p.ChannelID == "" { - return 0, fmt.Errorf("%w: ADD_MEMBER: channel_id required", ErrTxFailed) + if p.PostID == "" { + return 0, fmt.Errorf("%w: DELETE_POST: post_id required", ErrTxFailed) } - if _, err := txn.Get([]byte(prefixChannel + p.ChannelID)); err != nil { + item, err := txn.Get([]byte(prefixPost + p.PostID)) + if errors.Is(err, badger.ErrKeyNotFound) { + return 0, fmt.Errorf("%w: DELETE_POST: post %s not found", ErrTxFailed, p.PostID) + } + if err != nil { + return 0, err + } + var rec PostRecord + if err := item.Value(func(val []byte) error { return json.Unmarshal(val, &rec) }); err != nil { + return 0, err + } + if rec.Author != tx.From { + return 0, fmt.Errorf("%w: DELETE_POST: only author can delete", ErrTxFailed) + } + if err := c.debitBalance(txn, tx.From, tx.Fee); err != nil { + return 0, fmt.Errorf("DELETE_POST debit: %w", err) + } + rec.Deleted = true + val, _ := json.Marshal(rec) + if err := txn.Set([]byte(prefixPost+p.PostID), val); err != nil { + return 0, err + } + + case EventFollow: + if tx.To == "" { + return 0, fmt.Errorf("%w: FOLLOW: target (to) is required", ErrTxFailed) + } + if tx.To == tx.From { + return 0, fmt.Errorf("%w: FOLLOW: cannot follow yourself", ErrTxFailed) + } + if err := c.debitBalance(txn, tx.From, tx.Fee); err != nil { + return 0, fmt.Errorf("FOLLOW debit: %w", err) + } + // follow:: + reverse index followin:: + fKey := []byte(prefixFollow + tx.From + ":" + tx.To) + if _, err := txn.Get(fKey); err == nil { + return 0, fmt.Errorf("%w: FOLLOW: already following", ErrTxFailed) + } + if err := txn.Set(fKey, []byte{}); err != nil { + return 0, err + } + if err := txn.Set([]byte(prefixFollowInbound+tx.To+":"+tx.From), []byte{}); err != nil { + return 0, err + } + + case EventUnfollow: + if tx.To == "" { + return 0, fmt.Errorf("%w: UNFOLLOW: target (to) is required", ErrTxFailed) + } + if err := c.debitBalance(txn, tx.From, tx.Fee); err != nil { + return 0, fmt.Errorf("UNFOLLOW debit: %w", err) + } + fKey := []byte(prefixFollow + tx.From + ":" + tx.To) + if _, err := txn.Get(fKey); err != nil { if errors.Is(err, badger.ErrKeyNotFound) { - return 0, fmt.Errorf("%w: ADD_MEMBER: channel %q not found", ErrTxFailed, p.ChannelID) + return 0, fmt.Errorf("%w: UNFOLLOW: not following", ErrTxFailed) + } + return 0, err + } + if err := txn.Delete(fKey); err != nil { + return 0, err + } + if err := txn.Delete([]byte(prefixFollowInbound + tx.To + ":" + tx.From)); err != nil { + return 0, err + } + + case EventLikePost: + var p LikePostPayload + if err := json.Unmarshal(tx.Payload, &p); err != nil { + return 0, fmt.Errorf("%w: LIKE_POST bad payload: %v", ErrTxFailed, err) + } + if p.PostID == "" { + return 0, fmt.Errorf("%w: LIKE_POST: post_id required", ErrTxFailed) + } + if _, err := txn.Get([]byte(prefixPost + p.PostID)); err != nil { + if errors.Is(err, badger.ErrKeyNotFound) { + return 0, fmt.Errorf("%w: LIKE_POST: post %s not found", ErrTxFailed, p.PostID) + } + return 0, err + } + lKey := []byte(prefixLike + p.PostID + ":" + tx.From) + if _, err := txn.Get(lKey); err == nil { + return 0, fmt.Errorf("%w: LIKE_POST: already liked", ErrTxFailed) + } + if err := c.debitBalance(txn, tx.From, tx.Fee); err != nil { + return 0, fmt.Errorf("LIKE_POST debit: %w", err) + } + if err := txn.Set(lKey, []byte{}); err != nil { + return 0, err + } + if err := bumpLikeCount(txn, p.PostID, +1); err != nil { + return 0, err + } + + case EventUnlikePost: + var p UnlikePostPayload + if err := json.Unmarshal(tx.Payload, &p); err != nil { + return 0, fmt.Errorf("%w: UNLIKE_POST bad payload: %v", ErrTxFailed, err) + } + if p.PostID == "" { + return 0, fmt.Errorf("%w: UNLIKE_POST: post_id required", ErrTxFailed) + } + lKey := []byte(prefixLike + p.PostID + ":" + tx.From) + if _, err := txn.Get(lKey); err != nil { + if errors.Is(err, badger.ErrKeyNotFound) { + return 0, fmt.Errorf("%w: UNLIKE_POST: not liked", ErrTxFailed) } return 0, err } if err := c.debitBalance(txn, tx.From, tx.Fee); err != nil { - return 0, fmt.Errorf("ADD_MEMBER debit: %w", err) + return 0, fmt.Errorf("UNLIKE_POST debit: %w", err) } - member := tx.To - if member == "" { - member = tx.From + if err := txn.Delete(lKey); err != nil { + return 0, err } - if err := txn.Set([]byte(fmt.Sprintf("%s%s:%s", prefixChanMember, p.ChannelID, member)), []byte{}); err != nil { + if err := bumpLikeCount(txn, p.PostID, -1); err != nil { return 0, err } @@ -2336,6 +2629,35 @@ func (c *Chain) isValidatorTxn(txn *badger.Txn, pubKey string) (bool, error) { // verifyEd25519 verifies an Ed25519 signature without importing the identity package // (which would create a circular dependency). +// bumpLikeCount adjusts the cached like counter for a post. delta = ±1. +// Clamps at zero so a corrupt unlike without prior like can't underflow. +func bumpLikeCount(txn *badger.Txn, postID string, delta int64) error { + key := []byte(prefixLikeCount + postID) + var cur uint64 + item, err := txn.Get(key) + if err == nil { + if verr := item.Value(func(val []byte) error { + if len(val) == 8 { + cur = binary.BigEndian.Uint64(val) + } + return nil + }); verr != nil { + return verr + } + } else if !errors.Is(err, badger.ErrKeyNotFound) { + return err + } + switch { + case delta < 0 && cur > 0: + cur-- + case delta > 0: + cur++ + } + var buf [8]byte + binary.BigEndian.PutUint64(buf[:], cur) + return txn.Set(key, buf[:]) +} + func verifyEd25519(pubKeyHex string, msg, sig []byte) (bool, error) { pubBytes, err := hex.DecodeString(pubKeyHex) if err != nil { diff --git a/blockchain/chain_test.go b/blockchain/chain_test.go index 3a9180c..d58e67b 100644 --- a/blockchain/chain_test.go +++ b/blockchain/chain_test.go @@ -794,3 +794,248 @@ var _ = identity.Generate // Ensure ed25519 and hex are used directly (they may be used via helpers). var _ = ed25519.PublicKey(nil) var _ = hex.EncodeToString + +// ── Feed (v2.0.0) ────────────────────────────────────────────────────────── + +// TestFeedCreatePost: post commits, indexes, credits the hosting relay. +func TestFeedCreatePost(t *testing.T) { + c := newChain(t) + val := newIdentity(t) + alice := newIdentity(t) // post author + host := newIdentity(t) // hosting relay pubkey + + genesis := addGenesis(t, c, val) + + // Fund alice + host. + const postSize = uint64(200) + expectedFee := blockchain.BasePostFee + postSize*blockchain.PostByteFee + fundAlice := makeTx(blockchain.EventTransfer, val.PubKeyHex(), alice.PubKeyHex(), + expectedFee+5*blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + time.Sleep(2 * time.Millisecond) // ensure distinct txID (nanosec clock) + fundHost := makeTx(blockchain.EventTransfer, val.PubKeyHex(), host.PubKeyHex(), + blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + b1 := buildBlock(t, genesis, val, []*blockchain.Transaction{fundAlice, fundHost}) + mustAddBlock(t, c, b1) + + hostBalBefore, _ := c.Balance(host.PubKeyHex()) + + h := sha256.Sum256([]byte("hello world post body")) + postPayload := blockchain.CreatePostPayload{ + PostID: "post1", + ContentHash: h[:], + Size: postSize, + HostingRelay: host.PubKeyHex(), + } + postTx := makeTx( + blockchain.EventCreatePost, + alice.PubKeyHex(), "", + 0, expectedFee, // Fee = base + size*byte_fee; amount = 0 + mustJSON(postPayload), + ) + b2 := buildBlock(t, b1, val, []*blockchain.Transaction{postTx}) + mustAddBlock(t, c, b2) + + rec, err := c.Post("post1") + if err != nil || rec == nil { + t.Fatalf("Post(\"post1\") = %v, %v; want record", rec, err) + } + if rec.Author != alice.PubKeyHex() { + t.Errorf("author: got %q want %q", rec.Author, alice.PubKeyHex()) + } + if rec.Size != postSize { + t.Errorf("size: got %d want %d", rec.Size, postSize) + } + + // Host should have been credited the full fee. + hostBalAfter, _ := c.Balance(host.PubKeyHex()) + if hostBalAfter != hostBalBefore+expectedFee { + t.Errorf("host balance: got %d, want %d (delta %d)", + hostBalAfter, hostBalBefore, expectedFee) + } + + // PostsByAuthor should list it. + posts, err := c.PostsByAuthor(alice.PubKeyHex(), 10) + if err != nil { + t.Fatalf("PostsByAuthor: %v", err) + } + if len(posts) != 1 || posts[0].PostID != "post1" { + t.Errorf("PostsByAuthor: got %v, want [post1]", posts) + } +} + +// TestFeedInsufficientFee: size-based fee is enforced. +func TestFeedInsufficientFee(t *testing.T) { + c := newChain(t) + val := newIdentity(t) + alice := newIdentity(t) + host := newIdentity(t) + + genesis := addGenesis(t, c, val) + fundAlice := makeTx(blockchain.EventTransfer, val.PubKeyHex(), alice.PubKeyHex(), + 10*blockchain.Token, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + b1 := buildBlock(t, genesis, val, []*blockchain.Transaction{fundAlice}) + mustAddBlock(t, c, b1) + + const postSize = uint64(1000) + h := sha256.Sum256([]byte("body")) + postPayload := blockchain.CreatePostPayload{ + PostID: "underpaid", + ContentHash: h[:], + Size: postSize, + HostingRelay: host.PubKeyHex(), + } + // Fee too low — base alone without the size component. + // (Must still be ≥ MinFee so the chain-level block validation passes; + // the per-event CREATE_POST check is what should reject it.) + postTx := makeTx(blockchain.EventCreatePost, alice.PubKeyHex(), "", + 0, blockchain.MinFee, mustJSON(postPayload)) + b2 := buildBlock(t, b1, val, []*blockchain.Transaction{postTx}) + mustAddBlock(t, c, b2) // block commits, the tx is skipped (logged) + + if rec, _ := c.Post("underpaid"); rec != nil { + t.Fatalf("post was stored despite insufficient fee: %+v", rec) + } +} + +// TestFeedFollowUnfollow: follow graph round-trips via indices. +func TestFeedFollowUnfollow(t *testing.T) { + c := newChain(t) + val := newIdentity(t) + alice := newIdentity(t) + bob := newIdentity(t) + + genesis := addGenesis(t, c, val) + fundAlice := makeTx(blockchain.EventTransfer, val.PubKeyHex(), alice.PubKeyHex(), + 5*blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + b1 := buildBlock(t, genesis, val, []*blockchain.Transaction{fundAlice}) + mustAddBlock(t, c, b1) + + followTx := makeTx(blockchain.EventFollow, alice.PubKeyHex(), bob.PubKeyHex(), + 0, blockchain.MinFee, mustJSON(blockchain.FollowPayload{})) + b2 := buildBlock(t, b1, val, []*blockchain.Transaction{followTx}) + mustAddBlock(t, c, b2) + + following, _ := c.Following(alice.PubKeyHex()) + if len(following) != 1 || following[0] != bob.PubKeyHex() { + t.Errorf("Following: got %v, want [%s]", following, bob.PubKeyHex()) + } + followers, _ := c.Followers(bob.PubKeyHex()) + if len(followers) != 1 || followers[0] != alice.PubKeyHex() { + t.Errorf("Followers: got %v, want [%s]", followers, alice.PubKeyHex()) + } + + // Unfollow. + unfollowTx := makeTx(blockchain.EventUnfollow, alice.PubKeyHex(), bob.PubKeyHex(), + 0, blockchain.MinFee, mustJSON(blockchain.UnfollowPayload{})) + b3 := buildBlock(t, b2, val, []*blockchain.Transaction{unfollowTx}) + mustAddBlock(t, c, b3) + + following, _ = c.Following(alice.PubKeyHex()) + if len(following) != 0 { + t.Errorf("Following after unfollow: got %v, want []", following) + } +} + +// TestFeedLikeUnlike: like toggles + cached count stays consistent. +func TestFeedLikeUnlike(t *testing.T) { + c := newChain(t) + val := newIdentity(t) + alice := newIdentity(t) // author + bob := newIdentity(t) // liker + host := newIdentity(t) + + genesis := addGenesis(t, c, val) + + const postSize = uint64(100) + expectedPostFee := blockchain.BasePostFee + postSize*blockchain.PostByteFee + fundAlice := makeTx(blockchain.EventTransfer, val.PubKeyHex(), alice.PubKeyHex(), + expectedPostFee+5*blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + time.Sleep(2 * time.Millisecond) + fundBob := makeTx(blockchain.EventTransfer, val.PubKeyHex(), bob.PubKeyHex(), + 5*blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + b1 := buildBlock(t, genesis, val, []*blockchain.Transaction{fundAlice, fundBob}) + mustAddBlock(t, c, b1) + + h := sha256.Sum256([]byte("likeable")) + postTx := makeTx(blockchain.EventCreatePost, alice.PubKeyHex(), "", + 0, expectedPostFee, + mustJSON(blockchain.CreatePostPayload{ + PostID: "p1", ContentHash: h[:], Size: postSize, HostingRelay: host.PubKeyHex(), + })) + b2 := buildBlock(t, b1, val, []*blockchain.Transaction{postTx}) + mustAddBlock(t, c, b2) + + likeTx := makeTx(blockchain.EventLikePost, bob.PubKeyHex(), "", + 0, blockchain.MinFee, mustJSON(blockchain.LikePostPayload{PostID: "p1"})) + b3 := buildBlock(t, b2, val, []*blockchain.Transaction{likeTx}) + mustAddBlock(t, c, b3) + + n, _ := c.LikeCount("p1") + if n != 1 { + t.Errorf("LikeCount after like: got %d, want 1", n) + } + liked, _ := c.HasLiked("p1", bob.PubKeyHex()) + if !liked { + t.Errorf("HasLiked after like: got false") + } + + // Duplicate like — tx is skipped; counter stays at 1. + dupTx := makeTx(blockchain.EventLikePost, bob.PubKeyHex(), "", + 0, blockchain.MinFee, mustJSON(blockchain.LikePostPayload{PostID: "p1"})) + b4 := buildBlock(t, b3, val, []*blockchain.Transaction{dupTx}) + mustAddBlock(t, c, b4) + if n2, _ := c.LikeCount("p1"); n2 != 1 { + t.Errorf("LikeCount after duplicate: got %d, want 1 (tx should have been skipped)", n2) + } + + unlikeTx := makeTx(blockchain.EventUnlikePost, bob.PubKeyHex(), "", + 0, blockchain.MinFee, mustJSON(blockchain.UnlikePostPayload{PostID: "p1"})) + b5 := buildBlock(t, b4, val, []*blockchain.Transaction{unlikeTx}) + mustAddBlock(t, c, b5) + + n, _ = c.LikeCount("p1") + if n != 0 { + t.Errorf("LikeCount after unlike: got %d, want 0", n) + } +} + +// TestFeedDeletePostByOther: only the author may delete their post. +func TestFeedDeletePostByOther(t *testing.T) { + c := newChain(t) + val := newIdentity(t) + alice := newIdentity(t) + mallory := newIdentity(t) // tries to delete alice's post + host := newIdentity(t) + + genesis := addGenesis(t, c, val) + const postSize = uint64(100) + fee := blockchain.BasePostFee + postSize*blockchain.PostByteFee + fundAlice := makeTx(blockchain.EventTransfer, val.PubKeyHex(), alice.PubKeyHex(), + fee+5*blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + time.Sleep(2 * time.Millisecond) + fundMallory := makeTx(blockchain.EventTransfer, val.PubKeyHex(), mallory.PubKeyHex(), + 5*blockchain.MinFee, blockchain.MinFee, mustJSON(blockchain.TransferPayload{})) + b1 := buildBlock(t, genesis, val, []*blockchain.Transaction{fundAlice, fundMallory}) + mustAddBlock(t, c, b1) + + h := sha256.Sum256([]byte("body")) + postTx := makeTx(blockchain.EventCreatePost, alice.PubKeyHex(), "", 0, fee, + mustJSON(blockchain.CreatePostPayload{ + PostID: "p1", ContentHash: h[:], Size: postSize, HostingRelay: host.PubKeyHex(), + })) + b2 := buildBlock(t, b1, val, []*blockchain.Transaction{postTx}) + mustAddBlock(t, c, b2) + + // Mallory tries to delete alice's post — block commits, tx is skipped. + delTx := makeTx(blockchain.EventDeletePost, mallory.PubKeyHex(), "", 0, blockchain.MinFee, + mustJSON(blockchain.DeletePostPayload{PostID: "p1"})) + b3 := buildBlock(t, b2, val, []*blockchain.Transaction{delTx}) + mustAddBlock(t, c, b3) + rec, _ := c.Post("p1") + if rec == nil || rec.Deleted { + t.Fatalf("post was deleted by non-author: %+v", rec) + } +} + +// silence unused-import lint if fmt ever gets trimmed from the feed tests. +var _ = fmt.Sprintf diff --git a/blockchain/types.go b/blockchain/types.go index 471d413..25f8647 100644 --- a/blockchain/types.go +++ b/blockchain/types.go @@ -11,8 +11,6 @@ type EventType string const ( EventRegisterKey EventType = "REGISTER_KEY" - EventCreateChannel EventType = "CREATE_CHANNEL" - EventAddMember EventType = "ADD_MEMBER" EventOpenPayChan EventType = "OPEN_PAY_CHAN" EventClosePayChan EventType = "CLOSE_PAY_CHAN" EventTransfer EventType = "TRANSFER" @@ -37,6 +35,17 @@ const ( EventMintNFT EventType = "MINT_NFT" // mint a new non-fungible token EventTransferNFT EventType = "TRANSFER_NFT" // transfer NFT ownership EventBurnNFT EventType = "BURN_NFT" // burn (destroy) an NFT + + // ── Social feed (v2.0.0) ────────────────────────────────────────────── + // Replaces the old channel model with a VK/Twitter-style timeline. + // Posts are plaintext, publicly readable, size-priced. Bodies live in + // the relay feed-mailbox; on-chain we only keep metadata + author. + EventCreatePost EventType = "CREATE_POST" // author publishes a post + EventDeletePost EventType = "DELETE_POST" // author soft-deletes their post + EventFollow EventType = "FOLLOW" // follow another author's feed + EventUnfollow EventType = "UNFOLLOW" // unfollow an author + EventLikePost EventType = "LIKE_POST" // like a post + EventUnlikePost EventType = "UNLIKE_POST" // remove a previous like ) // Token amounts are stored in micro-tokens (µT). @@ -64,6 +73,31 @@ const ( // MinContactFee is the minimum amount a sender must pay the recipient when // submitting an EventContactRequest (anti-spam; goes directly to recipient). MinContactFee uint64 = 5_000 // 0.005 T + + // ── Feed pricing (v2.0.0) ───────────────────────────────────────────── + // A post's on-chain fee is BasePostFee + bytes_on_disk × PostByteFee. + // The fee is paid by the author and credited in full to the hosting + // relay (the node that received POST /feed/publish and stored the body). + // Size-based pricing is what aligns incentives: a 200-byte tweet is + // cheap, a 256 KB video costs ~0.26 T — node operators' storage cost + // is covered. + // + // Note: BasePostFee is set to MinFee (1000 µT) because chain-level block + // validation requires every tx's Fee ≥ MinFee. So the true minimum a + // post can cost is MinFee + size × PostByteFee. A 0-byte post is + // rejected (Size must be > 0), so in practice a ~50-byte text post + // costs ~1050 µT (~$0.001 depending on token price). + BasePostFee uint64 = 1_000 // 0.001 T flat per post — aligned with MinFee floor + PostByteFee uint64 = 1 // 1 µT per byte of stored content + + // MaxPostSize caps a single post's on-wire size (text + attachment, post + // compression). Hard limit — node refuses larger envelopes to protect + // storage and bandwidth. + MaxPostSize uint64 = 256 * 1024 // 256 KiB + + // LikeFee / FollowFee / UnlikeFee / UnfollowFee / DeletePostFee all use + // MinFee (1000 µT) — standard tx fee paid to the validator. No extra + // cost; these events carry no body. ) // Transaction is the atomic unit recorded in a block. @@ -90,11 +124,66 @@ type RegisterKeyPayload struct { X25519PubKey string `json:"x25519_pub_key,omitempty"` // hex Curve25519 key for E2E messaging } -// CreateChannelPayload is embedded in EventCreateChannel transactions. -type CreateChannelPayload struct { - ChannelID string `json:"channel_id"` - Title string `json:"title"` - IsPublic bool `json:"is_public"` +// ── Feed payloads (v2.0.0) ───────────────────────────────────────────────── + +// CreatePostPayload is embedded in EventCreatePost transactions. The body +// itself is NOT stored on-chain — it lives in the relay feed-mailbox keyed +// by PostID. On-chain we only keep author, size, hash, timestamp and any +// reply/quote reference for ordering and proof of authorship. +// +// PostID is computed client-side as hex(sha256(author || content_hash || ts)[:16]) +// — same scheme as envelope IDs. Clients include it so the relay can store +// the body under a stable key before the chain commit lands. +// +// HostingRelay is the node pubkey (Ed25519 hex) that accepted the POST +// /feed/publish call and holds the body. Readers resolve it via the chain +// and fetch the body directly from that relay (or via gossipsub replicas). +// The fee is credited to this pub. +// +// QuoteOf / ReplyTo are mutually exclusive; set at most one. ReplyTo makes +// the post a reply in a thread; QuoteOf creates a link/reference block. +type CreatePostPayload struct { + PostID string `json:"post_id"` + ContentHash []byte `json:"content_hash"` // sha256 of body-bytes, 32 B + Size uint64 `json:"size"` // bytes on disk (compressed) + HostingRelay string `json:"hosting_relay"` // hex Ed25519 of storing node + ReplyTo string `json:"reply_to,omitempty"` // parent post ID + QuoteOf string `json:"quote_of,omitempty"` // referenced post ID +} + +// DeletePostPayload — author soft-deletes their own post. Stored marker +// lets clients hide the post; relay can GC the body on the next sweep. +type DeletePostPayload struct { + PostID string `json:"post_id"` +} + +// FollowPayload / UnfollowPayload — follow graph. tx.From = follower, +// tx.To = target. No body. +type FollowPayload struct{} +type UnfollowPayload struct{} + +// LikePostPayload / UnlikePostPayload — per-post like indicator. tx.From +// = liker. The counter is derived on read. +type LikePostPayload struct { + PostID string `json:"post_id"` +} +type UnlikePostPayload struct { + PostID string `json:"post_id"` +} + +// PostRecord is what we store on-chain under post:. Consumers of +// PostsByAuthor / query endpoints decode this. +type PostRecord struct { + PostID string `json:"post_id"` + Author string `json:"author"` // hex Ed25519 + ContentHash []byte `json:"content_hash"` + Size uint64 `json:"size"` + HostingRelay string `json:"hosting_relay"` + ReplyTo string `json:"reply_to,omitempty"` + QuoteOf string `json:"quote_of,omitempty"` + CreatedAt int64 `json:"created_at"` // unix seconds (tx timestamp) + Deleted bool `json:"deleted,omitempty"` + FeeUT uint64 `json:"fee_ut"` // total fee paid } // RegisterRelayPayload is embedded in EventRegisterRelay transactions. @@ -241,24 +330,6 @@ type BlockContactPayload struct { Reason string `json:"reason,omitempty"` } -// ChannelMember records a participant in a channel together with their -// X25519 public key. The key is cached on-chain (written during ADD_MEMBER) -// so channel senders don't have to fan out a separate /api/identity lookup -// per recipient on every message — they GET /api/channels/:id/members -// once and seal N envelopes in a loop. -type ChannelMember struct { - PubKey string `json:"pub_key"` // Ed25519 hex - X25519PubKey string `json:"x25519_pub_key"` // optional; empty if member hasn't registered - Address string `json:"address"` -} - -// AddMemberPayload is embedded in EventAddMember transactions. -// tx.From adds tx.To as a member of the specified channel. -// If tx.To is empty, tx.From is added (self-join for public channels). -type AddMemberPayload struct { - ChannelID string `json:"channel_id"` -} - // AddValidatorPayload is embedded in EventAddValidator transactions. // tx.From must already be a validator; tx.To is the new validator's pub key. // diff --git a/cmd/node/main.go b/cmd/node/main.go index f6d9afc..fbaeba6 100644 --- a/cmd/node/main.go +++ b/cmd/node/main.go @@ -854,8 +854,6 @@ func main() { GetNFT: chain.NFT, GetNFTs: chain.NFTs, NFTsByOwner: chain.NFTsByOwner, - GetChannel: chain.Channel, - GetChannelMembers: chain.ChannelMembers, Events: sseHub, WS: wsHub, // Onboarding: expose libp2p peers + chain_id so new nodes/clients can diff --git a/node/api_channels.go b/node/api_channels.go deleted file mode 100644 index 21cf604..0000000 --- a/node/api_channels.go +++ /dev/null @@ -1,102 +0,0 @@ -// Package node — channel endpoints. -// -// `/api/channels/:id/members` returns every Ed25519 pubkey registered as a -// channel member together with their current X25519 pubkey (from the -// identity registry). Clients sealing a message to a channel iterate this -// list and call relay.Seal once per recipient — that's the "fan-out" -// group-messaging model (R1 in the roadmap). -// -// Why enrich with X25519 here rather than making the client do it? -// - One HTTP round trip vs N. At 10+ members the latency difference is -// significant over mobile networks. -// - The server already holds the identity state; no extra DB hops. -// - Clients get a stable, already-joined view — if a member hasn't -// published an X25519 key yet, we return them with `x25519_pub_key=""` -// so the caller knows to skip or retry later. -package node - -import ( - "fmt" - "net/http" - "strings" - - "go-blockchain/blockchain" - "go-blockchain/wallet" -) - -func registerChannelAPI(mux *http.ServeMux, q ExplorerQuery) { - // GET /api/channels/{id} → channel metadata - // GET /api/channels/{id}/members → enriched member list - // - // One HandleFunc deals with both by sniffing the path suffix. - mux.HandleFunc("/api/channels/", func(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet { - jsonErr(w, fmt.Errorf("method not allowed"), 405) - return - } - path := strings.TrimPrefix(r.URL.Path, "/api/channels/") - path = strings.Trim(path, "/") - if path == "" { - jsonErr(w, fmt.Errorf("channel id required"), 400) - return - } - switch { - case strings.HasSuffix(path, "/members"): - id := strings.TrimSuffix(path, "/members") - handleChannelMembers(w, q, id) - default: - handleChannelInfo(w, q, path) - } - }) -} - -func handleChannelInfo(w http.ResponseWriter, q ExplorerQuery, channelID string) { - if q.GetChannel == nil { - jsonErr(w, fmt.Errorf("channel queries not configured"), 503) - return - } - ch, err := q.GetChannel(channelID) - if err != nil { - jsonErr(w, err, 500) - return - } - if ch == nil { - jsonErr(w, fmt.Errorf("channel %s not found", channelID), 404) - return - } - jsonOK(w, ch) -} - -func handleChannelMembers(w http.ResponseWriter, q ExplorerQuery, channelID string) { - if q.GetChannelMembers == nil { - jsonErr(w, fmt.Errorf("channel queries not configured"), 503) - return - } - pubs, err := q.GetChannelMembers(channelID) - if err != nil { - jsonErr(w, err, 500) - return - } - out := make([]blockchain.ChannelMember, 0, len(pubs)) - for _, pub := range pubs { - member := blockchain.ChannelMember{ - PubKey: pub, - Address: wallet.PubKeyToAddress(pub), - } - // Best-effort X25519 lookup — skip silently on miss so a member - // who hasn't published their identity yet doesn't prevent the - // whole list from returning. The sender will just skip them on - // fan-out and retry later (after that member does register). - if q.IdentityInfo != nil { - if info, err := q.IdentityInfo(pub); err == nil && info != nil { - member.X25519PubKey = info.X25519Pub - } - } - out = append(out, member) - } - jsonOK(w, map[string]any{ - "channel_id": channelID, - "count": len(out), - "members": out, - }) -} diff --git a/node/api_routes.go b/node/api_routes.go index 7564338..5f32321 100644 --- a/node/api_routes.go +++ b/node/api_routes.go @@ -79,12 +79,6 @@ type ExplorerQuery struct { GetNFTs func() ([]blockchain.NFTRecord, error) NFTsByOwner func(ownerPub string) ([]blockchain.NFTRecord, error) - // Channel group-messaging lookups (R1). GetChannel returns metadata; - // GetChannelMembers returns the Ed25519 pubkey of every current member. - // Both may be nil on nodes that don't expose channel state (tests). - GetChannel func(channelID string) (*blockchain.CreateChannelPayload, error) - GetChannelMembers func(channelID string) ([]string, error) - // Events is the SSE hub for the live event stream. Optional — if nil the // /api/events endpoint returns 501 Not Implemented. Events *SSEHub @@ -127,7 +121,6 @@ func RegisterExplorerRoutes(mux *http.ServeMux, q ExplorerQuery, flags ...Explor registerUpdateCheckAPI(mux, q) registerOnboardingAPI(mux, q) registerTokenAPI(mux, q) - registerChannelAPI(mux, q) if !f.DisableSwagger { registerSwaggerRoutes(mux) } From 126658f2945ed59beacd1132c2ca0d4c3424a20b Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 18:52:22 +0300 Subject: [PATCH 07/27] feat(feed): relay body storage + HTTP endpoints (Phase B of v2.0.0) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase A (the previous commit) added the on-chain foundations. Phase B is the off-chain layer: post bodies live in a BadgerDB-backed feed mailbox, and a full HTTP surface makes the feed usable from clients. New components relay/feed_mailbox.go (+ tests) - FeedPost: body + content-type + attachment + hashtags + thread refs - Store / Get / Delete with TTL-bounded eviction (30 days default) - View counter (IncrementView / ViewCount) — off-chain because one tx per view would be nonsense - Hashtag inverted index: auto-extracts #tokens from content on Store, lowercased + deduped + capped at 8/post - Author chrono index: PostsByAuthor returns newest-first IDs - RecentPostIDs: scan-by-age helper used by trending/foryou node/api_feed.go POST /feed/publish — author-signed body upload, returns post_id + content_hash + size + hashtags + estimated fee for the follow-up on-chain CREATE_POST tx GET /feed/post/{id} — fetch body (respects on-chain soft delete, returns 410 when deleted) GET /feed/post/{id}/stats — {views, likes, liked_by_me?} POST /feed/post/{id}/view — bump the counter GET /feed/author/{pub} — chain-authoritative post list enriched with body + stats GET /feed/timeline — merged feed from people the user follows (reads chain.Following, fetches each author's recent posts) GET /feed/trending — top-scored posts in last 24h (score = likes × 3 + views) GET /feed/foryou — simple recommendations: recent posts minus authors the user already follows, already-liked posts, and own posts; ranked by engagement GET /feed/hashtag/{tag} — posts tagged with the given #tag cmd/node/main.go wiring - --feed-db flag (DCHAIN_FEED_DB) + --feed-ttl-days (DCHAIN_FEED_TTL_DAYS) - Opens FeedMailbox + registers FeedRoutes alongside RelayRoutes - Threads chain.Post / LikeCount / HasLiked / PostsByAuthor / Following into FeedConfig so HTTP handlers can merge on-chain metadata with off-chain body+stats. Auth & safety - POST /feed/publish: Ed25519 signature over "publish:: :"; ±5-minute skew window for anti-replay. - content_hash binds body to the on-chain tx — you can't publish body-A off-chain and commit hash-of-body-B on-chain. - Writes wrapped in withSubmitTxGuards (rate-limit + size cap), reads in withReadLimit — same guards as /relay. Trending / recommendations - V1 heuristic (likes × 3 + views) + time window. Documented as v2.2.0 "Feed algorithm" candidate for a proper ranking layer (half-life decay, follow-of-follow boost, hashtag collaborative). Tests - Store round-trip, size enforcement, hashtag indexing (case-insensitive + dedup), view counter increments, author chrono order, delete cleans all indices, RecentPostIDs time-window filter. - Full go test ./... is green (blockchain + consensus + identity + relay + vm all pass). Next (Phase C): client Feed tab — composer, timeline, post detail, profile follow, For You + Trending screens. Co-Authored-By: Claude Opus 4.7 (1M context) --- cmd/node/main.go | 22 ++ node/api_feed.go | 654 +++++++++++++++++++++++++++++++++++++ relay/feed_mailbox.go | 431 ++++++++++++++++++++++++ relay/feed_mailbox_test.go | 198 +++++++++++ 4 files changed, 1305 insertions(+) create mode 100644 node/api_feed.go create mode 100644 relay/feed_mailbox.go create mode 100644 relay/feed_mailbox_test.go diff --git a/cmd/node/main.go b/cmd/node/main.go index fbaeba6..99aa613 100644 --- a/cmd/node/main.go +++ b/cmd/node/main.go @@ -77,6 +77,8 @@ func main() { registerRelay := flag.Bool("register-relay", envBoolOr("DCHAIN_REGISTER_RELAY", false), "submit REGISTER_RELAY tx on startup (env: DCHAIN_REGISTER_RELAY)") relayFee := flag.Uint64("relay-fee", envUint64Or("DCHAIN_RELAY_FEE", 1_000), "relay fee per message in µT (env: DCHAIN_RELAY_FEE)") mailboxDB := flag.String("mailbox-db", envOr("DCHAIN_MAILBOX_DB", "./mailboxdata"), "BadgerDB directory for relay mailbox (env: DCHAIN_MAILBOX_DB)") + feedDB := flag.String("feed-db", envOr("DCHAIN_FEED_DB", "./feeddata"), "BadgerDB directory for social-feed post bodies (env: DCHAIN_FEED_DB)") + feedTTLDays := flag.Int("feed-ttl-days", int(envUint64Or("DCHAIN_FEED_TTL_DAYS", 30)), "how long feed posts are retained before auto-eviction (env: DCHAIN_FEED_TTL_DAYS)") govContractID := flag.String("governance-contract", envOr("DCHAIN_GOVERNANCE_CONTRACT", ""), "governance contract ID for dynamic chain parameters (env: DCHAIN_GOVERNANCE_CONTRACT)") joinSeedURL := flag.String("join", envOr("DCHAIN_JOIN", ""), "bootstrap from a running node: comma-separated HTTP URLs (env: DCHAIN_JOIN)") // Observer mode: the node participates in the P2P network, applies @@ -634,6 +636,15 @@ func main() { go mailbox.RunGC() log.Printf("[NODE] relay mailbox: %s", *mailboxDB) + // --- Feed mailbox (social-feed post bodies, v2.0.0) --- + feedTTL := time.Duration(*feedTTLDays) * 24 * time.Hour + feedMailbox, err := relay.OpenFeedMailbox(*feedDB, feedTTL) + if err != nil { + log.Fatalf("[NODE] feed mailbox: %v", err) + } + defer feedMailbox.Close() + log.Printf("[NODE] feed mailbox: %s (TTL %d days)", *feedDB, *feedTTLDays) + // Push-notify bus consumers whenever a fresh envelope lands in the // mailbox. Clients subscribed to `inbox:` (via WS) get the // event immediately so they no longer need to poll /relay/inbox. @@ -927,6 +938,16 @@ func main() { }, } + feedConfig := node.FeedConfig{ + Mailbox: feedMailbox, + HostingRelayPub: id.PubKeyHex(), + GetPost: chain.Post, + LikeCount: chain.LikeCount, + HasLiked: chain.HasLiked, + PostsByAuthor: chain.PostsByAuthor, + Following: chain.Following, + } + go func() { log.Printf("[NODE] stats API: http://0.0.0.0%s/stats", *statsAddr) if *disableUI { @@ -947,6 +968,7 @@ func main() { if err := stats.ListenAndServe(*statsAddr, statsQuery, func(mux *http.ServeMux) { node.RegisterExplorerRoutes(mux, explorerQuery, routeFlags) node.RegisterRelayRoutes(mux, relayConfig) + node.RegisterFeedRoutes(mux, feedConfig) // POST /api/governance/link — link deployed contracts at runtime. // Body: {"governance": ""} diff --git a/node/api_feed.go b/node/api_feed.go new file mode 100644 index 0000000..c4fbe54 --- /dev/null +++ b/node/api_feed.go @@ -0,0 +1,654 @@ +package node + +// Feed HTTP endpoints (v2.0.0). +// +// Mount points: +// +// POST /feed/publish — store a post body (authenticated) +// GET /feed/post/{id} — fetch a post body +// GET /feed/post/{id}/stats — {views, likes, liked_by_me?} aggregate +// POST /feed/post/{id}/view — increment off-chain view counter +// GET /feed/author/{pub} — ?limit=N, posts by an author +// GET /feed/timeline — ?follower=&limit=N, merged feed of follows +// GET /feed/trending — ?window=h&limit=N, top by likes + views +// GET /feed/foryou — ?pub=&limit=N, recommendations +// GET /feed/hashtag/{tag} — posts matching a hashtag +// +// Publish flow: +// 1. Client POSTs {content, attachment, post_id, author, sig, ts}. +// 2. Node verifies sig (Ed25519 over canonical bytes), hashes body, +// stores in FeedMailbox, returns hosting_relay + content_hash + size. +// 3. Client then submits on-chain CREATE_POST tx with that metadata. +// Node charges the fee (base + size×byte_fee) and credits the relay. +// 4. Subsequent GET /feed/post/{id} serves the stored body to anyone. +// +// Why the split? On-chain metadata gives us provable authorship + the +// pay-for-storage incentive; off-chain body storage keeps the block +// history small. If the hosting relay dies, the on-chain record stays +// (with a "body unavailable" fallback on the reader side) — authors can +// re-publish to another relay. + +import ( + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "net/http" + "sort" + "strings" + "time" + + "go-blockchain/blockchain" + "go-blockchain/identity" + "go-blockchain/relay" +) + +// FeedConfig wires feed HTTP endpoints to the relay mailbox and the +// chain for read-after-write queries. +type FeedConfig struct { + Mailbox *relay.FeedMailbox + + // HostingRelayPub is this node's Ed25519 pubkey — returned from + // /feed/publish so the client knows who to put in CREATE_POST tx. + HostingRelayPub string + + // Chain lookups (nil-safe; endpoints degrade gracefully). + GetPost func(postID string) (*blockchain.PostRecord, error) + LikeCount func(postID string) (uint64, error) + HasLiked func(postID, likerPub string) (bool, error) + PostsByAuthor func(authorPub string, limit int) ([]*blockchain.PostRecord, error) + Following func(followerPub string) ([]string, error) +} + +// RegisterFeedRoutes wires feed endpoints onto mux. Writes are rate-limited +// via withSubmitTxGuards; reads via withReadLimit (same limiters as /relay). +func RegisterFeedRoutes(mux *http.ServeMux, cfg FeedConfig) { + if cfg.Mailbox == nil { + return + } + mux.HandleFunc("/feed/publish", withSubmitTxGuards(feedPublish(cfg))) + mux.HandleFunc("/feed/post/", withReadLimit(feedPostRouter(cfg))) + mux.HandleFunc("/feed/author/", withReadLimit(feedAuthor(cfg))) + mux.HandleFunc("/feed/timeline", withReadLimit(feedTimeline(cfg))) + mux.HandleFunc("/feed/trending", withReadLimit(feedTrending(cfg))) + mux.HandleFunc("/feed/foryou", withReadLimit(feedForYou(cfg))) + mux.HandleFunc("/feed/hashtag/", withReadLimit(feedHashtag(cfg))) +} + +// ── POST /feed/publish ──────────────────────────────────────────────────── + +// feedPublishRequest — what the client sends. Signature is Ed25519 over +// canonical bytes: "publish:::". +// ts must be within ±5 minutes of server clock. +type feedPublishRequest struct { + PostID string `json:"post_id"` + Author string `json:"author"` // hex Ed25519 + Content string `json:"content"` + ContentType string `json:"content_type,omitempty"` + AttachmentB64 string `json:"attachment_b64,omitempty"` + AttachmentMIME string `json:"attachment_mime,omitempty"` + ReplyTo string `json:"reply_to,omitempty"` + QuoteOf string `json:"quote_of,omitempty"` + Sig string `json:"sig"` // base64 Ed25519 sig + Ts int64 `json:"ts"` +} + +type feedPublishResponse struct { + PostID string `json:"post_id"` + HostingRelay string `json:"hosting_relay"` + ContentHash string `json:"content_hash"` // hex sha256 + Size uint64 `json:"size"` + Hashtags []string `json:"hashtags"` + EstimatedFeeUT uint64 `json:"estimated_fee_ut"` // base + size*byte_fee +} + +func feedPublish(cfg FeedConfig) http.HandlerFunc { + const publishSkewSecs = 300 + + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + var req feedPublishRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + jsonErr(w, fmt.Errorf("invalid JSON: %w", err), 400) + return + } + if req.PostID == "" || req.Author == "" || req.Sig == "" || req.Ts == 0 { + jsonErr(w, fmt.Errorf("post_id, author, sig, ts are required"), 400) + return + } + if req.Content == "" && req.AttachmentB64 == "" { + jsonErr(w, fmt.Errorf("post must have content or attachment"), 400) + return + } + now := time.Now().Unix() + if req.Ts < now-publishSkewSecs || req.Ts > now+publishSkewSecs { + jsonErr(w, fmt.Errorf("ts out of range (±%ds)", publishSkewSecs), 400) + return + } + if req.ReplyTo != "" && req.QuoteOf != "" { + jsonErr(w, fmt.Errorf("reply_to and quote_of are mutually exclusive"), 400) + return + } + + // Decode attachment. + var attachment []byte + if req.AttachmentB64 != "" { + b, err := base64.StdEncoding.DecodeString(req.AttachmentB64) + if err != nil { + if b, err = base64.RawURLEncoding.DecodeString(req.AttachmentB64); err != nil { + jsonErr(w, fmt.Errorf("attachment_b64: invalid base64"), 400) + return + } + } + attachment = b + } + + // Content hash binds the body to the on-chain metadata. We hash + // content+attachment so the client can't publish body-A off-chain + // and commit hash-of-body-B on-chain. + h := sha256.New() + h.Write([]byte(req.Content)) + h.Write(attachment) + contentHash := h.Sum(nil) + contentHashHex := hex.EncodeToString(contentHash) + + // Verify the author's signature over the canonical publish bytes. + msg := []byte(fmt.Sprintf("publish:%s:%s:%d", req.PostID, contentHashHex, req.Ts)) + sigBytes, err := base64.StdEncoding.DecodeString(req.Sig) + if err != nil { + if sigBytes, err = base64.RawURLEncoding.DecodeString(req.Sig); err != nil { + jsonErr(w, fmt.Errorf("sig: invalid base64"), 400) + return + } + } + if _, err := hex.DecodeString(req.Author); err != nil { + jsonErr(w, fmt.Errorf("author: invalid hex"), 400) + return + } + ok, err := identity.Verify(req.Author, msg, sigBytes) + if err != nil || !ok { + jsonErr(w, fmt.Errorf("signature invalid"), 403) + return + } + + post := &relay.FeedPost{ + PostID: req.PostID, + Author: req.Author, + Content: req.Content, + ContentType: req.ContentType, + Attachment: attachment, + AttachmentMIME: req.AttachmentMIME, + ReplyTo: req.ReplyTo, + QuoteOf: req.QuoteOf, + } + hashtags, err := cfg.Mailbox.Store(post, req.Ts) + if err != nil { + if err == relay.ErrPostTooLarge { + jsonErr(w, err, 413) + return + } + jsonErr(w, err, 500) + return + } + + // Report what the client should put into CREATE_POST. + size := uint64(len(req.Content)) + uint64(len(attachment)) + 128 + fee := blockchain.BasePostFee + size*blockchain.PostByteFee + jsonOK(w, feedPublishResponse{ + PostID: req.PostID, + HostingRelay: cfg.HostingRelayPub, + ContentHash: contentHashHex, + Size: size, + Hashtags: hashtags, + EstimatedFeeUT: fee, + }) + } +} + +// ── GET /feed/post/{id} [+ /stats subroute, POST /view] ───────────────── + +// feedPostRouter dispatches /feed/post/{id}, /feed/post/{id}/stats, +// /feed/post/{id}/view to the right handler. +func feedPostRouter(cfg FeedConfig) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + rest := strings.TrimPrefix(r.URL.Path, "/feed/post/") + rest = strings.Trim(rest, "/") + if rest == "" { + jsonErr(w, fmt.Errorf("post id required"), 400) + return + } + parts := strings.Split(rest, "/") + postID := parts[0] + if len(parts) == 1 { + feedGetPost(cfg)(w, r, postID) + return + } + switch parts[1] { + case "stats": + feedPostStats(cfg)(w, r, postID) + case "view": + feedPostView(cfg)(w, r, postID) + default: + jsonErr(w, fmt.Errorf("unknown sub-route %q", parts[1]), 404) + } + } +} + +type postHandler func(w http.ResponseWriter, r *http.Request, postID string) + +func feedGetPost(cfg FeedConfig) postHandler { + return func(w http.ResponseWriter, r *http.Request, postID string) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + post, err := cfg.Mailbox.Get(postID) + if err != nil { + jsonErr(w, err, 500) + return + } + if post == nil { + jsonErr(w, fmt.Errorf("post %s not found", postID), 404) + return + } + // Respect on-chain soft-delete. + if cfg.GetPost != nil { + if rec, _ := cfg.GetPost(postID); rec != nil && rec.Deleted { + jsonErr(w, fmt.Errorf("post %s deleted", postID), 410) + return + } + } + jsonOK(w, post) + } +} + +type postStatsResponse struct { + PostID string `json:"post_id"` + Views uint64 `json:"views"` + Likes uint64 `json:"likes"` + LikedByMe *bool `json:"liked_by_me,omitempty"` // set only when ?me= given +} + +func feedPostStats(cfg FeedConfig) postHandler { + return func(w http.ResponseWriter, r *http.Request, postID string) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + views, _ := cfg.Mailbox.ViewCount(postID) + var likes uint64 + if cfg.LikeCount != nil { + likes, _ = cfg.LikeCount(postID) + } + resp := postStatsResponse{ + PostID: postID, + Views: views, + Likes: likes, + } + if me := r.URL.Query().Get("me"); me != "" && cfg.HasLiked != nil { + if liked, err := cfg.HasLiked(postID, me); err == nil { + resp.LikedByMe = &liked + } + } + jsonOK(w, resp) + } +} + +func feedPostView(cfg FeedConfig) postHandler { + return func(w http.ResponseWriter, r *http.Request, postID string) { + if r.Method != http.MethodPost { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + next, err := cfg.Mailbox.IncrementView(postID) + if err != nil { + jsonErr(w, err, 500) + return + } + jsonOK(w, map[string]any{ + "post_id": postID, + "views": next, + }) + } +} + +// ── GET /feed/author/{pub} ──────────────────────────────────────────────── + +func feedAuthor(cfg FeedConfig) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + pub := strings.TrimPrefix(r.URL.Path, "/feed/author/") + pub = strings.Trim(pub, "/") + if pub == "" { + jsonErr(w, fmt.Errorf("author pub required"), 400) + return + } + limit := queryInt(r, "limit", 50) + + // Prefer chain-authoritative list (includes soft-deleted flag) so + // clients can't be fooled by a stale relay that has an already- + // deleted post. If chain isn't wired, fall back to relay index. + if cfg.PostsByAuthor != nil { + records, err := cfg.PostsByAuthor(pub, limit) + if err != nil { + jsonErr(w, err, 500) + return + } + out := make([]feedAuthorItem, 0, len(records)) + for _, rec := range records { + if rec == nil || rec.Deleted { + continue + } + out = append(out, buildAuthorItem(cfg, rec)) + } + jsonOK(w, map[string]any{"author": pub, "count": len(out), "posts": out}) + return + } + ids, err := cfg.Mailbox.PostsByAuthor(pub, limit) + if err != nil { + jsonErr(w, err, 500) + return + } + out := expandByID(cfg, ids) + jsonOK(w, map[string]any{"author": pub, "count": len(out), "posts": out}) + } +} + +// feedAuthorItem is a chain record enriched with the body and live stats. +type feedAuthorItem struct { + PostID string `json:"post_id"` + Author string `json:"author"` + Content string `json:"content,omitempty"` + ContentType string `json:"content_type,omitempty"` + Hashtags []string `json:"hashtags,omitempty"` + ReplyTo string `json:"reply_to,omitempty"` + QuoteOf string `json:"quote_of,omitempty"` + CreatedAt int64 `json:"created_at"` + Size uint64 `json:"size"` + HostingRelay string `json:"hosting_relay"` + Views uint64 `json:"views"` + Likes uint64 `json:"likes"` + HasAttachment bool `json:"has_attachment"` +} + +func buildAuthorItem(cfg FeedConfig, rec *blockchain.PostRecord) feedAuthorItem { + item := feedAuthorItem{ + PostID: rec.PostID, + Author: rec.Author, + ReplyTo: rec.ReplyTo, + QuoteOf: rec.QuoteOf, + CreatedAt: rec.CreatedAt, + Size: rec.Size, + HostingRelay: rec.HostingRelay, + } + if body, _ := cfg.Mailbox.Get(rec.PostID); body != nil { + item.Content = body.Content + item.ContentType = body.ContentType + item.Hashtags = body.Hashtags + item.HasAttachment = len(body.Attachment) > 0 + } + if cfg.LikeCount != nil { + item.Likes, _ = cfg.LikeCount(rec.PostID) + } + item.Views, _ = cfg.Mailbox.ViewCount(rec.PostID) + return item +} + +// expandByID fetches bodies+stats for a list of post IDs (no chain record). +func expandByID(cfg FeedConfig, ids []string) []feedAuthorItem { + out := make([]feedAuthorItem, 0, len(ids)) + for _, id := range ids { + body, _ := cfg.Mailbox.Get(id) + if body == nil { + continue + } + item := feedAuthorItem{ + PostID: id, + Author: body.Author, + Content: body.Content, + ContentType: body.ContentType, + Hashtags: body.Hashtags, + ReplyTo: body.ReplyTo, + QuoteOf: body.QuoteOf, + CreatedAt: body.CreatedAt, + HasAttachment: len(body.Attachment) > 0, + } + if cfg.LikeCount != nil { + item.Likes, _ = cfg.LikeCount(id) + } + item.Views, _ = cfg.Mailbox.ViewCount(id) + out = append(out, item) + } + return out +} + +// ── GET /feed/timeline ──────────────────────────────────────────────────── + +func feedTimeline(cfg FeedConfig) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + follower := r.URL.Query().Get("follower") + if follower == "" { + jsonErr(w, fmt.Errorf("follower parameter required"), 400) + return + } + if cfg.Following == nil || cfg.PostsByAuthor == nil { + jsonErr(w, fmt.Errorf("timeline requires chain queries"), 503) + return + } + limit := queryInt(r, "limit", 50) + perAuthor := limit + if perAuthor > 30 { + perAuthor = 30 + } + + following, err := cfg.Following(follower) + if err != nil { + jsonErr(w, err, 500) + return + } + var merged []*blockchain.PostRecord + for _, target := range following { + posts, err := cfg.PostsByAuthor(target, perAuthor) + if err != nil { + continue + } + for _, p := range posts { + if p != nil && !p.Deleted { + merged = append(merged, p) + } + } + } + // Sort newest-first, take top N. + sort.Slice(merged, func(i, j int) bool { return merged[i].CreatedAt > merged[j].CreatedAt }) + if len(merged) > limit { + merged = merged[:limit] + } + out := make([]feedAuthorItem, 0, len(merged)) + for _, rec := range merged { + out = append(out, buildAuthorItem(cfg, rec)) + } + jsonOK(w, map[string]any{"count": len(out), "posts": out}) + } +} + +// ── GET /feed/trending ──────────────────────────────────────────────────── + +func feedTrending(cfg FeedConfig) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + limit := queryInt(r, "limit", 30) + // Window defaults to 24h; cap 7d so a viral post from a week ago + // doesn't permanently dominate. + windowHours := queryInt(r, "window", 24) + if windowHours > 24*7 { + windowHours = 24 * 7 + } + if windowHours < 1 { + windowHours = 1 + } + ids, err := cfg.Mailbox.RecentPostIDs(int64(windowHours)*3600, 500) + if err != nil { + jsonErr(w, err, 500) + return + } + // Score each = likes*3 + views, honoring soft-delete. + type scored struct { + id string + score uint64 + } + scoredList := make([]scored, 0, len(ids)) + for _, id := range ids { + if cfg.GetPost != nil { + if rec, _ := cfg.GetPost(id); rec != nil && rec.Deleted { + continue + } + } + views, _ := cfg.Mailbox.ViewCount(id) + var likes uint64 + if cfg.LikeCount != nil { + likes, _ = cfg.LikeCount(id) + } + scoredList = append(scoredList, scored{id: id, score: likes*3 + views}) + } + sort.Slice(scoredList, func(i, j int) bool { return scoredList[i].score > scoredList[j].score }) + if len(scoredList) > limit { + scoredList = scoredList[:limit] + } + pickedIDs := make([]string, len(scoredList)) + for i, s := range scoredList { + pickedIDs[i] = s.id + } + out := expandByID(cfg, pickedIDs) + jsonOK(w, map[string]any{"count": len(out), "posts": out}) + } +} + +// ── GET /feed/foryou ────────────────────────────────────────────────────── +// +// Simple recommendations heuristic for v2.0.0: +// 1. Compute the set of authors the user already follows. +// 2. Fetch recent posts from the relay (last 48h). +// 3. Filter OUT posts from followed authors (those live in /timeline). +// 4. Filter OUT posts the user has already liked. +// 5. Rank remaining by (likes × 3 + views) and return top N. +// +// Future improvements (tracked as v2.2.0 "Feed algorithm"): +// - Weight by "followed-of-followed" signal (friends-of-friends boost). +// - Decay by age (exp half-life ~12h). +// - Penalise self-engagement (author liking own post). +// - Collaborative filtering on hashtag co-occurrence. + +func feedForYou(cfg FeedConfig) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + pub := r.URL.Query().Get("pub") + limit := queryInt(r, "limit", 30) + + // Gather user's follows + likes to exclude from the candidate pool. + excludedAuthors := make(map[string]struct{}) + if cfg.Following != nil && pub != "" { + if list, err := cfg.Following(pub); err == nil { + for _, a := range list { + excludedAuthors[a] = struct{}{} + } + } + } + // Post pool: last 48h on this relay. + ids, err := cfg.Mailbox.RecentPostIDs(48*3600, 500) + if err != nil { + jsonErr(w, err, 500) + return + } + type scored struct { + id string + score uint64 + } + scoredList := make([]scored, 0, len(ids)) + for _, id := range ids { + body, _ := cfg.Mailbox.Get(id) + if body == nil { + continue + } + if _, followed := excludedAuthors[body.Author]; followed { + continue + } + if body.Author == pub { + continue // don't recommend user's own posts + } + if cfg.GetPost != nil { + if rec, _ := cfg.GetPost(id); rec != nil && rec.Deleted { + continue + } + } + // Skip already-liked. + if cfg.HasLiked != nil && pub != "" { + if liked, _ := cfg.HasLiked(id, pub); liked { + continue + } + } + views, _ := cfg.Mailbox.ViewCount(id) + var likes uint64 + if cfg.LikeCount != nil { + likes, _ = cfg.LikeCount(id) + } + // Small "seed" score so posts with no engagement still get shown + // sometimes (otherwise a silent but fresh post can't break in). + scoredList = append(scoredList, scored{id: id, score: likes*3 + views + 1}) + } + sort.Slice(scoredList, func(i, j int) bool { return scoredList[i].score > scoredList[j].score }) + if len(scoredList) > limit { + scoredList = scoredList[:limit] + } + pickedIDs := make([]string, len(scoredList)) + for i, s := range scoredList { + pickedIDs[i] = s.id + } + out := expandByID(cfg, pickedIDs) + jsonOK(w, map[string]any{"count": len(out), "posts": out}) + } +} + +// ── GET /feed/hashtag/{tag} ────────────────────────────────────────────── + +func feedHashtag(cfg FeedConfig) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + jsonErr(w, fmt.Errorf("method not allowed"), 405) + return + } + tag := strings.TrimPrefix(r.URL.Path, "/feed/hashtag/") + tag = strings.Trim(tag, "/") + if tag == "" { + jsonErr(w, fmt.Errorf("tag required"), 400) + return + } + limit := queryInt(r, "limit", 50) + ids, err := cfg.Mailbox.PostsByHashtag(tag, limit) + if err != nil { + jsonErr(w, err, 500) + return + } + out := expandByID(cfg, ids) + jsonOK(w, map[string]any{"tag": strings.ToLower(tag), "count": len(out), "posts": out}) + } +} + +// (queryInt helper is shared with the rest of the node HTTP surface; +// see api_common.go.) diff --git a/relay/feed_mailbox.go b/relay/feed_mailbox.go new file mode 100644 index 0000000..9079b45 --- /dev/null +++ b/relay/feed_mailbox.go @@ -0,0 +1,431 @@ +package relay + +// FeedMailbox — BadgerDB-backed storage for social-feed post bodies. +// +// Posts are PUBLIC (plaintext) — unlike the E2E inbox envelopes, feed posts +// have no recipient key. They live keyed by post ID and can be read by +// anyone via GET /feed/post/{id}. +// +// Storage layout (keys): +// +// post: → FeedPost JSON (body + metadata) +// post-by-author:: → postID (chrono index for GET /feed/author) +// post-views: → uint64 big-endian (view counter) +// post-hashtag::: → postID (inverted index for #tag search) +// post-trending:: → postID (ranked index; score = likes × 2 + views) +// +// View counts are off-chain because on-chain would mean one tx per view — +// financially and architecturally unreasonable. Likes stay on-chain +// (provable authorship + anti-Sybil via fee). +// +// Anti-spam: +// - MaxPostBodySize is enforced at Store time. +// - Per-sender rate limiting happens at the HTTP layer (withSubmitTxGuards). + +import ( + "encoding/binary" + "encoding/json" + "errors" + "fmt" + "regexp" + "sort" + "strings" + "time" + + badger "github.com/dgraph-io/badger/v4" +) + +const ( + feedPostPrefix = "feedpost:" + feedAuthorIdxPrefix = "feedauthor:" // feedauthor::: + feedViewPrefix = "feedview:" // feedview: → uint64 + feedHashtagPrefix = "feedtag:" // feedtag::: + feedTrendingPrefix = "feedtrend:" // feedtrend:: + + // MaxPostBodySize is the hard cap on a post's on-wire size. Matches + // blockchain.MaxPostSize so the on-chain fee estimate is always + // enforceable (no "I claimed 10 KiB but sent 50 KiB" trick). + MaxPostBodySize = 256 * 1024 // 256 KiB + + // FeedPostDefaultTTLDays is how long a post body lives before BadgerDB + // auto-evicts it. On-chain metadata stays forever, so a reader hitting + // a stale post sees the record with a "body unavailable" indicator. + // Configurable via the env var DCHAIN_FEED_TTL_DAYS (handled in main.go). + FeedPostDefaultTTLDays = 30 + + // maxHashtagsPerPost caps how many distinct hashtags we'll index per + // post. Prevents a spammer from polluting every tag namespace with one + // mega-post. + maxHashtagsPerPost = 8 + + // trendingHalfLifeSeconds controls how quickly a post's score decays. + // Used when computing "trending": recent engagement weighs more than old. + trendingHalfLifeSeconds = 12 * 3600 // 12 hours +) + +// FeedPost is the off-chain body. On-chain we keep the metadata in +// blockchain.PostRecord — here we store the readable payload. +// +// Why not just put the body on-chain? Size — a 256 KiB post × thousands +// per day would bloat the block history. Keeping it in a relay DB with a +// TTL gives us ephemerality while still letting on-chain records serve as +// the permanent proof of authorship. +type FeedPost struct { + // Identity (matches on-chain PostRecord.PostID). + PostID string `json:"post_id"` + Author string `json:"author"` // Ed25519 hex + + // Payload. Content is always plaintext (posts are public). Attachment is + // a pre-compressed blob — client is expected to have minimised size + // before publish. If empty, the post is text-only. + Content string `json:"content"` + ContentType string `json:"content_type,omitempty"` // "text/plain" | "text/markdown" | ... + Attachment []byte `json:"attachment,omitempty"` + AttachmentMIME string `json:"attachment_mime,omitempty"` + Hashtags []string `json:"hashtags,omitempty"` // lowercased, without leading # + + // CreatedAt matches the on-chain tx timestamp — we stamp it server-side + // at Store() so senders can't back-date. + CreatedAt int64 `json:"created_at"` + + // ReplyTo / QuoteOf mirror the on-chain PostRecord fields, included + // here so the client can thread without a second RPC. + ReplyTo string `json:"reply_to,omitempty"` + QuoteOf string `json:"quote_of,omitempty"` +} + +// ErrPostTooLarge is returned by Store when the post body exceeds MaxPostBodySize. +var ErrPostTooLarge = errors.New("post body exceeds maximum allowed size") + +// FeedMailbox stores feed post bodies. +type FeedMailbox struct { + db *badger.DB + ttl time.Duration +} + +// NewFeedMailbox wraps an already-open Badger DB. TTL controls how long +// post bodies live before auto-eviction (on-chain metadata persists +// forever independently). +func NewFeedMailbox(db *badger.DB, ttl time.Duration) *FeedMailbox { + if ttl <= 0 { + ttl = time.Duration(FeedPostDefaultTTLDays) * 24 * time.Hour + } + return &FeedMailbox{db: db, ttl: ttl} +} + +// OpenFeedMailbox opens (or creates) a dedicated BadgerDB at dbPath. +func OpenFeedMailbox(dbPath string, ttl time.Duration) (*FeedMailbox, error) { + opts := badger.DefaultOptions(dbPath). + WithLogger(nil). + WithValueLogFileSize(128 << 20). + WithNumVersionsToKeep(1). + WithCompactL0OnClose(true) + db, err := badger.Open(opts) + if err != nil { + return nil, fmt.Errorf("open feed mailbox db: %w", err) + } + return NewFeedMailbox(db, ttl), nil +} + +// Close releases the underlying Badger handle. +func (fm *FeedMailbox) Close() error { return fm.db.Close() } + +// Store persists a post body and updates all indices. `createdAt` is the +// canonical timestamp (usually from the chain tx) and becomes the +// server's view of when the post happened — clients' wall-clock values +// are ignored. +// +// Returns the set of hashtags actually indexed (after dedup + cap). +func (fm *FeedMailbox) Store(post *FeedPost, createdAt int64) ([]string, error) { + size := estimatePostSize(post) + if size > MaxPostBodySize { + return nil, ErrPostTooLarge + } + + post.CreatedAt = createdAt + // Normalise hashtags — the client may or may not have supplied them; + // we derive from Content as the authoritative source, then dedup. + tags := extractHashtags(post.Content) + if len(tags) > maxHashtagsPerPost { + tags = tags[:maxHashtagsPerPost] + } + post.Hashtags = tags + + val, err := json.Marshal(post) + if err != nil { + return nil, fmt.Errorf("marshal post: %w", err) + } + + err = fm.db.Update(func(txn *badger.Txn) error { + // Idempotent on postID — second Store is a no-op. + key := []byte(feedPostPrefix + post.PostID) + if _, err := txn.Get(key); err == nil { + return nil + } + entry := badger.NewEntry(key, val).WithTTL(fm.ttl) + if err := txn.SetEntry(entry); err != nil { + return err + } + + // Author chrono index. + authorKey := fmt.Sprintf("%s%s:%020d:%s", feedAuthorIdxPrefix, post.Author, createdAt, post.PostID) + if err := txn.SetEntry( + badger.NewEntry([]byte(authorKey), []byte(post.PostID)).WithTTL(fm.ttl), + ); err != nil { + return err + } + + // Hashtag inverted index. + for _, tag := range tags { + tagKey := fmt.Sprintf("%s%s:%020d:%s", feedHashtagPrefix, tag, createdAt, post.PostID) + if err := txn.SetEntry( + badger.NewEntry([]byte(tagKey), []byte(post.PostID)).WithTTL(fm.ttl), + ); err != nil { + return err + } + } + return nil + }) + if err != nil { + return nil, err + } + return tags, nil +} + +// Get returns the full post body, or nil if not found / evicted. +func (fm *FeedMailbox) Get(postID string) (*FeedPost, error) { + var p FeedPost + err := fm.db.View(func(txn *badger.Txn) error { + item, err := txn.Get([]byte(feedPostPrefix + postID)) + if err != nil { + return err + } + return item.Value(func(val []byte) error { + return json.Unmarshal(val, &p) + }) + }) + if errors.Is(err, badger.ErrKeyNotFound) { + return nil, nil + } + if err != nil { + return nil, err + } + return &p, nil +} + +// Delete removes a post body and its indices. On-chain soft-delete stays; +// this just frees storage. Called by DELETE_POST event handler hook. +func (fm *FeedMailbox) Delete(postID string) error { + // We need author and createdAt to build index keys — fetch first. + post, err := fm.Get(postID) + if err != nil { + return err + } + if post == nil { + return nil + } + return fm.db.Update(func(txn *badger.Txn) error { + if err := txn.Delete([]byte(feedPostPrefix + postID)); err != nil { + return err + } + authorKey := fmt.Sprintf("%s%s:%020d:%s", + feedAuthorIdxPrefix, post.Author, post.CreatedAt, postID) + if err := txn.Delete([]byte(authorKey)); err != nil { + return err + } + for _, tag := range post.Hashtags { + tagKey := fmt.Sprintf("%s%s:%020d:%s", + feedHashtagPrefix, tag, post.CreatedAt, postID) + if err := txn.Delete([]byte(tagKey)); err != nil { + return err + } + } + return nil + }) +} + +// IncrementView bumps the view counter for a post. No-op on missing post. +// Returns the new count. Views are ephemeral (tied to the post TTL) — +// a fresh relay that gossip-loads an old post starts from 0, which is +// acceptable for a non-authoritative metric. +func (fm *FeedMailbox) IncrementView(postID string) (uint64, error) { + var next uint64 + err := fm.db.Update(func(txn *badger.Txn) error { + key := []byte(feedViewPrefix + postID) + var cur uint64 + if item, err := txn.Get(key); err == nil { + _ = item.Value(func(val []byte) error { + if len(val) == 8 { + cur = binary.BigEndian.Uint64(val) + } + return nil + }) + } else if !errors.Is(err, badger.ErrKeyNotFound) { + return err + } + next = cur + 1 + var buf [8]byte + binary.BigEndian.PutUint64(buf[:], next) + return txn.SetEntry(badger.NewEntry(key, buf[:]).WithTTL(fm.ttl)) + }) + return next, err +} + +// ViewCount returns the current (off-chain) view count for a post. +func (fm *FeedMailbox) ViewCount(postID string) (uint64, error) { + var n uint64 + err := fm.db.View(func(txn *badger.Txn) error { + item, err := txn.Get([]byte(feedViewPrefix + postID)) + if errors.Is(err, badger.ErrKeyNotFound) { + return nil + } + if err != nil { + return err + } + return item.Value(func(val []byte) error { + if len(val) == 8 { + n = binary.BigEndian.Uint64(val) + } + return nil + }) + }) + return n, err +} + +// PostsByAuthor lists the N most recent post IDs by an author, newest first. +// Pure ID listing — callers fetch bodies via Get. +func (fm *FeedMailbox) PostsByAuthor(authorPub string, limit int) ([]string, error) { + if limit <= 0 || limit > 200 { + limit = 50 + } + prefix := []byte(feedAuthorIdxPrefix + authorPub + ":") + return fm.reverseIDScan(prefix, limit) +} + +// PostsByHashtag lists the N most recent posts tagged with tag (lowercased). +func (fm *FeedMailbox) PostsByHashtag(tag string, limit int) ([]string, error) { + tag = strings.ToLower(strings.TrimPrefix(tag, "#")) + if tag == "" { + return nil, nil + } + if limit <= 0 || limit > 200 { + limit = 50 + } + prefix := []byte(feedHashtagPrefix + tag + ":") + return fm.reverseIDScan(prefix, limit) +} + +// reverseIDScan walks prefix in reverse lex order and returns the value +// (postID) of each entry up to limit. Used for newest-first indices. +func (fm *FeedMailbox) reverseIDScan(prefix []byte, limit int) ([]string, error) { + out := make([]string, 0, limit) + err := fm.db.View(func(txn *badger.Txn) error { + opts := badger.DefaultIteratorOptions + opts.Prefix = prefix + opts.Reverse = true + seek := append([]byte{}, prefix...) + seek = append(seek, 0xff) + it := txn.NewIterator(opts) + defer it.Close() + for it.Seek(seek); it.ValidForPrefix(prefix) && len(out) < limit; it.Next() { + item := it.Item() + _ = item.Value(func(val []byte) error { + out = append(out, string(val)) + return nil + }) + } + return nil + }) + return out, err +} + +// RecentPostIDs enumerates the most recent posts stored by this relay +// across ALL authors. Used by the trending / recommendations endpoints to +// seed the candidate pool. maxAgeSeconds bounds the walk (0 = no bound). +func (fm *FeedMailbox) RecentPostIDs(maxAgeSeconds int64, limit int) ([]string, error) { + if limit <= 0 || limit > 500 { + limit = 100 + } + // Can't reuse chrono indices because they're per-author. We scan post:* + // and collect, sorted by CreatedAt from the decoded body. This is O(M) + // where M = #posts in DB — fine for MVP since TTL-bounded M is small + // (~5k posts × 30d TTL on a busy node). + type candidate struct { + id string + ts int64 + } + cutoff := int64(0) + if maxAgeSeconds > 0 { + cutoff = time.Now().Unix() - maxAgeSeconds + } + var candidates []candidate + prefix := []byte(feedPostPrefix) + err := fm.db.View(func(txn *badger.Txn) error { + opts := badger.DefaultIteratorOptions + opts.Prefix = prefix + it := txn.NewIterator(opts) + defer it.Close() + for it.Rewind(); it.Valid(); it.Next() { + item := it.Item() + err := item.Value(func(val []byte) error { + var p FeedPost + if err := json.Unmarshal(val, &p); err != nil { + return nil // skip corrupt + } + if p.CreatedAt < cutoff { + return nil + } + candidates = append(candidates, candidate{id: p.PostID, ts: p.CreatedAt}) + return nil + }) + if err != nil { + return err + } + } + return nil + }) + if err != nil { + return nil, err + } + sort.Slice(candidates, func(i, j int) bool { return candidates[i].ts > candidates[j].ts }) + if len(candidates) > limit { + candidates = candidates[:limit] + } + out := make([]string, len(candidates)) + for i, c := range candidates { + out[i] = c.id + } + return out, nil +} + +// extractHashtags finds #word tokens in text, lowercases, dedups, preserves +// first-seen order. Word = [A-Za-z0-9_] chars, length 1..40. +func extractHashtags(text string) []string { + re := hashtagRegex + matches := re.FindAllString(text, -1) + seen := make(map[string]struct{}, len(matches)) + out := make([]string, 0, len(matches)) + for _, m := range matches { + tag := strings.ToLower(strings.TrimPrefix(m, "#")) + if len(tag) == 0 || len(tag) > 40 { + continue + } + if _, ok := seen[tag]; ok { + continue + } + seen[tag] = struct{}{} + out = append(out, tag) + } + return out +} + +var hashtagRegex = regexp.MustCompile(`#[A-Za-z0-9_\p{L}]{1,40}`) + +// estimatePostSize returns the on-disk size used for fee calculation. +// Matches the client's pre-publish size estimate so fees are predictable. +func estimatePostSize(post *FeedPost) uint64 { + n := uint64(len(post.Content)) + uint64(len(post.Attachment)) + // Small overhead for metadata (~120 bytes of JSON scaffolding). + n += 128 + return n +} diff --git a/relay/feed_mailbox_test.go b/relay/feed_mailbox_test.go new file mode 100644 index 0000000..da0625d --- /dev/null +++ b/relay/feed_mailbox_test.go @@ -0,0 +1,198 @@ +package relay + +import ( + "os" + "testing" + "time" +) + +func newTestFeedMailbox(t *testing.T) *FeedMailbox { + t.Helper() + dir, err := os.MkdirTemp("", "dchain-feedtest-*") + if err != nil { + t.Fatalf("MkdirTemp: %v", err) + } + fm, err := OpenFeedMailbox(dir, 24*time.Hour) + if err != nil { + _ = os.RemoveAll(dir) + t.Fatalf("OpenFeedMailbox: %v", err) + } + t.Cleanup(func() { + _ = fm.Close() + for i := 0; i < 20; i++ { + if err := os.RemoveAll(dir); err == nil { + return + } + time.Sleep(10 * time.Millisecond) + } + }) + return fm +} + +// TestFeedMailboxStoreAndGet: store round-trips content + metadata. +func TestFeedMailboxStoreAndGet(t *testing.T) { + fm := newTestFeedMailbox(t) + post := &FeedPost{ + PostID: "p1", + Author: "authorhex", + Content: "Hello #world from #dchain", + } + tags, err := fm.Store(post, 12345) + if err != nil { + t.Fatalf("Store: %v", err) + } + wantTags := []string{"world", "dchain"} + if len(tags) != len(wantTags) { + t.Fatalf("Store returned %v, want %v", tags, wantTags) + } + for i := range wantTags { + if tags[i] != wantTags[i] { + t.Errorf("Store tag[%d]: got %q, want %q", i, tags[i], wantTags[i]) + } + } + + got, err := fm.Get("p1") + if err != nil || got == nil { + t.Fatalf("Get: got %v err=%v", got, err) + } + if got.Content != post.Content { + t.Errorf("content: got %q, want %q", got.Content, post.Content) + } + if got.CreatedAt != 12345 { + t.Errorf("created_at: got %d, want 12345", got.CreatedAt) + } + if len(got.Hashtags) != 2 { + t.Errorf("hashtags: got %v, want [world dchain]", got.Hashtags) + } +} + +// TestFeedMailboxTooLarge: rejects over-quota content. +func TestFeedMailboxTooLarge(t *testing.T) { + fm := newTestFeedMailbox(t) + big := make([]byte, MaxPostBodySize+1) + post := &FeedPost{ + PostID: "big1", + Author: "a", + Attachment: big, + } + if _, err := fm.Store(post, 0); err != ErrPostTooLarge { + t.Fatalf("Store huge post: got %v, want ErrPostTooLarge", err) + } +} + +// TestFeedMailboxHashtagIndex: hashtags are searchable + dedup + case-normalised. +func TestFeedMailboxHashtagIndex(t *testing.T) { + fm := newTestFeedMailbox(t) + + p1 := &FeedPost{PostID: "p1", Author: "a", Content: "post about #Go"} + p2 := &FeedPost{PostID: "p2", Author: "b", Content: "Also #go programming"} + p3 := &FeedPost{PostID: "p3", Author: "a", Content: "#Rust too"} + + if _, err := fm.Store(p1, 1000); err != nil { + t.Fatal(err) + } + if _, err := fm.Store(p2, 2000); err != nil { + t.Fatal(err) + } + if _, err := fm.Store(p3, 3000); err != nil { + t.Fatal(err) + } + + // #go is case-insensitive, should return both posts newest-first. + ids, err := fm.PostsByHashtag("#Go", 10) + if err != nil { + t.Fatal(err) + } + if len(ids) != 2 || ids[0] != "p2" || ids[1] != "p1" { + t.Errorf("PostsByHashtag(Go): got %v, want [p2 p1]", ids) + } + + ids, _ = fm.PostsByHashtag("rust", 10) + if len(ids) != 1 || ids[0] != "p3" { + t.Errorf("PostsByHashtag(rust): got %v, want [p3]", ids) + } +} + +// TestFeedMailboxViewCounter: increments + reads. +func TestFeedMailboxViewCounter(t *testing.T) { + fm := newTestFeedMailbox(t) + fm.Store(&FeedPost{PostID: "p", Author: "a", Content: "hi"}, 10) + + for i := 1; i <= 5; i++ { + n, err := fm.IncrementView("p") + if err != nil { + t.Fatal(err) + } + if n != uint64(i) { + t.Errorf("IncrementView #%d: got %d, want %d", i, n, i) + } + } + if n, _ := fm.ViewCount("p"); n != 5 { + t.Errorf("ViewCount: got %d, want 5", n) + } +} + +// TestFeedMailboxByAuthor: author chrono index returns newest first. +func TestFeedMailboxByAuthor(t *testing.T) { + fm := newTestFeedMailbox(t) + fm.Store(&FeedPost{PostID: "old", Author: "a", Content: "one"}, 100) + fm.Store(&FeedPost{PostID: "new", Author: "a", Content: "two"}, 500) + fm.Store(&FeedPost{PostID: "mid", Author: "a", Content: "three"}, 300) + fm.Store(&FeedPost{PostID: "other", Author: "b", Content: "four"}, 400) + + ids, err := fm.PostsByAuthor("a", 10) + if err != nil { + t.Fatal(err) + } + want := []string{"new", "mid", "old"} + if len(ids) != len(want) { + t.Fatalf("len: got %d, want %d (%v)", len(ids), len(want), ids) + } + for i := range want { + if ids[i] != want[i] { + t.Errorf("pos %d: got %q want %q", i, ids[i], want[i]) + } + } +} + +// TestFeedMailboxDelete: removes body + indices. +func TestFeedMailboxDelete(t *testing.T) { + fm := newTestFeedMailbox(t) + fm.Store(&FeedPost{PostID: "x", Author: "a", Content: "doomed #go"}, 100) + + if err := fm.Delete("x"); err != nil { + t.Fatalf("Delete: %v", err) + } + if got, _ := fm.Get("x"); got != nil { + t.Errorf("Get after delete: got %v, want nil", got) + } + if ids, _ := fm.PostsByHashtag("go", 10); len(ids) != 0 { + t.Errorf("hashtag index: got %v, want []", ids) + } + if ids, _ := fm.PostsByAuthor("a", 10); len(ids) != 0 { + t.Errorf("author index: got %v, want []", ids) + } +} + +// TestFeedMailboxRecentIDs: filters by window, sorts newest first. +func TestFeedMailboxRecentIDs(t *testing.T) { + fm := newTestFeedMailbox(t) + now := time.Now().Unix() + // p1 1 hour old, p2 5 hours old, p3 50 hours old. + fm.Store(&FeedPost{PostID: "p1", Author: "a", Content: "a"}, now-3600) + fm.Store(&FeedPost{PostID: "p2", Author: "b", Content: "b"}, now-5*3600) + fm.Store(&FeedPost{PostID: "p3", Author: "c", Content: "c"}, now-50*3600) + + // 6-hour window: p1 and p2 only. + ids, err := fm.RecentPostIDs(6*3600, 100) + if err != nil { + t.Fatal(err) + } + if len(ids) != 2 { + t.Errorf("RecentPostIDs(6h): got %v, want 2 posts", ids) + } + // Newest first. + if ids[0] != "p1" { + t.Errorf("first post: got %s, want p1", ids[0]) + } +} From f885264d23e64b91a13d44d88a1367253e2c778c Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 19:15:14 +0300 Subject: [PATCH 08/27] feat(media): mandatory metadata scrubbing on /feed/publish + FFmpeg sidecar MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Every photo from a phone camera ships with an EXIF block that leaks: GPS coordinates, camera model + serial, original timestamp, software name, author/copyright fields, sometimes an embedded thumbnail that survives cropping. For a social feed positioned as privacy-friendly we can't trust the client alone to scrub — a compromised build, a future plugin, or a hostile fork would simply skip the step and leak authorship data. So: server-side scrub is mandatory for every /feed/publish upload. New package: media media/scrub.go - Scrubber type with Scrub(ctx, bytes, claimedMIME) → (clean, actualMIME) - ScrubImage handles JPEG/PNG/GIF/WebP in-process: decodes, optionally downscales to 1080px max-dim, re-encodes as JPEG Q=75. Stdlib jpeg.Encode emits ZERO metadata → scrub is complete by construction. - Sidecar client (HTTP): posts video/audio bytes to an external FFmpeg worker at DCHAIN_MEDIA_SIDECAR_URL - Magic-byte MIME detection: rejects uploads where declared MIME doesn't match actual bytes (prevents a PDF dressed as image/jpeg from bypassing the scrubber) - ErrSidecarUnavailable: explicit error when video arrives but no sidecar is wired; operator opts in to fallback via --allow-unscrubbed-video (default: reject) media/scrub_test.go - Crafted EXIF segment with "SECRETGPS-…Canon-EOS-R5" canary — verifies the string is gone after ScrubImage - Downscale test (2000×1000 → 1080×540, aspect preserved) - MIME-mismatch rejection - Magic-byte detector sanity table FFmpeg sidecar — new docker/media-sidecar/ Tiny Go HTTP service (~180 LOC, no non-stdlib deps) that shells out to ffmpeg with -map_metadata -1 + -map 0:v -map 0:a? to guarantee only video + audio streams survive (no subtitles, attached pictures, or data channels that could carry hidden info). Re-encode profile: video → H.264 CRF 28 preset=fast, Opus 64k, MP4 faststart audio → Opus 64k, Ogg container Dockerfile: two-stage build (Go → alpine+ffmpeg), ~90 MB image, non- root user, /healthz endpoint for compose probes. Node reaches it via DCHAIN_MEDIA_SIDECAR_URL. Without it, video uploads are rejected with 503 unless operator sets DCHAIN_ALLOW_UNSCRUBBED_VIDEO. /feed/publish wiring - cfg.Scrubber is a required dependency - Before storing post body we call scrubber.Scrub(); attachment bytes + MIME are replaced with the cleaned version - content_hash is computed over the SCRUBBED bytes — so the on-chain CREATE_POST tx references exactly what readers will fetch - EstimatedFeeUT uses the scrubbed size, so author's fee reflects actual on-disk cost - Content-type mismatches → 400; sidecar unavailable for video → 503 Flags / env vars --feed-db / DCHAIN_FEED_DB (existing) --feed-ttl-days / DCHAIN_FEED_TTL_DAYS (existing) --media-sidecar-url / DCHAIN_MEDIA_SIDECAR_URL (NEW) --allow-unscrubbed-video / DCHAIN_ALLOW_UNSCRUBBED_VIDEO (NEW; default false) Client responsibilities (for reference — client work lands in Phase C) Even with server-side scrub, the client should still compress aggressively BEFORE upload, because: - upload time is ~N× larger for unscrubbed media (mobile networks) - the server's 256 KiB MaxPostSize is a HARD cap — oversized uploads are rejected, not silently truncated - the on-chain fee is size-based, so users pay for every byte the client didn't bother to shrink Recommended client pipeline: images → expo-image-manipulator: resize max-dim 1080px, WebP or JPEG quality 50-60 videos → react-native-compressor: H.264 CRF 28, 720p max, 64k audio audio → expo-audio's default Opus 32k (already compressed) Documented in docs/media-sidecar.md (added later with Phase C PR). Tests - go test ./... green across 6 packages (blockchain consensus identity media relay vm) Co-Authored-By: Claude Opus 4.7 (1M context) --- cmd/node/main.go | 30 ++- docker/media-sidecar/Dockerfile | 35 ++++ docker/media-sidecar/main.go | 201 +++++++++++++++++++ go.mod | 21 +- go.sum | 36 ++-- media/scrub.go | 332 ++++++++++++++++++++++++++++++++ media/scrub_test.go | 149 ++++++++++++++ node/api_feed.go | 61 +++++- 8 files changed, 830 insertions(+), 35 deletions(-) create mode 100644 docker/media-sidecar/Dockerfile create mode 100644 docker/media-sidecar/main.go create mode 100644 media/scrub.go create mode 100644 media/scrub_test.go diff --git a/cmd/node/main.go b/cmd/node/main.go index 99aa613..4902c29 100644 --- a/cmd/node/main.go +++ b/cmd/node/main.go @@ -41,6 +41,7 @@ import ( "go-blockchain/consensus" "go-blockchain/economy" "go-blockchain/identity" + "go-blockchain/media" "go-blockchain/node" "go-blockchain/node/version" "go-blockchain/p2p" @@ -79,6 +80,8 @@ func main() { mailboxDB := flag.String("mailbox-db", envOr("DCHAIN_MAILBOX_DB", "./mailboxdata"), "BadgerDB directory for relay mailbox (env: DCHAIN_MAILBOX_DB)") feedDB := flag.String("feed-db", envOr("DCHAIN_FEED_DB", "./feeddata"), "BadgerDB directory for social-feed post bodies (env: DCHAIN_FEED_DB)") feedTTLDays := flag.Int("feed-ttl-days", int(envUint64Or("DCHAIN_FEED_TTL_DAYS", 30)), "how long feed posts are retained before auto-eviction (env: DCHAIN_FEED_TTL_DAYS)") + mediaSidecarURL := flag.String("media-sidecar-url", envOr("DCHAIN_MEDIA_SIDECAR_URL", ""), "URL of the media scrubber sidecar (FFmpeg-based video/audio re-encoder). Empty = images only (env: DCHAIN_MEDIA_SIDECAR_URL)") + allowUnscrubbedVideo := flag.Bool("allow-unscrubbed-video", envBoolOr("DCHAIN_ALLOW_UNSCRUBBED_VIDEO", false), "accept video uploads without server-side metadata scrubbing (only when no sidecar is configured). DANGEROUS — leaves EXIF/GPS/author tags intact (env: DCHAIN_ALLOW_UNSCRUBBED_VIDEO)") govContractID := flag.String("governance-contract", envOr("DCHAIN_GOVERNANCE_CONTRACT", ""), "governance contract ID for dynamic chain parameters (env: DCHAIN_GOVERNANCE_CONTRACT)") joinSeedURL := flag.String("join", envOr("DCHAIN_JOIN", ""), "bootstrap from a running node: comma-separated HTTP URLs (env: DCHAIN_JOIN)") // Observer mode: the node participates in the P2P network, applies @@ -938,14 +941,27 @@ func main() { }, } + // Media scrubber — strips EXIF/GPS/author/camera metadata from every + // uploaded image in-process, and forwards video/audio to the FFmpeg + // sidecar when configured. Mandatory for all /feed/publish traffic. + scrubber := media.NewScrubber(media.SidecarConfig{URL: *mediaSidecarURL}) + if *mediaSidecarURL != "" { + log.Printf("[NODE] media sidecar: %s", *mediaSidecarURL) + } else { + log.Printf("[NODE] media sidecar: not configured (images scrubbed in-process; video/audio %s)", + map[bool]string{true: "stored unscrubbed (DANGEROUS)", false: "rejected"}[*allowUnscrubbedVideo]) + } + feedConfig := node.FeedConfig{ - Mailbox: feedMailbox, - HostingRelayPub: id.PubKeyHex(), - GetPost: chain.Post, - LikeCount: chain.LikeCount, - HasLiked: chain.HasLiked, - PostsByAuthor: chain.PostsByAuthor, - Following: chain.Following, + Mailbox: feedMailbox, + HostingRelayPub: id.PubKeyHex(), + Scrubber: scrubber, + AllowUnscrubbedVideo: *allowUnscrubbedVideo, + GetPost: chain.Post, + LikeCount: chain.LikeCount, + HasLiked: chain.HasLiked, + PostsByAuthor: chain.PostsByAuthor, + Following: chain.Following, } go func() { diff --git a/docker/media-sidecar/Dockerfile b/docker/media-sidecar/Dockerfile new file mode 100644 index 0000000..660dd77 --- /dev/null +++ b/docker/media-sidecar/Dockerfile @@ -0,0 +1,35 @@ +# media-sidecar — FFmpeg-based metadata scrubber for DChain node. +# +# Build: docker build -t dchain/media-sidecar -f docker/media-sidecar/Dockerfile . +# Run: docker run -p 8090:8090 dchain/media-sidecar +# Compose: see docker-compose.yml; node points DCHAIN_MEDIA_SIDECAR_URL at it. +# +# Stage 1 — build a tiny static Go binary. +FROM golang:1.22-alpine AS build +WORKDIR /src +# Copy only what we need (the sidecar main is self-contained, no module +# deps on the rest of the repo, so this is a cheap, cache-friendly build). +COPY docker/media-sidecar/main.go ./main.go +RUN go mod init dchain-media-sidecar 2>/dev/null || true +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /out/media-sidecar ./main.go + +# Stage 2 — runtime with ffmpeg. Alpine has a lean ffmpeg build (~90 MB +# total image, most of it codecs we actually need). +FROM alpine:3.19 +RUN apk add --no-cache ffmpeg ca-certificates \ + && addgroup -S dchain && adduser -S -G dchain dchain +COPY --from=build /out/media-sidecar /usr/local/bin/media-sidecar + +USER dchain +EXPOSE 8090 + +# Pin sensible defaults; operator overrides via docker-compose env. +ENV LISTEN_ADDR=:8090 \ + FFMPEG_BIN=ffmpeg \ + MAX_INPUT_MB=32 \ + JOB_TIMEOUT_SECS=60 + +HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \ + CMD wget -qO- http://127.0.0.1:8090/healthz || exit 1 + +ENTRYPOINT ["/usr/local/bin/media-sidecar"] diff --git a/docker/media-sidecar/main.go b/docker/media-sidecar/main.go new file mode 100644 index 0000000..1808759 --- /dev/null +++ b/docker/media-sidecar/main.go @@ -0,0 +1,201 @@ +// Media scrubber sidecar — tiny HTTP service that re-encodes video/audio +// through ffmpeg with all metadata stripped. Runs alongside the DChain +// node in docker-compose; the node calls it via DCHAIN_MEDIA_SIDECAR_URL. +// +// Contract (matches media.Scrubber in the node): +// +// POST /scrub/video Content-Type: video/* body: raw bytes +// → 200, Content-Type: video/mp4, body: cleaned bytes +// POST /scrub/audio Content-Type: audio/* body: raw bytes +// → 200, Content-Type: audio/ogg, body: cleaned bytes +// +// ffmpeg flags of note: +// +// -map_metadata -1 drop ALL metadata streams (title, author, encoder, +// GPS location atoms, XMP blocks, etc.) +// -map 0:v -map 0:a keep only video and audio streams — dumps attached +// pictures, subtitles, data channels that might carry +// hidden info +// -movflags +faststart +// put MOOV atom at the front so clients can start +// playback before the full download lands +// -c:v libx264 -crf 28 -preset fast +// h264 with aggressive-but-not-painful CRF; ~70-80% +// size reduction on phone-camera source +// -c:a libopus -b:a 64k +// opus at 64 kbps is transparent for speech, fine +// for music at feed quality +// +// Environment: +// +// LISTEN_ADDR default ":8090" +// FFMPEG_BIN default "ffmpeg" (must be in PATH) +// MAX_INPUT_MB default 32 — reject anything larger pre-ffmpeg +// JOB_TIMEOUT_SECS default 60 +// +// The service is deliberately dumb: no queuing, no DB, no state. If you +// need higher throughput, run N replicas behind a TCP load balancer. +package main + +import ( + "bytes" + "context" + "fmt" + "io" + "log" + "net/http" + "os" + "os/exec" + "strconv" + "time" +) + +func main() { + addr := envOr("LISTEN_ADDR", ":8090") + ffmpegBin := envOr("FFMPEG_BIN", "ffmpeg") + maxInputMB := envInt("MAX_INPUT_MB", 32) + jobTimeoutSecs := envInt("JOB_TIMEOUT_SECS", 60) + + // Fail fast if ffmpeg is missing — easier to debug at container start + // than to surface cryptic errors per-request. + if _, err := exec.LookPath(ffmpegBin); err != nil { + log.Fatalf("ffmpeg not found in PATH (looked for %q): %v", ffmpegBin, err) + } + + srv := &server{ + ffmpegBin: ffmpegBin, + maxInputSize: int64(maxInputMB) * 1024 * 1024, + jobTimeout: time.Duration(jobTimeoutSecs) * time.Second, + } + + mux := http.NewServeMux() + mux.HandleFunc("/scrub/video", srv.scrubVideo) + mux.HandleFunc("/scrub/audio", srv.scrubAudio) + mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("ok")) + }) + + log.Printf("media-sidecar: listening on %s, ffmpeg=%s, max_input=%d MiB, timeout=%ds", + addr, ffmpegBin, maxInputMB, jobTimeoutSecs) + if err := http.ListenAndServe(addr, mux); err != nil { + log.Fatalf("ListenAndServe: %v", err) + } +} + +type server struct { + ffmpegBin string + maxInputSize int64 + jobTimeout time.Duration +} + +func (s *server) scrubVideo(w http.ResponseWriter, r *http.Request) { + body, err := s.readLimited(r) + if err != nil { + httpErr(w, err.Error(), http.StatusBadRequest) + return + } + ctx, cancel := context.WithTimeout(r.Context(), s.jobTimeout) + defer cancel() + // Video path: re-encode with metadata strip, H.264 CRF 28, opus audio. + // Output format is MP4 (widest client compatibility). + args := []string{ + "-hide_banner", "-loglevel", "error", + "-i", "pipe:0", + "-map", "0:v", "-map", "0:a?", + "-map_metadata", "-1", + "-c:v", "libx264", "-preset", "fast", "-crf", "28", + "-c:a", "libopus", "-b:a", "64k", + "-movflags", "+faststart+frag_keyframe", + "-f", "mp4", + "pipe:1", + } + out, ffErr, err := s.runFFmpeg(ctx, args, body) + if err != nil { + log.Printf("video scrub failed: %v | stderr=%s", err, ffErr) + httpErr(w, "ffmpeg failed: "+err.Error(), http.StatusUnprocessableEntity) + return + } + w.Header().Set("Content-Type", "video/mp4") + w.Header().Set("Content-Length", strconv.Itoa(len(out))) + _, _ = w.Write(out) +} + +func (s *server) scrubAudio(w http.ResponseWriter, r *http.Request) { + body, err := s.readLimited(r) + if err != nil { + httpErr(w, err.Error(), http.StatusBadRequest) + return + } + ctx, cancel := context.WithTimeout(r.Context(), s.jobTimeout) + defer cancel() + args := []string{ + "-hide_banner", "-loglevel", "error", + "-i", "pipe:0", + "-vn", "-map", "0:a", + "-map_metadata", "-1", + "-c:a", "libopus", "-b:a", "64k", + "-f", "ogg", + "pipe:1", + } + out, ffErr, err := s.runFFmpeg(ctx, args, body) + if err != nil { + log.Printf("audio scrub failed: %v | stderr=%s", err, ffErr) + httpErr(w, "ffmpeg failed: "+err.Error(), http.StatusUnprocessableEntity) + return + } + w.Header().Set("Content-Type", "audio/ogg") + w.Header().Set("Content-Length", strconv.Itoa(len(out))) + _, _ = w.Write(out) +} + +func (s *server) runFFmpeg(ctx context.Context, args []string, input []byte) ([]byte, string, error) { + cmd := exec.CommandContext(ctx, s.ffmpegBin, args...) + cmd.Stdin = bytes.NewReader(input) + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + err := cmd.Run() + if err != nil { + return nil, stderr.String(), err + } + return stdout.Bytes(), stderr.String(), nil +} + +func (s *server) readLimited(r *http.Request) ([]byte, error) { + if r.Method != http.MethodPost { + return nil, fmt.Errorf("method not allowed") + } + limited := io.LimitReader(r.Body, s.maxInputSize+1) + buf, err := io.ReadAll(limited) + if err != nil { + return nil, fmt.Errorf("read body: %w", err) + } + if int64(len(buf)) > s.maxInputSize { + return nil, fmt.Errorf("input exceeds %d bytes", s.maxInputSize) + } + return buf, nil +} + +func httpErr(w http.ResponseWriter, msg string, status int) { + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + w.WriteHeader(status) + _, _ = w.Write([]byte(msg)) +} + +func envOr(k, d string) string { + if v := os.Getenv(k); v != "" { + return v + } + return d +} +func envInt(k string, d int) int { + v := os.Getenv(k) + if v == "" { + return d + } + n, err := strconv.Atoi(v) + if err != nil { + return d + } + return n +} diff --git a/go.mod b/go.mod index 4d145dd..84231ca 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module go-blockchain -go 1.21 +go 1.25.0 require ( github.com/dgraph-io/badger/v4 v4.2.0 @@ -9,7 +9,12 @@ require ( github.com/libp2p/go-libp2p-pubsub v0.10.0 github.com/multiformats/go-multiaddr v0.12.3 github.com/tetratelabs/wazero v1.7.3 - golang.org/x/crypto v0.18.0 + golang.org/x/crypto v0.49.0 +) + +require ( + golang.org/x/image v0.39.0 + golang.org/x/telemetry v0.0.0-20260311193753-579e4da9a98c // indirect ) require ( @@ -114,12 +119,12 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.26.0 // indirect golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect - golang.org/x/mod v0.13.0 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sync v0.4.0 // indirect - golang.org/x/sys v0.16.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.14.0 // indirect + golang.org/x/mod v0.34.0 // indirect + golang.org/x/net v0.52.0 // indirect + golang.org/x/sync v0.20.0 // indirect + golang.org/x/sys v0.42.0 // indirect + golang.org/x/text v0.36.0 // indirect + golang.org/x/tools v0.43.0 // indirect gonum.org/v1/gonum v0.13.0 // indirect google.golang.org/protobuf v1.31.0 // indirect lukechampine.com/blake3 v1.2.1 // indirect diff --git a/go.sum b/go.sum index f732b3b..fa37461 100644 --- a/go.sum +++ b/go.sum @@ -123,8 +123,8 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF8= @@ -443,11 +443,13 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4= +golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= +golang.org/x/image v0.39.0 h1:skVYidAEVKgn8lZ602XO75asgXBgLj9G/FE3RbuPFww= +golang.org/x/image v0.39.0/go.mod h1:sIbmppfU+xFLPIG0FoVUTvyBMmgng1/XAMhQ2ft0hpA= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -459,8 +461,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= -golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI= +golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -479,8 +481,8 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210423184538-5f58ad60dda6/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0= +golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -494,8 +496,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= -golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4= +golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -517,15 +519,17 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo= +golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= +golang.org/x/telemetry v0.0.0-20260311193753-579e4da9a98c h1:6a8FdnNk6bTXBjR4AGKFgUKuo+7GnR3FX5L7CbveeZc= +golang.org/x/telemetry v0.0.0-20260311193753-579e4da9a98c/go.mod h1:TpUTTEp9frx7rTdLpC9gFG9kdI7zVLFTFFlqaH2Cncw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg= +golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -545,8 +549,8 @@ golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= -golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= +golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s= +golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/media/scrub.go b/media/scrub.go new file mode 100644 index 0000000..1130195 --- /dev/null +++ b/media/scrub.go @@ -0,0 +1,332 @@ +// Package media contains metadata scrubbing and re-compression helpers for +// files uploaded to the social feed. +// +// Why this exists +// --------------- +// Every image file carries an EXIF block that can leak: +// - GPS coordinates where the photo was taken +// - Camera model, serial number, lens +// - Original timestamp (even if the user clears their clock) +// - Software name / version +// - Author / copyright fields +// - A small embedded thumbnail that may leak even after cropping +// +// Videos and audio have analogous containers (MOV/MP4 atoms, ID3 tags, +// Matroska tags). For a social feed that prides itself on privacy we +// can't trust the client to have stripped all of it — we scrub again +// on the server before persisting the file to the feed mailbox. +// +// Strategy +// -------- +// Images: decode → strip any ICC profile → re-encode with the stdlib +// JPEG/PNG encoders. These encoders DO NOT emit EXIF, so re-encoding is +// a complete scrub by construction. Output is JPEG (quality 75) unless +// the input is a lossless PNG small enough to keep as PNG. +// +// Videos: require an external ffmpeg worker (the "media sidecar") — +// cannot do this in pure Go without a huge CGo footprint. A tiny HTTP +// contract (see docs/media-sidecar.md) lets node operators plug in +// compressO-like services behind an env var. If the sidecar is not +// configured, videos are stored as-is with a LOG WARNING — the operator +// decides whether to accept that risk. +// +// Magic-byte detection: the claimed Content-Type must match what's +// actually in the bytes; mismatches are rejected (prevents a PDF +// labelled as image/jpeg from bypassing the scrubber). +package media + +import ( + "bytes" + "context" + "errors" + "fmt" + "image" + "image/jpeg" + "image/png" + "io" + "net/http" + "strings" + "time" + + // Register decoders for the formats we accept. + _ "image/gif" + _ "golang.org/x/image/webp" +) + +// Errors returned by scrubber. +var ( + // ErrUnsupportedMIME is returned when the caller claims a MIME we + // don't know how to scrub. + ErrUnsupportedMIME = errors.New("unsupported media type") + + // ErrMIMEMismatch is returned when the bytes don't match the claimed + // MIME — blocks a crafted upload from bypassing the scrubber. + ErrMIMEMismatch = errors.New("actual bytes don't match claimed content-type") + + // ErrSidecarUnavailable is returned when video scrubbing was required + // but no external worker is configured and the operator policy does + // not allow unscrubbed video storage. + ErrSidecarUnavailable = errors.New("media sidecar required for video scrubbing but not configured") +) + +// ── Image scrubbing ──────────────────────────────────────────────────────── + +// ImageMaxDim caps the larger dimension of a stored image. 1080px is the +// "full-HD-ish" sweet spot — larger rarely matters on a phone feed and +// drops file size dramatically. The client is expected to have downscaled +// already (expo-image-manipulator), but we re-apply the cap server-side +// as a defence-in-depth and to guarantee uniform storage cost. +const ImageMaxDim = 1080 + +// ImageJPEGQuality is the re-encode quality for JPEG output. 75 balances +// perceived quality with size — below 60 artifacts become visible, above +// 85 we're paying for noise we can't see. +const ImageJPEGQuality = 75 + +// ScrubImage decodes src, removes all metadata (by way of re-encoding +// with the stdlib JPEG encoder), optionally downscales to ImageMaxDim, +// and returns the clean JPEG bytes + the canonical MIME the caller +// should store. +// +// claimedMIME is what the client said the file is; if the bytes don't +// match, ErrMIMEMismatch is returned. Accepts image/jpeg, image/png, +// image/gif, image/webp on input; output is always image/jpeg (one less +// branch in the reader, and no decoder has to touch EXIF). +func ScrubImage(src []byte, claimedMIME string) (out []byte, outMIME string, err error) { + actualMIME := detectMIME(src) + if !isImageMIME(actualMIME) { + return nil, "", fmt.Errorf("%w: %s", ErrUnsupportedMIME, actualMIME) + } + if claimedMIME != "" && !mimesCompatible(claimedMIME, actualMIME) { + return nil, "", fmt.Errorf("%w: claimed %s, actual %s", + ErrMIMEMismatch, claimedMIME, actualMIME) + } + + img, _, err := image.Decode(bytes.NewReader(src)) + if err != nil { + return nil, "", fmt.Errorf("decode image: %w", err) + } + + // Downscale if needed. We use a draw-based nearest-neighbour style + // approach via stdlib to avoid pulling in x/image/draw unless we need + // higher-quality resampling. For feed thumbnails nearest is fine since + // content is typically downsampled already. + if bounds := img.Bounds(); bounds.Dx() > ImageMaxDim || bounds.Dy() > ImageMaxDim { + img = downscale(img, ImageMaxDim) + } + + // Re-encode as JPEG. stdlib's jpeg.Encode writes ZERO metadata — + // no EXIF, no ICC, no XMP, no MakerNote. That's the scrub. + var buf bytes.Buffer + if err := jpeg.Encode(&buf, img, &jpeg.Options{Quality: ImageJPEGQuality}); err != nil { + return nil, "", fmt.Errorf("encode jpeg: %w", err) + } + return buf.Bytes(), "image/jpeg", nil +} + +// downscale returns a new image whose larger dimension equals maxDim, +// preserving aspect ratio. Uses stdlib image.NewRGBA + a nearest-neighbour +// copy loop — good enough for feed images that are already compressed. +func downscale(src image.Image, maxDim int) image.Image { + b := src.Bounds() + w, h := b.Dx(), b.Dy() + var nw, nh int + if w >= h { + nw = maxDim + nh = h * maxDim / w + } else { + nh = maxDim + nw = w * maxDim / h + } + dst := image.NewRGBA(image.Rect(0, 0, nw, nh)) + for y := 0; y < nh; y++ { + sy := b.Min.Y + y*h/nh + for x := 0; x < nw; x++ { + sx := b.Min.X + x*w/nw + dst.Set(x, y, src.At(sx, sy)) + } + } + return dst +} + +// pngEncoder is kept for callers that explicitly want lossless output +// (future — not used by ScrubImage which always produces JPEG). +var pngEncoder = png.Encoder{CompressionLevel: png.BestCompression} + +// ── MIME detection & validation ──────────────────────────────────────────── + +// detectMIME inspects magic bytes to figure out what the data actually is, +// independent of what the caller claimed. Matches the subset of types +// stdlib http.DetectContentType handles, refined for our use. +func detectMIME(data []byte) string { + if len(data) == 0 { + return "" + } + // http.DetectContentType handles most formats correctly (JPEG, PNG, + // GIF, WebP, MP4, WebM, MP3, OGG). We only refine when needed. + return strings.SplitN(http.DetectContentType(data), ";", 2)[0] +} + +func isImageMIME(m string) bool { + switch m { + case "image/jpeg", "image/png", "image/gif", "image/webp": + return true + } + return false +} + +func isVideoMIME(m string) bool { + switch m { + case "video/mp4", "video/webm", "video/quicktime": + return true + } + return false +} + +func isAudioMIME(m string) bool { + switch m { + case "audio/mpeg", "audio/ogg", "audio/webm", "audio/wav", "audio/mp4": + return true + } + return false +} + +// mimesCompatible tolerates small aliases (image/jpg vs image/jpeg, etc.) +// so a misspelled client header doesn't cause a 400. Claimed MIME is +// the caller's; actual is from magic bytes — we trust magic bytes when +// they disagree with a known-silly alias. +func mimesCompatible(claimed, actual string) bool { + claimed = strings.ToLower(strings.TrimSpace(claimed)) + if claimed == actual { + return true + } + aliases := map[string]string{ + "image/jpg": "image/jpeg", + "image/x-png": "image/png", + "video/mov": "video/quicktime", + } + if canon, ok := aliases[claimed]; ok && canon == actual { + return true + } + return false +} + +// ── Video scrubbing (sidecar) ────────────────────────────────────────────── + +// SidecarConfig describes how to reach an external media scrubber worker +// (typically a tiny FFmpeg-wrapper HTTP service running alongside the +// node — see docs/media-sidecar.md). Leaving URL empty disables sidecar +// use; callers then decide whether to fall back to "store as-is and warn" +// or to reject video uploads entirely. +type SidecarConfig struct { + // URL is the base URL of the sidecar. Expected routes: + // + // POST /scrub/video body: raw bytes → returns scrubbed bytes + // POST /scrub/audio body: raw bytes → returns scrubbed bytes + // + // Both MUST strip metadata (-map_metadata -1 in ffmpeg terms) and + // re-encode with a sane bitrate cap (default: H.264 CRF 28 for + // video, libopus 96k for audio). See the reference implementation + // at docker/media-sidecar/ in this repo. + URL string + + // Timeout guards against a hung sidecar. 30s is enough for a 5 MB + // video on modest hardware; larger inputs should be pre-compressed + // by the client. + Timeout time.Duration + + // MaxInputBytes caps what we forward to the sidecar (protects + // against an attacker tying up the sidecar on a 1 GB upload). + MaxInputBytes int64 +} + +// Scrubber bundles image + sidecar capabilities. Create once at node +// startup and reuse. +type Scrubber struct { + sidecar SidecarConfig + http *http.Client +} + +// NewScrubber returns a Scrubber. sidecar.URL may be empty (image-only +// mode) — in that case ScrubVideo / ScrubAudio return ErrSidecarUnavailable. +func NewScrubber(sidecar SidecarConfig) *Scrubber { + if sidecar.Timeout == 0 { + sidecar.Timeout = 30 * time.Second + } + if sidecar.MaxInputBytes == 0 { + sidecar.MaxInputBytes = 16 * 1024 * 1024 // 16 MiB input → client should have shrunk + } + return &Scrubber{ + sidecar: sidecar, + http: &http.Client{ + Timeout: sidecar.Timeout, + }, + } +} + +// Scrub picks the right strategy based on the actual MIME of the bytes. +// Returns the cleaned payload and the canonical MIME to store under. +func (s *Scrubber) Scrub(ctx context.Context, src []byte, claimedMIME string) ([]byte, string, error) { + actual := detectMIME(src) + if claimedMIME != "" && !mimesCompatible(claimedMIME, actual) { + return nil, "", fmt.Errorf("%w: claimed %s, actual %s", + ErrMIMEMismatch, claimedMIME, actual) + } + switch { + case isImageMIME(actual): + // Images handled in-process, no sidecar needed. + return ScrubImage(src, claimedMIME) + case isVideoMIME(actual): + return s.scrubViaSidecar(ctx, "/scrub/video", src, actual) + case isAudioMIME(actual): + return s.scrubViaSidecar(ctx, "/scrub/audio", src, actual) + default: + return nil, "", fmt.Errorf("%w: %s", ErrUnsupportedMIME, actual) + } +} + +// scrubViaSidecar POSTs src to the configured sidecar route and returns +// the response bytes. Errors: +// - ErrSidecarUnavailable if sidecar.URL is empty +// - wrapping the HTTP error otherwise +func (s *Scrubber) scrubViaSidecar(ctx context.Context, path string, src []byte, actual string) ([]byte, string, error) { + if s.sidecar.URL == "" { + return nil, "", ErrSidecarUnavailable + } + if int64(len(src)) > s.sidecar.MaxInputBytes { + return nil, "", fmt.Errorf("input exceeds sidecar max %d bytes", s.sidecar.MaxInputBytes) + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, + strings.TrimRight(s.sidecar.URL, "/")+path, bytes.NewReader(src)) + if err != nil { + return nil, "", fmt.Errorf("build sidecar request: %w", err) + } + req.Header.Set("Content-Type", actual) + resp, err := s.http.Do(req) + if err != nil { + return nil, "", fmt.Errorf("call sidecar: %w", err) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 4096)) + return nil, "", fmt.Errorf("sidecar returned %d: %s", resp.StatusCode, string(body)) + } + // Limit the reply we buffer — an evil sidecar could try to amplify. + const maxReply = 64 * 1024 * 1024 // 64 MiB hard cap + out, err := io.ReadAll(io.LimitReader(resp.Body, maxReply)) + if err != nil { + return nil, "", fmt.Errorf("read sidecar reply: %w", err) + } + respMIME := resp.Header.Get("Content-Type") + if respMIME == "" { + respMIME = actual + } + return out, strings.SplitN(respMIME, ";", 2)[0], nil +} + +// IsSidecarConfigured reports whether video/audio scrubbing is available. +// Callers can use this to decide whether to accept video attachments or +// reject them with a clear "this node doesn't support video" message. +func (s *Scrubber) IsSidecarConfigured() bool { + return s.sidecar.URL != "" +} diff --git a/media/scrub_test.go b/media/scrub_test.go new file mode 100644 index 0000000..81ad9bb --- /dev/null +++ b/media/scrub_test.go @@ -0,0 +1,149 @@ +package media + +import ( + "bytes" + "image" + "image/color" + "image/jpeg" + "testing" +) + +// TestScrubImageRemovesEXIF: our scrubber re-encodes via stdlib JPEG, which +// does not preserve EXIF by construction. We verify that a crafted input +// carrying an EXIF marker produces an output without one. +func TestScrubImageRemovesEXIF(t *testing.T) { + // Build a JPEG that explicitly contains an APP1 EXIF segment. + // Structure: JPEG SOI + APP1 with "Exif\x00\x00" header + real image data. + var base bytes.Buffer + img := image.NewRGBA(image.Rect(0, 0, 8, 8)) + for y := 0; y < 8; y++ { + for x := 0; x < 8; x++ { + img.Set(x, y, color.RGBA{uint8(x * 32), uint8(y * 32), 128, 255}) + } + } + if err := jpeg.Encode(&base, img, &jpeg.Options{Quality: 80}); err != nil { + t.Fatalf("encode base: %v", err) + } + input := injectEXIF(t, base.Bytes()) + + if !bytes.Contains(input, []byte("Exif\x00\x00")) { + t.Fatalf("test setup broken: EXIF not injected") + } + // Also drop an identifiable string in the EXIF payload so we can prove + // it's gone. + if !bytes.Contains(input, []byte("SECRETGPS")) { + t.Fatalf("test setup broken: EXIF marker not injected") + } + + cleaned, mime, err := ScrubImage(input, "image/jpeg") + if err != nil { + t.Fatalf("ScrubImage: %v", err) + } + if mime != "image/jpeg" { + t.Errorf("mime: got %q, want image/jpeg", mime) + } + // Verify the scrubbed output doesn't contain our canary string. + if bytes.Contains(cleaned, []byte("SECRETGPS")) { + t.Errorf("EXIF canary survived scrub — metadata not stripped") + } + // Verify the output doesn't contain the EXIF segment marker. + if bytes.Contains(cleaned, []byte("Exif\x00\x00")) { + t.Errorf("EXIF header string survived scrub") + } + // Output must still be a valid JPEG. + if _, err := jpeg.Decode(bytes.NewReader(cleaned)); err != nil { + t.Errorf("scrubbed output is not a valid JPEG: %v", err) + } +} + +// injectEXIF splices a synthetic APP1 EXIF segment after the JPEG SOI. +// Segment layout: FF E1 "Exif\0\0" + arbitrary payload. +// The payload is NOT valid TIFF — that's fine; stdlib JPEG decoder skips +// unknown APP1 segments rather than aborting. +func injectEXIF(t *testing.T, src []byte) []byte { + t.Helper() + if len(src) < 2 || src[0] != 0xFF || src[1] != 0xD8 { + t.Fatalf("not a JPEG") + } + payload := []byte("Exif\x00\x00" + "SECRETGPS-51.5074N-0.1278W-Canon-EOS-R5") + segmentLen := len(payload) + 2 // +2 = 2 bytes of len field itself + var seg bytes.Buffer + seg.Write([]byte{0xFF, 0xE1}) + seg.WriteByte(byte(segmentLen >> 8)) + seg.WriteByte(byte(segmentLen & 0xff)) + seg.Write(payload) + out := make([]byte, 0, len(src)+seg.Len()) + out = append(out, src[:2]...) // SOI + out = append(out, seg.Bytes()...) + out = append(out, src[2:]...) + return out +} + +// TestScrubImageMIMEMismatch: rejects bytes that don't match claimed MIME. +func TestScrubImageMIMEMismatch(t *testing.T) { + var buf bytes.Buffer + img := image.NewRGBA(image.Rect(0, 0, 4, 4)) + jpeg.Encode(&buf, img, nil) + // Claim it's a PNG. + _, _, err := ScrubImage(buf.Bytes(), "image/png") + if err == nil { + t.Fatalf("expected ErrMIMEMismatch, got nil") + } +} + +// TestScrubImageDownscale: images over ImageMaxDim are shrunk. +func TestScrubImageDownscale(t *testing.T) { + // Make a 2000×1000 image — larger dim 2000 > 1080. + img := image.NewRGBA(image.Rect(0, 0, 2000, 1000)) + for y := 0; y < 1000; y++ { + for x := 0; x < 2000; x++ { + img.Set(x, y, color.RGBA{128, 64, 200, 255}) + } + } + var buf bytes.Buffer + if err := jpeg.Encode(&buf, img, &jpeg.Options{Quality: 80}); err != nil { + t.Fatalf("encode: %v", err) + } + cleaned, _, err := ScrubImage(buf.Bytes(), "image/jpeg") + if err != nil { + t.Fatalf("ScrubImage: %v", err) + } + decoded, err := jpeg.Decode(bytes.NewReader(cleaned)) + if err != nil { + t.Fatalf("decode scrubbed: %v", err) + } + b := decoded.Bounds() + if b.Dx() > ImageMaxDim || b.Dy() > ImageMaxDim { + t.Errorf("not downscaled: got %dx%d, want max %d", b.Dx(), b.Dy(), ImageMaxDim) + } + // Aspect ratio roughly preserved (2:1 → 1080:540 with rounding slack). + if b.Dx() != ImageMaxDim { + t.Errorf("larger dim: got %d, want %d", b.Dx(), ImageMaxDim) + } +} + +// TestDetectMIME: a few magic-byte cases to ensure magic detection works. +func TestDetectMIME(t *testing.T) { + cases := []struct { + data []byte + want string + }{ + {[]byte("\xff\xd8\xff\xe0garbage"), "image/jpeg"}, + {[]byte("\x89PNG\r\n\x1a\n..."), "image/png"}, + {[]byte("GIF89a..."), "image/gif"}, + {[]byte{}, ""}, + } + for _, tc := range cases { + got := detectMIME(tc.data) + if got != tc.want { + t.Errorf("detectMIME(%q): got %q want %q", string(tc.data[:min(len(tc.data), 12)]), got, tc.want) + } + } +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/node/api_feed.go b/node/api_feed.go index c4fbe54..685433b 100644 --- a/node/api_feed.go +++ b/node/api_feed.go @@ -29,11 +29,13 @@ package node // re-publish to another relay. import ( + "context" "crypto/sha256" "encoding/base64" "encoding/hex" "encoding/json" "fmt" + "log" "net/http" "sort" "strings" @@ -41,6 +43,7 @@ import ( "go-blockchain/blockchain" "go-blockchain/identity" + "go-blockchain/media" "go-blockchain/relay" ) @@ -53,6 +56,18 @@ type FeedConfig struct { // /feed/publish so the client knows who to put in CREATE_POST tx. HostingRelayPub string + // Scrubber strips metadata from image/video/audio attachments before + // they are stored. MUST be non-nil; a zero Scrubber (NewScrubber with + // empty sidecar URL) still handles images in-process — only video/audio + // require sidecar config. + Scrubber *media.Scrubber + + // AllowUnscrubbedVideo controls server behaviour when a video upload + // arrives and no sidecar is configured. false (default) → reject; true + // → store as-is with a warning log. Set via --allow-unscrubbed-video + // flag on the node. Leave false in production. + AllowUnscrubbedVideo bool + // Chain lookups (nil-safe; endpoints degrade gracefully). GetPost func(postID string) (*blockchain.PostRecord, error) LikeCount func(postID string) (uint64, error) @@ -136,6 +151,7 @@ func feedPublish(cfg FeedConfig) http.HandlerFunc { // Decode attachment. var attachment []byte + var attachmentMIME string if req.AttachmentB64 != "" { b, err := base64.StdEncoding.DecodeString(req.AttachmentB64) if err != nil { @@ -145,11 +161,48 @@ func feedPublish(cfg FeedConfig) http.HandlerFunc { } } attachment = b + attachmentMIME = req.AttachmentMIME + + // MANDATORY server-side scrub: strip ALL metadata (EXIF/GPS/ + // camera/author/ICC/etc.) and re-compress. Client is expected + // to have done a first pass, but we never trust it — a photo + // from a phone carries GPS coordinates by default and the client + // might forget or a hostile client might skip the scrub entirely. + // + // Images are handled in-process (stdlib re-encode to JPEG kills + // all metadata by construction). Videos/audio are forwarded to + // the media sidecar; if none is configured and the operator + // hasn't opted in to AllowUnscrubbedVideo, we reject. + if cfg.Scrubber == nil { + jsonErr(w, fmt.Errorf("media scrubber not configured on this node"), 503) + return + } + ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second) + cleaned, newMIME, err := cfg.Scrubber.Scrub(ctx, attachment, attachmentMIME) + cancel() + if err != nil { + // Graceful video fallback only when explicitly allowed. + if err == media.ErrSidecarUnavailable && cfg.AllowUnscrubbedVideo { + // Keep bytes as-is (operator accepted the risk), just log. + log.Printf("[feed] WARNING: storing unscrubbed video — no sidecar configured (author=%s)", req.Author) + } else { + status := 400 + if err == media.ErrSidecarUnavailable { + status = 503 + } + jsonErr(w, fmt.Errorf("scrub attachment: %w", err), status) + return + } + } else { + attachment = cleaned + attachmentMIME = newMIME + } } - // Content hash binds the body to the on-chain metadata. We hash - // content+attachment so the client can't publish body-A off-chain - // and commit hash-of-body-B on-chain. + // Content hash is computed over the scrubbed bytes — that's what + // the on-chain tx will reference, and what readers fetch. Binds + // the body to the metadata so a misbehaving relay can't substitute + // a different body under the same PostID. h := sha256.New() h.Write([]byte(req.Content)) h.Write(attachment) @@ -181,7 +234,7 @@ func feedPublish(cfg FeedConfig) http.HandlerFunc { Content: req.Content, ContentType: req.ContentType, Attachment: attachment, - AttachmentMIME: req.AttachmentMIME, + AttachmentMIME: attachmentMIME, ReplyTo: req.ReplyTo, QuoteOf: req.QuoteOf, } From 9e86c93fdaaa3bd9bff4cf878342a7be4a1a0992 Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 19:27:00 +0300 Subject: [PATCH 09/27] test(feed): end-to-end integration + two-node propagation (Phase B hardening) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds two integration-test files that exercise the full feed stack over real HTTP requests, plus a fix to the publish signature model that the EXIF scrubbing test surfaced. Bug fix — api_feed.go publish signature flow Previously: server scrubbed the attachment → computed content_hash over the SCRUBBED bytes → verified the author's signature against that hash. But the client, not owning the scrubber, signs over the RAW upload. The two hashes differ whenever scrub touches the bytes (which it always does for images), so every signed upload with an image was rejected as "signature invalid". Fixed order: 1. decode attachment from base64 2. compute raw_content_hash over Content + raw attachment 3. verify author's signature against raw_content_hash 4. scrub attachment (strips EXIF / re-encodes) 5. compute final_content_hash over Content + scrubbed attachment 6. return final hash in response for the on-chain CREATE_POST tx The signature proves the upload is authentic; the final hash binds the on-chain record to what readers actually download. node/feed_e2e_test.go In-process harness: real BadgerDB chain + feed mailbox + media scrubber + httptest.Server with RegisterFeedRoutes. Tests drive it via real http.Post / http.Get so rate limiters, auth, scrubber, and handler code all run on the happy path. Tests: - TestE2EFullFlow — publish → CREATE_POST tx → body fetch → view bump → stats → author list → soft-delete → 410 Gone on re-fetch - TestE2ELikeUnlikeAffectsStats — on-chain LIKE_POST bumps /stats, liked_by_me reflects the caller - TestE2ETimeline — follow graph, merged timeline newest-first - TestE2ETrendingRanking — likes × 3 + views puts hot post at [0] - TestE2EForYouFilters — excludes own posts + followed authors + already-liked posts; surfaces strangers - TestE2EHashtagSearch — tag returns only tagged posts - TestE2EScrubberStripsEXIF — injects SUPERSECRETGPS canary into a JPEG APP1 segment, uploads via /feed/publish, reads back — asserts canary is GONE from stored attachment. This is the privacy-critical regression gate: if it ever breaks, GPS coordinates leak. - TestE2ERejectsMIMEMismatch — PNG labelled as JPEG → 400 - TestE2ERejectsBadSignature — wrong signer → 403 - TestE2ERejectsStaleTimestamp — 1-hour-old ts → 400 (anti-replay) node/feed_twonode_test.go Simulates two independent nodes sharing block history (gossip via same-block AddBlock on both chains). Verifies the v2.0.0 design contract: chain state replicates, but post BODIES live only on the hosting relay. Tests: - TestTwoNodePostPropagation — Alice publishes on A; B's chain sees the record; B's HTTP /feed/post/{id} returns 404 (body is A's); fetch from A succeeds using hosting_relay field from B's chain lookup. Documents the client-side routing contract. - TestTwoNodeLikeCounterSharedAcrossNodes — Bob likes from Node B; both A's and B's /stats show likes=1. Proves engagement aggregates are chain-authoritative, not per-relay. - TestTwoNodeFollowGraphReplicates — FOLLOW tx propagates, /timeline on B returns A-hosted posts with metadata (no body, as designed). Coverage summary Publish flow (sign → scrub → hash → store): ✓ CREATE_POST on-chain fee accounting: ✓ Like / Unlike counter consistency: ✓ Follow graph → timeline merge: ✓ Trending ranking by likes × 3 + views: ✓ For You exclusion rules (self, followed, liked): ✓ Hashtag inverted index: ✓ View counter increment + stats aggregate: ✓ Soft-delete → 410 Gone: ✓ Metadata scrubbing (EXIF canary): ✓ MIME mismatch rejection: ✓ Signature authenticity: ✓ Timestamp anti-replay (±5 min window): ✓ Two-node block propagation: ✓ Cross-node body fetch via hosting_relay: ✓ Likes aggregation across nodes: ✓ All 7 test packages green: blockchain consensus identity media node relay vm. Co-Authored-By: Claude Opus 4.7 (1M context) --- node/api_feed.go | 107 ++--- node/feed_e2e_test.go | 831 ++++++++++++++++++++++++++++++++++++++ node/feed_twonode_test.go | 504 +++++++++++++++++++++++ 3 files changed, 1393 insertions(+), 49 deletions(-) create mode 100644 node/feed_e2e_test.go create mode 100644 node/feed_twonode_test.go diff --git a/node/api_feed.go b/node/api_feed.go index 685433b..fd12b13 100644 --- a/node/api_feed.go +++ b/node/api_feed.go @@ -149,8 +149,8 @@ func feedPublish(cfg FeedConfig) http.HandlerFunc { return } - // Decode attachment. - var attachment []byte + // Decode attachment (raw upload — before scrub). + var rawAttachment []byte var attachmentMIME string if req.AttachmentB64 != "" { b, err := base64.StdEncoding.DecodeString(req.AttachmentB64) @@ -160,57 +160,21 @@ func feedPublish(cfg FeedConfig) http.HandlerFunc { return } } - attachment = b + rawAttachment = b attachmentMIME = req.AttachmentMIME - - // MANDATORY server-side scrub: strip ALL metadata (EXIF/GPS/ - // camera/author/ICC/etc.) and re-compress. Client is expected - // to have done a first pass, but we never trust it — a photo - // from a phone carries GPS coordinates by default and the client - // might forget or a hostile client might skip the scrub entirely. - // - // Images are handled in-process (stdlib re-encode to JPEG kills - // all metadata by construction). Videos/audio are forwarded to - // the media sidecar; if none is configured and the operator - // hasn't opted in to AllowUnscrubbedVideo, we reject. - if cfg.Scrubber == nil { - jsonErr(w, fmt.Errorf("media scrubber not configured on this node"), 503) - return - } - ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second) - cleaned, newMIME, err := cfg.Scrubber.Scrub(ctx, attachment, attachmentMIME) - cancel() - if err != nil { - // Graceful video fallback only when explicitly allowed. - if err == media.ErrSidecarUnavailable && cfg.AllowUnscrubbedVideo { - // Keep bytes as-is (operator accepted the risk), just log. - log.Printf("[feed] WARNING: storing unscrubbed video — no sidecar configured (author=%s)", req.Author) - } else { - status := 400 - if err == media.ErrSidecarUnavailable { - status = 503 - } - jsonErr(w, fmt.Errorf("scrub attachment: %w", err), status) - return - } - } else { - attachment = cleaned - attachmentMIME = newMIME - } } - // Content hash is computed over the scrubbed bytes — that's what - // the on-chain tx will reference, and what readers fetch. Binds - // the body to the metadata so a misbehaving relay can't substitute - // a different body under the same PostID. - h := sha256.New() - h.Write([]byte(req.Content)) - h.Write(attachment) - contentHash := h.Sum(nil) - contentHashHex := hex.EncodeToString(contentHash) + // ── Step 1: verify signature over the RAW-upload hash ────────── + // The client signs what it sent. The server recomputes hash over + // the as-received bytes and verifies — this proves the upload + // came from the claimed author and wasn't tampered with in transit. + rawHasher := sha256.New() + rawHasher.Write([]byte(req.Content)) + rawHasher.Write(rawAttachment) + rawContentHash := rawHasher.Sum(nil) + rawContentHashHex := hex.EncodeToString(rawContentHash) - // Verify the author's signature over the canonical publish bytes. - msg := []byte(fmt.Sprintf("publish:%s:%s:%d", req.PostID, contentHashHex, req.Ts)) + msg := []byte(fmt.Sprintf("publish:%s:%s:%d", req.PostID, rawContentHashHex, req.Ts)) sigBytes, err := base64.StdEncoding.DecodeString(req.Sig) if err != nil { if sigBytes, err = base64.RawURLEncoding.DecodeString(req.Sig); err != nil { @@ -228,6 +192,51 @@ func feedPublish(cfg FeedConfig) http.HandlerFunc { return } + // ── Step 2: MANDATORY server-side metadata scrub ───────────── + // Runs AFTER signature verification so a fake client can't burn + // CPU by triggering expensive scrub work on unauthenticated inputs. + // + // Images: in-process stdlib re-encode → kills EXIF/GPS/ICC/XMP by + // construction. Videos/audio: forwarded to FFmpeg sidecar; without + // one, we reject unless operator opted in to unscrubbed video. + attachment := rawAttachment + if len(attachment) > 0 { + if cfg.Scrubber == nil { + jsonErr(w, fmt.Errorf("media scrubber not configured on this node"), 503) + return + } + ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second) + cleaned, newMIME, err := cfg.Scrubber.Scrub(ctx, attachment, attachmentMIME) + cancel() + if err != nil { + if err == media.ErrSidecarUnavailable && cfg.AllowUnscrubbedVideo { + log.Printf("[feed] WARNING: storing unscrubbed video — no sidecar configured (author=%s)", req.Author) + } else { + status := 400 + if err == media.ErrSidecarUnavailable { + status = 503 + } + jsonErr(w, fmt.Errorf("scrub attachment: %w", err), status) + return + } + } else { + attachment = cleaned + attachmentMIME = newMIME + } + } + + // ── Step 3: recompute content hash over the SCRUBBED bytes ──── + // This is what goes into the response + on-chain CREATE_POST, so + // anyone fetching the body can verify integrity against the chain. + // The signature check already used the raw-upload hash above; + // this final hash binds the on-chain record to what readers will + // actually download. + finalHasher := sha256.New() + finalHasher.Write([]byte(req.Content)) + finalHasher.Write(attachment) + contentHash := finalHasher.Sum(nil) + contentHashHex := hex.EncodeToString(contentHash) + post := &relay.FeedPost{ PostID: req.PostID, Author: req.Author, diff --git a/node/feed_e2e_test.go b/node/feed_e2e_test.go new file mode 100644 index 0000000..90bb577 --- /dev/null +++ b/node/feed_e2e_test.go @@ -0,0 +1,831 @@ +// End-to-end integration tests for the social feed (v2.0.0). +// +// These tests exercise the full HTTP surface against a real in-process +// setup: a BadgerDB chain, a BadgerDB feed-mailbox, the media scrubber, +// and a net/http ServeMux with all feed routes wired. Requests hit the +// real handlers (including rate-limiters, auth, and scrubber) so we +// catch wire-level regressions that unit tests miss. +// +// Layout of a typical test: +// +// h := newFeedHarness(t) +// defer h.Close() +// author := h.newUser("alice") +// h.fund(author, 1_000_000) // give them tokens +// resp := h.publish(author, "Hello #world", nil) // POST /feed/publish +// h.commitCreatePost(author, resp) // chain tx +// got := h.getPost(resp.PostID) +// ... +package node + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "image" + "image/color" + "image/jpeg" + "io" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + "go-blockchain/blockchain" + "go-blockchain/identity" + "go-blockchain/media" + "go-blockchain/relay" +) + +// ── Harness ────────────────────────────────────────────────────────────── + +type feedHarness struct { + t *testing.T + + chainDir string + feedDir string + chain *blockchain.Chain + mailbox *relay.FeedMailbox + scrubber *media.Scrubber + server *httptest.Server + validator *identity.Identity + tip *blockchain.Block +} + +func newFeedHarness(t *testing.T) *feedHarness { + t.Helper() + chainDir, err := os.MkdirTemp("", "dchain-e2e-chain-*") + if err != nil { + t.Fatalf("MkdirTemp chain: %v", err) + } + feedDir, err := os.MkdirTemp("", "dchain-e2e-feed-*") + if err != nil { + t.Fatalf("MkdirTemp feed: %v", err) + } + c, err := blockchain.NewChain(chainDir) + if err != nil { + t.Fatalf("NewChain: %v", err) + } + fm, err := relay.OpenFeedMailbox(feedDir, 24*time.Hour) + if err != nil { + t.Fatalf("OpenFeedMailbox: %v", err) + } + + validator, err := identity.Generate() + if err != nil { + t.Fatalf("identity.Generate: %v", err) + } + // Bootstrap a genesis block so the validator has funds to disburse. + genesis := blockchain.GenesisBlock(validator.PubKeyHex(), validator.PrivKey) + if err := c.AddBlock(genesis); err != nil { + t.Fatalf("AddBlock genesis: %v", err) + } + + scrubber := media.NewScrubber(media.SidecarConfig{}) // no sidecar — images only + cfg := FeedConfig{ + Mailbox: fm, + HostingRelayPub: validator.PubKeyHex(), + Scrubber: scrubber, + AllowUnscrubbedVideo: false, + GetPost: c.Post, + LikeCount: c.LikeCount, + HasLiked: c.HasLiked, + PostsByAuthor: c.PostsByAuthor, + Following: c.Following, + } + mux := http.NewServeMux() + RegisterFeedRoutes(mux, cfg) + srv := httptest.NewServer(mux) + + h := &feedHarness{ + t: t, chainDir: chainDir, feedDir: feedDir, + chain: c, mailbox: fm, scrubber: scrubber, + server: srv, validator: validator, tip: genesis, + } + t.Cleanup(h.Close) + return h +} + +// Close releases all handles and removes the temp directories. Safe to +// call multiple times. +func (h *feedHarness) Close() { + if h.server != nil { + h.server.Close() + h.server = nil + } + if h.mailbox != nil { + _ = h.mailbox.Close() + h.mailbox = nil + } + if h.chain != nil { + _ = h.chain.Close() + h.chain = nil + } + // Retry because Windows holds mmap files briefly after Close. + for _, dir := range []string{h.chainDir, h.feedDir} { + for i := 0; i < 20; i++ { + if err := os.RemoveAll(dir); err == nil { + break + } + time.Sleep(10 * time.Millisecond) + } + } +} + +// newUser generates a fresh identity. Not funded — call fund() separately. +func (h *feedHarness) newUser(label string) *identity.Identity { + h.t.Helper() + id, err := identity.Generate() + if err != nil { + h.t.Fatalf("%s identity: %v", label, err) + } + return id +} + +// fund sends `amount` µT from the genesis validator to `target`, committing +// the transfer in its own block. +func (h *feedHarness) fund(target *identity.Identity, amount uint64) { + h.t.Helper() + tx := &blockchain.Transaction{ + ID: h.nextTxID(h.validator.PubKeyHex(), blockchain.EventTransfer), + Type: blockchain.EventTransfer, + From: h.validator.PubKeyHex(), + To: target.PubKeyHex(), + Amount: amount, + Fee: blockchain.MinFee, + Timestamp: time.Now().UTC(), + } + h.commit(tx) +} + +// commit wraps one or more txs into a block, signs, and appends. +func (h *feedHarness) commit(txs ...*blockchain.Transaction) { + h.t.Helper() + // Small sleep to guarantee distinct tx IDs across calls. + time.Sleep(2 * time.Millisecond) + var totalFees uint64 + for _, tx := range txs { + totalFees += tx.Fee + } + b := &blockchain.Block{ + Index: h.tip.Index + 1, + Timestamp: time.Now().UTC(), + Transactions: txs, + PrevHash: h.tip.Hash, + Validator: h.validator.PubKeyHex(), + TotalFees: totalFees, + } + b.ComputeHash() + b.Sign(h.validator.PrivKey) + if err := h.chain.AddBlock(b); err != nil { + h.t.Fatalf("AddBlock: %v", err) + } + h.tip = b +} + +func (h *feedHarness) nextTxID(from string, typ blockchain.EventType) string { + // Hash (from, type, now_nanos) for uniqueness. + sum := sha256.Sum256([]byte(fmt.Sprintf("%s:%s:%d", from, typ, time.Now().UnixNano()))) + return hex.EncodeToString(sum[:16]) +} + +// publish POSTs /feed/publish as `author` with signed request body. On +// success returns the server's response so the caller can commit the +// on-chain CREATE_POST with matching metadata. +func (h *feedHarness) publish(author *identity.Identity, content string, attachment []byte) feedPublishResponse { + h.t.Helper() + attachB64 := "" + attachMIME := "" + if len(attachment) > 0 { + attachB64 = base64.StdEncoding.EncodeToString(attachment) + attachMIME = "image/jpeg" + } + // Client-side hash matches the server's canonical bytes rule: + // publish::: + // The client knows its own attachment before any server-side scrub, + // so this is the hash over the "raw upload". The server recomputes + // over SCRUBBED bytes and returns that as content_hash — the client + // then uses server's number for CREATE_POST. + idHash := sha256.Sum256([]byte(fmt.Sprintf("%s-%d-%s", + author.PubKeyHex(), time.Now().UnixNano(), content))) + postID := hex.EncodeToString(idHash[:16]) + // Build signature over CLIENT-side hash. + h256 := sha256.New() + h256.Write([]byte(content)) + h256.Write(attachment) + clientHash := hex.EncodeToString(h256.Sum(nil)) + ts := time.Now().Unix() + sigBytes := author.Sign([]byte(fmt.Sprintf("publish:%s:%s:%d", postID, clientHash, ts))) + + req := feedPublishRequest{ + PostID: postID, + Author: author.PubKeyHex(), + Content: content, + AttachmentB64: attachB64, + AttachmentMIME: attachMIME, + Sig: base64.StdEncoding.EncodeToString(sigBytes), + Ts: ts, + } + var resp feedPublishResponse + h.postJSON("/feed/publish", req, &resp) + return resp +} + +// commitCreatePost sends the on-chain CREATE_POST tx that pays the +// hosting relay (this node's validator in the harness). Must be called +// after publish() so the two agree on the content hash and size. +func (h *feedHarness) commitCreatePost(author *identity.Identity, pub feedPublishResponse) { + h.t.Helper() + contentHash, err := hex.DecodeString(pub.ContentHash) + if err != nil { + h.t.Fatalf("decode content hash: %v", err) + } + payload := blockchain.CreatePostPayload{ + PostID: pub.PostID, + ContentHash: contentHash, + Size: pub.Size, + HostingRelay: pub.HostingRelay, + } + pbytes, _ := json.Marshal(payload) + tx := &blockchain.Transaction{ + ID: h.nextTxID(author.PubKeyHex(), blockchain.EventCreatePost), + Type: blockchain.EventCreatePost, + From: author.PubKeyHex(), + Amount: 0, + Fee: pub.EstimatedFeeUT, + Payload: pbytes, + Timestamp: time.Now().UTC(), + } + h.commit(tx) +} + +// like / unlike / follow / unfollow helpers — all just small tx builders. + +func (h *feedHarness) like(liker *identity.Identity, postID string) { + payload, _ := json.Marshal(blockchain.LikePostPayload{PostID: postID}) + tx := &blockchain.Transaction{ + ID: h.nextTxID(liker.PubKeyHex(), blockchain.EventLikePost), + Type: blockchain.EventLikePost, + From: liker.PubKeyHex(), + Fee: blockchain.MinFee, + Payload: payload, + Timestamp: time.Now().UTC(), + } + h.commit(tx) +} + +func (h *feedHarness) follow(follower *identity.Identity, target string) { + tx := &blockchain.Transaction{ + ID: h.nextTxID(follower.PubKeyHex(), blockchain.EventFollow), + Type: blockchain.EventFollow, + From: follower.PubKeyHex(), + To: target, + Fee: blockchain.MinFee, + Payload: []byte(`{}`), + Timestamp: time.Now().UTC(), + } + h.commit(tx) +} + +// deletePost commits an on-chain EventDeletePost for the given post, +// signed by the author. +func (h *feedHarness) deletePost(author *identity.Identity, postID string) { + payload, _ := json.Marshal(blockchain.DeletePostPayload{PostID: postID}) + tx := &blockchain.Transaction{ + ID: h.nextTxID(author.PubKeyHex(), blockchain.EventDeletePost), + Type: blockchain.EventDeletePost, + From: author.PubKeyHex(), + Fee: blockchain.MinFee, + Payload: payload, + Timestamp: time.Now().UTC(), + } + h.commit(tx) +} + +// ── HTTP helpers ────────────────────────────────────────────────────────── + +func (h *feedHarness) postJSON(path string, req any, out any) { + h.t.Helper() + body, _ := json.Marshal(req) + resp, err := http.Post(h.server.URL+path, "application/json", bytes.NewReader(body)) + if err != nil { + h.t.Fatalf("POST %s: %v", path, err) + } + defer resp.Body.Close() + if resp.StatusCode >= 400 { + raw, _ := io.ReadAll(resp.Body) + h.t.Fatalf("POST %s → %d: %s", path, resp.StatusCode, string(raw)) + } + if out != nil { + if err := json.NewDecoder(resp.Body).Decode(out); err != nil { + h.t.Fatalf("decode %s response: %v", path, err) + } + } +} + +func (h *feedHarness) postJSONExpectStatus(path string, req any, want int) string { + h.t.Helper() + body, _ := json.Marshal(req) + resp, err := http.Post(h.server.URL+path, "application/json", bytes.NewReader(body)) + if err != nil { + h.t.Fatalf("POST %s: %v", path, err) + } + defer resp.Body.Close() + raw, _ := io.ReadAll(resp.Body) + if resp.StatusCode != want { + h.t.Fatalf("POST %s → %d, want %d: %s", path, resp.StatusCode, want, string(raw)) + } + return string(raw) +} + +func (h *feedHarness) getJSON(path string, out any) { + h.t.Helper() + resp, err := http.Get(h.server.URL + path) + if err != nil { + h.t.Fatalf("GET %s: %v", path, err) + } + defer resp.Body.Close() + if resp.StatusCode >= 400 { + raw, _ := io.ReadAll(resp.Body) + h.t.Fatalf("GET %s → %d: %s", path, resp.StatusCode, string(raw)) + } + if out != nil { + if err := json.NewDecoder(resp.Body).Decode(out); err != nil { + h.t.Fatalf("decode %s response: %v", path, err) + } + } +} + +// getStatus fetches path and returns status + body; doesn't fail on non-2xx. +func (h *feedHarness) getStatus(path string) (int, string) { + resp, err := http.Get(h.server.URL + path) + if err != nil { + h.t.Fatalf("GET %s: %v", path, err) + } + defer resp.Body.Close() + raw, _ := io.ReadAll(resp.Body) + return resp.StatusCode, string(raw) +} + +// postRaw is for endpoints like /feed/post/{id}/view that take no body. +func (h *feedHarness) postRaw(path string, out any) { + h.t.Helper() + resp, err := http.Post(h.server.URL+path, "application/json", nil) + if err != nil { + h.t.Fatalf("POST %s: %v", path, err) + } + defer resp.Body.Close() + if resp.StatusCode >= 400 { + raw, _ := io.ReadAll(resp.Body) + h.t.Fatalf("POST %s → %d: %s", path, resp.StatusCode, string(raw)) + } + if out != nil { + if err := json.NewDecoder(resp.Body).Decode(out); err != nil { + h.t.Fatalf("decode %s response: %v", path, err) + } + } +} + +// ── Tests ───────────────────────────────────────────────────────────────── + +// TestE2EFullFlow runs the whole publish → commit → read cycle end-to-end. +// +// Covers: /feed/publish signature, /feed/post/{id} body fetch, /feed/post/{id}/stats, +// /feed/post/{id}/view counter, CREATE_POST fee debit to author + credit to +// hosting relay, PostsByAuthor enrichment, DELETE soft-delete → 410. +func TestE2EFullFlow(t *testing.T) { + h := newFeedHarness(t) + + alice := h.newUser("alice") + h.fund(alice, 10*blockchain.Token) + + hostBalBefore, _ := h.chain.Balance(h.validator.PubKeyHex()) + + // 1. PUBLISH → body lands in feed mailbox. + pub := h.publish(alice, "Hello from the feed #dchain #intro", nil) + if pub.PostID == "" || pub.ContentHash == "" { + t.Fatalf("publish response missing required fields: %+v", pub) + } + if pub.HostingRelay != h.validator.PubKeyHex() { + t.Errorf("hosting_relay: got %s, want %s", pub.HostingRelay, h.validator.PubKeyHex()) + } + wantTags := []string{"dchain", "intro"} + if len(pub.Hashtags) != len(wantTags) { + t.Errorf("hashtags: got %v, want %v", pub.Hashtags, wantTags) + } + + // Before the CREATE_POST tx lands the body is available but /stats + // says 0 likes. That's the expected "just published, not committed" state. + + // 2. COMMIT on-chain CREATE_POST tx. + h.commitCreatePost(alice, pub) + + // Hosting relay should have been credited tx.Fee. + hostBalAfter, _ := h.chain.Balance(h.validator.PubKeyHex()) + if hostBalAfter <= hostBalBefore { + t.Errorf("hosting relay balance did not increase after CREATE_POST: %d → %d", + hostBalBefore, hostBalAfter) + } + + // 3. READ via HTTP — body comes back. + var got struct { + PostID string `json:"post_id"` + Author string `json:"author"` + Content string `json:"content"` + } + h.getJSON("/feed/post/"+pub.PostID, &got) + if got.Content != "Hello from the feed #dchain #intro" { + t.Errorf("content: got %q, want original", got.Content) + } + if got.Author != alice.PubKeyHex() { + t.Errorf("author: got %s, want %s", got.Author, alice.PubKeyHex()) + } + + // 4. VIEW COUNTER increments. + var viewResp struct { + Views uint64 `json:"views"` + } + for i := 1; i <= 3; i++ { + h.postRaw("/feed/post/"+pub.PostID+"/view", &viewResp) + if viewResp.Views != uint64(i) { + t.Errorf("views #%d: got %d, want %d", i, viewResp.Views, i) + } + } + + // 5. STATS aggregate is correct. + var stats postStatsResponse + h.getJSON("/feed/post/"+pub.PostID+"/stats", &stats) + if stats.Views != 3 { + t.Errorf("stats.views: got %d, want 3", stats.Views) + } + if stats.Likes != 0 { + t.Errorf("stats.likes: got %d, want 0", stats.Likes) + } + + // 6. AUTHOR listing merges chain record + body + stats. + var authorResp struct { + Count int `json:"count"` + Posts []feedAuthorItem `json:"posts"` + } + h.getJSON("/feed/author/"+alice.PubKeyHex(), &authorResp) + if authorResp.Count != 1 { + t.Fatalf("author count: got %d, want 1", authorResp.Count) + } + if authorResp.Posts[0].Views != 3 { + t.Errorf("author post views: got %d, want 3", authorResp.Posts[0].Views) + } + if len(authorResp.Posts[0].Hashtags) != 2 { + t.Errorf("author post hashtags: got %v, want 2", authorResp.Posts[0].Hashtags) + } + + // 7. DELETE → body stays in mailbox but chain marks deleted → 410 on fetch. + h.deletePost(alice, pub.PostID) + status, body := h.getStatus("/feed/post/" + pub.PostID) + if status != http.StatusGone { + t.Errorf("GET deleted post: got status %d, want 410; body: %s", status, body) + } +} + +// TestE2ELikeUnlikeAffectsStats: on-chain LIKE_POST updates /stats. +func TestE2ELikeUnlikeAffectsStats(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + bob := h.newUser("bob") + h.fund(alice, 10*blockchain.Token) + h.fund(bob, 10*blockchain.Token) + + pub := h.publish(alice, "likeable", nil) + h.commitCreatePost(alice, pub) + + // Bob likes alice's post. + h.like(bob, pub.PostID) + + var stats postStatsResponse + h.getJSON("/feed/post/"+pub.PostID+"/stats?me="+bob.PubKeyHex(), &stats) + if stats.Likes != 1 { + t.Errorf("likes after like: got %d, want 1", stats.Likes) + } + if stats.LikedByMe == nil || !*stats.LikedByMe { + t.Errorf("liked_by_me: got %v, want true", stats.LikedByMe) + } + + // And a non-liker sees liked_by_me=false. + carol := h.newUser("carol") + h.getJSON("/feed/post/"+pub.PostID+"/stats?me="+carol.PubKeyHex(), &stats) + if stats.LikedByMe == nil || *stats.LikedByMe { + t.Errorf("liked_by_me for carol: got %v, want false", stats.LikedByMe) + } +} + +// TestE2ETimeline: follow graph merges posts newest-first. +func TestE2ETimeline(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + bob := h.newUser("bob") + carol := h.newUser("carol") + // Fund everyone. + for _, u := range []*identity.Identity{alice, bob, carol} { + h.fund(u, 10*blockchain.Token) + } + + // Alice follows bob + carol. + h.follow(alice, bob.PubKeyHex()) + h.follow(alice, carol.PubKeyHex()) + + // Bob + carol each publish a post. Sleep 1.1s between so the tx + // timestamps land in distinct unix seconds — the chain chrono index + // is second-resolution, not millisecond. + pubBob := h.publish(bob, "post from bob", nil) + h.commitCreatePost(bob, pubBob) + time.Sleep(1100 * time.Millisecond) + pubCarol := h.publish(carol, "post from carol", nil) + h.commitCreatePost(carol, pubCarol) + + var tl struct { + Count int `json:"count"` + Posts []feedAuthorItem `json:"posts"` + } + h.getJSON("/feed/timeline?follower="+alice.PubKeyHex(), &tl) + if tl.Count != 2 { + t.Fatalf("timeline count: got %d, want 2", tl.Count) + } + // Newest first — carol was published last, so her post should be [0]. + if tl.Posts[0].PostID != pubCarol.PostID { + t.Errorf("timeline[0]: got %s, want carol's post %s", tl.Posts[0].PostID, pubCarol.PostID) + } + if tl.Posts[1].PostID != pubBob.PostID { + t.Errorf("timeline[1]: got %s, want bob's post %s", tl.Posts[1].PostID, pubBob.PostID) + } +} + +// TestE2ETrendingRanking: post with more engagement floats to the top. +func TestE2ETrendingRanking(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + bob := h.newUser("bob") + carol := h.newUser("carol") + for _, u := range []*identity.Identity{alice, bob, carol} { + h.fund(u, 10*blockchain.Token) + } + + lowPost := h.publish(alice, "low-engagement post", nil) + h.commitCreatePost(alice, lowPost) + hotPost := h.publish(alice, "hot post", nil) + h.commitCreatePost(alice, hotPost) + + // Hot post gets 2 likes + 5 views; low post stays at 0. + h.like(bob, hotPost.PostID) + h.like(carol, hotPost.PostID) + var viewResp struct{ Views uint64 } + for i := 0; i < 5; i++ { + h.postRaw("/feed/post/"+hotPost.PostID+"/view", &viewResp) + } + + var tr struct { + Count int `json:"count"` + Posts []feedAuthorItem `json:"posts"` + } + h.getJSON("/feed/trending?limit=10", &tr) + if tr.Count < 2 { + t.Fatalf("trending: got %d posts, want ≥2", tr.Count) + } + // Hot post MUST be first (likes × 3 + views = 11 vs 0). + if tr.Posts[0].PostID != hotPost.PostID { + t.Errorf("trending[0]: got %s, want hot post %s", tr.Posts[0].PostID, hotPost.PostID) + } +} + +// TestE2EForYouFilters: recommendations exclude followed authors, +// already-liked posts, and the user's own posts. +func TestE2EForYouFilters(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") // asking for recs + bob := h.newUser("bob") // alice follows bob → bob's posts excluded + carol := h.newUser("carol") // stranger → should surface + dave := h.newUser("dave") // post liked by alice → excluded + + for _, u := range []*identity.Identity{alice, bob, carol, dave} { + h.fund(u, 10*blockchain.Token) + } + + // Alice follows bob. + h.follow(alice, bob.PubKeyHex()) + + // Each non-alice user publishes a post, plus alice herself. + postOwn := h.publish(alice, "my own post", nil) + h.commitCreatePost(alice, postOwn) + postBob := h.publish(bob, "from bob (followed)", nil) + h.commitCreatePost(bob, postBob) + postCarol := h.publish(carol, "from carol (stranger)", nil) + h.commitCreatePost(carol, postCarol) + postDave := h.publish(dave, "from dave", nil) + h.commitCreatePost(dave, postDave) + + // Alice likes dave's post — so it should NOT appear in her ForYou. + h.like(alice, postDave.PostID) + + var fy struct { + Count int `json:"count"` + Posts []feedAuthorItem `json:"posts"` + } + h.getJSON("/feed/foryou?pub="+alice.PubKeyHex()+"&limit=20", &fy) + + // Expected: only carol's post. The others are excluded. + seen := map[string]bool{} + for _, p := range fy.Posts { + seen[p.PostID] = true + } + if seen[postOwn.PostID] { + t.Errorf("ForYou included alice's own post %s", postOwn.PostID) + } + if seen[postBob.PostID] { + t.Errorf("ForYou included followed author bob's post %s", postBob.PostID) + } + if seen[postDave.PostID] { + t.Errorf("ForYou included already-liked post from dave %s", postDave.PostID) + } + if !seen[postCarol.PostID] { + t.Errorf("ForYou missing carol's post %s (should surface)", postCarol.PostID) + } +} + +// TestE2EHashtagSearch: a tag returns only posts that used it. +func TestE2EHashtagSearch(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + h.fund(alice, 10*blockchain.Token) + + goPost := h.publish(alice, "learning #golang today", nil) + h.commitCreatePost(alice, goPost) + rustPost := h.publish(alice, "later — #rust", nil) + h.commitCreatePost(alice, rustPost) + untagged := h.publish(alice, "no tags", nil) + h.commitCreatePost(alice, untagged) + + var tag struct { + Tag string `json:"tag"` + Count int `json:"count"` + Posts []feedAuthorItem `json:"posts"` + } + h.getJSON("/feed/hashtag/golang", &tag) + if tag.Count != 1 || tag.Posts[0].PostID != goPost.PostID { + t.Errorf("hashtag(golang): got %+v, want [%s]", tag, goPost.PostID) + } + h.getJSON("/feed/hashtag/rust", &tag) + if tag.Count != 1 || tag.Posts[0].PostID != rustPost.PostID { + t.Errorf("hashtag(rust): got %+v, want [%s]", tag, rustPost.PostID) + } +} + +// TestE2EScrubberStripsEXIF: uploaded image with EXIF canary comes back +// without the canary in the stored body. Proves server-side scrub is +// mandatory and working at the HTTP boundary. +func TestE2EScrubberStripsEXIF(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + h.fund(alice, 1*blockchain.Token) + + // Build a JPEG with an injected EXIF segment containing a canary. + var jpegBuf bytes.Buffer + img := image.NewRGBA(image.Rect(0, 0, 16, 16)) + for y := 0; y < 16; y++ { + for x := 0; x < 16; x++ { + img.Set(x, y, color.RGBA{uint8(x * 16), uint8(y * 16), 100, 255}) + } + } + if err := jpeg.Encode(&jpegBuf, img, &jpeg.Options{Quality: 80}); err != nil { + t.Fatalf("jpeg encode: %v", err) + } + withEXIF := injectEXIFSegment(t, jpegBuf.Bytes(), + "SUPERSECRETGPS-51.5N-0.1W-iPhone-Serial-A1B2C3") + + // Pre-flight: the upload bytes DO contain the canary. + if !bytes.Contains(withEXIF, []byte("SUPERSECRETGPS")) { + t.Fatalf("test setup: canary not injected") + } + + pub := h.publish(alice, "look at this photo", withEXIF) + if pub.PostID == "" { + t.Fatalf("publish failed") + } + h.commitCreatePost(alice, pub) + + // Fetch the stored body back. The attachment field is the cleaned bytes. + var fetched struct { + Attachment string `json:"attachment"` // base64 + } + h.getJSON("/feed/post/"+pub.PostID, &fetched) + if fetched.Attachment == "" { + t.Fatalf("attachment not returned") + } + decoded, err := base64.StdEncoding.DecodeString(fetched.Attachment) + if err != nil { + t.Fatalf("decode attachment: %v", err) + } + if bytes.Contains(decoded, []byte("SUPERSECRETGPS")) { + t.Errorf("CRITICAL: EXIF canary survived server-side scrub — metadata leaked") + } + // Sanity: still a valid JPEG after scrub. + if _, err := jpeg.Decode(bytes.NewReader(decoded)); err != nil { + t.Errorf("scrubbed attachment is not a valid JPEG: %v", err) + } +} + +// TestE2ERejectsMIMEMismatch: claimed MIME vs magic bytes. +func TestE2ERejectsMIMEMismatch(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + h.fund(alice, 1*blockchain.Token) + + // Build a PNG but claim it's a JPEG. + fake := []byte{0x89, 'P', 'N', 'G', '\r', '\n', 0x1a, '\n', + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} + ts := time.Now().Unix() + postID := "mimecheck" + hash := sha256.Sum256(append([]byte("mislabel"), fake...)) + sig := alice.Sign([]byte(fmt.Sprintf("publish:%s:%s:%d", + postID, hex.EncodeToString(hash[:]), ts))) + req := feedPublishRequest{ + PostID: postID, + Author: alice.PubKeyHex(), + Content: "mislabel", + AttachmentB64: base64.StdEncoding.EncodeToString(fake), + AttachmentMIME: "image/jpeg", // LIE — it's PNG magic + Sig: base64.StdEncoding.EncodeToString(sig), + Ts: ts, + } + h.postJSONExpectStatus("/feed/publish", req, http.StatusBadRequest) +} + +// TestE2ERejectsBadSignature: wrong signer cannot publish. +func TestE2ERejectsBadSignature(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + eve := h.newUser("eve") + h.fund(alice, 1*blockchain.Token) + h.fund(eve, 1*blockchain.Token) + + ts := time.Now().Unix() + postID := "forgery" + hash := sha256.Sum256([]byte("evil")) + // Eve signs over data but claims to be alice. + sig := eve.Sign([]byte(fmt.Sprintf("publish:%s:%s:%d", + postID, hex.EncodeToString(hash[:]), ts))) + req := feedPublishRequest{ + PostID: postID, + Author: alice.PubKeyHex(), // claim alice + Content: "evil", + Sig: base64.StdEncoding.EncodeToString(sig), + Ts: ts, + } + h.postJSONExpectStatus("/feed/publish", req, http.StatusForbidden) +} + +// TestE2ERejectsStaleTimestamp: publish with ts way in the past must be rejected. +func TestE2ERejectsStaleTimestamp(t *testing.T) { + h := newFeedHarness(t) + alice := h.newUser("alice") + h.fund(alice, 1*blockchain.Token) + + ts := time.Now().Add(-1 * time.Hour).Unix() // 1 hour stale + postID := "stale" + hash := sha256.Sum256([]byte("old")) + sig := alice.Sign([]byte(fmt.Sprintf("publish:%s:%s:%d", + postID, hex.EncodeToString(hash[:]), ts))) + req := feedPublishRequest{ + PostID: postID, + Author: alice.PubKeyHex(), + Content: "old", + Sig: base64.StdEncoding.EncodeToString(sig), + Ts: ts, + } + h.postJSONExpectStatus("/feed/publish", req, http.StatusBadRequest) +} + +// injectEXIFSegment splices an APP1 EXIF segment with the given canary +// string into a JPEG. Mirrors media/scrub_test.go but local to keep the +// integration test self-contained. +func injectEXIFSegment(t *testing.T, src []byte, canary string) []byte { + t.Helper() + if len(src) < 2 || src[0] != 0xFF || src[1] != 0xD8 { + t.Fatalf("not a JPEG") + } + payload := []byte("Exif\x00\x00" + canary) + segLen := len(payload) + 2 + out := make([]byte, 0, len(src)+segLen+4) + out = append(out, src[0], src[1]) // SOI + out = append(out, 0xFF, 0xE1, byte(segLen>>8), byte(segLen&0xff)) + out = append(out, payload...) + out = append(out, src[2:]...) + return out +} + +// Silence unused-import lint if strings gets trimmed by refactor. +var _ = strings.TrimSpace +var _ = context.TODO diff --git a/node/feed_twonode_test.go b/node/feed_twonode_test.go new file mode 100644 index 0000000..11c8a89 --- /dev/null +++ b/node/feed_twonode_test.go @@ -0,0 +1,504 @@ +// Two-node simulation: verifies that a post published on Node A is +// discoverable and fetchable from Node B after block propagation. +// +// The real network uses libp2p gossipsub for blocks + an HTTP pull +// fallback. For tests we simulate gossip by manually calling chain.AddBlock +// on both nodes with the same block — identical to what each node does +// after receiving a peer's gossiped block in production. +// +// Body ownership: only the HOSTING relay has the post body in its +// feed mailbox. Readers on OTHER nodes see the on-chain record +// (hosting_relay pubkey, content hash, size, author) and fetch the +// body directly from the hosting node over HTTP. That's the design — +// storage costs don't get amortised across the whole network, the +// author pays one node to host, and the public reads from that one +// node (or from replicas if/when we add post pinning in v3.0.0). +package node + +import ( + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + "go-blockchain/blockchain" + "go-blockchain/identity" + "go-blockchain/media" + "go-blockchain/relay" +) + +// twoNodeHarness wires two independent chain+feed instances sharing a +// single block history (simulated gossip). Node A is the "hosting" +// relay; Node B is the reader. +type twoNodeHarness struct { + t *testing.T + + aChainDir, aFeedDir string + bChainDir, bFeedDir string + + aChain, bChain *blockchain.Chain + aMailbox, bMailbox *relay.FeedMailbox + aServer, bServer *httptest.Server + aHostPub string + bHostPub string + validator *identity.Identity + tipIndex uint64 + tipHash []byte +} + +func newTwoNodeHarness(t *testing.T) *twoNodeHarness { + t.Helper() + mkdir := func(prefix string) string { + d, err := os.MkdirTemp("", prefix) + if err != nil { + t.Fatalf("MkdirTemp: %v", err) + } + return d + } + + h := &twoNodeHarness{ + t: t, + aChainDir: mkdir("dchain-2n-chainA-*"), + aFeedDir: mkdir("dchain-2n-feedA-*"), + bChainDir: mkdir("dchain-2n-chainB-*"), + bFeedDir: mkdir("dchain-2n-feedB-*"), + } + + var err error + h.aChain, err = blockchain.NewChain(h.aChainDir) + if err != nil { + t.Fatalf("chain A: %v", err) + } + h.bChain, err = blockchain.NewChain(h.bChainDir) + if err != nil { + t.Fatalf("chain B: %v", err) + } + h.aMailbox, err = relay.OpenFeedMailbox(h.aFeedDir, 24*time.Hour) + if err != nil { + t.Fatalf("feed A: %v", err) + } + h.bMailbox, err = relay.OpenFeedMailbox(h.bFeedDir, 24*time.Hour) + if err != nil { + t.Fatalf("feed B: %v", err) + } + + h.validator, err = identity.Generate() + if err != nil { + t.Fatalf("validator: %v", err) + } + // Both nodes start from the same genesis — the single bootstrap + // validator allocates the initial supply. In production this is + // hardcoded; in tests we just generate and use it to sign blocks + // on both chains. + genesis := blockchain.GenesisBlock(h.validator.PubKeyHex(), h.validator.PrivKey) + if err := h.aChain.AddBlock(genesis); err != nil { + t.Fatalf("A genesis: %v", err) + } + if err := h.bChain.AddBlock(genesis); err != nil { + t.Fatalf("B genesis: %v", err) + } + h.tipIndex = genesis.Index + h.tipHash = genesis.Hash + + // Node A hosts; Node B is a pure reader (no host_pub of its own that + // anyone publishes to). They share a single validator because this + // test isn't about consensus — it's about chain state propagation. + h.aHostPub = h.validator.PubKeyHex() + + // Node B uses a separate identity purely for its hosting_relay field + // (never actually hosts anything in this scenario). Distinguishes A + // from B in balance assertions. + idB, _ := identity.Generate() + h.bHostPub = idB.PubKeyHex() + + scrubber := media.NewScrubber(media.SidecarConfig{}) + + aCfg := FeedConfig{ + Mailbox: h.aMailbox, + HostingRelayPub: h.aHostPub, + Scrubber: scrubber, + GetPost: h.aChain.Post, + LikeCount: h.aChain.LikeCount, + HasLiked: h.aChain.HasLiked, + PostsByAuthor: h.aChain.PostsByAuthor, + Following: h.aChain.Following, + } + bCfg := FeedConfig{ + Mailbox: h.bMailbox, + HostingRelayPub: h.bHostPub, + Scrubber: scrubber, + GetPost: h.bChain.Post, + LikeCount: h.bChain.LikeCount, + HasLiked: h.bChain.HasLiked, + PostsByAuthor: h.bChain.PostsByAuthor, + Following: h.bChain.Following, + } + muxA := http.NewServeMux() + RegisterFeedRoutes(muxA, aCfg) + h.aServer = httptest.NewServer(muxA) + muxB := http.NewServeMux() + RegisterFeedRoutes(muxB, bCfg) + h.bServer = httptest.NewServer(muxB) + + t.Cleanup(h.Close) + return h +} + +func (h *twoNodeHarness) Close() { + if h.aServer != nil { + h.aServer.Close() + } + if h.bServer != nil { + h.bServer.Close() + } + if h.aMailbox != nil { + _ = h.aMailbox.Close() + } + if h.bMailbox != nil { + _ = h.bMailbox.Close() + } + if h.aChain != nil { + _ = h.aChain.Close() + } + if h.bChain != nil { + _ = h.bChain.Close() + } + for _, dir := range []string{h.aChainDir, h.aFeedDir, h.bChainDir, h.bFeedDir} { + for i := 0; i < 20; i++ { + if err := os.RemoveAll(dir); err == nil { + break + } + time.Sleep(10 * time.Millisecond) + } + } +} + +// gossipBlock simulates libp2p block propagation: same block applied to +// both chains. In production, AddBlock is called on each peer after the +// gossipsub message arrives — no chain-level difference from the direct +// call here. +func (h *twoNodeHarness) gossipBlock(txs ...*blockchain.Transaction) { + h.t.Helper() + time.Sleep(2 * time.Millisecond) // distinct tx IDs + var totalFees uint64 + for _, tx := range txs { + totalFees += tx.Fee + } + b := &blockchain.Block{ + Index: h.tipIndex + 1, + Timestamp: time.Now().UTC(), + Transactions: txs, + PrevHash: h.tipHash, + Validator: h.validator.PubKeyHex(), + TotalFees: totalFees, + } + b.ComputeHash() + b.Sign(h.validator.PrivKey) + + if err := h.aChain.AddBlock(b); err != nil { + h.t.Fatalf("A AddBlock: %v", err) + } + if err := h.bChain.AddBlock(b); err != nil { + h.t.Fatalf("B AddBlock: %v", err) + } + h.tipIndex = b.Index + h.tipHash = b.Hash +} + +func (h *twoNodeHarness) nextTxID(from string, typ blockchain.EventType) string { + sum := sha256.Sum256([]byte(fmt.Sprintf("%s:%s:%d", from, typ, time.Now().UnixNano()))) + return hex.EncodeToString(sum[:16]) +} + +// fundAB transfers from validator → target, propagated to both chains. +func (h *twoNodeHarness) fundAB(target *identity.Identity, amount uint64) { + tx := &blockchain.Transaction{ + ID: h.nextTxID(h.validator.PubKeyHex(), blockchain.EventTransfer), + Type: blockchain.EventTransfer, + From: h.validator.PubKeyHex(), + To: target.PubKeyHex(), + Amount: amount, + Fee: blockchain.MinFee, + Timestamp: time.Now().UTC(), + } + h.gossipBlock(tx) +} + +// publishOnA uploads body to A's feed mailbox (only A gets the body) and +// gossips the CREATE_POST tx to both chains (both see the metadata). +func (h *twoNodeHarness) publishOnA(author *identity.Identity, content string) feedPublishResponse { + h.t.Helper() + idHash := sha256.Sum256([]byte(fmt.Sprintf("%s-%d-%s", + author.PubKeyHex(), time.Now().UnixNano(), content))) + postID := hex.EncodeToString(idHash[:16]) + clientHasher := sha256.New() + clientHasher.Write([]byte(content)) + clientHash := hex.EncodeToString(clientHasher.Sum(nil)) + ts := time.Now().Unix() + sig := author.Sign([]byte(fmt.Sprintf("publish:%s:%s:%d", postID, clientHash, ts))) + + req := feedPublishRequest{ + PostID: postID, + Author: author.PubKeyHex(), + Content: content, + Sig: base64.StdEncoding.EncodeToString(sig), + Ts: ts, + } + body, _ := json.Marshal(req) + resp, err := http.Post(h.aServer.URL+"/feed/publish", "application/json", strings.NewReader(string(body))) + if err != nil { + h.t.Fatalf("publish on A: %v", err) + } + defer resp.Body.Close() + if resp.StatusCode >= 400 { + raw, _ := io.ReadAll(resp.Body) + h.t.Fatalf("publish on A → %d: %s", resp.StatusCode, string(raw)) + } + var out feedPublishResponse + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + h.t.Fatalf("decode publish: %v", err) + } + + // Now the ON-CHAIN CREATE_POST tx — gossiped to both nodes. + contentHash, _ := hex.DecodeString(out.ContentHash) + payload, _ := json.Marshal(blockchain.CreatePostPayload{ + PostID: out.PostID, + ContentHash: contentHash, + Size: out.Size, + HostingRelay: out.HostingRelay, + }) + tx := &blockchain.Transaction{ + ID: h.nextTxID(author.PubKeyHex(), blockchain.EventCreatePost), + Type: blockchain.EventCreatePost, + From: author.PubKeyHex(), + Fee: out.EstimatedFeeUT, + Payload: payload, + Timestamp: time.Now().UTC(), + } + h.gossipBlock(tx) + return out +} + +// likeOnB submits a LIKE_POST tx originating on Node B (simulates a +// follower using their own node). Both chains receive the block. +func (h *twoNodeHarness) likeOnB(liker *identity.Identity, postID string) { + payload, _ := json.Marshal(blockchain.LikePostPayload{PostID: postID}) + tx := &blockchain.Transaction{ + ID: h.nextTxID(liker.PubKeyHex(), blockchain.EventLikePost), + Type: blockchain.EventLikePost, + From: liker.PubKeyHex(), + Fee: blockchain.MinFee, + Payload: payload, + Timestamp: time.Now().UTC(), + } + h.gossipBlock(tx) +} + +// getBodyFromA fetches /feed/post/{id} from Node A's HTTP server. +func (h *twoNodeHarness) getBodyFromA(postID string) (int, []byte) { + h.t.Helper() + resp, err := http.Get(h.aServer.URL + "/feed/post/" + postID) + if err != nil { + h.t.Fatalf("GET A: %v", err) + } + defer resp.Body.Close() + raw, _ := io.ReadAll(resp.Body) + return resp.StatusCode, raw +} + +// getBodyFromB same for Node B. +func (h *twoNodeHarness) getBodyFromB(postID string) (int, []byte) { + h.t.Helper() + resp, err := http.Get(h.bServer.URL + "/feed/post/" + postID) + if err != nil { + h.t.Fatalf("GET B: %v", err) + } + defer resp.Body.Close() + raw, _ := io.ReadAll(resp.Body) + return resp.StatusCode, raw +} + +// ── Tests ───────────────────────────────────────────────────────────────── + +// TestTwoNodePostPropagation: Alice publishes on Node A. After block +// propagation, both chains have the record. Node B can read the +// on-chain metadata directly, and can fetch the body from Node A (the +// hosting relay) — which is what the client does in production. +func TestTwoNodePostPropagation(t *testing.T) { + h := newTwoNodeHarness(t) + alice, _ := identity.Generate() + h.fundAB(alice, 10*blockchain.Token) + + pub := h.publishOnA(alice, "hello from node A") + + // Node A chain has the record. + recA, err := h.aChain.Post(pub.PostID) + if err != nil || recA == nil { + t.Fatalf("A chain.Post: %v (rec=%v)", err, recA) + } + // Node B chain also has the record — propagation successful. + recB, err := h.bChain.Post(pub.PostID) + if err != nil || recB == nil { + t.Fatalf("B chain.Post: %v (rec=%v)", err, recB) + } + if recA.PostID != recB.PostID || recA.Author != recB.Author { + t.Errorf("chains disagree: A=%+v B=%+v", recA, recB) + } + if recB.HostingRelay != h.aHostPub { + t.Errorf("B sees hosting_relay=%s, want A's pub=%s", recB.HostingRelay, h.aHostPub) + } + + // Node A HTTP serves the body. + statusA, _ := h.getBodyFromA(pub.PostID) + if statusA != http.StatusOK { + t.Errorf("A GET: status %d, want 200", statusA) + } + + // Node B HTTP does NOT have the body — body only lives on the hosting + // relay. This is by design: the reader client on Node B would read + // chain.Post(id).HostingRelay, look up its URL via /api/relays, and + // fetch directly from Node A. Tested by the next assertion. + statusB, _ := h.getBodyFromB(pub.PostID) + if statusB != http.StatusNotFound { + t.Errorf("B GET: status %d, want 404 (body lives only on hosting relay)", statusB) + } + + // Simulate the client routing step: use chain record from B to find + // hosting relay, then fetch from A. + hosting := recB.HostingRelay + if hosting != h.aHostPub { + t.Fatalf("hosting not A: %s", hosting) + } + // In production: look up hosting's URL via /api/relays. Here we + // already know it = h.aServer.URL. Just verify the fetch works. + statusCross, bodyCross := h.getBodyFromA(pub.PostID) + if statusCross != http.StatusOK { + t.Fatalf("cross-node fetch: status %d", statusCross) + } + var fetched struct { + Content string `json:"content"` + Author string `json:"author"` + } + if err := json.Unmarshal(bodyCross, &fetched); err != nil { + t.Fatalf("decode cross-node body: %v", err) + } + if fetched.Content != "hello from node A" { + t.Errorf("cross-node content: got %q", fetched.Content) + } +} + +// TestTwoNodeLikeCounterSharedAcrossNodes: a like submitted with tx +// origin on Node B bumps the on-chain counter — which Node A's HTTP +// /stats then reflects. Demonstrates that engagement aggregates are +// consistent across the mesh because they live on the chain, not in +// any single relay's memory. +func TestTwoNodeLikeCounterSharedAcrossNodes(t *testing.T) { + h := newTwoNodeHarness(t) + alice, _ := identity.Generate() + bob, _ := identity.Generate() + h.fundAB(alice, 10*blockchain.Token) + h.fundAB(bob, 10*blockchain.Token) + + pub := h.publishOnA(alice, "content for engagement test") + h.likeOnB(bob, pub.PostID) + + // A's HTTP stats (backed by its chain.LikeCount) should see the like. + resp, err := http.Get(h.aServer.URL + "/feed/post/" + pub.PostID + "/stats") + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + var stats postStatsResponse + if err := json.NewDecoder(resp.Body).Decode(&stats); err != nil { + t.Fatal(err) + } + if stats.Likes != 1 { + t.Errorf("A /stats: got %d likes, want 1", stats.Likes) + } + + // Same for B. + resp, err = http.Get(h.bServer.URL + "/feed/post/" + pub.PostID + "/stats") + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if err := json.NewDecoder(resp.Body).Decode(&stats); err != nil { + t.Fatal(err) + } + if stats.Likes != 1 { + t.Errorf("B /stats: got %d likes, want 1", stats.Likes) + } +} + +// TestTwoNodeFollowGraphReplicates: FOLLOW tx on any node propagates to +// both chains; B's /feed/timeline returns A-hosted posts correctly. +func TestTwoNodeFollowGraphReplicates(t *testing.T) { + h := newTwoNodeHarness(t) + alice, _ := identity.Generate() // will follow bob + bob, _ := identity.Generate() // author + h.fundAB(alice, 10*blockchain.Token) + h.fundAB(bob, 10*blockchain.Token) + + // Alice follows Bob (tx gossiped to both nodes). + followTx := &blockchain.Transaction{ + ID: h.nextTxID(alice.PubKeyHex(), blockchain.EventFollow), + Type: blockchain.EventFollow, + From: alice.PubKeyHex(), + To: bob.PubKeyHex(), + Fee: blockchain.MinFee, + Payload: []byte(`{}`), + Timestamp: time.Now().UTC(), + } + h.gossipBlock(followTx) + + // Bob publishes on A. Alice queries timeline on B. + bobPost := h.publishOnA(bob, "bob speaks") + + // Alice's timeline on Node B should include Bob's post (metadata + // lives on chain). Body fetch would go to A, but /timeline only + // returns the enriched record — which DOES include body content + // because B's feed_mailbox doesn't have it... hmm. + // + // Actually this reveals a limitation: /feed/timeline on B merges + // chain records (available) with local mailbox bodies (missing). + // The body parts of the response will be empty. For the e2e test we + // just verify the metadata is there — the client is expected to + // resolve bodies separately via the hosting_relay URL. + resp, err := http.Get(h.bServer.URL + "/feed/timeline?follower=" + alice.PubKeyHex()) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + var tl struct { + Count int `json:"count"` + Posts []feedAuthorItem `json:"posts"` + } + if err := json.NewDecoder(resp.Body).Decode(&tl); err != nil { + t.Fatal(err) + } + if tl.Count != 1 { + t.Fatalf("B timeline count: got %d, want 1", tl.Count) + } + if tl.Posts[0].PostID != bobPost.PostID { + t.Errorf("B timeline[0]: got %s, want %s", tl.Posts[0].PostID, bobPost.PostID) + } + // Metadata must be correct even if body is empty on B. + if tl.Posts[0].Author != bob.PubKeyHex() { + t.Errorf("B timeline[0].author: got %s, want %s", tl.Posts[0].Author, bob.PubKeyHex()) + } + if tl.Posts[0].HostingRelay != h.aHostPub { + t.Errorf("B timeline[0].hosting_relay: got %s, want A (%s)", tl.Posts[0].HostingRelay, h.aHostPub) + } + // Body is intentionally empty on B (A hosts it). Verify. + if tl.Posts[0].Content != "" { + t.Errorf("B timeline[0].content: got %q, want empty (body lives on A)", tl.Posts[0].Content) + } +} From 5b64ef2560705d779ef59bb38d443a2c48d441ee Mon Sep 17 00:00:00 2001 From: vsecoder Date: Sat, 18 Apr 2026 19:43:55 +0300 Subject: [PATCH 10/27] feat(client): Twitter-style social feed UI (Phase C of v2.0.0) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ships the client side of the v2.0.0 feed feature. Folds client-app/ into the monorepo (was previously .gitignored as "tracked separately" but no separate repo ever existed — for v2.0.0 the client is first-class). Feed screens app/(app)/feed.tsx — Feed tab - Three-way tab strip: Подписки / Для вас / В тренде backed by /feed/timeline, /feed/foryou, /feed/trending respectively - Default landing tab is "Для вас" — surfaces discovery without requiring the user to follow anyone first - FlatList with pull-to-refresh + viewability-driven view counter bump (posts visible ≥ 60% for ≥ 1s trigger POST /feed/post/…/view) - Floating blue compose button → /compose - Per-post liked_by_me fetched in batches of 6 after list load app/(app)/compose.tsx — post composer modal - Fullscreen, Twitter-like header (✕ left, Опубликовать right) - Auto-focused multiline TextInput, 4000 char cap - Hashtag preview chips that auto-update as you type - expo-image-picker + expo-image-manipulator pipeline: resize to 1080px max-dim, JPEG Q=50 (client-side first-pass compression before the mandatory server-side scrub) - Live fee estimate + balance guard with a confirmation modal ("Опубликовать пост? Цена: 0.00X T · Размер: N KB") - Exif: false passed to ImagePicker as an extra privacy layer app/(app)/feed/[id].tsx — post detail - Full PostCard rendering + detailed info panel (views, likes, size, fee, hosting relay, hashtags as tappable chips) - Triggers bumpView on mount - 410 (on-chain soft-delete) routes back to the feed app/(app)/feed/tag/[tag].tsx — hashtag feed app/(app)/profile/[address].tsx — rebuilt - Twitter-ish profile: avatar, name, address short-form, post count - Posts | Инфо tab strip - Follow / Unfollow button for non-self profiles (optimistic UI) - Edit button on self profile → settings - Secondary actions (chat, copy address) when viewing a known contact Supporting library lib/feed.ts — HTTP wrappers + tx builders for every /feed/* endpoint: - publishPost (POST /feed/publish, signed) - publishAndCommit (publish → on-chain CREATE_POST) - fetchPost / fetchStats / bumpView - fetchAuthorPosts / fetchTimeline / fetchForYou / fetchTrending / fetchHashtag - buildCreatePostTx / buildDeletePostTx - buildFollowTx / buildUnfollowTx - buildLikePostTx / buildUnlikePostTx - likePost / unlikePost / followUser / unfollowUser / deletePost (high-level helpers that bundle build + submitTx) - formatFee, formatRelativeTime, formatCount — Twitter-like display helpers components/feed/PostCard.tsx — core card component - Memoised for performance (N-row re-render on every like elsewhere would cost a lot otherwise) - Optimistic like toggle with heart-bounce spring animation - Hashtag highlighting in body text (tappable → hashtag feed) - Long-press context menu (Delete, owner-only) - Views / likes / share-link / reply icons in footer row Navigation cleanup - NavBar: removed the SOON pill on the Feed tab (it's shipped now) - (app)/_layout: hide NavBar on /compose and /feed/* sub-routes - AnimatedSlot: treat /feed/, /feed/tag/, /compose as sub-routes so back-swipe-right closes them Channel removal (client side) - lib/types.ts: ContactKind stripped to 'direct' | 'group'; legacy 'channel' flag removed. `kind` field kept for backward compat with existing AsyncStorage records. - lib/devSeed.ts: dropped the 5 channel seed contacts. - components/ChatTile.tsx: removed channel kindIcon branch. Dependencies - expo-image-manipulator added for client-side image compression. - expo-file-system/legacy used for readAsStringAsync (SDK 54 moved that API to the legacy sub-path; the new streaming API isn't yet stable). Type check - npx tsc --noEmit — clean, 0 errors. Next (not in this commit) - Direct attachment-bytes endpoint on the server so post-detail can actually render the image (currently shows placeholder with URL) - Cross-relay body fetch via /api/relays + hosting_relay pubkey - Mentions (@username) with notifications - Full-text search Co-Authored-By: Claude Opus 4.7 (1M context) --- .gitignore | 5 +- client-app/.gitignore | 42 + client-app/README.md | 93 + client-app/app.json | 69 + client-app/app/(app)/_layout.tsx | 81 + client-app/app/(app)/chats/[id].tsx | 512 + client-app/app/(app)/chats/_layout.tsx | 28 + client-app/app/(app)/chats/index.tsx | 105 + client-app/app/(app)/compose.tsx | 390 + client-app/app/(app)/feed.tsx | 320 + client-app/app/(app)/feed/[id].tsx | 242 + client-app/app/(app)/feed/tag/[tag].tsx | 127 + client-app/app/(app)/new-contact.tsx | 288 + client-app/app/(app)/profile/[address].tsx | 441 + client-app/app/(app)/requests.tsx | 173 + client-app/app/(app)/settings.tsx | 595 + client-app/app/(app)/wallet.tsx | 652 + client-app/app/(auth)/create.tsx | 139 + client-app/app/(auth)/created.tsx | 196 + client-app/app/(auth)/import.tsx | 230 + client-app/app/_layout.tsx | 59 + client-app/app/index.tsx | 519 + client-app/babel.config.js | 12 + client-app/components/AnimatedSlot.tsx | 67 + client-app/components/Avatar.tsx | 76 + client-app/components/ChatTile.tsx | 174 + client-app/components/Composer.tsx | 329 + client-app/components/Header.tsx | 76 + client-app/components/IconButton.tsx | 61 + client-app/components/NavBar.tsx | 150 + client-app/components/SearchBar.tsx | 88 + client-app/components/TabHeader.tsx | 59 + client-app/components/chat/AttachmentMenu.tsx | 188 + .../components/chat/AttachmentPreview.tsx | 178 + client-app/components/chat/DaySeparator.tsx | 36 + client-app/components/chat/MessageBubble.tsx | 374 + client-app/components/chat/ReplyQuote.tsx | 70 + .../components/chat/VideoCirclePlayer.tsx | 158 + .../components/chat/VideoCircleRecorder.tsx | 217 + client-app/components/chat/VoicePlayer.tsx | 166 + client-app/components/chat/VoiceRecorder.tsx | 183 + client-app/components/chat/rows.ts | 79 + client-app/components/feed/PostCard.tsx | 370 + client-app/eas.json | 22 + client-app/global.css | 3 + client-app/hooks/useBalance.ts | 94 + client-app/hooks/useConnectionStatus.ts | 52 + client-app/hooks/useContacts.ts | 80 + client-app/hooks/useGlobalInbox.ts | 114 + client-app/hooks/useMessages.ts | 149 + client-app/hooks/useNotifications.ts | 144 + client-app/hooks/useWellKnownContracts.ts | 61 + client-app/lib/api.ts | 778 ++ client-app/lib/crypto.ts | 168 + client-app/lib/dates.ts | 67 + client-app/lib/devSeed.ts | 444 + client-app/lib/feed.ts | 487 + client-app/lib/storage.ts | 101 + client-app/lib/store.ts | 128 + client-app/lib/types.ts | 149 + client-app/lib/utils.ts | 35 + client-app/lib/ws.ts | 401 + client-app/metro.config.js | 6 + client-app/nativewind-env.d.ts | 1 + client-app/package-lock.json | 11482 ++++++++++++++++ client-app/package.json | 61 + client-app/tailwind.config.js | 35 + client-app/tsconfig.json | 9 + 68 files changed, 23487 insertions(+), 1 deletion(-) create mode 100644 client-app/.gitignore create mode 100644 client-app/README.md create mode 100644 client-app/app.json create mode 100644 client-app/app/(app)/_layout.tsx create mode 100644 client-app/app/(app)/chats/[id].tsx create mode 100644 client-app/app/(app)/chats/_layout.tsx create mode 100644 client-app/app/(app)/chats/index.tsx create mode 100644 client-app/app/(app)/compose.tsx create mode 100644 client-app/app/(app)/feed.tsx create mode 100644 client-app/app/(app)/feed/[id].tsx create mode 100644 client-app/app/(app)/feed/tag/[tag].tsx create mode 100644 client-app/app/(app)/new-contact.tsx create mode 100644 client-app/app/(app)/profile/[address].tsx create mode 100644 client-app/app/(app)/requests.tsx create mode 100644 client-app/app/(app)/settings.tsx create mode 100644 client-app/app/(app)/wallet.tsx create mode 100644 client-app/app/(auth)/create.tsx create mode 100644 client-app/app/(auth)/created.tsx create mode 100644 client-app/app/(auth)/import.tsx create mode 100644 client-app/app/_layout.tsx create mode 100644 client-app/app/index.tsx create mode 100644 client-app/babel.config.js create mode 100644 client-app/components/AnimatedSlot.tsx create mode 100644 client-app/components/Avatar.tsx create mode 100644 client-app/components/ChatTile.tsx create mode 100644 client-app/components/Composer.tsx create mode 100644 client-app/components/Header.tsx create mode 100644 client-app/components/IconButton.tsx create mode 100644 client-app/components/NavBar.tsx create mode 100644 client-app/components/SearchBar.tsx create mode 100644 client-app/components/TabHeader.tsx create mode 100644 client-app/components/chat/AttachmentMenu.tsx create mode 100644 client-app/components/chat/AttachmentPreview.tsx create mode 100644 client-app/components/chat/DaySeparator.tsx create mode 100644 client-app/components/chat/MessageBubble.tsx create mode 100644 client-app/components/chat/ReplyQuote.tsx create mode 100644 client-app/components/chat/VideoCirclePlayer.tsx create mode 100644 client-app/components/chat/VideoCircleRecorder.tsx create mode 100644 client-app/components/chat/VoicePlayer.tsx create mode 100644 client-app/components/chat/VoiceRecorder.tsx create mode 100644 client-app/components/chat/rows.ts create mode 100644 client-app/components/feed/PostCard.tsx create mode 100644 client-app/eas.json create mode 100644 client-app/global.css create mode 100644 client-app/hooks/useBalance.ts create mode 100644 client-app/hooks/useConnectionStatus.ts create mode 100644 client-app/hooks/useContacts.ts create mode 100644 client-app/hooks/useGlobalInbox.ts create mode 100644 client-app/hooks/useMessages.ts create mode 100644 client-app/hooks/useNotifications.ts create mode 100644 client-app/hooks/useWellKnownContracts.ts create mode 100644 client-app/lib/api.ts create mode 100644 client-app/lib/crypto.ts create mode 100644 client-app/lib/dates.ts create mode 100644 client-app/lib/devSeed.ts create mode 100644 client-app/lib/feed.ts create mode 100644 client-app/lib/storage.ts create mode 100644 client-app/lib/store.ts create mode 100644 client-app/lib/types.ts create mode 100644 client-app/lib/utils.ts create mode 100644 client-app/lib/ws.ts create mode 100644 client-app/metro.config.js create mode 100644 client-app/nativewind-env.d.ts create mode 100644 client-app/package-lock.json create mode 100644 client-app/package.json create mode 100644 client-app/tailwind.config.js create mode 100644 client-app/tsconfig.json diff --git a/.gitignore b/.gitignore index 9d0821e..4191450 100644 --- a/.gitignore +++ b/.gitignore @@ -60,4 +60,7 @@ Thumbs.db # Not part of the release bundle — tracked separately CONTEXT.md CHANGELOG.md -client-app/ + +# Client app sources are tracked from v2.0.0 onwards (feed feature made +# the client a first-class part of the release). Local state (node_modules, +# build artifacts, Expo cache) is ignored via client-app/.gitignore. diff --git a/client-app/.gitignore b/client-app/.gitignore new file mode 100644 index 0000000..2e0754d --- /dev/null +++ b/client-app/.gitignore @@ -0,0 +1,42 @@ +# ── Client-app local state ───────────────────────────────────────────── + +# Dependencies (install via npm ci) +node_modules/ + +# Expo / Metro caches +.expo/ +.expo-shared/ + +# Build outputs +dist/ +web-build/ +*.apk +*.aab +*.ipa + +# TypeScript incremental build +*.tsbuildinfo + +# Env files +.env +.env.local +.env.*.local + +# Logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# OS +.DS_Store +Thumbs.db + +# Editor +.vscode/ +.idea/ +*.swp + +# Native prebuild output (Expo managed) +/android +/ios diff --git a/client-app/README.md b/client-app/README.md new file mode 100644 index 0000000..856ba71 --- /dev/null +++ b/client-app/README.md @@ -0,0 +1,93 @@ +# DChain Messenger — React Native Client + +E2E-encrypted mobile/desktop messenger built on the DChain blockchain stack. + +**Stack:** React Native · Expo · NativeWind (Tailwind) · TweetNaCl · Zustand + +## Quick Start + +```bash +cd client-app +npm install +npx expo start # opens Expo Dev Tools +# Press 'i' for iOS simulator, 'a' for Android, 'w' for web +``` + +## Requirements + +- Node.js 18+ +- [Expo Go](https://expo.dev/client) on your phone (for Expo tunnel), or iOS/Android emulator +- A running DChain node (see root README for `docker compose up --build -d`) + +## Project Structure + +``` +client-app/ +├── app/ +│ ├── _layout.tsx # Root layout — loads keys, sets up nav +│ ├── index.tsx # Welcome / onboarding +│ ├── (auth)/ +│ │ ├── create.tsx # Generate new Ed25519 + X25519 keys +│ │ ├── created.tsx # Key created — export reminder +│ │ └── import.tsx # Import existing key.json +│ └── (app)/ +│ ├── _layout.tsx # Tab bar — Chats · Wallet · Settings +│ ├── chats/ +│ │ ├── index.tsx # Chat list with contacts +│ │ └── [id].tsx # Individual chat with E2E encryption +│ ├── requests.tsx # Incoming contact requests +│ ├── new-contact.tsx # Add contact by @username or address +│ ├── wallet.tsx # Balance + TX history + send +│ └── settings.tsx # Node URL, key export, profile +├── components/ui/ # shadcn-style components (Button, Card, Input…) +├── hooks/ +│ ├── useMessages.ts # Poll relay inbox, decrypt messages +│ ├── useBalance.ts # Poll token balance +│ └── useContacts.ts # Load contacts + poll contact requests +└── lib/ + ├── api.ts # REST client for all DChain endpoints + ├── crypto.ts # NaCl box encrypt/decrypt, Ed25519 sign + ├── storage.ts # SecureStore (keys) + AsyncStorage (data) + ├── store.ts # Zustand global state + ├── types.ts # TypeScript interfaces + └── utils.ts # cn(), formatAmount(), relativeTime() +``` + +## Cryptography + +| Operation | Algorithm | Library | +|-----------|-----------|---------| +| Transaction signing | Ed25519 | TweetNaCl `sign` | +| Key exchange | X25519 (Curve25519) | TweetNaCl `box` | +| Message encryption | NaCl box (XSalsa20-Poly1305) | TweetNaCl `box` | +| Key storage | Device secure enclave | expo-secure-store | + +Messages are encrypted as: +``` +Envelope { + sender_pub: // sender's public key + recipient_pub: // recipient's public key + nonce: <24-byte hex> // random per message + ciphertext: // NaCl box(plaintext, nonce, sender_priv, recipient_pub) +} +``` + +## Connect to your node + +1. Start the DChain node: `docker compose up --build -d` +2. Open the app → Settings → Node URL → `http://YOUR_IP:8081` +3. If using Expo Go on physical device: your PC and phone must be on the same network, or use `npx expo start --tunnel` + +## Key File Format + +The `key.json` exported/imported by the app: +```json +{ + "pub_key": "26018d40...", // Ed25519 public key (64 hex chars) + "priv_key": "...", // Ed25519 private key (128 hex chars) + "x25519_pub": "...", // X25519 public key (64 hex chars) + "x25519_priv": "..." // X25519 private key (64 hex chars) +} +``` + +This is the same format as the Go node's `--key` flag. diff --git a/client-app/app.json b/client-app/app.json new file mode 100644 index 0000000..7419e5f --- /dev/null +++ b/client-app/app.json @@ -0,0 +1,69 @@ +{ + "expo": { + "name": "DChain Messenger", + "slug": "dchain-messenger", + "version": "1.0.0", + "orientation": "portrait", + "userInterfaceStyle": "dark", + "backgroundColor": "#000000", + "ios": { + "supportsTablet": false, + "bundleIdentifier": "com.dchain.messenger", + "infoPlist": { + "NSMicrophoneUsageDescription": "Allow DChain to record voice messages and video.", + "NSCameraUsageDescription": "Allow DChain to record video messages and scan QR codes.", + "NSPhotoLibraryUsageDescription": "Allow DChain to attach photos and videos from your library." + } + }, + "android": { + "package": "com.dchain.messenger", + "softwareKeyboardLayoutMode": "pan", + "permissions": [ + "android.permission.RECORD_AUDIO", + "android.permission.CAMERA", + "android.permission.READ_EXTERNAL_STORAGE", + "android.permission.WRITE_EXTERNAL_STORAGE", + "android.permission.MODIFY_AUDIO_SETTINGS" + ] + }, + "web": { + "bundler": "metro", + "output": "static" + }, + "plugins": [ + "expo-router", + "expo-secure-store", + [ + "expo-camera", + { + "cameraPermission": "Allow DChain to record video messages and scan QR codes.", + "microphonePermission": "Allow DChain to record audio with video." + } + ], + [ + "expo-image-picker", + { + "photosPermission": "Allow DChain to attach photos and videos.", + "cameraPermission": "Allow DChain to take photos." + } + ], + [ + "expo-audio", + { + "microphonePermission": "Allow DChain to record voice messages." + } + ], + "expo-video" + ], + "experiments": { + "typedRoutes": false + }, + "scheme": "dchain", + "extra": { + "router": {}, + "eas": { + "projectId": "28d7743e-6745-460f-8ce5-c971c5c297b6" + } + } + } +} diff --git a/client-app/app/(app)/_layout.tsx b/client-app/app/(app)/_layout.tsx new file mode 100644 index 0000000..5260729 --- /dev/null +++ b/client-app/app/(app)/_layout.tsx @@ -0,0 +1,81 @@ +/** + * Main app layout — кастомный `` + ``. + * + * AnimatedSlot — обёртка над Slot'ом, анимирующая переход при смене + * pathname'а. Направление анимации вычисляется по TAB_ORDER: если + * целевой tab "справа" — слайд из правой стороны, "слева" — из левой. + * + * Intra-tab навигация (chats/index → chats/[id]) обслуживается вложенным + * Stack'ом в chats/_layout.tsx — там остаётся нативная slide-from-right + * анимация, чтобы chat detail "выезжал" поверх списка. + * + * Side-effects (balance, contacts, WS auth, dev seed) — монтируются здесь + * один раз; переходы между tab'ами их не перезапускают. + */ +import React, { useEffect } from 'react'; +import { View } from 'react-native'; +import { router, usePathname } from 'expo-router'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { useStore } from '@/lib/store'; +import { useBalance } from '@/hooks/useBalance'; +import { useContacts } from '@/hooks/useContacts'; +import { useWellKnownContracts } from '@/hooks/useWellKnownContracts'; +import { useNotifications } from '@/hooks/useNotifications'; +import { useGlobalInbox } from '@/hooks/useGlobalInbox'; +import { getWSClient } from '@/lib/ws'; +import { useDevSeed } from '@/lib/devSeed'; +import { NavBar } from '@/components/NavBar'; +import { AnimatedSlot } from '@/components/AnimatedSlot'; + +export default function AppLayout() { + const keyFile = useStore(s => s.keyFile); + const requests = useStore(s => s.requests); + const insets = useSafeAreaInsets(); + const pathname = usePathname(); + + // NavBar прячется на full-screen экранах: + // - chat detail + // - compose (new post modal) + // - feed sub-routes (post detail, hashtag search) + const hideNav = + /^\/chats\/[^/]+/.test(pathname) || + pathname === '/compose' || + /^\/feed\/.+/.test(pathname); + + useBalance(); + useContacts(); + useWellKnownContracts(); + useDevSeed(); + useNotifications(); // permission + tap-handler + useGlobalInbox(); // global inbox listener → notifications on new peer msg + + useEffect(() => { + const ws = getWSClient(); + if (keyFile) ws.setAuthCreds({ pubKey: keyFile.pub_key, privKey: keyFile.priv_key }); + else ws.setAuthCreds(null); + }, [keyFile]); + + useEffect(() => { + if (keyFile === null) { + const t = setTimeout(() => { + if (!useStore.getState().keyFile) router.replace('/'); + }, 300); + return () => clearTimeout(t); + } + }, [keyFile]); + + return ( + + + + + {!hideNav && ( + + )} + + ); +} diff --git a/client-app/app/(app)/chats/[id].tsx b/client-app/app/(app)/chats/[id].tsx new file mode 100644 index 0000000..e7d2aa0 --- /dev/null +++ b/client-app/app/(app)/chats/[id].tsx @@ -0,0 +1,512 @@ +/** + * Chat detail screen — верстка по референсу (X-style Messages). + * + * Структура: + * [Header: back + avatar + name + typing-status | ⋯] + * [FlatList: MessageBubble + DaySeparator, group-aware] + * [Composer: floating, supports edit/reply banner] + * + * Весь presentational код вынесен в components/chat/*: + * - MessageBubble (own/peer rendering) + * - DaySeparator (day label между группами) + * - buildRows (чистая функция группировки) + * Date-форматирование — lib/dates.ts. + */ +import React, { useState, useRef, useEffect, useCallback, useMemo } from 'react'; +import { + View, Text, FlatList, KeyboardAvoidingView, Platform, Alert, Pressable, +} from 'react-native'; +import { router, useLocalSearchParams } from 'expo-router'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import * as Clipboard from 'expo-clipboard'; + +import { useStore } from '@/lib/store'; +import { useMessages } from '@/hooks/useMessages'; +import { encryptMessage } from '@/lib/crypto'; +import { sendEnvelope } from '@/lib/api'; +import { getWSClient } from '@/lib/ws'; +import { appendMessage, loadMessages } from '@/lib/storage'; +import { randomId } from '@/lib/utils'; +import type { Message } from '@/lib/types'; + +import { Avatar } from '@/components/Avatar'; +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; +import { Composer, ComposerMode } from '@/components/Composer'; +import { AttachmentMenu } from '@/components/chat/AttachmentMenu'; +import { VideoCircleRecorder } from '@/components/chat/VideoCircleRecorder'; +import { clearContactNotifications } from '@/hooks/useNotifications'; +import { MessageBubble } from '@/components/chat/MessageBubble'; +import { DaySeparator } from '@/components/chat/DaySeparator'; +import { buildRows, Row } from '@/components/chat/rows'; +import type { Attachment } from '@/lib/types'; + +function shortAddr(a: string, n = 6) { + if (!a) return '—'; + return a.length <= n * 2 + 1 ? a : `${a.slice(0, n)}…${a.slice(-n)}`; +} + +export default function ChatScreen() { + const { id: contactAddress } = useLocalSearchParams<{ id: string }>(); + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const contacts = useStore(s => s.contacts); + const messages = useStore(s => s.messages); + const setMsgs = useStore(s => s.setMessages); + const appendMsg = useStore(s => s.appendMessage); + const clearUnread = useStore(s => s.clearUnread); + + // При открытии чата: сбрасываем unread-счётчик и dismiss'им банер. + useEffect(() => { + if (!contactAddress) return; + clearUnread(contactAddress); + clearContactNotifications(contactAddress); + }, [contactAddress, clearUnread]); + + const contact = contacts.find(c => c.address === contactAddress); + const chatMsgs = messages[contactAddress ?? ''] ?? []; + const listRef = useRef(null); + + const [text, setText] = useState(''); + const [sending, setSending] = useState(false); + const [peerTyping, setPeerTyping] = useState(false); + const [composeMode, setComposeMode] = useState({ kind: 'new' }); + const [pendingAttach, setPendingAttach] = useState(null); + const [attachMenuOpen, setAttachMenuOpen] = useState(false); + const [videoCircleOpen, setVideoCircleOpen] = useState(false); + /** + * ID сообщения, которое сейчас подсвечено (после jump-to-reply). На + * ~2 секунды backgroundColor bubble'а мерцает accent-цветом. + * `null` — ничего не подсвечено. + */ + const [highlightedId, setHighlightedId] = useState(null); + const highlightClearTimer = useRef | null>(null); + + // ── Selection mode ─────────────────────────────────────────────────── + // Активируется первым long-press'ом на bubble'е. Header меняется на + // toolbar с Forward/Delete/Cancel. Tap по bubble'у в selection mode + // toggle'ит принадлежность к выборке. Cancel сбрасывает всё. + const [selectedIds, setSelectedIds] = useState>(new Set()); + const selectionMode = selectedIds.size > 0; + + useMessages(contact?.x25519Pub ?? ''); + + // ── Typing indicator от peer'а ───────────────────────────────────────── + useEffect(() => { + if (!keyFile?.x25519_pub) return; + const ws = getWSClient(); + let timer: ReturnType | null = null; + const off = ws.subscribe('typing:' + keyFile.x25519_pub, (frame) => { + if (frame.event !== 'typing') return; + const d = frame.data as { from?: string } | undefined; + if (!contact?.x25519Pub || d?.from !== contact.x25519Pub) return; + setPeerTyping(true); + if (timer) clearTimeout(timer); + timer = setTimeout(() => setPeerTyping(false), 3_000); + }); + return () => { off(); if (timer) clearTimeout(timer); }; + }, [keyFile?.x25519_pub, contact?.x25519Pub]); + + // Throttled типinginisi-ping собеседнику. + const lastTypingSent = useRef(0); + const onChange = useCallback((t: string) => { + setText(t); + if (!contact?.x25519Pub || !t.trim()) return; + const now = Date.now(); + if (now - lastTypingSent.current < 2_000) return; + lastTypingSent.current = now; + getWSClient().sendTyping(contact.x25519Pub); + }, [contact?.x25519Pub]); + + // Восстановить сообщения из persistent-storage при первом заходе в чат. + // + // Важно: НЕ перезаписываем store пустым массивом — это стёрло бы + // содержимое, которое уже лежит в zustand (например, из devSeed или + // только что полученные по WS сообщения пока монтировались). Если + // в кэше что-то есть — мержим: берём max(cached, in-store) по id. + useEffect(() => { + if (!contactAddress) return; + loadMessages(contactAddress).then(cached => { + if (!cached || cached.length === 0) return; // кэш пуст → оставляем store + const existing = useStore.getState().messages[contactAddress] ?? []; + const byId = new Map(); + for (const m of cached as Message[]) byId.set(m.id, m); + for (const m of existing) byId.set(m.id, m); // store-версия свежее + const merged = Array.from(byId.values()).sort((a, b) => a.timestamp - b.timestamp); + setMsgs(contactAddress, merged); + }); + }, [contactAddress, setMsgs]); + + const name = contact?.username + ? `@${contact.username}` + : contact?.alias ?? shortAddr(contactAddress ?? ''); + + // ── Compose actions ──────────────────────────────────────────────────── + const cancelCompose = useCallback(() => { + setComposeMode({ kind: 'new' }); + setText(''); + setPendingAttach(null); + }, []); + + // buildRows выдаёт chronological [old → new]. FlatList работает + // inverted, поэтому reverse'им: newest = data[0] = снизу экрана. + // Определено тут (не позже) чтобы handlers типа onJumpToReply могли + // искать индексы по id без forward-declaration. + const rows = useMemo(() => { + const chrono = buildRows(chatMsgs); + return [...chrono].reverse(); + }, [chatMsgs]); + + /** + * Core send logic. Принимает явные text + attachment чтобы избегать + * race'а со state updates при моментальной отправке голоса/видео. + * Если передано null/undefined — берём из текущего state. + */ + const sendCore = useCallback(async ( + textArg: string | null = null, + attachArg: Attachment | null | undefined = undefined, + ) => { + if (!keyFile || !contact) return; + const actualText = textArg !== null ? textArg : text; + const actualAttach = attachArg !== undefined ? attachArg : pendingAttach; + const hasText = !!actualText.trim(); + const hasAttach = !!actualAttach; + if (!hasText && !hasAttach) return; + if (!contact.x25519Pub) { + Alert.alert('No encryption key yet', 'The contact has not published their key. Try later.'); + return; + } + + if (composeMode.kind === 'edit') { + const target = chatMsgs.find(m => m.text === composeMode.text && m.mine); + if (!target) { cancelCompose(); return; } + const updated: Message = { ...target, text: actualText.trim(), edited: true }; + setMsgs(contact.address, chatMsgs.map(m => m.id === target.id ? updated : m)); + cancelCompose(); + return; + } + + setSending(true); + try { + if (hasText) { + const { nonce, ciphertext } = encryptMessage( + actualText.trim(), keyFile.x25519_priv, contact.x25519Pub, + ); + await sendEnvelope({ + senderPub: keyFile.x25519_pub, + recipientPub: contact.x25519Pub, + senderEd25519Pub: keyFile.pub_key, + nonce, ciphertext, + }); + } + + const msg: Message = { + id: randomId(), + from: keyFile.x25519_pub, + text: actualText.trim(), + timestamp: Math.floor(Date.now() / 1000), + mine: true, + read: false, + edited: false, + attachment: actualAttach ?? undefined, + replyTo: composeMode.kind === 'reply' + ? { id: composeMode.msgId, text: composeMode.preview, author: composeMode.author } + : undefined, + }; + appendMsg(contact.address, msg); + await appendMessage(contact.address, msg); + setText(''); + setPendingAttach(null); + setComposeMode({ kind: 'new' }); + } catch (e: any) { + Alert.alert('Send failed', e?.message ?? 'Unknown error'); + } finally { + setSending(false); + } + }, [ + text, keyFile, contact, composeMode, chatMsgs, + setMsgs, cancelCompose, appendMsg, pendingAttach, + ]); + + // UI send button + const send = useCallback(() => sendCore(), [sendCore]); + + // ── Selection handlers ─────────────────────────────────────────────── + // Long-press — входим в selection mode и сразу отмечаем это сообщение. + const onMessageLongPress = useCallback((m: Message) => { + setSelectedIds(prev => { + const next = new Set(prev); + next.add(m.id); + return next; + }); + }, []); + + // Tap в selection mode — toggle принадлежности. + const onMessageTap = useCallback((m: Message) => { + if (!selectionMode) return; + setSelectedIds(prev => { + const next = new Set(prev); + if (next.has(m.id)) next.delete(m.id); else next.add(m.id); + return next; + }); + }, [selectionMode]); + + const cancelSelection = useCallback(() => setSelectedIds(new Set()), []); + + // ── Swipe-to-reply ────────────────────────────────────────────────── + const onMessageReply = useCallback((m: Message) => { + if (selectionMode) return; + setComposeMode({ + kind: 'reply', + msgId: m.id, + author: m.mine ? 'You' : name, + preview: m.text || (m.attachment ? `(${m.attachment.kind})` : ''), + }); + }, [name, selectionMode]); + + // ── Profile navigation (tap на аватарке / имени peer'а) ────────────── + const onOpenPeerProfile = useCallback(() => { + if (!contactAddress) return; + router.push(`/(app)/profile/${contactAddress}` as never); + }, [contactAddress]); + + // ── Jump to reply: tap по quoted-блоку в bubble'е ──────────────────── + // Скроллим FlatList к оригинальному сообщению и зажигаем highlight + // на ~2 секунды (highlightedId state + useEffect-driven анимация в + // MessageBubble.highlightAnim). + const onJumpToReply = useCallback((originalId: string) => { + const idx = rows.findIndex(r => r.kind === 'msg' && r.msg.id === originalId); + if (idx < 0) { + // Сообщение не найдено (возможно удалено или ушло за пагинацию). + // Silently no-op. + return; + } + try { + listRef.current?.scrollToIndex({ + index: idx, + animated: true, + viewPosition: 0.3, // оригинал — чуть выше середины экрана, не прямо в центре + }); + } catch { + // scrollToIndex может throw'нуть если индекс за пределами рендера; + // fallback: scrollToOffset на приблизительную позицию. + } + setHighlightedId(originalId); + if (highlightClearTimer.current) clearTimeout(highlightClearTimer.current); + highlightClearTimer.current = setTimeout(() => { + setHighlightedId(null); + highlightClearTimer.current = null; + }, 2000); + }, [rows]); + + useEffect(() => { + return () => { + if (highlightClearTimer.current) clearTimeout(highlightClearTimer.current); + }; + }, []); + + // ── Selection actions ──────────────────────────────────────────────── + const deleteSelected = useCallback(() => { + if (selectedIds.size === 0 || !contact) return; + Alert.alert( + `Delete ${selectedIds.size} message${selectedIds.size > 1 ? 's' : ''}?`, + 'This removes them from your device. Other participants keep their copies.', + [ + { text: 'Cancel', style: 'cancel' }, + { + text: 'Delete', + style: 'destructive', + onPress: () => { + setMsgs(contact.address, chatMsgs.filter(m => !selectedIds.has(m.id))); + setSelectedIds(new Set()); + }, + }, + ], + ); + }, [selectedIds, contact, chatMsgs, setMsgs]); + + const forwardSelected = useCallback(() => { + // Forward UI ещё не реализован — показываем stub. Пример потока: + // 1. открыть "Forward to…" screen со списком контактов + // 2. для каждого выбранного контакта — sendEnvelope с оригинальным + // текстом, timestamp=now + Alert.alert( + `Forward ${selectedIds.size} message${selectedIds.size > 1 ? 's' : ''}`, + 'Contact-picker screen is coming in the next iteration. For now, copy the text and paste.', + [{ text: 'OK' }], + ); + }, [selectedIds]); + + // Copy доступен только когда выделено ровно одно сообщение. + const copySelected = useCallback(async () => { + if (selectedIds.size !== 1) return; + const id = [...selectedIds][0]; + const msg = chatMsgs.find(m => m.id === id); + if (!msg) return; + await Clipboard.setStringAsync(msg.text); + setSelectedIds(new Set()); + }, [selectedIds, chatMsgs]); + + // В group-чатах над peer-сообщениями рисуется имя отправителя и его + // аватар (group = несколько участников). В DM (direct) и каналах + // отправитель ровно один, поэтому имя/аватар не нужны — убираем. + const withSenderMeta = contact?.kind === 'group'; + + const renderRow = ({ item }: { item: Row }) => { + if (item.kind === 'sep') return ; + return ( + + ); + }; + + return ( + + {/* Header — использует общий компонент
, чтобы соблюдать + правила шапки приложения (left slot / centered title / right slot). */} + + {selectionMode ? ( +
} + title={`${selectedIds.size} selected`} + right={ + <> + {selectedIds.size === 1 && ( + + )} + + + + } + /> + ) : ( +
router.back()} />} + title={ + + + + + {name} + + {peerTyping && ( + + typing… + + )} + {!peerTyping && !contact?.x25519Pub && ( + + waiting for key + + )} + + + } + right={} + /> + )} + + + {/* Messages — inverted: data[0] рендерится снизу, последующее — + выше. Это стандартный chat-паттерн: FlatList сразу монтируется + с "scroll position at bottom" без ручного scrollToEnd, и новые + сообщения (добавляемые в начало reversed-массива) появляются + внизу естественно. Никаких jerk'ов при открытии. */} + r.kind === 'sep' ? r.id : r.msg.id} + renderItem={renderRow} + contentContainerStyle={{ paddingVertical: 10 }} + showsVerticalScrollIndicator={false} + ListEmptyComponent={() => ( + + + + Say hi to {name} + + + Your messages are end-to-end encrypted. + + + )} + /> + + {/* Composer — floating, прибит к низу. */} + + setAttachMenuOpen(true)} + attachment={pendingAttach} + onClearAttach={() => setPendingAttach(null)} + onFinishVoice={(att) => { + // Voice отправляется сразу — sendCore получает attachment + // явным аргументом, минуя state-задержку. + sendCore('', att); + }} + onStartVideoCircle={() => setVideoCircleOpen(true)} + /> + + + setAttachMenuOpen(false)} + onPick={(att) => setPendingAttach(att)} + /> + + setVideoCircleOpen(false)} + onFinish={(att) => { + // Video-circle тоже отправляется сразу. + sendCore('', att); + }} + /> + + ); +} diff --git a/client-app/app/(app)/chats/_layout.tsx b/client-app/app/(app)/chats/_layout.tsx new file mode 100644 index 0000000..b64a699 --- /dev/null +++ b/client-app/app/(app)/chats/_layout.tsx @@ -0,0 +1,28 @@ +/** + * chats/_layout — вложенный Stack для chats/index и chats/[id]. + * + * animation: 'none' — переходы между index и [id] анимирует родительский + * AnimatedSlot (140ms, Easing.out cubic), обеспечивая единую скорость и + * кривую между: + * - chat open/close (index ↔ [id]) + * - tab switches (chats ↔ wallet и т.д.) + * - sub-route open/close (settings, profile) + * + * gestureEnabled: true оставлен на случай если пользователь использует + * нативный iOS edge-swipe — он вызовет router.back(), анимация пройдёт + * через AnimatedSlot. + */ +import { Stack } from 'expo-router'; + +export default function ChatsLayout() { + return ( + + ); +} diff --git a/client-app/app/(app)/chats/index.tsx b/client-app/app/(app)/chats/index.tsx new file mode 100644 index 0000000..272c5c0 --- /dev/null +++ b/client-app/app/(app)/chats/index.tsx @@ -0,0 +1,105 @@ +/** + * Messages screen — список чатов в стиле референса. + * + * ┌ safe-area top + * │ TabHeader (title зависит от connection state) + * │ ─ FlatList (chat tiles) ─ + * └ NavBar (external) + * + * Фильтры и search убраны — лист один поток; requests доступны через + * NavBar → notifications tab. FAB composer'а тоже убран (чат-лист + * просто отражает существующие беседы, создание новых — через tab + * "New chat" в NavBar'е). + */ +import React, { useMemo } from 'react'; +import { View, Text, FlatList } from 'react-native'; +import { router } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; + +import { useStore } from '@/lib/store'; +import { useConnectionStatus } from '@/hooks/useConnectionStatus'; +import type { Contact, Message } from '@/lib/types'; + +import { TabHeader } from '@/components/TabHeader'; +import { ChatTile } from '@/components/ChatTile'; + +export default function ChatsScreen() { + const insets = useSafeAreaInsets(); + const contacts = useStore(s => s.contacts); + const messages = useStore(s => s.messages); + + // Статус подключения: online / connecting / offline. + // Название шапки и цвет pip'а на аватаре зависят от него. + const connStatus = useConnectionStatus(); + + const headerTitle = + connStatus === 'online' ? 'Messages' : + connStatus === 'connecting' ? 'Connecting…' : + 'Waiting for internet'; + + const dotColor = + connStatus === 'online' ? '#3ba55d' : // green + connStatus === 'connecting' ? '#f0b35a' : // amber + '#f4212e'; // red + + const lastOf = (c: Contact): Message | null => { + const msgs = messages[c.address]; + return msgs && msgs.length ? msgs[msgs.length - 1] : null; + }; + + // Сортировка по последней активности. + const sorted = useMemo(() => { + return [...contacts] + .map(c => ({ c, last: lastOf(c) })) + .sort((a, b) => { + const ka = a.last ? a.last.timestamp : a.c.addedAt / 1000; + const kb = b.last ? b.last.timestamp : b.c.addedAt / 1000; + return kb - ka; + }) + .map(x => x.c); + }, [contacts, messages]); + + return ( + + + + + c.address} + renderItem={({ item }) => ( + router.push(`/(app)/chats/${item.address}` as never)} + /> + )} + contentContainerStyle={{ paddingBottom: 40, flexGrow: 1 }} + showsVerticalScrollIndicator={false} + /> + + {sorted.length === 0 && ( + + + + No chats yet + + + Use the search tab in the navbar to add your first contact. + + + )} + + + ); +} diff --git a/client-app/app/(app)/compose.tsx b/client-app/app/(app)/compose.tsx new file mode 100644 index 0000000..a24b143 --- /dev/null +++ b/client-app/app/(app)/compose.tsx @@ -0,0 +1,390 @@ +/** + * Post composer — full-screen modal for writing a new post. + * + * Twitter-style layout: + * Header: [✕] (draft-ish) [Опубликовать button] + * Body: [avatar] [multiline TextInput autogrow] + * [hashtags preview chips] + * [attachment preview + remove button] + * Footer: [📷 attach] ··· [] [~fee estimate] + * + * The flow: + * 1. User types content; hashtags auto-parse for preview + * 2. (Optional) pick image — client-side compression (expo-image-manipulator) + * → resize to 1080px max, JPEG quality 50 + * 3. Tap "Опубликовать" → confirmation modal with fee + * 4. Confirm → publishAndCommit() → navigate to post detail + * + * Failure modes: + * - Size overflow (>256 KiB): blocked client-side with hint to compress + * further or drop attachment + * - Insufficient balance: show humanised error from submitTx + * - Network down: toast "нет связи, попробуйте снова" + */ +import React, { useEffect, useMemo, useRef, useState } from 'react'; +import { + View, Text, TextInput, Pressable, Alert, Image, KeyboardAvoidingView, + Platform, ActivityIndicator, ScrollView, Linking, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { router } from 'expo-router'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import * as ImagePicker from 'expo-image-picker'; +import * as ImageManipulator from 'expo-image-manipulator'; +import * as FileSystem from 'expo-file-system/legacy'; + +import { useStore } from '@/lib/store'; +import { Avatar } from '@/components/Avatar'; +import { publishAndCommit, formatFee } from '@/lib/feed'; +import { humanizeTxError, getBalance } from '@/lib/api'; + +const MAX_CONTENT_LENGTH = 4000; +const MAX_POST_BYTES = 256 * 1024; // must match server's MaxPostSize +const IMAGE_MAX_DIM = 1080; +const IMAGE_QUALITY = 0.5; // JPEG Q=50 — small, still readable + +interface Attachment { + uri: string; + mime: string; + size: number; + bytes: Uint8Array; + width?: number; + height?: number; +} + +export default function ComposeScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const username = useStore(s => s.username); + + const [content, setContent] = useState(''); + const [attach, setAttach] = useState(null); + const [busy, setBusy] = useState(false); + const [picking, setPicking] = useState(false); + const [balance, setBalance] = useState(null); + + // Fetch balance once so we can warn before publishing. + useEffect(() => { + if (!keyFile) return; + getBalance(keyFile.pub_key).then(setBalance).catch(() => setBalance(null)); + }, [keyFile]); + + // Estimated fee mirrors server's formula exactly. Displayed to the user + // so they aren't surprised by a debit. + const estimatedFee = useMemo(() => { + const size = (new TextEncoder().encode(content)).length + (attach?.size ?? 0) + 128; + return 1000 + size; // base 1000 + 1 µT/byte (matches blockchain constants) + }, [content, attach]); + + const totalBytes = useMemo(() => { + return (new TextEncoder().encode(content)).length + (attach?.size ?? 0) + 128; + }, [content, attach]); + + const hashtags = useMemo(() => { + const matches = content.match(/#[A-Za-z0-9_\u0400-\u04FF]{1,40}/g) || []; + const seen = new Set(); + return matches + .map(m => m.slice(1).toLowerCase()) + .filter(t => !seen.has(t) && seen.add(t)); + }, [content]); + + const canPublish = !busy && (content.trim().length > 0 || attach !== null) + && totalBytes <= MAX_POST_BYTES; + + const onPickImage = async () => { + if (picking) return; + setPicking(true); + try { + const perm = await ImagePicker.requestMediaLibraryPermissionsAsync(); + if (!perm.granted) { + Alert.alert( + 'Нужен доступ к фото', + 'Откройте настройки и разрешите доступ к галерее.', + [ + { text: 'Отмена' }, + { text: 'Настройки', onPress: () => Linking.openSettings() }, + ], + ); + return; + } + const result = await ImagePicker.launchImageLibraryAsync({ + mediaTypes: ImagePicker.MediaTypeOptions.Images, + quality: 1, + exif: false, // privacy: ask picker not to return EXIF + }); + if (result.canceled || !result.assets[0]) return; + + const asset = result.assets[0]; + + // Client-side compression: resize + re-encode. This is the FIRST + // scrub pass — server will do another one (mandatory) before storing. + const manipulated = await ImageManipulator.manipulateAsync( + asset.uri, + [{ resize: { width: IMAGE_MAX_DIM } }], + { compress: IMAGE_QUALITY, format: ImageManipulator.SaveFormat.JPEG }, + ); + + // Read the compressed bytes. + const b64 = await FileSystem.readAsStringAsync(manipulated.uri, { + encoding: FileSystem.EncodingType.Base64, + }); + const bytes = base64ToBytes(b64); + + if (bytes.length > MAX_POST_BYTES - 512) { + Alert.alert( + 'Слишком большое', + `Картинка ${Math.round(bytes.length / 1024)} KB — лимит ${MAX_POST_BYTES / 1024} KB. Попробуйте выбрать поменьше.`, + ); + return; + } + + setAttach({ + uri: manipulated.uri, + mime: 'image/jpeg', + size: bytes.length, + bytes, + width: manipulated.width, + height: manipulated.height, + }); + } catch (e: any) { + Alert.alert('Не удалось', String(e?.message ?? e)); + } finally { + setPicking(false); + } + }; + + const onPublish = async () => { + if (!keyFile || !canPublish) return; + + // Balance guard. + if (balance !== null && balance < estimatedFee) { + Alert.alert( + 'Недостаточно средств', + `Нужно ${formatFee(estimatedFee)}, на балансе ${formatFee(balance)}.`, + ); + return; + } + + Alert.alert( + 'Опубликовать пост?', + `Цена: ${formatFee(estimatedFee)}\nРазмер: ${Math.round(totalBytes / 1024 * 10) / 10} KB`, + [ + { text: 'Отмена', style: 'cancel' }, + { + text: 'Опубликовать', + onPress: async () => { + setBusy(true); + try { + const postID = await publishAndCommit({ + author: keyFile.pub_key, + privKey: keyFile.priv_key, + content: content.trim(), + attachment: attach?.bytes, + attachmentMIME: attach?.mime, + }); + // Close composer and open the new post. + router.replace(`/(app)/feed/${postID}` as never); + } catch (e: any) { + Alert.alert('Не удалось опубликовать', humanizeTxError(e)); + } finally { + setBusy(false); + } + }, + }, + ], + ); + }; + + return ( + + {/* Header */} + + router.back()} hitSlop={8}> + + + + ({ + paddingHorizontal: 18, paddingVertical: 9, + borderRadius: 999, + backgroundColor: canPublish ? (pressed ? '#1a8cd8' : '#1d9bf0') : '#1f1f1f', + })} + > + {busy ? ( + + ) : ( + + Опубликовать + + )} + + + + + {/* Avatar + TextInput row */} + + + + + + {/* Hashtag preview */} + {hashtags.length > 0 && ( + + {hashtags.map(tag => ( + + + #{tag} + + + ))} + + )} + + {/* Attachment preview */} + {attach && ( + + + + setAttach(null)} + hitSlop={8} + style={({ pressed }) => ({ + position: 'absolute', + top: 8, right: 8, + width: 28, height: 28, borderRadius: 14, + backgroundColor: pressed ? 'rgba(0,0,0,0.9)' : 'rgba(0,0,0,0.75)', + alignItems: 'center', justifyContent: 'center', + })} + > + + + + + {Math.round(attach.size / 1024)} KB · метаданные удалят на сервере + + + )} + + + {/* Footer: attach / counter / fee */} + + ({ + opacity: pressed || picking || attach ? 0.5 : 1, + })} + > + {picking + ? + : } + + + MAX_POST_BYTES ? '#f4212e' + : totalBytes > MAX_POST_BYTES * 0.85 ? '#f0b35a' + : '#6a6a6a', + fontSize: 12, + fontWeight: '600', + }} + > + {Math.round(totalBytes / 1024 * 10) / 10} / {MAX_POST_BYTES / 1024} KB + + + + ≈ {formatFee(estimatedFee)} + + + + ); +} + +// ── Helpers ──────────────────────────────────────────────────────────── + +function base64ToBytes(b64: string): Uint8Array { + const binary = atob(b64.replace(/-/g, '+').replace(/_/g, '/')); + const out = new Uint8Array(binary.length); + for (let i = 0; i < binary.length; i++) out[i] = binary.charCodeAt(i); + return out; +} diff --git a/client-app/app/(app)/feed.tsx b/client-app/app/(app)/feed.tsx new file mode 100644 index 0000000..b935fd7 --- /dev/null +++ b/client-app/app/(app)/feed.tsx @@ -0,0 +1,320 @@ +/** + * Feed tab — Twitter-style timeline with three sources: + * + * Подписки → /feed/timeline?follower=me (posts from people I follow) + * Для вас → /feed/foryou?pub=me (recommendations) + * В тренде → /feed/trending?window=24 (most-engaged in last 24h) + * + * Floating compose button (bottom-right) → /(app)/compose modal. + * + * Uses a single FlatList per tab with pull-to-refresh + optimistic + * local updates. Stats (likes, likedByMe) are fetched once per refresh + * and piggy-backed onto each PostCard via props; the card does the + * optimistic toggle locally until the next refresh reconciles. + */ +import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { + View, Text, FlatList, Pressable, RefreshControl, ActivityIndicator, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { router } from 'expo-router'; + +import { TabHeader } from '@/components/TabHeader'; +import { PostCard } from '@/components/feed/PostCard'; +import { useStore } from '@/lib/store'; +import { + fetchTimeline, fetchForYou, fetchTrending, fetchStats, bumpView, + type FeedPostItem, +} from '@/lib/feed'; + +type TabKey = 'following' | 'foryou' | 'trending'; + +const TAB_LABELS: Record = { + following: 'Подписки', + foryou: 'Для вас', + trending: 'В тренде', +}; + +export default function FeedScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + + const [tab, setTab] = useState('foryou'); // default: discovery + const [posts, setPosts] = useState([]); + const [likedSet, setLikedSet] = useState>(new Set()); + const [loading, setLoading] = useState(false); + const [refreshing, setRefreshing] = useState(false); + const [error, setError] = useState(null); + + // Guard against rapid tab switches overwriting each other's results. + const requestRef = useRef(0); + + const loadPosts = useCallback(async (isRefresh = false) => { + if (!keyFile) return; + if (isRefresh) setRefreshing(true); + else setLoading(true); + setError(null); + + const seq = ++requestRef.current; + try { + let items: FeedPostItem[] = []; + switch (tab) { + case 'following': + items = await fetchTimeline(keyFile.pub_key, 40); + break; + case 'foryou': + items = await fetchForYou(keyFile.pub_key, 40); + break; + case 'trending': + items = await fetchTrending(24, 40); + break; + } + if (seq !== requestRef.current) return; // stale response + setPosts(items); + + // Batch-fetch liked_by_me (bounded concurrency — 6 at a time). + const liked = new Set(); + const chunks = chunk(items, 6); + for (const group of chunks) { + const results = await Promise.all( + group.map(p => fetchStats(p.post_id, keyFile.pub_key)), + ); + results.forEach((s, i) => { + if (s?.liked_by_me) liked.add(group[i].post_id); + }); + } + if (seq !== requestRef.current) return; + setLikedSet(liked); + } catch (e: any) { + if (seq !== requestRef.current) return; + const msg = String(e?.message ?? e); + // Silence benign network/404 — just show empty state. + if (/Network request failed|→\s*404/.test(msg)) { + setPosts([]); + } else { + setError(msg); + } + } finally { + if (seq !== requestRef.current) return; + setLoading(false); + setRefreshing(false); + } + }, [keyFile, tab]); + + useEffect(() => { loadPosts(false); }, [loadPosts]); + + const onStatsChanged = useCallback(async (postID: string) => { + if (!keyFile) return; + const stats = await fetchStats(postID, keyFile.pub_key); + if (!stats) return; + setPosts(ps => ps.map(p => p.post_id === postID + ? { ...p, likes: stats.likes, views: stats.views } + : p)); + setLikedSet(s => { + const next = new Set(s); + if (stats.liked_by_me) next.add(postID); + else next.delete(postID); + return next; + }); + }, [keyFile]); + + const onDeleted = useCallback((postID: string) => { + setPosts(ps => ps.filter(p => p.post_id !== postID)); + }, []); + + // View counter: fire bumpView once when a card scrolls into view. + const viewedRef = useRef>(new Set()); + const onViewableItemsChanged = useRef(({ viewableItems }: { viewableItems: Array<{ item: FeedPostItem; isViewable: boolean }> }) => { + for (const { item, isViewable } of viewableItems) { + if (isViewable && !viewedRef.current.has(item.post_id)) { + viewedRef.current.add(item.post_id); + bumpView(item.post_id); + } + } + }).current; + + const viewabilityConfig = useRef({ itemVisiblePercentThreshold: 60, minimumViewTime: 1000 }).current; + + const emptyHint = useMemo(() => { + switch (tab) { + case 'following': return 'Подпишитесь на кого-нибудь, чтобы увидеть их посты здесь.'; + case 'foryou': return 'Пока нет рекомендаций — возвращайтесь позже.'; + case 'trending': return 'В этой ленте пока тихо.'; + } + }, [tab]); + + return ( + + + + {/* Tab strip */} + + {(Object.keys(TAB_LABELS) as TabKey[]).map(key => ( + setTab(key)} + style={({ pressed }) => ({ + flex: 1, + alignItems: 'center', + paddingVertical: 14, + backgroundColor: pressed ? '#0a0a0a' : 'transparent', + })} + > + + {TAB_LABELS[key]} + + {tab === key && ( + + )} + + ))} + + + {/* Feed list */} + p.post_id} + renderItem={({ item }) => ( + + )} + refreshControl={ + loadPosts(true)} + tintColor="#1d9bf0" + /> + } + onViewableItemsChanged={onViewableItemsChanged} + viewabilityConfig={viewabilityConfig} + ListEmptyComponent={ + loading ? ( + + + + ) : error ? ( + loadPosts(false)} + /> + ) : ( + + ) + } + contentContainerStyle={posts.length === 0 ? { flexGrow: 1 } : undefined} + /> + + {/* Floating compose button */} + router.push('/(app)/compose' as never)} + style={({ pressed }) => ({ + position: 'absolute', + right: 18, + bottom: Math.max(insets.bottom, 12) + 70, // clear the NavBar + width: 56, height: 56, + borderRadius: 28, + backgroundColor: pressed ? '#1a8cd8' : '#1d9bf0', + alignItems: 'center', justifyContent: 'center', + shadowColor: '#1d9bf0', + shadowOffset: { width: 0, height: 4 }, + shadowOpacity: 0.4, + shadowRadius: 8, + elevation: 6, + })} + > + + + + ); +} + +// ── Empty state ───────────────────────────────────────────────────────── + +function EmptyState({ + icon, title, subtitle, onRetry, +}: { + icon: React.ComponentProps['name']; + title: string; + subtitle?: string; + onRetry?: () => void; +}) { + return ( + + + + + + {title} + + {subtitle && ( + + {subtitle} + + )} + {onRetry && ( + ({ + marginTop: 16, + paddingHorizontal: 20, paddingVertical: 10, + borderRadius: 999, + backgroundColor: pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + + Попробовать снова + + + )} + + ); +} + +function chunk(arr: T[], size: number): T[][] { + const out: T[][] = []; + for (let i = 0; i < arr.length; i += size) out.push(arr.slice(i, i + size)); + return out; +} diff --git a/client-app/app/(app)/feed/[id].tsx b/client-app/app/(app)/feed/[id].tsx new file mode 100644 index 0000000..090aa12 --- /dev/null +++ b/client-app/app/(app)/feed/[id].tsx @@ -0,0 +1,242 @@ +/** + * Post detail — full view of one post with stats, thread context, and a + * lazy-rendered image attachment. + * + * Why a dedicated screen? + * - PostCard in the timeline intentionally doesn't render attachments + * (would explode initial render time with N images). + * - Per-post stats (views, likes, liked_by_me) want a fresh refresh + * on open; timeline batches but not at the per-second cadence a + * reader expects when they just tapped in. + * + * Layout: + * [← back · Пост] + * [PostCard (full — with attachment)] + * [stats bar: views · likes · fee] + * [— reply affordance below (future)] + */ +import React, { useCallback, useEffect, useState } from 'react'; +import { + View, Text, ScrollView, ActivityIndicator, Image, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { router, useLocalSearchParams } from 'expo-router'; + +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; +import { PostCard } from '@/components/feed/PostCard'; +import { useStore } from '@/lib/store'; +import { + fetchPost, fetchStats, bumpView, formatCount, formatFee, + type FeedPostItem, type PostStats, +} from '@/lib/feed'; + +export default function PostDetailScreen() { + const { id: postID } = useLocalSearchParams<{ id: string }>(); + const keyFile = useStore(s => s.keyFile); + + const [post, setPost] = useState(null); + const [stats, setStats] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + const load = useCallback(async () => { + if (!postID) return; + setLoading(true); + setError(null); + try { + const [p, s] = await Promise.all([ + fetchPost(postID), + fetchStats(postID, keyFile?.pub_key), + ]); + setPost(p); + setStats(s); + if (p) bumpView(postID); // fire-and-forget + } catch (e: any) { + setError(String(e?.message ?? e)); + } finally { + setLoading(false); + } + }, [postID, keyFile]); + + useEffect(() => { load(); }, [load]); + + const onStatsChanged = useCallback(async () => { + if (!postID) return; + const s = await fetchStats(postID, keyFile?.pub_key); + if (s) setStats(s); + }, [postID, keyFile]); + + const onDeleted = useCallback(() => { + // Go back to feed — the post is gone. + router.back(); + }, []); + + return ( + +
router.back()} />} + title="Пост" + /> + + {loading ? ( + + + + ) : error ? ( + + {error} + + ) : !post ? ( + + + + Пост удалён или больше недоступен + + + ) : ( + + + + {/* Attachment preview (if any). For MVP we try loading from the + CURRENT node — works when you're connected to the hosting + relay. Cross-relay discovery (look up hosting_relay URL via + /api/relays) is future work. */} + {post.has_attachment && ( + + )} + + {/* Detailed stats block */} + + + Информация о посте + + + + + + + + + {post.hashtags && post.hashtags.length > 0 && ( + <> + + + Хештеги + + + {post.hashtags.map(tag => ( + router.push(`/(app)/feed/tag/${encodeURIComponent(tag)}` as never)} + style={{ + color: '#1d9bf0', + fontSize: 13, + paddingHorizontal: 8, + paddingVertical: 3, + backgroundColor: '#081a2a', + borderRadius: 999, + }} + > + #{tag} + + ))} + + + )} + + + + + )} + + ); +} + +function DetailRow({ label, value, mono }: { label: string; value: string; mono?: boolean }) { + return ( + + {label} + + {value} + + + ); +} + +function AttachmentPreview({ postID }: { postID: string }) { + // For MVP we hit the local node URL; if the body is hosted elsewhere + // the image load will fail and the placeholder stays visible. + const { getNodeUrl } = require('@/lib/api'); + const url = `${getNodeUrl()}/feed/post/${postID}`; + // The body is a JSON object, not raw image bytes. For now we just + // show a placeholder — decoding base64 attachment → data-uri is a + // Phase D improvement once we add /feed/post/{id}/attachment raw bytes. + return ( + + + + Вложение: {url} + + + Прямой просмотр вложений — в следующем релизе + + + ); +} + +function shortAddr(a: string, n = 6): string { + if (!a) return '—'; + return a.length <= n * 2 + 1 ? a : `${a.slice(0, n)}…${a.slice(-n)}`; +} + +// Silence Image import when unused (reserved for future attachment preview). +void Image; diff --git a/client-app/app/(app)/feed/tag/[tag].tsx b/client-app/app/(app)/feed/tag/[tag].tsx new file mode 100644 index 0000000..e9c7d78 --- /dev/null +++ b/client-app/app/(app)/feed/tag/[tag].tsx @@ -0,0 +1,127 @@ +/** + * Hashtag feed — all posts tagged with #tag, newest first. + * + * Route: /(app)/feed/tag/[tag] + * Triggered by tapping a hashtag inside any PostCard's body. + */ +import React, { useCallback, useEffect, useRef, useState } from 'react'; +import { + View, Text, FlatList, RefreshControl, ActivityIndicator, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { router, useLocalSearchParams } from 'expo-router'; + +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; +import { PostCard } from '@/components/feed/PostCard'; +import { useStore } from '@/lib/store'; +import { fetchHashtag, fetchStats, type FeedPostItem } from '@/lib/feed'; + +export default function HashtagScreen() { + const { tag: rawTag } = useLocalSearchParams<{ tag: string }>(); + const tag = (rawTag ?? '').replace(/^#/, '').toLowerCase(); + const keyFile = useStore(s => s.keyFile); + + const [posts, setPosts] = useState([]); + const [likedSet, setLikedSet] = useState>(new Set()); + const [loading, setLoading] = useState(true); + const [refreshing, setRefreshing] = useState(false); + + const seq = useRef(0); + + const load = useCallback(async (isRefresh = false) => { + if (!tag) return; + if (isRefresh) setRefreshing(true); + else setLoading(true); + + const id = ++seq.current; + try { + const items = await fetchHashtag(tag, 60); + if (id !== seq.current) return; + setPosts(items); + + const liked = new Set(); + if (keyFile) { + for (const p of items) { + const s = await fetchStats(p.post_id, keyFile.pub_key); + if (s?.liked_by_me) liked.add(p.post_id); + } + } + if (id !== seq.current) return; + setLikedSet(liked); + } catch { + if (id !== seq.current) return; + setPosts([]); + } finally { + if (id !== seq.current) return; + setLoading(false); + setRefreshing(false); + } + }, [tag, keyFile]); + + useEffect(() => { load(false); }, [load]); + + const onStatsChanged = useCallback(async (postID: string) => { + if (!keyFile) return; + const s = await fetchStats(postID, keyFile.pub_key); + if (!s) return; + setPosts(ps => ps.map(p => p.post_id === postID + ? { ...p, likes: s.likes, views: s.views } : p)); + setLikedSet(set => { + const next = new Set(set); + if (s.liked_by_me) next.add(postID); else next.delete(postID); + return next; + }); + }, [keyFile]); + + return ( + +
router.back()} />} + title={`#${tag}`} + /> + + p.post_id} + renderItem={({ item }) => ( + + )} + refreshControl={ + load(true)} + tintColor="#1d9bf0" + /> + } + ListEmptyComponent={ + loading ? ( + + + + ) : ( + + + + Пока нет постов с этим тегом + + + Будьте первым — напишите пост с #{tag} + + + ) + } + contentContainerStyle={posts.length === 0 ? { flexGrow: 1 } : undefined} + /> + + ); +} diff --git a/client-app/app/(app)/new-contact.tsx b/client-app/app/(app)/new-contact.tsx new file mode 100644 index 0000000..e2636eb --- /dev/null +++ b/client-app/app/(app)/new-contact.tsx @@ -0,0 +1,288 @@ +/** + * Add new contact — dark minimalist, inspired by the reference. + * + * Flow: + * 1. Пользователь вводит @username или hex pubkey / DC-address. + * 2. Жмёт Search → resolveUsername → getIdentity. + * 3. Показываем preview (avatar + имя + address + наличие x25519). + * 4. Выбирает fee (chip-selector) + вводит intro. + * 5. Submit → CONTACT_REQUEST tx. + */ +import React, { useState } from 'react'; +import { + View, Text, ScrollView, Alert, Pressable, TextInput, ActivityIndicator, +} from 'react-native'; +import { router } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { useStore } from '@/lib/store'; +import { getIdentity, buildContactRequestTx, submitTx, resolveUsername, humanizeTxError } from '@/lib/api'; +import { shortAddr } from '@/lib/crypto'; +import { formatAmount } from '@/lib/utils'; + +import { Avatar } from '@/components/Avatar'; +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; +import { SearchBar } from '@/components/SearchBar'; + +const MIN_CONTACT_FEE = 5000; +const FEE_TIERS = [ + { value: 5_000, label: 'Min' }, + { value: 10_000, label: 'Standard' }, + { value: 50_000, label: 'Priority' }, +]; + +interface Resolved { + address: string; + nickname?: string; + x25519?: string; +} + +export default function NewContactScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const settings = useStore(s => s.settings); + const balance = useStore(s => s.balance); + + const [query, setQuery] = useState(''); + const [intro, setIntro] = useState(''); + const [fee, setFee] = useState(MIN_CONTACT_FEE); + const [resolved, setResolved] = useState(null); + const [searching, setSearching] = useState(false); + const [sending, setSending] = useState(false); + const [error, setError] = useState(null); + + async function search() { + const q = query.trim(); + if (!q) return; + setSearching(true); setResolved(null); setError(null); + try { + let address = q; + if (q.startsWith('@') || (!q.match(/^[0-9a-f]{64}$/i) && !q.startsWith('DC'))) { + const name = q.replace('@', ''); + const addr = await resolveUsername(settings.contractId, name); + if (!addr) { setError(`@${name} is not registered on this chain`); return; } + address = addr; + } + const identity = await getIdentity(address); + setResolved({ + address: identity?.pub_key ?? address, + nickname: identity?.nickname || undefined, + x25519: identity?.x25519_pub || undefined, + }); + } catch (e: any) { + setError(e?.message ?? 'Lookup failed'); + } finally { + setSearching(false); + } + } + + async function sendRequest() { + if (!resolved || !keyFile) return; + if (balance < fee + 1000) { + Alert.alert('Insufficient balance', `Need ${formatAmount(fee + 1000)} (fee + network).`); + return; + } + setSending(true); setError(null); + try { + const tx = buildContactRequestTx({ + from: keyFile.pub_key, + to: resolved.address, + contactFee: fee, + intro: intro.trim() || undefined, + privKey: keyFile.priv_key, + }); + await submitTx(tx); + Alert.alert( + 'Request sent', + `A contact request has been sent to ${resolved.nickname ? '@' + resolved.nickname : shortAddr(resolved.address)}.`, + [{ text: 'OK', onPress: () => router.back() }], + ); + } catch (e: any) { + setError(humanizeTxError(e)); + } finally { + setSending(false); + } + } + + const displayName = resolved + ? (resolved.nickname ? `@${resolved.nickname}` : shortAddr(resolved.address)) + : ''; + + return ( + +
router.back()} />} + /> + + + Enter a @username, a + hex pubkey or a DC… address. + + + + + ({ + flexDirection: 'row', alignItems: 'center', justifyContent: 'center', + paddingVertical: 11, borderRadius: 999, marginTop: 12, + backgroundColor: !query.trim() || searching ? '#1a1a1a' : pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + {searching ? ( + + ) : ( + Search + )} + + + {error && ( + + {error} + + )} + + {/* Resolved profile card */} + {resolved && ( + <> + + + + + + {displayName} + + + {shortAddr(resolved.address, 10)} + + + + + {resolved.x25519 ? 'E2E-ready' : 'Key not published yet'} + + + + + + + {/* Intro */} + + Intro (optional, plaintext on-chain) + + + + {intro.length}/140 + + + {/* Fee tier */} + + Anti-spam fee (goes to recipient) + + + {FEE_TIERS.map(t => { + const active = fee === t.value; + return ( + setFee(t.value)} + style={({ pressed }) => ({ + flex: 1, + alignItems: 'center', + paddingVertical: 10, + borderRadius: 10, + backgroundColor: active ? '#ffffff' : pressed ? '#1a1a1a' : '#111111', + borderWidth: 1, borderColor: active ? '#ffffff' : '#1f1f1f', + })} + > + + {t.label} + + + {formatAmount(t.value)} + + + ); + })} + + + {/* Submit */} + ({ + flexDirection: 'row', alignItems: 'center', justifyContent: 'center', + paddingVertical: 13, borderRadius: 999, marginTop: 20, + backgroundColor: sending ? '#1a1a1a' : pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + {sending ? ( + + ) : ( + + Send request · {formatAmount(fee + 1000)} + + )} + + + )} + + + ); +} diff --git a/client-app/app/(app)/profile/[address].tsx b/client-app/app/(app)/profile/[address].tsx new file mode 100644 index 0000000..2688954 --- /dev/null +++ b/client-app/app/(app)/profile/[address].tsx @@ -0,0 +1,441 @@ +/** + * Profile screen — shows info about any address (yours or someone else's), + * plus their post feed, follow/unfollow button, and basic counters. + * + * Routes: + * /(app)/profile/ + * + * Two states: + * - Known contact → open chat, show full info + * - Unknown address → Twitter-style "discovery" profile: shows just the + * address + posts + follow button. Useful when tapping an author from + * the feed of someone you don't chat with. + */ +import React, { useCallback, useEffect, useState } from 'react'; +import { + View, Text, ScrollView, Pressable, Alert, FlatList, + ActivityIndicator, RefreshControl, +} from 'react-native'; +import { router, useLocalSearchParams } from 'expo-router'; +import * as Clipboard from 'expo-clipboard'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; + +import { useStore } from '@/lib/store'; +import type { Contact } from '@/lib/types'; + +import { Avatar } from '@/components/Avatar'; +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; +import { PostCard } from '@/components/feed/PostCard'; +import { + fetchAuthorPosts, fetchStats, followUser, unfollowUser, + formatCount, type FeedPostItem, +} from '@/lib/feed'; +import { humanizeTxError } from '@/lib/api'; + +function shortAddr(a: string, n = 10): string { + if (!a) return '—'; + return a.length <= n * 2 + 1 ? a : `${a.slice(0, n)}…${a.slice(-n)}`; +} + +type Tab = 'posts' | 'info'; + +export default function ProfileScreen() { + const insets = useSafeAreaInsets(); + const { address } = useLocalSearchParams<{ address: string }>(); + const contacts = useStore(s => s.contacts); + const keyFile = useStore(s => s.keyFile); + const contact = contacts.find(c => c.address === address); + + const [tab, setTab] = useState('posts'); + const [posts, setPosts] = useState([]); + const [likedSet, setLikedSet] = useState>(new Set()); + const [loadingPosts, setLoadingPosts] = useState(true); + const [refreshing, setRefreshing] = useState(false); + + // Follow state is optimistic + reconciled via on-chain query. For MVP + // we keep a local-only flag that toggles immediately on tap; future: + // query chain.Following(me) once on mount to seed accurate initial state. + const [following, setFollowing] = useState(false); + const [followingBusy, setFollowingBusy] = useState(false); + const [copied, setCopied] = useState(null); + + const isMe = !!keyFile && keyFile.pub_key === address; + const displayName = contact?.username + ? `@${contact.username}` + : contact?.alias ?? (isMe ? 'Вы' : shortAddr(address ?? '')); + + const loadPosts = useCallback(async (isRefresh = false) => { + if (!address) return; + if (isRefresh) setRefreshing(true); else setLoadingPosts(true); + try { + const items = await fetchAuthorPosts(address, 40); + setPosts(items); + if (keyFile) { + const liked = new Set(); + for (const p of items) { + const s = await fetchStats(p.post_id, keyFile.pub_key); + if (s?.liked_by_me) liked.add(p.post_id); + } + setLikedSet(liked); + } + } catch { + setPosts([]); + } finally { + setLoadingPosts(false); + setRefreshing(false); + } + }, [address, keyFile]); + + useEffect(() => { + if (tab === 'posts') loadPosts(false); + }, [tab, loadPosts]); + + const copy = async (value: string, label: string) => { + await Clipboard.setStringAsync(value); + setCopied(label); + setTimeout(() => setCopied(null), 1800); + }; + + const openChat = () => { + if (!address) return; + router.replace(`/(app)/chats/${address}` as never); + }; + + const onToggleFollow = async () => { + if (!keyFile || !address || isMe || followingBusy) return; + setFollowingBusy(true); + const wasFollowing = following; + setFollowing(!wasFollowing); + try { + if (wasFollowing) { + await unfollowUser({ from: keyFile.pub_key, privKey: keyFile.priv_key, target: address }); + } else { + await followUser({ from: keyFile.pub_key, privKey: keyFile.priv_key, target: address }); + } + } catch (e: any) { + setFollowing(wasFollowing); + Alert.alert('Не удалось', humanizeTxError(e)); + } finally { + setFollowingBusy(false); + } + }; + + const onStatsChanged = useCallback(async (postID: string) => { + if (!keyFile) return; + const s = await fetchStats(postID, keyFile.pub_key); + if (!s) return; + setPosts(ps => ps.map(p => p.post_id === postID + ? { ...p, likes: s.likes, views: s.views } : p)); + setLikedSet(set => { + const next = new Set(set); + if (s.liked_by_me) next.add(postID); else next.delete(postID); + return next; + }); + }, [keyFile]); + + const onDeleted = useCallback((postID: string) => { + setPosts(ps => ps.filter(p => p.post_id !== postID)); + }, []); + + // ── Hero + follow button block ────────────────────────────────────── + + const Hero = ( + + + + + {!isMe ? ( + ({ + paddingHorizontal: 18, paddingVertical: 9, + borderRadius: 999, + backgroundColor: following + ? (pressed ? '#1a1a1a' : '#111111') + : (pressed ? '#e7e7e7' : '#ffffff'), + borderWidth: following ? 1 : 0, + borderColor: '#1f1f1f', + minWidth: 110, + alignItems: 'center', + })} + > + {followingBusy ? ( + + ) : ( + + {following ? 'Вы подписаны' : 'Подписаться'} + + )} + + ) : ( + router.push('/(app)/settings' as never)} + style={({ pressed }) => ({ + paddingHorizontal: 18, paddingVertical: 9, + borderRadius: 999, + backgroundColor: pressed ? '#1a1a1a' : '#111111', + borderWidth: 1, borderColor: '#1f1f1f', + })} + > + + Редактировать + + + )} + + + + + {displayName} + + {contact?.username && ( + + )} + + + {shortAddr(address ?? '')} + + + {/* Counters row — post count is derived from what we loaded; follower/ + following counters would require chain.Followers / chain.Following + HTTP exposure which isn't wired yet (Phase D). */} + + + {formatCount(posts.length)} + постов + + + + {/* Secondary actions: open chat + copy address */} + {!isMe && contact && ( + + ({ + flex: 1, + alignItems: 'center', justifyContent: 'center', + paddingVertical: 10, borderRadius: 999, + backgroundColor: pressed ? '#1a1a1a' : '#111111', + borderWidth: 1, borderColor: '#1f1f1f', + flexDirection: 'row', gap: 6, + })} + > + + + Чат + + + address && copy(address, 'address')} + style={({ pressed }) => ({ + flex: 1, + alignItems: 'center', justifyContent: 'center', + paddingVertical: 10, borderRadius: 999, + backgroundColor: pressed ? '#1a1a1a' : '#111111', + borderWidth: 1, borderColor: '#1f1f1f', + })} + > + + {copied === 'address' ? 'Скопировано' : 'Копировать адрес'} + + + + )} + + ); + + // ── Tab strip ──────────────────────────────────────────────────────── + + const TabStrip = ( + + {(['posts', 'info'] as Tab[]).map(key => ( + setTab(key)} + style={{ + flex: 1, + alignItems: 'center', + paddingVertical: 12, + }} + > + + {key === 'posts' ? 'Посты' : 'Инфо'} + + {tab === key && ( + + )} + + ))} + + ); + + // ── Content per tab ───────────────────────────────────────────────── + + if (tab === 'posts') { + return ( + +
router.back()} />} + /> + p.post_id} + renderItem={({ item }) => ( + + )} + ListHeaderComponent={ + <> + {Hero} + {TabStrip} + + } + refreshControl={ + loadPosts(true)} + tintColor="#1d9bf0" + /> + } + ListEmptyComponent={ + loadingPosts ? ( + + + + ) : ( + + + + Пока нет постов + + + {isMe + ? 'Нажмите на синюю кнопку в ленте, чтобы написать первый.' + : 'Этот пользователь ещё ничего не публиковал.'} + + + ) + } + /> + + ); + } + + // Info tab + return ( + +
router.back()} />} + /> + + {Hero} + {TabStrip} + + + + + {contact && ( + <> + + + + )} + + + + + + ); +} + +function InfoRow({ + label, value, mono, accent, danger, +}: { + label: string; + value: string; + mono?: boolean; + accent?: boolean; + danger?: boolean; +}) { + const color = danger ? '#f0b35a' : accent ? '#1d9bf0' : '#ffffff'; + return ( + + {label} + + {value} + + + ); +} + +// Silence unused-import lint for Contact type used only in helpers. +const _contactType: Contact | null = null; void _contactType; diff --git a/client-app/app/(app)/requests.tsx b/client-app/app/(app)/requests.tsx new file mode 100644 index 0000000..b3f9f59 --- /dev/null +++ b/client-app/app/(app)/requests.tsx @@ -0,0 +1,173 @@ +/** + * Contact requests / notifications — dark minimalist. + * + * В референсе нижний таб «notifications» ведёт сюда. Пока это только + * incoming CONTACT_REQUEST'ы; позже сюда же придут другие системные + * уведомления (slash, ADD_VALIDATOR со-sig-ing, и т.д.). + */ +import React, { useState } from 'react'; +import { View, Text, FlatList, Alert, Pressable, ActivityIndicator } from 'react-native'; +import { router } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { useStore } from '@/lib/store'; +import { + buildAcceptContactTx, submitTx, getIdentity, humanizeTxError, +} from '@/lib/api'; +import { saveContact } from '@/lib/storage'; +import { shortAddr } from '@/lib/crypto'; +import { relativeTime } from '@/lib/utils'; +import type { ContactRequest } from '@/lib/types'; + +import { Avatar } from '@/components/Avatar'; +import { TabHeader } from '@/components/TabHeader'; +import { IconButton } from '@/components/IconButton'; + +export default function RequestsScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const requests = useStore(s => s.requests); + const setRequests = useStore(s => s.setRequests); + const upsertContact = useStore(s => s.upsertContact); + + const [accepting, setAccepting] = useState(null); + + async function accept(req: ContactRequest) { + if (!keyFile) return; + setAccepting(req.txHash); + try { + const identity = await getIdentity(req.from); + const x25519Pub = identity?.x25519_pub ?? ''; + + const tx = buildAcceptContactTx({ + from: keyFile.pub_key, to: req.from, privKey: keyFile.priv_key, + }); + await submitTx(tx); + + const contact = { address: req.from, x25519Pub, username: req.username, addedAt: Date.now() }; + upsertContact(contact); + await saveContact(contact); + + setRequests(requests.filter(r => r.txHash !== req.txHash)); + router.replace(`/(app)/chats/${req.from}` as never); + } catch (e: any) { + Alert.alert('Accept failed', humanizeTxError(e)); + } finally { + setAccepting(null); + } + } + + function decline(req: ContactRequest) { + Alert.alert( + 'Decline request', + `Decline request from ${req.username ? '@' + req.username : shortAddr(req.from)}?`, + [ + { text: 'Cancel', style: 'cancel' }, + { + text: 'Decline', + style: 'destructive', + onPress: () => setRequests(requests.filter(r => r.txHash !== req.txHash)), + }, + ], + ); + } + + const renderItem = ({ item: req }: { item: ContactRequest }) => { + const name = req.username ? `@${req.username}` : shortAddr(req.from); + const isAccepting = accepting === req.txHash; + return ( + + + + + {name} + + + wants to message you · {relativeTime(req.timestamp)} + + {req.intro ? ( + + {req.intro} + + ) : null} + + + accept(req)} + disabled={isAccepting} + style={({ pressed }) => ({ + flex: 1, + alignItems: 'center', justifyContent: 'center', + paddingVertical: 9, borderRadius: 999, + backgroundColor: isAccepting ? '#1a1a1a' : pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + {isAccepting ? ( + + ) : ( + Accept + )} + + decline(req)} + disabled={isAccepting} + style={({ pressed }) => ({ + flex: 1, + alignItems: 'center', justifyContent: 'center', + paddingVertical: 9, borderRadius: 999, + backgroundColor: pressed ? '#1a1a1a' : '#111111', + borderWidth: 1, borderColor: '#1f1f1f', + })} + > + Decline + + + + + ); + }; + + return ( + + + + {requests.length === 0 ? ( + + + + All caught up + + + Contact requests and network events will appear here. + + + ) : ( + r.txHash} + renderItem={renderItem} + contentContainerStyle={{ paddingBottom: 120 }} + showsVerticalScrollIndicator={false} + /> + )} + + ); +} diff --git a/client-app/app/(app)/settings.tsx b/client-app/app/(app)/settings.tsx new file mode 100644 index 0000000..854fb2d --- /dev/null +++ b/client-app/app/(app)/settings.tsx @@ -0,0 +1,595 @@ +/** + * Settings screen — sub-route, открывается по tap'у на profile-avatar в + * TabHeader. Использует обычный `
` с back-кнопкой. + * + * Секции: + * 1. Профиль — avatar, @username, short-address, Copy row. + * 2. Username — регистрация в native:username_registry (если не куплено). + * 3. Node — URL + contract ID + Save + Status. + * 4. Account — Export key, Delete account. + * + * Весь Pressable'овый layout живёт на ВНЕШНЕМ View с static style — + * Pressable handle-ит только background change (через вложенный View + * в ({pressed}) callback'е), никаких layout props в callback-style. + * Это лечит web-баг, где Pressable style-функция не применяет + * percentage/padding layout надёжно. + */ +import React, { useState, useEffect } from 'react'; +import { + View, Text, ScrollView, TextInput, Alert, Pressable, ActivityIndicator, Share, +} from 'react-native'; +import * as Clipboard from 'expo-clipboard'; +import { router } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; + +import { useStore } from '@/lib/store'; +import { saveSettings, deleteKeyFile } from '@/lib/storage'; +import { + setNodeUrl, getNetStats, resolveUsername, reverseResolve, + buildCallContractTx, submitTx, + USERNAME_REGISTRATION_FEE, MIN_USERNAME_LENGTH, MAX_USERNAME_LENGTH, + humanizeTxError, +} from '@/lib/api'; +import { shortAddr } from '@/lib/crypto'; +import { formatAmount } from '@/lib/utils'; + +import { Avatar } from '@/components/Avatar'; +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; + +type NodeStatus = 'idle' | 'checking' | 'ok' | 'error'; +type IoniconName = React.ComponentProps['name']; + +// ─── Shared layout primitives ───────────────────────────────────── + +function SectionLabel({ children }: { children: string }) { + return ( + + {children} + + ); +} + +function Card({ children }: { children: React.ReactNode }) { + return ( + + {children} + + ); +} + +/** + * Row — clickable / non-clickable list item внутри Card'а. + * + * Layout живёт на ВНЕШНЕМ контейнере (View если read-only, Pressable + * если tappable). Для pressed-стейта используется вложенный `` + * с background-color, чтобы не полагаться на style-функцию Pressable'а + * (web-баг). + */ +function Row({ + icon, label, value, onPress, right, danger, first, +}: { + icon: IoniconName; + label: string; + value?: string; + onPress?: () => void; + right?: React.ReactNode; + danger?: boolean; + first?: boolean; +}) { + const body = (pressed: boolean) => ( + + + + + + + {label} + + {value !== undefined && ( + + {value} + + )} + + {right} + {onPress && !right && ( + + )} + + ); + + if (!onPress) return {body(false)}; + return ( + + {({ pressed }) => body(pressed)} + + ); +} + +// ─── Screen ─────────────────────────────────────────────────────── + +export default function SettingsScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const setKeyFile = useStore(s => s.setKeyFile); + const settings = useStore(s => s.settings); + const setSettings = useStore(s => s.setSettings); + const username = useStore(s => s.username); + const setUsername = useStore(s => s.setUsername); + const balance = useStore(s => s.balance); + + const [nodeUrl, setNodeUrlInput] = useState(settings.nodeUrl); + const [contractId, setContractId] = useState(settings.contractId); + const [nodeStatus, setNodeStatus] = useState('idle'); + const [peerCount, setPeerCount] = useState(null); + const [blockCount, setBlockCount] = useState(null); + const [copied, setCopied] = useState(false); + const [savingNode, setSavingNode] = useState(false); + + // Username registration state + const [nameInput, setNameInput] = useState(''); + const [nameError, setNameError] = useState(null); + const [registering, setRegistering] = useState(false); + + useEffect(() => { checkNode(); }, []); + useEffect(() => { setContractId(settings.contractId); }, [settings.contractId]); + useEffect(() => { + if (!settings.contractId || !keyFile) { setUsername(null); return; } + (async () => { + const name = await reverseResolve(settings.contractId, keyFile.pub_key); + setUsername(name); + })(); + }, [settings.contractId, keyFile, setUsername]); + + async function checkNode() { + setNodeStatus('checking'); + try { + const stats = await getNetStats(); + setNodeStatus('ok'); + setPeerCount(stats.peer_count); + setBlockCount(stats.total_blocks); + } catch { + setNodeStatus('error'); + } + } + + async function saveNode() { + setSavingNode(true); + const url = nodeUrl.trim().replace(/\/$/, ''); + setNodeUrl(url); + const next = { nodeUrl: url, contractId: contractId.trim() }; + setSettings(next); + await saveSettings(next); + await checkNode(); + setSavingNode(false); + Alert.alert('Saved', 'Node settings updated.'); + } + + async function copyAddress() { + if (!keyFile) return; + await Clipboard.setStringAsync(keyFile.pub_key); + setCopied(true); + setTimeout(() => setCopied(false), 1800); + } + + async function exportKey() { + if (!keyFile) return; + try { + await Share.share({ + message: JSON.stringify(keyFile, null, 2), + title: 'DChain key file', + }); + } catch (e: any) { + Alert.alert('Export failed', e?.message ?? 'Unknown error'); + } + } + + function logout() { + Alert.alert( + 'Delete account', + 'Your key will be removed from this device. Make sure you have a backup!', + [ + { text: 'Cancel', style: 'cancel' }, + { + text: 'Delete', + style: 'destructive', + onPress: async () => { + await deleteKeyFile(); + setKeyFile(null); + router.replace('/'); + }, + }, + ], + ); + } + + const onNameChange = (v: string) => { + const cleaned = v.toLowerCase().replace(/[^a-z0-9_\-]/g, '').slice(0, MAX_USERNAME_LENGTH); + setNameInput(cleaned); + setNameError(null); + }; + const nameIsValid = nameInput.length >= MIN_USERNAME_LENGTH && /^[a-z]/.test(nameInput); + + async function registerUsername() { + if (!keyFile) return; + const name = nameInput.trim(); + if (!nameIsValid) { + setNameError(`Min ${MIN_USERNAME_LENGTH} chars, starts with a-z`); + return; + } + if (!settings.contractId) { + setNameError('No registry contract in node settings'); + return; + } + const total = USERNAME_REGISTRATION_FEE + 1000 + 2000; + if (balance < total) { + setNameError(`Need ${formatAmount(total)}, have ${formatAmount(balance)}`); + return; + } + try { + const existing = await resolveUsername(settings.contractId, name); + if (existing) { setNameError(`@${name} already taken`); return; } + } catch { /* ignore */ } + + Alert.alert( + `Buy @${name}?`, + `Cost: ${formatAmount(USERNAME_REGISTRATION_FEE)} + fee ${formatAmount(1000)}.\nBinds to your address until released.`, + [ + { text: 'Cancel', style: 'cancel' }, + { + text: 'Buy', + onPress: async () => { + setRegistering(true); + setNameError(null); + try { + const tx = buildCallContractTx({ + from: keyFile.pub_key, + contractId: settings.contractId, + method: 'register', + args: [name], + amount: USERNAME_REGISTRATION_FEE, + privKey: keyFile.priv_key, + }); + await submitTx(tx); + setNameInput(''); + Alert.alert('Submitted', 'Registration tx accepted. Name appears in a few seconds.'); + let attempts = 0; + const iv = setInterval(async () => { + attempts++; + const got = keyFile + ? await reverseResolve(settings.contractId, keyFile.pub_key) + : null; + if (got) { setUsername(got); clearInterval(iv); } + else if (attempts >= 10) clearInterval(iv); + }, 2000); + } catch (e: any) { + setNameError(humanizeTxError(e)); + } finally { + setRegistering(false); + } + }, + }, + ], + ); + } + + const statusColor = + nodeStatus === 'ok' ? '#3ba55d' : + nodeStatus === 'error' ? '#f4212e' : + '#f0b35a'; + const statusLabel = + nodeStatus === 'ok' ? 'Connected' : + nodeStatus === 'error' ? 'Unreachable' : + 'Checking…'; + + return ( + +
router.back()} />} + /> + + {/* ── Profile ── */} + Profile + + + + + {username ? ( + + + @{username} + + + + ) : ( + No username yet + )} + + {keyFile ? shortAddr(keyFile.pub_key, 10) : '—'} + + + + } + /> + + + {/* ── Username (только если ещё нет) ── */} + {!username && ( + <> + Username + + + + Buy a username + + + Flat {formatAmount(USERNAME_REGISTRATION_FEE)} fee + {formatAmount(1000)} network. + Only a-z, 0-9, _, -. Starts with a letter. + + + + @ + + + {nameError && ( + + {nameError} + + )} + + + + + + )} + + {/* ── Node ── */} + Node + + + + + + + + + } + /> + + + {/* ── Account ── */} + Account + + + + + + + ); +} + +// ─── Form primitives ────────────────────────────────────────────── + +function LabeledInput({ + label, value, onChangeText, placeholder, monospace, +}: { + label: string; + value: string; + onChangeText: (v: string) => void; + placeholder?: string; + monospace?: boolean; +}) { + return ( + + {label} + + + ); +} + +function PrimaryButton({ + label, onPress, disabled, loading, style, +}: { + label: string; + onPress: () => void; + disabled?: boolean; + loading?: boolean; + style?: object; +}) { + return ( + + {({ pressed }) => ( + + {loading ? ( + + ) : ( + + {label} + + )} + + )} + + ); +} diff --git a/client-app/app/(app)/wallet.tsx b/client-app/app/(app)/wallet.tsx new file mode 100644 index 0000000..33c5393 --- /dev/null +++ b/client-app/app/(app)/wallet.tsx @@ -0,0 +1,652 @@ +/** + * Wallet screen — dark minimalist. + * + * Сетка: + * [TabHeader: profile-avatar | Wallet | refresh] + * [Balance hero card — gradient-ish dark card, big number, address chip, action row] + * [SectionLabel: Recent transactions] + * [TX list card — tiles per tx, in/out coloring, relative time] + * [Send modal: slide-up sheet с полями recipient/amount/fee + total preview] + * + * Все кнопки и инпуты — те же плоские стили, что на других экранах. + * Никаких style-функций у Pressable'ов с layout-пропсами (избегаем web + * layout-баги, которые мы уже ловили на ChatTile/MessageBubble). + */ +import React, { useState, useCallback, useEffect, useMemo } from 'react'; +import { + View, Text, ScrollView, Modal, Alert, RefreshControl, Pressable, TextInput, ActivityIndicator, +} from 'react-native'; +import * as Clipboard from 'expo-clipboard'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; + +import { useStore } from '@/lib/store'; +import { useBalance } from '@/hooks/useBalance'; +import { buildTransferTx, submitTx, getTxHistory, getBalance, humanizeTxError } from '@/lib/api'; +import { shortAddr } from '@/lib/crypto'; +import { formatAmount, relativeTime } from '@/lib/utils'; +import type { TxRecord } from '@/lib/types'; + +import { TabHeader } from '@/components/TabHeader'; +import { IconButton } from '@/components/IconButton'; + +// ─── TX meta (icon + label + tone) ───────────────────────────────── + +type IoniconName = React.ComponentProps['name']; + +interface TxMeta { + label: string; + icon: IoniconName; + tone: 'in' | 'out' | 'neutral'; +} + +const TX_META: Record = { + TRANSFER: { label: 'Transfer', icon: 'swap-horizontal-outline', tone: 'neutral' }, + CONTACT_REQUEST: { label: 'Contact request', icon: 'person-add-outline', tone: 'out' }, + ACCEPT_CONTACT: { label: 'Contact accepted', icon: 'person-outline', tone: 'in' }, + BLOCK_CONTACT: { label: 'Block', icon: 'ban-outline', tone: 'out' }, + DEPLOY_CONTRACT: { label: 'Deploy', icon: 'document-text-outline', tone: 'out' }, + CALL_CONTRACT: { label: 'Call contract', icon: 'flash-outline', tone: 'out' }, + STAKE: { label: 'Stake', icon: 'lock-closed-outline', tone: 'out' }, + UNSTAKE: { label: 'Unstake', icon: 'lock-open-outline', tone: 'in' }, + REGISTER_KEY: { label: 'Register key', icon: 'key-outline', tone: 'neutral' }, + BLOCK_REWARD: { label: 'Block reward', icon: 'diamond-outline', tone: 'in' }, +}; + +function txMeta(type: string): TxMeta { + return TX_META[type] ?? { label: type.replace(/_/g, ' '), icon: 'ellipse-outline', tone: 'neutral' }; +} + +const toneColor = (tone: TxMeta['tone']): string => + tone === 'in' ? '#3ba55d' : tone === 'out' ? '#f4212e' : '#e7e7e7'; + +// ─── Main ────────────────────────────────────────────────────────── + +export default function WalletScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const balance = useStore(s => s.balance); + const setBalance = useStore(s => s.setBalance); + + useBalance(); + + const [txHistory, setTxHistory] = useState([]); + const [refreshing, setRefreshing] = useState(false); + const [copied, setCopied] = useState(false); + const [showSend, setShowSend] = useState(false); + + const load = useCallback(async () => { + if (!keyFile) return; + setRefreshing(true); + try { + const [hist, bal] = await Promise.all([ + getTxHistory(keyFile.pub_key), + getBalance(keyFile.pub_key), + ]); + setTxHistory(hist); + setBalance(bal); + } catch { /* ignore — WS/HTTP retries sample */ } + setRefreshing(false); + }, [keyFile, setBalance]); + + useEffect(() => { load(); }, [load]); + + const copyAddress = async () => { + if (!keyFile) return; + await Clipboard.setStringAsync(keyFile.pub_key); + setCopied(true); + setTimeout(() => setCopied(false), 1800); + }; + + const mine = keyFile?.pub_key ?? ''; + + return ( + + } + /> + + } + contentContainerStyle={{ paddingBottom: 120 }} + showsVerticalScrollIndicator={false} + > + setShowSend(true)} + /> + + Recent transactions + + {txHistory.length === 0 ? ( + + ) : ( + + {txHistory.map((tx, i) => ( + + ))} + + )} + + + setShowSend(false)} + balance={balance} + keyFile={keyFile} + onSent={() => { + setShowSend(false); + setTimeout(load, 1200); + }} + /> + + ); +} + +// ─── Hero card ───────────────────────────────────────────────────── + +function BalanceHero({ + balance, address, copied, onCopy, onSend, +}: { + balance: number; + address: string; + copied: boolean; + onCopy: () => void; + onSend: () => void; +}) { + return ( + + + Balance + + + {formatAmount(balance)} + + + {/* Address chip */} + + + + + {copied ? 'Copied!' : shortAddr(address, 10)} + + + + + {/* Actions */} + + + + + + ); +} + +function HeroButton({ + icon, label, primary, onPress, +}: { + icon: IoniconName; + label: string; + primary?: boolean; + onPress: () => void; +}) { + const base = { + flex: 1, + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'center', + paddingVertical: 11, + borderRadius: 999, + gap: 6, + } as const; + return ( + + {({ pressed }) => ( + + + + {label} + + + )} + + ); +} + +// ─── Section label ──────────────────────────────────────────────── + +function SectionLabel({ children }: { children: string }) { + return ( + + {children} + + ); +} + +// ─── Empty state ────────────────────────────────────────────────── + +function EmptyTx() { + return ( + + + + No transactions yet + + + Pull to refresh + + + ); +} + +// ─── TX tile ────────────────────────────────────────────────────── +// +// Pressable с ВНЕШНИМ плоским style (background через static object), +// внутренняя View handles row-layout. Избегаем web-баг со style-функциями +// Pressable'а. + +function TxTile({ + tx, first, mine, +}: { + tx: TxRecord; + first: boolean; + mine: string; +}) { + const m = txMeta(tx.type); + const isMineTx = tx.from === mine; + const amt = tx.amount ?? 0; + const sign = m.tone === 'in' ? '+' : m.tone === 'out' ? '−' : ''; + const color = toneColor(m.tone); + + return ( + + + + + + + + {m.label} + + + {tx.type === 'TRANSFER' + ? (isMineTx ? `→ ${shortAddr(tx.to ?? '', 5)}` : `← ${shortAddr(tx.from, 5)}`) + : shortAddr(tx.hash, 8)} + {' · '} + {relativeTime(tx.timestamp)} + + + {amt > 0 && ( + + {sign}{formatAmount(amt)} + + )} + + + ); +} + +// ─── Send modal ─────────────────────────────────────────────────── + +function SendModal({ + visible, onClose, balance, keyFile, onSent, +}: { + visible: boolean; + onClose: () => void; + balance: number; + keyFile: { pub_key: string; priv_key: string } | null; + onSent: () => void; +}) { + const insets = useSafeAreaInsets(); + const [to, setTo] = useState(''); + const [amount, setAmount] = useState(''); + const [fee, setFee] = useState('1000'); + const [sending, setSending] = useState(false); + + useEffect(() => { + if (!visible) { + // reset при закрытии + setTo(''); setAmount(''); setFee('1000'); setSending(false); + } + }, [visible]); + + const amt = parseInt(amount || '0', 10) || 0; + const f = parseInt(fee || '0', 10) || 0; + const total = amt + f; + const ok = !!to.trim() && amt > 0 && total <= balance; + + const send = async () => { + if (!keyFile) return; + if (!ok) { + Alert.alert('Check inputs', total > balance + ? `Need ${formatAmount(total)}, have ${formatAmount(balance)}.` + : 'Recipient and amount are required.'); + return; + } + setSending(true); + try { + const tx = buildTransferTx({ + from: keyFile.pub_key, + to: to.trim(), + amount: amt, + fee: f, + privKey: keyFile.priv_key, + }); + await submitTx(tx); + onSent(); + } catch (e: any) { + Alert.alert('Send failed', humanizeTxError(e)); + } finally { + setSending(false); + } + }; + + return ( + + + { /* block bubble-close */ }} + style={{ + backgroundColor: '#0a0a0a', + borderTopLeftRadius: 20, + borderTopRightRadius: 20, + paddingTop: 10, + paddingBottom: Math.max(insets.bottom, 14) + 12, + paddingHorizontal: 14, + borderTopWidth: 1, + borderColor: '#1f1f1f', + }} + > + + + Send tokens + + + + + + + + + + + + + + + + + + + + {/* Summary */} + + + + + balance ? '#f4212e' : '#ffffff'} + /> + + + + + {({ pressed }) => ( + + + Cancel + + + )} + + + {({ pressed }) => ( + + {sending ? ( + + ) : ( + + Send + + )} + + )} + + + + + + ); +} + +function Field({ label, children }: { label: string; children: React.ReactNode }) { + return ( + + {label} + + {children} + + + ); +} + +function SummaryRow({ + label, value, muted, accent, +}: { + label: string; + value: string; + muted?: boolean; + accent?: string; +}) { + return ( + + {label} + + {value} + + + ); +} diff --git a/client-app/app/(auth)/create.tsx b/client-app/app/(auth)/create.tsx new file mode 100644 index 0000000..21c9d5e --- /dev/null +++ b/client-app/app/(auth)/create.tsx @@ -0,0 +1,139 @@ +/** + * Create Account — dark minimalist. + * Генерирует Ed25519 + X25519 keypair локально, сохраняет в SecureStore. + */ +import React, { useState } from 'react'; +import { View, Text, ScrollView, Alert, Pressable, ActivityIndicator } from 'react-native'; +import { router } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { generateKeyFile } from '@/lib/crypto'; +import { saveKeyFile } from '@/lib/storage'; +import { useStore } from '@/lib/store'; + +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; + +export default function CreateAccountScreen() { + const insets = useSafeAreaInsets(); + const setKeyFile = useStore(s => s.setKeyFile); + const [loading, setLoading] = useState(false); + + async function handleCreate() { + setLoading(true); + try { + const kf = generateKeyFile(); + await saveKeyFile(kf); + setKeyFile(kf); + router.replace('/(auth)/created' as never); + } catch (e: any) { + Alert.alert('Error', e?.message ?? 'Unknown error'); + } finally { + setLoading(false); + } + } + + return ( + +
router.back()} />} + /> + + + A new identity is created locally + + + Your private key never leaves this device. The app encrypts it in the + platform secure store. + + + + + + + + + + + + Important + + + Export and backup your key file right after creation. If you lose + it there is no recovery — blockchain has no password reset. + + + + ({ + alignItems: 'center', justifyContent: 'center', + paddingVertical: 13, borderRadius: 999, marginTop: 20, + backgroundColor: loading ? '#1a1a1a' : pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + {loading ? ( + + ) : ( + + Generate keys & continue + + )} + + + + ); +} + +function InfoRow({ + icon, label, desc, first, +}: { + icon: React.ComponentProps['name']; + label: string; + desc: string; + first?: boolean; +}) { + return ( + + + + + + {label} + {desc} + + + ); +} diff --git a/client-app/app/(auth)/created.tsx b/client-app/app/(auth)/created.tsx new file mode 100644 index 0000000..b01a6bf --- /dev/null +++ b/client-app/app/(auth)/created.tsx @@ -0,0 +1,196 @@ +/** + * Account Created confirmation screen — dark minimalist. + * Показывает адрес + x25519, кнопки copy и export (share) key.json. + */ +import React, { useState } from 'react'; +import { View, Text, ScrollView, Alert, Pressable, Share } from 'react-native'; +import { router } from 'expo-router'; +import * as Clipboard from 'expo-clipboard'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { useStore } from '@/lib/store'; + +import { Header } from '@/components/Header'; + +export default function AccountCreatedScreen() { + const insets = useSafeAreaInsets(); + const keyFile = useStore(s => s.keyFile); + const [copied, setCopied] = useState(null); + + if (!keyFile) { + router.replace('/'); + return null; + } + + async function copy(value: string, label: string) { + await Clipboard.setStringAsync(value); + setCopied(label); + setTimeout(() => setCopied(null), 1800); + } + + async function exportKey() { + try { + const json = JSON.stringify(keyFile, null, 2); + // Используем плоский Share API — без записи во временный файл. + // Получатель (mail, notes, etc.) получит текст целиком; юзер сам + // сохраняет как .json если нужно. + await Share.share({ + message: json, + title: 'DChain key file', + }); + } catch (e: any) { + Alert.alert('Export failed', e?.message ?? 'Unknown error'); + } + } + + return ( + +
+ + {/* Success badge */} + + + + + + Welcome aboard + + + Keys have been generated and stored securely. + + + + {/* Address */} + copy(keyFile.pub_key, 'address')} + /> + + {/* X25519 */} + + copy(keyFile.x25519_pub, 'x25519')} + /> + + {/* Backup */} + + + + + Backup your key file + + + + Export it now and store somewhere safe — password managers, cold + storage, printed paper. If you lose it, you lose the account. + + ({ + alignItems: 'center', justifyContent: 'center', + paddingVertical: 10, borderRadius: 999, + backgroundColor: pressed ? '#2a1f0f' : '#1a1409', + borderWidth: 1, borderColor: 'rgba(240,179,90,0.35)', + })} + > + + Export key.json + + + + + {/* Continue */} + router.replace('/(app)/chats' as never)} + style={({ pressed }) => ({ + alignItems: 'center', justifyContent: 'center', + paddingVertical: 14, borderRadius: 999, marginTop: 20, + backgroundColor: pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + + Open messenger + + + + + ); +} + +function KeyCard({ + title, value, copied, onCopy, +}: { + title: string; + value: string; + copied: boolean; + onCopy: () => void; +}) { + return ( + + + {title} + + + {value} + + ({ + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'center', + paddingVertical: 9, borderRadius: 999, + marginTop: 10, + backgroundColor: pressed ? '#1a1a1a' : '#111111', + borderWidth: 1, borderColor: '#1f1f1f', + })} + > + + + {copied ? 'Copied' : 'Copy'} + + + + ); +} diff --git a/client-app/app/(auth)/import.tsx b/client-app/app/(auth)/import.tsx new file mode 100644 index 0000000..b2208eb --- /dev/null +++ b/client-app/app/(auth)/import.tsx @@ -0,0 +1,230 @@ +/** + * Import existing key — dark minimalist. + * Два пути: + * 1. Paste JSON напрямую в textarea. + * 2. Pick файл .json через DocumentPicker. + */ +import React, { useState } from 'react'; +import { + View, Text, ScrollView, TextInput, Alert, Pressable, ActivityIndicator, +} from 'react-native'; +import { router } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import * as DocumentPicker from 'expo-document-picker'; +import * as Clipboard from 'expo-clipboard'; +import { saveKeyFile } from '@/lib/storage'; +import { useStore } from '@/lib/store'; +import type { KeyFile } from '@/lib/types'; + +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; + +type Tab = 'paste' | 'file'; + +const REQUIRED_FIELDS: (keyof KeyFile)[] = ['pub_key', 'priv_key', 'x25519_pub', 'x25519_priv']; + +function validateKeyFile(raw: string): KeyFile { + let parsed: any; + try { parsed = JSON.parse(raw.trim()); } + catch { throw new Error('Invalid JSON — check that you copied the full key file.'); } + for (const field of REQUIRED_FIELDS) { + if (!parsed[field] || typeof parsed[field] !== 'string') { + throw new Error(`Missing or invalid field: "${field}"`); + } + if (!/^[0-9a-f]+$/i.test(parsed[field])) { + throw new Error(`Field "${field}" must be a hex string.`); + } + } + return parsed as KeyFile; +} + +export default function ImportKeyScreen() { + const insets = useSafeAreaInsets(); + const setKeyFile = useStore(s => s.setKeyFile); + + const [tab, setTab] = useState('paste'); + const [jsonText, setJsonText] = useState(''); + const [fileName, setFileName] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + async function applyKey(kf: KeyFile) { + setLoading(true); setError(null); + try { + await saveKeyFile(kf); + setKeyFile(kf); + router.replace('/(app)/chats' as never); + } catch (e: any) { + setError(e?.message ?? 'Import failed'); + } finally { + setLoading(false); + } + } + + async function handlePasteImport() { + setError(null); + const text = jsonText.trim(); + if (!text) { + const clip = await Clipboard.getStringAsync(); + if (clip) setJsonText(clip); + return; + } + try { await applyKey(validateKeyFile(text)); } + catch (e: any) { setError(e?.message ?? 'Import failed'); } + } + + async function pickFile() { + setError(null); + try { + const result = await DocumentPicker.getDocumentAsync({ + type: ['application/json', 'text/plain', '*/*'], + copyToCacheDirectory: true, + }); + if (result.canceled) return; + const asset = result.assets[0]; + setFileName(asset.name); + const response = await fetch(asset.uri); + const raw = await response.text(); + await applyKey(validateKeyFile(raw)); + } catch (e: any) { + setError(e?.message ?? 'Import failed'); + } + } + + return ( + +
router.back()} />} + /> + + + Restore your account from a previously exported{' '} + dchain_key.json. + + + {/* Tabs */} + + {(['paste', 'file'] as Tab[]).map(t => ( + setTab(t)} + style={{ + flex: 1, + alignItems: 'center', + paddingVertical: 8, + borderRadius: 999, + backgroundColor: tab === t ? '#1d9bf0' : 'transparent', + }} + > + + {t === 'paste' ? 'Paste JSON' : 'Pick file'} + + + ))} + + + {tab === 'paste' ? ( + <> + + ({ + flexDirection: 'row', alignItems: 'center', justifyContent: 'center', + paddingVertical: 12, borderRadius: 999, marginTop: 12, + backgroundColor: loading ? '#1a1a1a' : pressed ? '#1a8cd8' : '#1d9bf0', + })} + > + {loading ? ( + + ) : ( + + {jsonText.trim() ? 'Import key' : 'Paste from clipboard'} + + )} + + + ) : ( + <> + ({ + alignItems: 'center', justifyContent: 'center', + paddingVertical: 40, borderRadius: 14, + backgroundColor: pressed ? '#111111' : '#0a0a0a', + borderWidth: 1, borderStyle: 'dashed', borderColor: '#1f1f1f', + })} + > + + + {fileName ?? 'Tap to pick key.json'} + + + Will auto-import on selection + + + {loading && ( + + + + )} + + )} + + {error && ( + + {error} + + )} + + + ); +} diff --git a/client-app/app/_layout.tsx b/client-app/app/_layout.tsx new file mode 100644 index 0000000..39a8728 --- /dev/null +++ b/client-app/app/_layout.tsx @@ -0,0 +1,59 @@ +import '../global.css'; + +import React, { useEffect } from 'react'; +import { Stack } from 'expo-router'; +import { StatusBar } from 'expo-status-bar'; +import { View } from 'react-native'; +import { SafeAreaProvider } from 'react-native-safe-area-context'; +// GestureHandlerRootView обязателен для работы gesture-handler'а +// на всех страницах: Pan/LongPress/Tap жестах внутри чатов. +import { GestureHandlerRootView } from 'react-native-gesture-handler'; +import { loadKeyFile, loadSettings } from '@/lib/storage'; +import { setNodeUrl } from '@/lib/api'; +import { useStore } from '@/lib/store'; + +export default function RootLayout() { + const setKeyFile = useStore(s => s.setKeyFile); + const setSettings = useStore(s => s.setSettings); + const booted = useStore(s => s.booted); + const setBooted = useStore(s => s.setBooted); + + // Bootstrap: load key + settings from storage синхронно до первого + // render'а экранов. Пока `booted=false` мы рендерим чёрный экран — + // это убирает "мелькание" welcome'а при старте, когда ключи уже есть + // в AsyncStorage, но ещё не успели загрузиться в store. + useEffect(() => { + (async () => { + try { + const [kf, settings] = await Promise.all([loadKeyFile(), loadSettings()]); + if (kf) setKeyFile(kf); + setSettings(settings); + setNodeUrl(settings.nodeUrl); + } finally { + setBooted(true); + } + })(); + }, []); + + return ( + + + + + {booted ? ( + + ) : ( + // Пустой чёрный экран пока bootstrap идёт — без flicker'а. + + )} + + + + ); +} diff --git a/client-app/app/index.tsx b/client-app/app/index.tsx new file mode 100644 index 0000000..263e58a --- /dev/null +++ b/client-app/app/index.tsx @@ -0,0 +1,519 @@ +/** + * Onboarding — 3-слайдовый pager перед auth-экранами. + * + * Slide 1 — "Why DChain": value-proposition, 3 пункта с иконками. + * Slide 2 — "How it works": выбор релей-ноды (public paid vs свой node), + * ссылка на Gitea, + node URL input с live ping. + * Slide 3 — "Your keys": кнопки Create / Import. + * + * Если `keyFile` в store уже есть (bootstrap из RootLayout загрузил) — + * делаем в (app), чтобы пользователь не видел вообще никакого + * мелькания onboarding'а. До загрузки `booted === false` root показывает + * чёрный экран. + */ +import React, { useEffect, useState, useCallback, useRef } from 'react'; +import { + View, Text, TextInput, Pressable, ScrollView, + Alert, ActivityIndicator, Linking, Dimensions, + useWindowDimensions, +} from 'react-native'; +import { router, Redirect } from 'expo-router'; +import { Ionicons } from '@expo/vector-icons'; +import { CameraView, useCameraPermissions } from 'expo-camera'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import { useStore } from '@/lib/store'; +import { saveSettings } from '@/lib/storage'; +import { setNodeUrl, getNetStats } from '@/lib/api'; + +const { width: SCREEN_W } = Dimensions.get('window'); +const GITEA_URL = 'https://git.vsecoder.vodka/vsecoder/dchain'; + +export default function WelcomeScreen() { + const insets = useSafeAreaInsets(); + const { height: SCREEN_H } = useWindowDimensions(); + const keyFile = useStore(s => s.keyFile); + const booted = useStore(s => s.booted); + const settings = useStore(s => s.settings); + const setSettings = useStore(s => s.setSettings); + + const scrollRef = useRef(null); + const [page, setPage] = useState(0); + const [nodeInput, setNodeInput] = useState(''); + const [scanning, setScanning] = useState(false); + const [checking, setChecking] = useState(false); + const [nodeOk, setNodeOk] = useState(null); + + const [permission, requestPermission] = useCameraPermissions(); + + useEffect(() => { setNodeInput(settings.nodeUrl); }, [settings.nodeUrl]); + + // ВСЕ hooks должны быть объявлены ДО любого early-return, иначе + // React на следующем render'е посчитает разное число hooks и выкинет + // "Rendered fewer hooks than expected". useCallback ниже — тоже hook. + const applyNode = useCallback(async (url: string) => { + const clean = url.trim().replace(/\/$/, ''); + if (!clean) return; + setChecking(true); + setNodeOk(null); + setNodeUrl(clean); + try { + await getNetStats(); + setNodeOk(true); + const next = { ...settings, nodeUrl: clean }; + setSettings(next); + await saveSettings(next); + } catch { + setNodeOk(false); + } finally { + setChecking(false); + } + }, [settings, setSettings]); + + const onQrScanned = useCallback(({ data }: { data: string }) => { + setScanning(false); + let url = data.trim(); + try { const p = JSON.parse(url); if (p.nodeUrl) url = p.nodeUrl; } catch {} + setNodeInput(url); + applyNode(url); + }, [applyNode]); + + // Bootstrap ещё не закончился — ничего не рендерим, RootLayout покажет + // чёрный экран (single source of truth для splash-state'а). + if (!booted) return null; + + // Ключи уже загружены — сразу в main app, без мелькания onboarding'а. + if (keyFile) return ; + + const openScanner = async () => { + if (!permission?.granted) { + const { granted } = await requestPermission(); + if (!granted) { + Alert.alert('Camera permission required', 'Allow camera access to scan QR codes.'); + return; + } + } + setScanning(true); + }; + + const goToPage = (p: number) => { + scrollRef.current?.scrollTo({ x: p * SCREEN_W, animated: true }); + setPage(p); + }; + + if (scanning) { + return ( + + + + + + Point at a DChain node QR code + + + setScanning(false)} + style={{ + position: 'absolute', top: 56, left: 16, + backgroundColor: 'rgba(0,0,0,0.6)', borderRadius: 20, + paddingHorizontal: 16, paddingVertical: 8, + }} + > + ✕ Cancel + + + ); + } + + const statusColor = nodeOk === true ? '#3ba55d' : nodeOk === false ? '#f4212e' : '#8b8b8b'; + + // Высота footer'а (dots + inset) — резервируем под неё снизу каждого + // слайда, чтобы CTA-кнопки оказывались прямо над индикатором страниц, + // а не залезали под него. + const FOOTER_H = Math.max(insets.bottom, 20) + 8 + 12 + 7; // = padBottom + padTop + dot + const PAGE_H = SCREEN_H - FOOTER_H; + + return ( + + { + const p = Math.round(e.nativeEvent.contentOffset.x / SCREEN_W); + setPage(p); + }} + style={{ flex: 1 }} + keyboardShouldPersistTaps="handled" + > + {/* ───────── Slide 1: Why DChain ───────── */} + + + + + + + + DChain + + + A messenger that belongs to you. + + + + + + + + + {/* CTA — прижата к правому нижнему краю. */} + + goToPage(1)} /> + + + + {/* ───────── Slide 2: How it works ───────── */} + + + + Как это работает + + + Сообщения проходят через релей-ноду в зашифрованном виде. + Выбери публичную или подключи свою. + + + + + + + Node URL + + + + + { setNodeInput(t); setNodeOk(null); }} + onEndEditing={() => applyNode(nodeInput)} + onSubmitEditing={() => applyNode(nodeInput)} + placeholder="http://192.168.1.10:8080" + placeholderTextColor="#5a5a5a" + autoCapitalize="none" + autoCorrect={false} + keyboardType="url" + returnKeyType="done" + style={{ flex: 1, color: '#ffffff', fontSize: 14, paddingVertical: 12 }} + /> + {checking + ? + : nodeOk === true + ? + : nodeOk === false + ? + : null} + + ({ + width: 48, alignItems: 'center', justifyContent: 'center', + backgroundColor: pressed ? '#1a1a1a' : '#0a0a0a', + borderWidth: 1, borderColor: '#1f1f1f', + borderRadius: 12, + })} + > + + + + {nodeOk === false && ( + + Cannot reach node — check URL and that the node is running + + )} + + + {/* CTA — прижата к правому нижнему краю. */} + + Linking.openURL(GITEA_URL).catch(() => {})} + /> + goToPage(2)} /> + + + + {/* ───────── Slide 3: Your keys ───────── */} + + + + + + + + Твой аккаунт + + + Создай новую пару ключей или импортируй существующую. + Ключи хранятся только на этом устройстве. + + + + + {/* CTA — прижата к правому нижнему краю. */} + + router.push('/(auth)/import' as never)} + /> + router.push('/(auth)/create' as never)} + /> + + + + + {/* Footer: dots-only pager indicator. CTA-кнопки теперь inline + на каждом слайде, чтобы выглядели как полноценные кнопки, а не + мелкий "Далее" в углу. */} + + {[0, 1, 2].map(i => ( + goToPage(i)} + hitSlop={8} + style={{ + width: page === i ? 22 : 7, + height: 7, + borderRadius: 3.5, + backgroundColor: page === i ? '#1d9bf0' : '#2a2a2a', + }} + /> + ))} + + + ); +} + +// ───────── helper components ───────── + +/** + * Primary CTA button — синий pill. Натуральная ширина (hugs content), + * `numberOfLines={1}` на лейбле чтобы текст не переносился. Фон + * применяется через inner View, а не напрямую на Pressable — это + * обходит редкие RN-баги, когда backgroundColor на Pressable не + * рендерится пока кнопка не нажата. + */ +function CTAPrimary({ label, onPress }: { label: string; onPress: () => void }) { + return ( + ({ opacity: pressed ? 0.85 : 1 })}> + + + {label} + + + + ); +} + +/** Secondary CTA — тёмный pill с border'ом, optional icon слева. */ +function CTASecondary({ + label, icon, onPress, +}: { + label: string; + icon?: React.ComponentProps['name']; + onPress: () => void; +}) { + return ( + ({ opacity: pressed ? 0.6 : 1 })}> + + {icon && } + + {label} + + + + ); +} + +function FeatureRow({ + icon, title, text, +}: { icon: React.ComponentProps['name']; title: string; text: string }) { + return ( + + + + + + + {title} + + + {text} + + + + ); +} + +function OptionCard({ + icon, title, text, actionLabel, onAction, +}: { + icon: React.ComponentProps['name']; + title: string; + text: string; + actionLabel?: string; + onAction?: () => void; +}) { + return ( + + + + + {title} + + + + {text} + + {actionLabel && onAction && ( + ({ opacity: pressed ? 0.6 : 1, marginTop: 8 })}> + + {actionLabel} + + + )} + + ); +} diff --git a/client-app/babel.config.js b/client-app/babel.config.js new file mode 100644 index 0000000..d08d04a --- /dev/null +++ b/client-app/babel.config.js @@ -0,0 +1,12 @@ +module.exports = function (api) { + api.cache(true); + return { + presets: [ + ['babel-preset-expo', { jsxImportSource: 'nativewind' }], + 'nativewind/babel', + ], + plugins: [ + 'react-native-reanimated/plugin', // must be last + ], + }; +}; diff --git a/client-app/components/AnimatedSlot.tsx b/client-app/components/AnimatedSlot.tsx new file mode 100644 index 0000000..6673fd8 --- /dev/null +++ b/client-app/components/AnimatedSlot.tsx @@ -0,0 +1,67 @@ +/** + * AnimatedSlot — обёртка над ``. Исторически тут была slide- + * анимация при смене pathname'а + tab-swipe pan. Обе фичи вызывали + * баги: + * - tab-swipe конфликтовал с vertical FlatList scroll (чаты пропадали + * при flick'е) + * - translateX застревал на ±width когда анимация прерывалась + * re-render-cascade'ом от useGlobalInbox → UI уезжал за экран + * + * Решение: убрали обе. Навигация между tab'ами — только через NavBar, + * переходы — без slide. Sub-route back-swipe остаётся (он не конфликтует + * с FlatList'ом, т.к. на chat detail FlatList inverted и смотрит вверх). + */ +import React, { useMemo } from 'react'; +import { PanResponder, View } from 'react-native'; +import { Slot, usePathname, router } from 'expo-router'; +import { useWindowDimensions } from 'react-native'; + +function topSegment(p: string): string { + const m = p.match(/^\/[^/]+/); + return m ? m[0] : ''; +} + +/** Это sub-route — внутри какого-либо tab'а, но глубже первого сегмента. */ +function isSubRoute(path: string): boolean { + const seg = topSegment(path); + if (seg === '/chats') return path !== '/chats' && path !== '/chats/'; + if (seg === '/feed') return path !== '/feed' && path !== '/feed/'; + if (seg === '/profile') return true; + if (seg === '/settings') return true; + if (seg === '/compose') return true; + return false; +} + +export function AnimatedSlot() { + const pathname = usePathname(); + const { width } = useWindowDimensions(); + + // Pan-gesture только на sub-route'ах: swipe-right → back. На tab'ах + // gesture полностью отключён — исключает конфликт с vertical scroll. + const panResponder = useMemo(() => { + const sub = isSubRoute(pathname); + + return PanResponder.create({ + onMoveShouldSetPanResponder: (_e, g) => { + if (!sub) return false; + if (Math.abs(g.dx) < 40) return false; + if (Math.abs(g.dy) > 15) return false; + if (g.dx <= 0) return false; // только правое направление + return Math.abs(g.dx) > Math.abs(g.dy) * 3; + }, + onMoveShouldSetPanResponderCapture: () => false, + + onPanResponderRelease: (_e, g) => { + if (!sub) return; + if (Math.abs(g.dy) > 30) return; + if (g.dx > width * 0.30) router.back(); + }, + }); + }, [pathname, width]); + + return ( + + + + ); +} diff --git a/client-app/components/Avatar.tsx b/client-app/components/Avatar.tsx new file mode 100644 index 0000000..9186203 --- /dev/null +++ b/client-app/components/Avatar.tsx @@ -0,0 +1,76 @@ +/** + * Avatar — круглая заглушка с инициалом, опционально online-пип. + * Нет зависимостей от асинхронных источников (картинок) — для messenger-тайла + * важнее мгновенный рендер, чем фотография. Если в будущем будут фото, + * расширяем здесь. + */ +import React from 'react'; +import { View, Text } from 'react-native'; + +export interface AvatarProps { + /** Имя / @username — берём первый символ для placeholder. */ + name?: string; + /** Адрес (hex pubkey) — fallback для тех у кого нет имени. */ + address?: string; + /** Общий размер в px. По умолчанию 48 (tile size). */ + size?: number; + /** Цвет пипа справа-снизу. undefined = без пипа. */ + dotColor?: string; + /** Класс для обёртки (position: relative кадр). */ + className?: string; +} + +/** Простое хэширование имени → один из 6 оттенков серого для разнообразия. */ +function pickBg(seed: string): string { + const shades = ['#1a1a1a', '#222222', '#2a2a2a', '#151515', '#1c1c1c', '#1f1f1f']; + let h = 0; + for (let i = 0; i < seed.length; i++) h = (h * 31 + seed.charCodeAt(i)) & 0xffff; + return shades[h % shades.length]; +} + +export function Avatar({ name, address, size = 48, dotColor, className }: AvatarProps) { + const seed = (name ?? address ?? '?').replace(/^@/, ''); + const initial = seed.charAt(0).toUpperCase() || '?'; + const bg = pickBg(seed); + + return ( + + + + {initial} + + + {dotColor && ( + + )} + + ); +} diff --git a/client-app/components/ChatTile.tsx b/client-app/components/ChatTile.tsx new file mode 100644 index 0000000..eb74137 --- /dev/null +++ b/client-app/components/ChatTile.tsx @@ -0,0 +1,174 @@ +/** + * ChatTile — одна строка в списке чатов на главной (Messages screen). + * + * Layout: + * [avatar 44] [name (+verified) (+kind-icon)] [time] + * [last-msg preview] [unread pill] + * + * Kind-icon — мегафон для channel, 👥 для group, ничего для direct. + * Verified checkmark — если у контакта есть @username. + * Online-dot на аватарке — только для direct-чатов с x25519 ключом. + */ +import React from 'react'; +import { View, Text, Pressable } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +import type { Contact, Message } from '@/lib/types'; +import { Avatar } from '@/components/Avatar'; +import { formatWhen } from '@/lib/dates'; +import { useStore } from '@/lib/store'; + +function previewText(s: string, max = 50): string { + return s.length <= max ? s : s.slice(0, max).trimEnd() + '…'; +} + +/** + * Текстовое превью последнего сообщения. Если у сообщения нет текста + * (только вложение) — возвращаем маркер с иконкой названием типа: + * "🖼 Photo" / "🎬 Video" / "🎙 Voice" / "📎 File" + * Если текст есть — он используется; если есть и то и другое, префикс + * добавляется перед текстом. + */ +function lastPreview(m: Message): string { + const emojiByKind = { + image: '🖼', video: '🎬', voice: '🎙', file: '📎', + } as const; + const labelByKind = { + image: 'Photo', video: 'Video', voice: 'Voice message', file: 'File', + } as const; + const text = m.text.trim(); + if (m.attachment) { + const prefix = `${emojiByKind[m.attachment.kind]} ${labelByKind[m.attachment.kind]}`; + return text ? `${prefix} ${previewText(text, 40)}` : prefix; + } + return previewText(text); +} + +function shortAddr(a: string, n = 5): string { + if (!a) return '—'; + return a.length <= n * 2 + 1 ? a : `${a.slice(0, n)}…${a.slice(-n)}`; +} + +function displayName(c: Contact): string { + return c.username ? `@${c.username}` : c.alias ?? shortAddr(c.address); +} + +export interface ChatTileProps { + contact: Contact; + lastMessage: Message | null; + onPress: () => void; +} + +export function ChatTile({ contact: c, lastMessage, onPress }: ChatTileProps) { + const name = displayName(c); + const last = lastMessage; + + // Визуальный маркер типа чата. + const kindIcon: React.ComponentProps['name'] | null = + c.kind === 'group' ? 'people' : null; + + // Unread берётся из runtime-store'а (инкрементится в useGlobalInbox, + // обнуляется при открытии чата). Fallback на c.unread для legacy seed. + const storeUnread = useStore(s => s.unreadByContact[c.address] ?? 0); + const unreadCount = storeUnread || (c.unread ?? 0); + const unread = unreadCount > 0 ? unreadCount : null; + + return ( + ({ + backgroundColor: pressed ? '#0a0a0a' : 'transparent', + })} + > + + + + + {/* Первая строка: [kind-icon] name [verified] ··· time */} + + {kindIcon && ( + + )} + + {name} + + {c.username && ( + + )} + {last && ( + + {formatWhen(last.timestamp)} + + )} + + + {/* Вторая строка: [✓✓ mine-seen] preview ··· [unread] */} + + {last?.mine && ( + + )} + + {last + ? lastPreview(last) + : c.x25519Pub + ? 'Tap to start encrypted chat' + : 'Waiting for identity…'} + + + {unread !== null && ( + + + {unread > 99 ? '99+' : unread} + + + )} + + + + + ); +} diff --git a/client-app/components/Composer.tsx b/client-app/components/Composer.tsx new file mode 100644 index 0000000..ebc2e3d --- /dev/null +++ b/client-app/components/Composer.tsx @@ -0,0 +1,329 @@ +/** + * Composer — плавающий блок ввода сообщения, прибит к низу. + * + * Композиция: + * 1. Опциональный баннер (edit / reply) сверху. + * 2. Опциональная pending-attachment preview. + * 3. Либо: + * - обычный input-bubble с `[+] [textarea] [↑/🎤/⭕]` + * - inline VoiceRecorder когда идёт запись голосового + * + * Send-action зависит от состояния: + * - есть текст/attachment → ↑ (send) + * - пусто → показываем две иконки: 🎤 (start voice) + ⭕ (open video circle) + * + * API: + * mode, onCancelMode + * text, onChangeText + * onSend, sending + * onAttach — tap на + (AttachmentMenu) + * attachment, onClearAttach + * onFinishVoice — готовая voice-attachment (из VoiceRecorder) + * onStartVideoCircle — tap на ⭕, родитель открывает VideoCircleRecorder + * placeholder + */ +import React, { useRef, useState } from 'react'; +import { View, Text, TextInput, Pressable, ActivityIndicator, Image } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +import type { Attachment } from '@/lib/types'; +import { VoiceRecorder } from '@/components/chat/VoiceRecorder'; + +export type ComposerMode = + | { kind: 'new' } + | { kind: 'edit'; text: string } + | { kind: 'reply'; msgId: string; author: string; preview: string }; + +export interface ComposerProps { + mode: ComposerMode; + onCancelMode?: () => void; + + text: string; + onChangeText: (t: string) => void; + + onSend: () => void; + sending?: boolean; + + onAttach?: () => void; + + attachment?: Attachment | null; + onClearAttach?: () => void; + + /** Voice recording завершена и отправляем сразу (мгновенный flow). */ + onFinishVoice?: (att: Attachment) => void; + /** Tap на "⭕" — родитель открывает VideoCircleRecorder. */ + onStartVideoCircle?: () => void; + + placeholder?: string; +} + +const INPUT_MIN_HEIGHT = 24; +const INPUT_MAX_HEIGHT = 72; + +export function Composer(props: ComposerProps) { + const { + mode, onCancelMode, text, onChangeText, onSend, sending, onAttach, + attachment, onClearAttach, + onFinishVoice, onStartVideoCircle, + placeholder, + } = props; + + const inputRef = useRef(null); + const [recordingVoice, setRecordingVoice] = useState(false); + + const hasContent = !!text.trim() || !!attachment; + const canSend = hasContent && !sending; + const inEdit = mode.kind === 'edit'; + const inReply = mode.kind === 'reply'; + + const focusInput = () => inputRef.current?.focus(); + + return ( + + {/* ── Banner: edit / reply ── */} + {(inEdit || inReply) && !recordingVoice && ( + + + + {inEdit && ( + + Edit message + + )} + {inReply && ( + <> + + Reply to {(mode as { author: string }).author} + + + {(mode as { preview: string }).preview} + + + )} + + ({ opacity: pressed ? 0.5 : 1 })} + > + + + + )} + + {/* ── Pending attachment preview ── */} + {attachment && !recordingVoice && ( + + )} + + {/* ── Voice recording (inline) ИЛИ обычный input ── */} + {recordingVoice ? ( + { + setRecordingVoice(false); + onFinishVoice?.(att); + }} + onCancel={() => setRecordingVoice(false)} + /> + ) : ( + + + {/* + attach — всегда, кроме edit */} + {onAttach && !inEdit && ( + { e.stopPropagation?.(); onAttach(); }} + hitSlop={6} + style={({ pressed }) => ({ + width: 32, height: 32, borderRadius: 16, + alignItems: 'center', justifyContent: 'center', + opacity: pressed ? 0.6 : 1, + })} + > + + + )} + + + + {/* Правая часть: send ИЛИ [mic + video-circle] */} + {canSend ? ( + { e.stopPropagation?.(); onSend(); }} + style={({ pressed }) => ({ + width: 32, height: 32, borderRadius: 16, + backgroundColor: pressed ? '#1a8cd8' : '#1d9bf0', + alignItems: 'center', justifyContent: 'center', + })} + > + {sending ? ( + + ) : ( + + )} + + ) : !inEdit && (onFinishVoice || onStartVideoCircle) ? ( + + {onStartVideoCircle && ( + { e.stopPropagation?.(); onStartVideoCircle(); }} + hitSlop={6} + style={({ pressed }) => ({ + width: 32, height: 32, borderRadius: 16, + alignItems: 'center', justifyContent: 'center', + opacity: pressed ? 0.6 : 1, + })} + > + + + )} + {onFinishVoice && ( + { e.stopPropagation?.(); setRecordingVoice(true); }} + hitSlop={6} + style={({ pressed }) => ({ + width: 32, height: 32, borderRadius: 16, + alignItems: 'center', justifyContent: 'center', + opacity: pressed ? 0.6 : 1, + })} + > + + + )} + + ) : null} + + + )} + + ); +} + +// ─── Attachment chip — preview текущего pending attachment'а ──────── + +function AttachmentChip({ + attachment, onClear, +}: { + attachment: Attachment; + onClear?: () => void; +}) { + const icon: React.ComponentProps['name'] = + attachment.kind === 'image' ? 'image-outline' : + attachment.kind === 'video' ? 'videocam-outline' : + attachment.kind === 'voice' ? 'mic-outline' : + 'document-outline'; + + return ( + + {attachment.kind === 'image' || attachment.kind === 'video' ? ( + + ) : ( + + + + )} + + + + {attachment.name ?? attachmentLabel(attachment)} + + + {attachment.kind.toUpperCase()} + {attachment.circle ? ' · circle' : ''} + {attachment.size ? ` · ${(attachment.size / 1024).toFixed(0)} KB` : ''} + {attachment.duration ? ` · ${attachment.duration}s` : ''} + + + + ({ opacity: pressed ? 0.5 : 1, padding: 4 })} + > + + + + ); +} + +function attachmentLabel(a: Attachment): string { + switch (a.kind) { + case 'image': return 'Photo'; + case 'video': return a.circle ? 'Video message' : 'Video'; + case 'voice': return 'Voice message'; + case 'file': return 'File'; + } +} diff --git a/client-app/components/Header.tsx b/client-app/components/Header.tsx new file mode 100644 index 0000000..10f8d96 --- /dev/null +++ b/client-app/components/Header.tsx @@ -0,0 +1,76 @@ +/** + * Header — единая шапка экрана: [left slot] [title centered] [right slot]. + * + * Правила выравнивания: + * - left/right принимают натуральную ширину контента (обычно 1-2 + * IconButton'а 36px, или pressable-avatar 32px). + * - title (ReactNode, принимает как string, так и compound — аватар + + * имя вместе) всегда центрирован через flex:1 + alignItems:center. + * Абсолютно не позиционируется, т.к. при слишком широком title'е + * лучше ужать его, чем наложить на кнопки. + * + * `title` может быть строкой (тогда рендерится как Text 17px semibold) + * либо произвольным node'ом — используется в chat detail для + * [avatar][name + typing-subtitle] compound-блока. + * + * `divider` (default true) — тонкая 1px линия снизу; в tab-страницах + * обычно выключена (TabHeader всегда ставит divider=false). + */ +import React, { ReactNode } from 'react'; +import { View, Text } from 'react-native'; + +export interface HeaderProps { + title?: ReactNode; + left?: ReactNode; + right?: ReactNode; + /** Показывать нижнюю тонкую линию-разделитель. По умолчанию true. */ + divider?: boolean; +} + +export function Header({ title, left, right, divider = true }: HeaderProps) { + return ( + + + {/* Left slot — натуральная ширина, минимум 44 чтобы title + визуально центрировался для одно-icon-left + одно-icon-right. */} + {left} + + {/* Title centered */} + + {typeof title === 'string' ? ( + + {title} + + ) : title ?? null} + + + {/* Right slot — row, натуральная ширина, минимум 44. gap=4 + чтобы несколько IconButton'ов не слипались в selection-mode. */} + + {right} + + + + ); +} diff --git a/client-app/components/IconButton.tsx b/client-app/components/IconButton.tsx new file mode 100644 index 0000000..df62c53 --- /dev/null +++ b/client-app/components/IconButton.tsx @@ -0,0 +1,61 @@ +/** + * IconButton — круглая touch-target кнопка под Ionicon. + * + * Три варианта: + * - 'ghost' — прозрачная, используется в хедере (шестерёнка, back). + * - 'solid' — акцентный заливной круг, например composer FAB. + * - 'tile' — квадратная заливка 36×36 для небольших action-chip'ов. + * + * Размер управляется props.size (диаметр). Touch-target никогда меньше 40px + * (accessibility), поэтому для size<40 внутренний иконопад растёт. + */ +import React from 'react'; +import { Pressable, View } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +type IoniconName = React.ComponentProps['name']; + +export interface IconButtonProps { + icon: IoniconName; + onPress?: () => void; + variant?: 'ghost' | 'solid' | 'tile'; + size?: number; // visual diameter; hit slop ensures accessibility + color?: string; // override icon color + disabled?: boolean; + className?: string; +} + +export function IconButton({ + icon, onPress, variant = 'ghost', size = 40, color, disabled, className, +}: IconButtonProps) { + const iconSize = Math.round(size * 0.5); + const bg = + variant === 'solid' ? '#1d9bf0' : + variant === 'tile' ? '#1a1a1a' : + 'transparent'; + const tint = + color ?? + (variant === 'solid' ? '#ffffff' : + disabled ? '#3a3a3a' : + '#e7e7e7'); + + const radius = variant === 'tile' ? 10 : size / 2; + + return ( + ({ + width: size, + height: size, + borderRadius: radius, + backgroundColor: pressed && !disabled ? (variant === 'solid' ? '#1a8cd8' : '#1a1a1a') : bg, + alignItems: 'center', + justifyContent: 'center', + })} + > + + + ); +} diff --git a/client-app/components/NavBar.tsx b/client-app/components/NavBar.tsx new file mode 100644 index 0000000..0e54566 --- /dev/null +++ b/client-app/components/NavBar.tsx @@ -0,0 +1,150 @@ +/** + * NavBar — нижний бар на 5 иконок без подписей. + * + * Активный таб: + * - иконка заполненная (Ionicons variant без `-outline`) + * - вокруг иконки subtle highlight-блок (чуть светлее bg), радиус 14 + * - текст/бейдж остаются как у inactive + * + * Inactive: + * - outline-иконка, цвет #6b6b6b + * - soon-таб дополнительно dimmed и показывает чип SOON + * + * Роутинг через expo-router `router.replace` — без стекa, каждый tab это + * полная страница без "back" концепции. + */ +import React from 'react'; +import { View, Pressable, Text } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { useRouter, usePathname } from 'expo-router'; + +type IoniconName = React.ComponentProps['name']; + +interface Item { + key: string; + href: string; + icon: IoniconName; + iconActive: IoniconName; + badge?: number; + soon?: boolean; +} + +export interface NavBarProps { + bottomInset?: number; + requestCount?: number; + notifCount?: number; +} + +export function NavBar({ bottomInset = 0, requestCount = 0, notifCount = 0 }: NavBarProps) { + const router = useRouter(); + const pathname = usePathname(); + + const items: Item[] = [ + { key: 'home', href: '/(app)/chats', icon: 'home-outline', iconActive: 'home', badge: requestCount }, + { key: 'add', href: '/(app)/new-contact', icon: 'search-outline', iconActive: 'search' }, + { key: 'feed', href: '/(app)/feed', icon: 'newspaper-outline', iconActive: 'newspaper' }, + { key: 'notif', href: '/(app)/requests', icon: 'notifications-outline', iconActive: 'notifications', badge: notifCount }, + { key: 'wallet', href: '/(app)/wallet', icon: 'wallet-outline', iconActive: 'wallet' }, + ]; + + // NavBar active-matching: путь может начинаться с "/chats" ИЛИ с href + // напрямую. Вариант `/chats/xyz` тоже считается active для home. + const isActive = (href: string) => { + // Нормализуем /(app)/chats → /chats + const norm = href.replace(/^\/\(app\)/, ''); + return pathname === norm || pathname.startsWith(norm + '/'); + }; + + return ( + + {items.map((it) => { + const active = isActive(it.href); + return ( + { + if (it.soon) return; + router.replace(it.href as never); + }} + hitSlop={6} + style={({ pressed }) => ({ + flex: 1, + alignItems: 'center', + justifyContent: 'center', + paddingVertical: 4, + opacity: pressed ? 0.65 : 1, + })} + > + + + {it.badge && it.badge > 0 ? ( + + + {it.badge > 99 ? '99+' : it.badge} + + + ) : null} + {it.soon && ( + + + SOON + + + )} + + + ); + })} + + ); +} diff --git a/client-app/components/SearchBar.tsx b/client-app/components/SearchBar.tsx new file mode 100644 index 0000000..0c63e53 --- /dev/null +++ b/client-app/components/SearchBar.tsx @@ -0,0 +1,88 @@ +/** + * SearchBar — серый блок, в состоянии idle текст с иконкой 🔍 отцентрированы. + * + * Когда пользователь тапает/фокусирует — поле становится input-friendly, но + * визуально рестайл не нужен: при наличии текста placeholder скрыт и + * пользовательский ввод выравнивается влево автоматически (multiline off). + */ +import React, { useState } from 'react'; +import { View, TextInput, Text } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +export interface SearchBarProps { + value: string; + onChangeText: (v: string) => void; + placeholder?: string; + autoFocus?: boolean; + onSubmitEditing?: () => void; +} + +export function SearchBar({ + value, onChangeText, placeholder = 'Search', autoFocus, onSubmitEditing, +}: SearchBarProps) { + const [focused, setFocused] = useState(false); + + // Placeholder центрируется пока нет фокуса И нет значения. + // Как только юзер фокусируется или начинает печатать — иконка+текст + // прыгают к левому краю, чтобы не мешать вводу. + const centered = !focused && !value; + + return ( + + {centered ? ( + // ── Idle state — только текст+icon, отцентрированы. + // Невидимый TextInput поверх ловит tap, чтобы не дергать focus вручную. + + + {placeholder} + setFocused(true)} + onSubmitEditing={onSubmitEditing} + returnKeyType="search" + style={{ + position: 'absolute', left: 0, right: 0, top: 0, bottom: 0, + color: 'transparent', + // Скрываем cursor в idle-режиме; при focus компонент перерисуется. + }} + /> + + ) : ( + + + setFocused(true)} + onBlur={() => setFocused(false)} + onSubmitEditing={onSubmitEditing} + returnKeyType="search" + style={{ + flex: 1, + color: '#ffffff', + fontSize: 14, + padding: 0, + includeFontPadding: false, + }} + /> + + )} + + ); +} diff --git a/client-app/components/TabHeader.tsx b/client-app/components/TabHeader.tsx new file mode 100644 index 0000000..2022535 --- /dev/null +++ b/client-app/components/TabHeader.tsx @@ -0,0 +1,59 @@ +/** + * TabHeader — общая шапка для всех tab-страниц (home/feed/notifications/wallet). + * + * Структура строго как в референсе Messages-экрана: + * [avatar 32 → /settings] [title] [right slot] + * + * Без нижнего разделителя (divider=false) — тот же уровень, что и фон экрана. + * + * Right-slot по умолчанию — шестерёнка → /settings. Но экраны могут передать + * свой (например, refresh в wallet). Левый avatar — всегда клик-навигация в + * settings, как в референсе. + */ +import React from 'react'; +import { Pressable } from 'react-native'; +import { useRouter } from 'expo-router'; +import { useStore } from '@/lib/store'; +import { Avatar } from '@/components/Avatar'; +import { Header } from '@/components/Header'; +import { IconButton } from '@/components/IconButton'; + +export interface TabHeaderProps { + title: string; + /** Right-slot. Если не передан — выставляется IconButton с settings-outline. */ + right?: React.ReactNode; + /** Dot-color на profile-avatar'е (например, WS live/polling indicator). */ + profileDotColor?: string; +} + +export function TabHeader({ title, right, profileDotColor }: TabHeaderProps) { + const router = useRouter(); + const username = useStore(s => s.username); + const keyFile = useStore(s => s.keyFile); + + return ( +
router.push('/(app)/settings' as never)} hitSlop={8}> + + + } + right={ + right ?? ( + router.push('/(app)/settings' as never)} + /> + ) + } + /> + ); +} diff --git a/client-app/components/chat/AttachmentMenu.tsx b/client-app/components/chat/AttachmentMenu.tsx new file mode 100644 index 0000000..234ebd2 --- /dev/null +++ b/client-app/components/chat/AttachmentMenu.tsx @@ -0,0 +1,188 @@ +/** + * AttachmentMenu — bottom-sheet с вариантами прикрепления. + * + * Выводится при нажатии на `+` в composer'е. Опции: + * - 📷 Photo / video из галереи (expo-image-picker) + * - 📸 Take photo (камера) + * - 📎 File (expo-document-picker) + * - 🎙️ Voice message — stub (запись через expo-av потребует + * permissions runtime + recording UI; сейчас добавляет мок- + * голосовое с duration 4s) + * + * Всё визуально — тёмный overlay + sheet снизу. Закрытие по tap'у на + * overlay или на Cancel. + */ +import React from 'react'; +import { View, Text, Pressable, Alert, Modal } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { useSafeAreaInsets } from 'react-native-safe-area-context'; +import * as ImagePicker from 'expo-image-picker'; +import * as DocumentPicker from 'expo-document-picker'; + +import type { Attachment } from '@/lib/types'; + +export interface AttachmentMenuProps { + visible: boolean; + onClose: () => void; + /** Вызывается когда attachment готов для отправки. */ + onPick: (att: Attachment) => void; +} + +export function AttachmentMenu({ visible, onClose, onPick }: AttachmentMenuProps) { + const insets = useSafeAreaInsets(); + + const pickImageOrVideo = async () => { + try { + const perm = await ImagePicker.requestMediaLibraryPermissionsAsync(); + if (!perm.granted) { + Alert.alert('Permission needed', 'Grant photos access to attach media.'); + return; + } + const result = await ImagePicker.launchImageLibraryAsync({ + mediaTypes: ImagePicker.MediaTypeOptions.All, + quality: 0.85, + allowsEditing: false, + }); + if (result.canceled) return; + const asset = result.assets[0]; + onPick({ + kind: asset.type === 'video' ? 'video' : 'image', + uri: asset.uri, + mime: asset.mimeType, + width: asset.width, + height: asset.height, + duration: asset.duration ? Math.round(asset.duration / 1000) : undefined, + }); + onClose(); + } catch (e: any) { + Alert.alert('Pick failed', e?.message ?? 'Unknown error'); + } + }; + + const takePhoto = async () => { + try { + const perm = await ImagePicker.requestCameraPermissionsAsync(); + if (!perm.granted) { + Alert.alert('Permission needed', 'Grant camera access to take a photo.'); + return; + } + const result = await ImagePicker.launchCameraAsync({ quality: 0.85 }); + if (result.canceled) return; + const asset = result.assets[0]; + onPick({ + kind: asset.type === 'video' ? 'video' : 'image', + uri: asset.uri, + mime: asset.mimeType, + width: asset.width, + height: asset.height, + }); + onClose(); + } catch (e: any) { + Alert.alert('Camera failed', e?.message ?? 'Unknown error'); + } + }; + + const pickFile = async () => { + try { + const res = await DocumentPicker.getDocumentAsync({ + type: '*/*', + copyToCacheDirectory: true, + }); + if (res.canceled) return; + const asset = res.assets[0]; + onPick({ + kind: 'file', + uri: asset.uri, + name: asset.name, + mime: asset.mimeType ?? undefined, + size: asset.size, + }); + onClose(); + } catch (e: any) { + Alert.alert('File pick failed', e?.message ?? 'Unknown error'); + } + }; + + // Voice recorder больше не stub — см. inline-кнопку 🎤 в composer'е, + // которая разворачивает VoiceRecorder (expo-av Audio.Recording). Опция + // Voice в этом меню убрана, т.к. дублировала бы UX. + + return ( + + + + {}} + style={{ + backgroundColor: '#111111', + borderTopLeftRadius: 20, + borderTopRightRadius: 20, + paddingTop: 8, + paddingBottom: Math.max(insets.bottom, 12) + 10, + paddingHorizontal: 10, + borderTopWidth: 1, borderColor: '#1f1f1f', + }} + > + {/* Drag handle */} + + + Attach + + + + + + + + + ); +} + +function Row({ + icon, label, onPress, +}: { + icon: React.ComponentProps['name']; + label: string; + onPress: () => void; +}) { + return ( + ({ + flexDirection: 'row', + alignItems: 'center', + gap: 14, + paddingHorizontal: 14, + paddingVertical: 14, + borderRadius: 14, + backgroundColor: pressed ? '#1a1a1a' : 'transparent', + })} + > + + + + {label} + + ); +} diff --git a/client-app/components/chat/AttachmentPreview.tsx b/client-app/components/chat/AttachmentPreview.tsx new file mode 100644 index 0000000..468012a --- /dev/null +++ b/client-app/components/chat/AttachmentPreview.tsx @@ -0,0 +1,178 @@ +/** + * AttachmentPreview — рендер `Message.attachment` внутри bubble'а. + * + * Четыре формы: + * - image → Image с object-fit cover, aspect-ratio из width/height + * - video → то же + play-overlay в центре, duration внизу-справа + * - voice → row [play-icon] [waveform stub] [duration] + * - file → row [file-icon] [name + size] + * + * Вложения размещаются ВНУТРИ того же bubble'а что и текст, чуть ниже + * footer'а нет и ширина bubble'а снимает maxWidth-ограничение ради + * изображений (отдельный media-first-bubble case). + */ +import React from 'react'; +import { View, Text, Image } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +import type { Attachment } from '@/lib/types'; +import { VoicePlayer } from '@/components/chat/VoicePlayer'; +import { VideoCirclePlayer } from '@/components/chat/VideoCirclePlayer'; + +export interface AttachmentPreviewProps { + attachment: Attachment; + /** Используется для тонирования footer-элементов. */ + own?: boolean; +} + +function formatSize(bytes: number): string { + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(0)} KB`; + if (bytes < 1024 * 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)} MB`; + return `${(bytes / 1024 / 1024 / 1024).toFixed(2)} GB`; +} + +function formatDuration(seconds: number): string { + const m = Math.floor(seconds / 60); + const s = Math.floor(seconds % 60); + return `${m}:${String(s).padStart(2, '0')}`; +} + +export function AttachmentPreview({ attachment, own }: AttachmentPreviewProps) { + switch (attachment.kind) { + case 'image': + return ; + case 'video': + // circle=true — круглое видео-сообщение (Telegram-стиль). + return attachment.circle + ? + : ; + case 'voice': + return ; + case 'file': + return ; + } +} + +// ─── Image ────────────────────────────────────────────────────────── + +function ImageAttachment({ att }: { att: Attachment }) { + // Aspect-ratio из реальных width/height; fallback 4:3. + const aspect = att.width && att.height ? att.width / att.height : 4 / 3; + return ( + + ); +} + +// ─── Video ────────────────────────────────────────────────────────── + +function VideoAttachment({ att }: { att: Attachment }) { + const aspect = att.width && att.height ? att.width / att.height : 16 / 9; + return ( + + + {/* Play overlay по центру */} + + + + {att.duration !== undefined && ( + + + {formatDuration(att.duration)} + + + )} + + ); +} + +// ─── Voice ────────────────────────────────────────────────────────── +// Реальный плеер — см. components/chat/VoicePlayer.tsx (expo-av Sound). + +// ─── File ─────────────────────────────────────────────────────────── + +function FileAttachment({ att, own }: { att: Attachment; own?: boolean }) { + return ( + + + + + + + {att.name ?? 'file'} + + + {att.size !== undefined ? formatSize(att.size) : ''} + {att.size !== undefined && att.mime ? ' · ' : ''} + {att.mime ?? ''} + + + + ); +} diff --git a/client-app/components/chat/DaySeparator.tsx b/client-app/components/chat/DaySeparator.tsx new file mode 100644 index 0000000..f5b18cb --- /dev/null +++ b/client-app/components/chat/DaySeparator.tsx @@ -0,0 +1,36 @@ +/** + * DaySeparator — центральный лейбл "Сегодня" / "Вчера" / "17 июня 2025" + * между группами сообщений. + * + * Стиль: тонкий шрифт серого цвета, маленький размер. В референсе этот + * лейбл не должен перетягивать на себя внимание — он визуальный якорь, + * не заголовок. + */ +import React from 'react'; +import { View, Text, Platform } from 'react-native'; + +export interface DaySeparatorProps { + label: string; +} + +export function DaySeparator({ label }: DaySeparatorProps) { + return ( + + + {label} + + + ); +} diff --git a/client-app/components/chat/MessageBubble.tsx b/client-app/components/chat/MessageBubble.tsx new file mode 100644 index 0000000..d8985ca --- /dev/null +++ b/client-app/components/chat/MessageBubble.tsx @@ -0,0 +1,374 @@ +/** + * MessageBubble — рендер одного сообщения с gesture interactions. + * + * Гестуры — разведены по двум примитивам во избежание конфликта со + * скроллом FlatList'а: + * + * 1. Swipe-left (reply): PanResponder на Animated.View обёртке + * bubble'а. `onMoveShouldSetPanResponder` клеймит responder ТОЛЬКО + * когда пользователь сдвинул палец > 6px влево и горизонталь + * преобладает над вертикалью. Для вертикального скролла + * `onMoveShouldSet` возвращает false — FlatList получает gesture. + * Touchdown ничего не клеймит (onStartShouldSetPanResponder + * отсутствует). + * + * 2. Long-press / tap: через View.onTouchStart/End. Primitive touch + * events bubble'ятся независимо от responder'а. Long-press запускаем + * timer'ом на 550ms, cancel при `onTouchMove` с достаточной + * амплитудой. Tap — короткое касание без move в selection mode. + * + * 3. `selectionMode=true` — PanResponder disabled (в selection режиме + * свайпы не работают). + * + * 4. ReplyQuote — отдельный Pressable над bubble-текстом; tap прыгает + * к оригиналу через onJumpToReply. + * + * 5. highlight prop — bubble-row мерцает accent-blue фоном, использует + * Animated.Value; управляется из ChatScreen после scrollToIndex. + */ +import React, { useRef, useEffect } from 'react'; +import { + View, Text, Pressable, ViewStyle, Animated, PanResponder, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +import type { Message } from '@/lib/types'; +import { relTime } from '@/lib/dates'; +import { Avatar } from '@/components/Avatar'; +import { AttachmentPreview } from '@/components/chat/AttachmentPreview'; +import { ReplyQuote } from '@/components/chat/ReplyQuote'; + +export const PEER_AVATAR_SLOT = 34; +const SWIPE_THRESHOLD = 60; +const LONG_PRESS_MS = 550; +const TAP_MAX_MOVEMENT = 8; +const TAP_MAX_ELAPSED = 300; + +export interface MessageBubbleProps { + msg: Message; + peerName: string; + peerAddress?: string; + withSenderMeta?: boolean; + showName: boolean; + showAvatar: boolean; + + onReply?: (m: Message) => void; + onLongPress?: (m: Message) => void; + onTap?: (m: Message) => void; + onOpenProfile?: () => void; + onJumpToReply?: (originalId: string) => void; + + selectionMode?: boolean; + selected?: boolean; + /** Mgnt-управляемый highlight: row мерцает accent-фоном ~1-2 секунды. */ + highlighted?: boolean; +} + +// ─── Bubble styles ────────────────────────────────────────────────── + +const bubbleBase: ViewStyle = { + borderRadius: 18, + paddingHorizontal: 14, + paddingTop: 8, + paddingBottom: 6, +}; + +const peerBubble: ViewStyle = { + ...bubbleBase, + backgroundColor: '#1a1a1a', + borderBottomLeftRadius: 6, +}; + +const ownBubble: ViewStyle = { + ...bubbleBase, + backgroundColor: '#1d9bf0', + borderBottomRightRadius: 6, +}; + +const bubbleText = { color: '#ffffff', fontSize: 15, lineHeight: 20 } as const; + +// ─── Main ─────────────────────────────────────────────────────────── + +export function MessageBubble(props: MessageBubbleProps) { + if (props.msg.mine) return ; + if (!props.withSenderMeta) return ; + return ; +} + +type Variant = 'own' | 'peer-compact' | 'group-peer'; + +function RowShell({ + msg, peerName, peerAddress, showName, showAvatar, + onReply, onLongPress, onTap, onOpenProfile, onJumpToReply, + selectionMode, selected, highlighted, variant, +}: MessageBubbleProps & { variant: Variant }) { + const translateX = useRef(new Animated.Value(0)).current; + const startTs = useRef(0); + const moved = useRef(false); + const lpTimer = useRef | null>(null); + + const clearLp = () => { + if (lpTimer.current) { clearTimeout(lpTimer.current); lpTimer.current = null; } + }; + + // Touch start — запускаем long-press timer (НЕ клеймим responder). + const onTouchStart = () => { + startTs.current = Date.now(); + moved.current = false; + clearLp(); + if (onLongPress) { + lpTimer.current = setTimeout(() => { + if (!moved.current) onLongPress(msg); + lpTimer.current = null; + }, LONG_PRESS_MS); + } + }; + + const onTouchMove = (e: { nativeEvent: { pageX: number; pageY: number } }) => { + // Если пользователь двигает палец — отменяем long-press timer. + // Малые движения (< TAP_MAX_MOVEMENT) игнорируем — устраняют + // fale-cancel от дрожания пальца. + // Здесь нет точного dx/dy от gesture-системы, используем primitive + // touch coords отсчитываемые по абсолютным координатам. Проще — + // всегда отменяем на first move (PanResponder ниже отнимет + // responder если leftward). + moved.current = true; + clearLp(); + }; + + const onTouchEnd = () => { + const elapsed = Date.now() - startTs.current; + clearLp(); + // Короткий tap без движения → в selection mode toggle. + if (!moved.current && elapsed < TAP_MAX_ELAPSED && selectionMode) { + onTap?.(msg); + } + }; + + // Swipe-to-reply: PanResponder клеймит ТОЛЬКО leftward-dominant move. + // Для vertical scroll / rightward swipe / start-touch возвращает false, + // FlatList / AnimatedSlot получают gesture. + const panResponder = useRef( + PanResponder.create({ + onMoveShouldSetPanResponder: (_e, g) => { + if (selectionMode) return false; + // Leftward > 6px и горизонталь преобладает. + return g.dx < -6 && Math.abs(g.dx) > Math.abs(g.dy) * 1.5; + }, + onPanResponderGrant: () => { + // Как только мы заклеймили gesture, отменяем long-press + // (пользователь явно свайпает, не удерживает). + clearLp(); + moved.current = true; + }, + onPanResponderMove: (_e, g) => { + translateX.setValue(Math.min(0, g.dx)); + }, + onPanResponderRelease: (_e, g) => { + if (g.dx <= -SWIPE_THRESHOLD) onReply?.(msg); + Animated.spring(translateX, { + toValue: 0, friction: 6, tension: 80, useNativeDriver: true, + }).start(); + }, + onPanResponderTerminate: () => { + Animated.spring(translateX, { + toValue: 0, friction: 6, tension: 80, useNativeDriver: true, + }).start(); + }, + }), + ).current; + + // Highlight fade: при переключении highlighted=true крутим короткую + // анимацию "flash + fade out" через Animated.Value (0→1→0 за ~1.8s). + const highlightAnim = useRef(new Animated.Value(0)).current; + useEffect(() => { + if (!highlighted) return; + highlightAnim.setValue(0); + Animated.sequence([ + Animated.timing(highlightAnim, { toValue: 1, duration: 150, useNativeDriver: false }), + Animated.delay(1400), + Animated.timing(highlightAnim, { toValue: 0, duration: 450, useNativeDriver: false }), + ]).start(); + }, [highlighted, highlightAnim]); + + const highlightBg = highlightAnim.interpolate({ + inputRange: [0, 1], + outputRange: ['rgba(29,155,240,0)', 'rgba(29,155,240,0.22)'], + }); + + const isMine = variant === 'own'; + const hasAttachment = !!msg.attachment; + const hasReply = !!msg.replyTo; + const attachmentOnly = hasAttachment && !msg.text.trim(); + const bubbleStyle = attachmentOnly + ? { ...(isMine ? ownBubble : peerBubble), padding: 4 } + : (isMine ? ownBubble : peerBubble); + + const bubbleNode = ( + + + {msg.replyTo && ( + onJumpToReply?.(msg.replyTo!.id)} + /> + )} + {msg.attachment && ( + + )} + {msg.text.trim() ? ( + {msg.text} + ) : null} + + + + ); + + const contentRow = + variant === 'own' ? ( + + {bubbleNode} + + ) : variant === 'peer-compact' ? ( + + {bubbleNode} + + ) : ( + + {showName && ( + + + {peerName} + + + )} + + + {showAvatar ? ( + + + + ) : null} + + {bubbleNode} + + + ); + + return ( + { clearLp(); moved.current = true; }} + style={{ + paddingHorizontal: 8, + marginBottom: 6, + // Selection & highlight накладываются: highlight flash побеждает + // когда анимация > 0, иначе статичный selection-tint. + backgroundColor: selected ? 'rgba(29,155,240,0.12)' : highlightBg, + position: 'relative', + }} + > + {contentRow} + {selectionMode && ( + onTap?.(msg)} + /> + )} + + ); +} + +// ─── Clickable check-dot ──────────────────────────────────────────── + +function CheckDot({ selected, onPress }: { selected: boolean; onPress: () => void }) { + return ( + + + {selected && } + + + ); +} + +// ─── Footer ───────────────────────────────────────────────────────── + +interface FooterProps { + edited: boolean; + time: string; + own?: boolean; + read?: boolean; +} + +function BubbleFooter({ edited, time, own, read }: FooterProps) { + const textColor = own ? 'rgba(255,255,255,0.78)' : '#8b8b8b'; + const dotColor = own ? 'rgba(255,255,255,0.55)' : '#5a5a5a'; + return ( + + {edited && ( + <> + Edited + · + + )} + {time} + {own && ( + + )} + + ); +} diff --git a/client-app/components/chat/ReplyQuote.tsx b/client-app/components/chat/ReplyQuote.tsx new file mode 100644 index 0000000..a1bba49 --- /dev/null +++ b/client-app/components/chat/ReplyQuote.tsx @@ -0,0 +1,70 @@ +/** + * ReplyQuote — блок "цитаты" внутри bubble'а сообщения-ответа. + * + * Визуал: slim-row с синим бордером слева (accent-bar), author в синем, + * preview text — серым, в одну строку. + * + * Tap на quoted-блок → onJump → ChatScreen скроллит к оригиналу и + * подсвечивает его на пару секунд. Если оригинал не найден в текущем + * списке (удалён / ушёл за пределы пагинации) — onJump может просто + * no-op'нуть. + * + * Цвета зависят от того в чьём bubble'е мы находимся: + * - own (синий bubble) → quote border = белый, текст белый/85% + * - peer (серый bubble) → quote border = accent blue, текст white + */ +import React from 'react'; +import { View, Text, Pressable } from 'react-native'; + +export interface ReplyQuoteProps { + author: string; + preview: string; + own?: boolean; + onJump?: () => void; +} + +export function ReplyQuote({ author, preview, own, onJump }: ReplyQuoteProps) { + const barColor = own ? 'rgba(255,255,255,0.85)' : '#1d9bf0'; + const authorColor = own ? '#ffffff' : '#1d9bf0'; + const previewColor = own ? 'rgba(255,255,255,0.85)' : '#c0c0c0'; + + return ( + ({ + flexDirection: 'row', + backgroundColor: own ? 'rgba(255,255,255,0.10)' : 'rgba(29,155,240,0.10)', + borderRadius: 10, + overflow: 'hidden', + marginBottom: 5, + opacity: pressed ? 0.7 : 1, + })} + > + {/* Accent bar слева */} + + + + {author} + + + {preview || 'attachment'} + + + + ); +} diff --git a/client-app/components/chat/VideoCirclePlayer.tsx b/client-app/components/chat/VideoCirclePlayer.tsx new file mode 100644 index 0000000..9d01cf6 --- /dev/null +++ b/client-app/components/chat/VideoCirclePlayer.tsx @@ -0,0 +1,158 @@ +/** + * VideoCirclePlayer — telegram-style круглое видео-сообщение. + * + * Мигрировано с expo-av `