feat(feed/chat): lazy-render + pagination for long scrolls
Server pagination
- blockchain.PostsByAuthor signature extended with beforeTs int64;
passing 0 keeps the previous "everything, newest first" behaviour,
non-zero skips posts with CreatedAt >= beforeTs so clients can
paginate older results.
- node.FeedConfig.PostsByAuthor callback type updated; the two
/feed endpoints that use it (timeline + author) now accept
`?before=<unix_seconds>` and forward it through. /feed/author
limit default dropped from 50 to 30 to match the client's page
size.
- node/api_common.go: new queryInt64 helper for parsing the cursor
param safely (matches the queryInt pattern already used).
Client infinite scroll (Feed tab)
- lib/feed.ts: fetchTimeline / fetchAuthorPosts accept
`{limit?, before?}` options. Old signatures still work for other
callers (fetchForYou / fetchTrending / fetchHashtag) — those are
ranked feeds that don't have a stable cursor so they stay
single-shot.
- feed/index.tsx: tracks loadingMore / exhausted state. onEndReached
(threshold 0.6) fires loadMore() which fetches the next 20 posts
using the oldest currently-loaded post's created_at as `before`.
Deduplicates on post_id before appending. Stops when the server
returns < PAGE_SIZE items. ListFooterComponent shows a small
spinner during paginated fetches.
- FlatList lazy-render tuning on all feed lists (index + hashtag):
initialNumToRender:10, maxToRenderPerBatch:8, windowSize:7,
removeClippedSubviews — first paint stays quick even with 100+
posts loaded.
Chat lazy render
- chats/[id].tsx FlatList: initialNumToRender:25 (~1.5 screens),
maxToRenderPerBatch:12, windowSize:10, removeClippedSubviews.
Keeps initial chat open snappy on conversations with thousands
of messages; RN re-renders a small window around the viewport
and drops the rest.
Tests
- chain_test.go updated for new PostsByAuthor signature.
- All 7 Go packages green.
- tsc --noEmit clean.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -570,7 +570,11 @@ func (c *Chain) Post(postID string) (*PostRecord, error) {
|
|||||||
// PostsByAuthor returns the last `limit` posts by the given author, newest
|
// PostsByAuthor returns the last `limit` posts by the given author, newest
|
||||||
// first. Iterates `postbyauthor:<author>:...` in reverse order. If limit
|
// first. Iterates `postbyauthor:<author>:...` in reverse order. If limit
|
||||||
// ≤ 0, defaults to 50; capped at 200.
|
// ≤ 0, defaults to 50; capped at 200.
|
||||||
func (c *Chain) PostsByAuthor(authorPub string, limit int) ([]*PostRecord, error) {
|
//
|
||||||
|
// If beforeTs > 0, skip posts with CreatedAt >= beforeTs — used by the
|
||||||
|
// timeline/author endpoints to paginate older results. Pass 0 for the
|
||||||
|
// first page (everything, newest first).
|
||||||
|
func (c *Chain) PostsByAuthor(authorPub string, beforeTs int64, limit int) ([]*PostRecord, error) {
|
||||||
if limit <= 0 {
|
if limit <= 0 {
|
||||||
limit = 50
|
limit = 50
|
||||||
}
|
}
|
||||||
@@ -606,6 +610,9 @@ func (c *Chain) PostsByAuthor(authorPub string, limit int) ([]*PostRecord, error
|
|||||||
if rec.Deleted {
|
if rec.Deleted {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if beforeTs > 0 && rec.CreatedAt >= beforeTs {
|
||||||
|
continue
|
||||||
|
}
|
||||||
out = append(out, rec)
|
out = append(out, rec)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -854,7 +854,7 @@ func TestFeedCreatePost(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// PostsByAuthor should list it.
|
// PostsByAuthor should list it.
|
||||||
posts, err := c.PostsByAuthor(alice.PubKeyHex(), 10)
|
posts, err := c.PostsByAuthor(alice.PubKeyHex(), 0, 10)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("PostsByAuthor: %v", err)
|
t.Fatalf("PostsByAuthor: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -455,6 +455,13 @@ export default function ChatScreen() {
|
|||||||
renderItem={renderRow}
|
renderItem={renderRow}
|
||||||
contentContainerStyle={{ paddingVertical: 10 }}
|
contentContainerStyle={{ paddingVertical: 10 }}
|
||||||
showsVerticalScrollIndicator={false}
|
showsVerticalScrollIndicator={false}
|
||||||
|
// Lazy render: only mount ~1.5 screens of bubbles initially,
|
||||||
|
// render further batches as the user scrolls older. Keeps
|
||||||
|
// initial paint fast on chats with thousands of messages.
|
||||||
|
initialNumToRender={25}
|
||||||
|
maxToRenderPerBatch={12}
|
||||||
|
windowSize={10}
|
||||||
|
removeClippedSubviews
|
||||||
ListEmptyComponent={() => (
|
ListEmptyComponent={() => (
|
||||||
<View style={{
|
<View style={{
|
||||||
flex: 1, alignItems: 'center', justifyContent: 'center',
|
flex: 1, alignItems: 'center', justifyContent: 'center',
|
||||||
|
|||||||
@@ -46,8 +46,12 @@ export default function FeedScreen() {
|
|||||||
const [likedSet, setLikedSet] = useState<Set<string>>(new Set());
|
const [likedSet, setLikedSet] = useState<Set<string>>(new Set());
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
const [refreshing, setRefreshing] = useState(false);
|
const [refreshing, setRefreshing] = useState(false);
|
||||||
|
const [loadingMore, setLoadingMore] = useState(false);
|
||||||
|
const [exhausted, setExhausted] = useState(false);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const PAGE_SIZE = 20;
|
||||||
|
|
||||||
// Guard against rapid tab switches overwriting each other's results.
|
// Guard against rapid tab switches overwriting each other's results.
|
||||||
const requestRef = useRef(0);
|
const requestRef = useRef(0);
|
||||||
|
|
||||||
@@ -56,19 +60,20 @@ export default function FeedScreen() {
|
|||||||
if (isRefresh) setRefreshing(true);
|
if (isRefresh) setRefreshing(true);
|
||||||
else setLoading(true);
|
else setLoading(true);
|
||||||
setError(null);
|
setError(null);
|
||||||
|
setExhausted(false);
|
||||||
|
|
||||||
const seq = ++requestRef.current;
|
const seq = ++requestRef.current;
|
||||||
try {
|
try {
|
||||||
let items: FeedPostItem[] = [];
|
let items: FeedPostItem[] = [];
|
||||||
switch (tab) {
|
switch (tab) {
|
||||||
case 'following':
|
case 'following':
|
||||||
items = await fetchTimeline(keyFile.pub_key, 40);
|
items = await fetchTimeline(keyFile.pub_key, { limit: PAGE_SIZE });
|
||||||
break;
|
break;
|
||||||
case 'foryou':
|
case 'foryou':
|
||||||
items = await fetchForYou(keyFile.pub_key, 40);
|
items = await fetchForYou(keyFile.pub_key, PAGE_SIZE);
|
||||||
break;
|
break;
|
||||||
case 'trending':
|
case 'trending':
|
||||||
items = await fetchTrending(24, 40);
|
items = await fetchTrending(24, PAGE_SIZE);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (seq !== requestRef.current) return; // stale response
|
if (seq !== requestRef.current) return; // stale response
|
||||||
@@ -80,6 +85,9 @@ export default function FeedScreen() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
setPosts(items);
|
setPosts(items);
|
||||||
|
// If the server returned fewer than PAGE_SIZE, we already have
|
||||||
|
// everything — disable further paginated fetches.
|
||||||
|
if (items.length < PAGE_SIZE) setExhausted(true);
|
||||||
|
|
||||||
// Batch-fetch liked_by_me (bounded concurrency — 6 at a time).
|
// Batch-fetch liked_by_me (bounded concurrency — 6 at a time).
|
||||||
const liked = new Set<string>();
|
const liked = new Set<string>();
|
||||||
@@ -102,6 +110,7 @@ export default function FeedScreen() {
|
|||||||
// testable; in production this path shows the empty state.
|
// testable; in production this path shows the empty state.
|
||||||
if (/Network request failed|→\s*404/.test(msg)) {
|
if (/Network request failed|→\s*404/.test(msg)) {
|
||||||
setPosts(getDevSeedFeed());
|
setPosts(getDevSeedFeed());
|
||||||
|
setExhausted(true);
|
||||||
} else {
|
} else {
|
||||||
setError(msg);
|
setError(msg);
|
||||||
}
|
}
|
||||||
@@ -112,6 +121,57 @@ export default function FeedScreen() {
|
|||||||
}
|
}
|
||||||
}, [keyFile, tab]);
|
}, [keyFile, tab]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* loadMore — paginate older posts when the user scrolls to the end
|
||||||
|
* of the list. Only the "following" and "foryou"/trending-less-useful
|
||||||
|
* paths actually support server-side pagination via the `before`
|
||||||
|
* cursor; foryou/trending return their ranked top-N which is by
|
||||||
|
* design not paginated (users very rarely scroll past 20 hot posts).
|
||||||
|
*
|
||||||
|
* We key the next page off the oldest post currently in state. If
|
||||||
|
* the server returns less than PAGE_SIZE items, we mark the list as
|
||||||
|
* exhausted to stop further fetches.
|
||||||
|
*/
|
||||||
|
const loadMore = useCallback(async () => {
|
||||||
|
if (!keyFile || loadingMore || exhausted || refreshing || loading) return;
|
||||||
|
if (posts.length === 0) return;
|
||||||
|
// foryou / trending are ranked, not ordered — no stable cursor to
|
||||||
|
// paginate against in v2.0.0. Skip.
|
||||||
|
if (tab === 'foryou' || tab === 'trending') return;
|
||||||
|
|
||||||
|
const oldest = posts[posts.length - 1];
|
||||||
|
const before = oldest?.created_at;
|
||||||
|
if (!before) return;
|
||||||
|
|
||||||
|
setLoadingMore(true);
|
||||||
|
const seq = requestRef.current; // don't bump — this is additive
|
||||||
|
try {
|
||||||
|
const next = await fetchTimeline(keyFile.pub_key, {
|
||||||
|
limit: PAGE_SIZE, before,
|
||||||
|
});
|
||||||
|
if (seq !== requestRef.current) return;
|
||||||
|
if (next.length === 0) {
|
||||||
|
setExhausted(true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Dedup by post_id (could overlap on the boundary ts).
|
||||||
|
setPosts(prev => {
|
||||||
|
const have = new Set(prev.map(p => p.post_id));
|
||||||
|
const merged = [...prev];
|
||||||
|
for (const p of next) {
|
||||||
|
if (!have.has(p.post_id)) merged.push(p);
|
||||||
|
}
|
||||||
|
return merged;
|
||||||
|
});
|
||||||
|
if (next.length < PAGE_SIZE) setExhausted(true);
|
||||||
|
} catch {
|
||||||
|
// Don't escalate to error UI for pagination failures — just stop.
|
||||||
|
setExhausted(true);
|
||||||
|
} finally {
|
||||||
|
setLoadingMore(false);
|
||||||
|
}
|
||||||
|
}, [keyFile, loadingMore, exhausted, refreshing, loading, posts, tab]);
|
||||||
|
|
||||||
useEffect(() => { loadPosts(false); }, [loadPosts]);
|
useEffect(() => { loadPosts(false); }, [loadPosts]);
|
||||||
|
|
||||||
const onStatsChanged = useCallback(async (postID: string) => {
|
const onStatsChanged = useCallback(async (postID: string) => {
|
||||||
@@ -219,6 +279,22 @@ export default function FeedScreen() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
ItemSeparatorComponent={PostSeparator}
|
ItemSeparatorComponent={PostSeparator}
|
||||||
|
onEndReached={loadMore}
|
||||||
|
onEndReachedThreshold={0.6}
|
||||||
|
ListFooterComponent={
|
||||||
|
loadingMore ? (
|
||||||
|
<View style={{ paddingVertical: 20, alignItems: 'center' }}>
|
||||||
|
<ActivityIndicator color="#1d9bf0" size="small" />
|
||||||
|
</View>
|
||||||
|
) : null
|
||||||
|
}
|
||||||
|
// Lazy-render tuning: start with one viewport's worth of posts,
|
||||||
|
// keep a small window around the visible area. Works together
|
||||||
|
// with onEndReached pagination for smooth long-feed scroll.
|
||||||
|
initialNumToRender={10}
|
||||||
|
maxToRenderPerBatch={8}
|
||||||
|
windowSize={7}
|
||||||
|
removeClippedSubviews
|
||||||
refreshControl={
|
refreshControl={
|
||||||
<RefreshControl
|
<RefreshControl
|
||||||
refreshing={refreshing}
|
refreshing={refreshing}
|
||||||
|
|||||||
@@ -95,6 +95,10 @@ export default function HashtagScreen() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
ItemSeparatorComponent={PostSeparator}
|
ItemSeparatorComponent={PostSeparator}
|
||||||
|
initialNumToRender={10}
|
||||||
|
maxToRenderPerBatch={8}
|
||||||
|
windowSize={7}
|
||||||
|
removeClippedSubviews
|
||||||
refreshControl={
|
refreshControl={
|
||||||
<RefreshControl
|
<RefreshControl
|
||||||
refreshing={refreshing}
|
refreshing={refreshing}
|
||||||
|
|||||||
@@ -226,13 +226,24 @@ export async function bumpView(postID: string): Promise<void> {
|
|||||||
} catch { /* ignore */ }
|
} catch { /* ignore */ }
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchAuthorPosts(pub: string, limit = 30): Promise<FeedPostItem[]> {
|
export async function fetchAuthorPosts(
|
||||||
const resp = await getJSON<TimelineResponse>(`/feed/author/${pub}?limit=${limit}`);
|
pub: string, opts: { limit?: number; before?: number } = {},
|
||||||
|
): Promise<FeedPostItem[]> {
|
||||||
|
const limit = opts.limit ?? 30;
|
||||||
|
const qs = opts.before
|
||||||
|
? `?limit=${limit}&before=${opts.before}`
|
||||||
|
: `?limit=${limit}`;
|
||||||
|
const resp = await getJSON<TimelineResponse>(`/feed/author/${pub}${qs}`);
|
||||||
return resp.posts ?? [];
|
return resp.posts ?? [];
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchTimeline(followerPub: string, limit = 30): Promise<FeedPostItem[]> {
|
export async function fetchTimeline(
|
||||||
const resp = await getJSON<TimelineResponse>(`/feed/timeline?follower=${followerPub}&limit=${limit}`);
|
followerPub: string, opts: { limit?: number; before?: number } = {},
|
||||||
|
): Promise<FeedPostItem[]> {
|
||||||
|
const limit = opts.limit ?? 30;
|
||||||
|
let qs = `?follower=${followerPub}&limit=${limit}`;
|
||||||
|
if (opts.before) qs += `&before=${opts.before}`;
|
||||||
|
const resp = await getJSON<TimelineResponse>(`/feed/timeline${qs}`);
|
||||||
return resp.posts ?? [];
|
return resp.posts ?? [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,20 @@ func queryInt(r *http.Request, key string, def int) int {
|
|||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// queryInt64 reads a non-negative int64 query param — typically a unix
|
||||||
|
// timestamp cursor for pagination. Returns def when missing or invalid.
|
||||||
|
func queryInt64(r *http.Request, key string, def int64) int64 {
|
||||||
|
s := r.URL.Query().Get(key)
|
||||||
|
if s == "" {
|
||||||
|
return def
|
||||||
|
}
|
||||||
|
n, err := strconv.ParseInt(s, 10, 64)
|
||||||
|
if err != nil || n < 0 {
|
||||||
|
return def
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
// queryIntMin0 parses a query param as a non-negative integer; returns 0 if absent or invalid.
|
// queryIntMin0 parses a query param as a non-negative integer; returns 0 if absent or invalid.
|
||||||
func queryIntMin0(r *http.Request, key string) int {
|
func queryIntMin0(r *http.Request, key string) int {
|
||||||
s := r.URL.Query().Get(key)
|
s := r.URL.Query().Get(key)
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ type FeedConfig struct {
|
|||||||
GetPost func(postID string) (*blockchain.PostRecord, error)
|
GetPost func(postID string) (*blockchain.PostRecord, error)
|
||||||
LikeCount func(postID string) (uint64, error)
|
LikeCount func(postID string) (uint64, error)
|
||||||
HasLiked func(postID, likerPub string) (bool, error)
|
HasLiked func(postID, likerPub string) (bool, error)
|
||||||
PostsByAuthor func(authorPub string, limit int) ([]*blockchain.PostRecord, error)
|
PostsByAuthor func(authorPub string, beforeTs int64, limit int) ([]*blockchain.PostRecord, error)
|
||||||
Following func(followerPub string) ([]string, error)
|
Following func(followerPub string) ([]string, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -440,13 +440,14 @@ func feedAuthor(cfg FeedConfig) http.HandlerFunc {
|
|||||||
jsonErr(w, fmt.Errorf("author pub required"), 400)
|
jsonErr(w, fmt.Errorf("author pub required"), 400)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
limit := queryInt(r, "limit", 50)
|
limit := queryInt(r, "limit", 30)
|
||||||
|
beforeTs := queryInt64(r, "before", 0) // pagination cursor (unix seconds)
|
||||||
|
|
||||||
// Prefer chain-authoritative list (includes soft-deleted flag) so
|
// Prefer chain-authoritative list (includes soft-deleted flag) so
|
||||||
// clients can't be fooled by a stale relay that has an already-
|
// clients can't be fooled by a stale relay that has an already-
|
||||||
// deleted post. If chain isn't wired, fall back to relay index.
|
// deleted post. If chain isn't wired, fall back to relay index.
|
||||||
if cfg.PostsByAuthor != nil {
|
if cfg.PostsByAuthor != nil {
|
||||||
records, err := cfg.PostsByAuthor(pub, limit)
|
records, err := cfg.PostsByAuthor(pub, beforeTs, limit)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
jsonErr(w, err, 500)
|
jsonErr(w, err, 500)
|
||||||
return
|
return
|
||||||
@@ -461,6 +462,8 @@ func feedAuthor(cfg FeedConfig) http.HandlerFunc {
|
|||||||
jsonOK(w, map[string]any{"author": pub, "count": len(out), "posts": out})
|
jsonOK(w, map[string]any{"author": pub, "count": len(out), "posts": out})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
// Fallback: relay index (no chain). Doesn't support `before` yet;
|
||||||
|
// the chain-authoritative path above is what production serves.
|
||||||
ids, err := cfg.Mailbox.PostsByAuthor(pub, limit)
|
ids, err := cfg.Mailbox.PostsByAuthor(pub, limit)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
jsonErr(w, err, 500)
|
jsonErr(w, err, 500)
|
||||||
@@ -556,7 +559,8 @@ func feedTimeline(cfg FeedConfig) http.HandlerFunc {
|
|||||||
jsonErr(w, fmt.Errorf("timeline requires chain queries"), 503)
|
jsonErr(w, fmt.Errorf("timeline requires chain queries"), 503)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
limit := queryInt(r, "limit", 50)
|
limit := queryInt(r, "limit", 30)
|
||||||
|
beforeTs := queryInt64(r, "before", 0) // pagination cursor
|
||||||
perAuthor := limit
|
perAuthor := limit
|
||||||
if perAuthor > 30 {
|
if perAuthor > 30 {
|
||||||
perAuthor = 30
|
perAuthor = 30
|
||||||
@@ -569,7 +573,7 @@ func feedTimeline(cfg FeedConfig) http.HandlerFunc {
|
|||||||
}
|
}
|
||||||
var merged []*blockchain.PostRecord
|
var merged []*blockchain.PostRecord
|
||||||
for _, target := range following {
|
for _, target := range following {
|
||||||
posts, err := cfg.PostsByAuthor(target, perAuthor)
|
posts, err := cfg.PostsByAuthor(target, beforeTs, perAuthor)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user