Browse Source

more performance updates

aggressive caching
turn off debug logs
master
Silberengel 1 month ago
parent
commit
83e61d192d
  1. 34
      package-lock.json
  2. 4
      package.json
  3. 4
      public/healthz.json
  4. 65
      src/lib/modules/comments/CommentThread.svelte
  5. 6
      src/lib/modules/discussions/DiscussionList.svelte
  6. 33
      src/lib/modules/discussions/DiscussionView.svelte
  7. 43
      src/lib/modules/feed/FeedPage.svelte
  8. 129
      src/lib/modules/profiles/ProfilePage.svelte
  9. 4
      src/lib/services/nostr/nostr-client.ts
  10. 31
      vite.config.ts

34
package-lock.json generated

@ -49,8 +49,10 @@ @@ -49,8 +49,10 @@
"prettier": "^3.2.5",
"prettier-plugin-svelte": "^3.2.2",
"tailwindcss": "^3.4.1",
"terser": "^5.46.0",
"typescript": "^5.3.3",
"vite": "^5.4.21"
"vite": "^5.4.21",
"vite-plugin-compression": "^0.5.1"
}
},
"node_modules/@alloc/quick-lru": {
@ -9158,6 +9160,36 @@ @@ -9158,6 +9160,36 @@
}
}
},
"node_modules/vite-plugin-compression": {
"version": "0.5.1",
"resolved": "https://registry.npmjs.org/vite-plugin-compression/-/vite-plugin-compression-0.5.1.tgz",
"integrity": "sha512-5QJKBDc+gNYVqL/skgFAP81Yuzo9R+EAf19d+EtsMF/i8kFUpNi3J/H01QD3Oo8zBQn+NzoCIFkpPLynoOzaJg==",
"dev": true,
"license": "MIT",
"dependencies": {
"chalk": "^4.1.2",
"debug": "^4.3.3",
"fs-extra": "^10.0.0"
},
"peerDependencies": {
"vite": ">=2.0.0"
}
},
"node_modules/vite-plugin-compression/node_modules/fs-extra": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz",
"integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/vite-plugin-pwa": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/vite-plugin-pwa/-/vite-plugin-pwa-1.2.0.tgz",

4
package.json

@ -63,8 +63,10 @@ @@ -63,8 +63,10 @@
"prettier": "^3.2.5",
"prettier-plugin-svelte": "^3.2.2",
"tailwindcss": "^3.4.1",
"terser": "^5.46.0",
"typescript": "^5.3.3",
"vite": "^5.4.21"
"vite": "^5.4.21",
"vite-plugin-compression": "^0.5.1"
},
"overrides": {
"@sveltejs/vite-plugin-svelte": "^4.0.0-next.6",

4
public/healthz.json

@ -2,7 +2,7 @@ @@ -2,7 +2,7 @@
"status": "ok",
"service": "aitherboard",
"version": "0.2.0",
"buildTime": "2026-02-07T06:35:08.691Z",
"buildTime": "2026-02-07T06:54:31.135Z",
"gitCommit": "unknown",
"timestamp": 1770446108691
"timestamp": 1770447271135
}

65
src/lib/modules/comments/CommentThread.svelte

@ -299,6 +299,34 @@ @@ -299,6 +299,34 @@
return;
}
// Load from cache first (fast - instant display)
try {
const { getRecentCachedEvents } = await import('../../services/cache/event-cache.js');
const cachedComments = await getRecentCachedEvents([KIND.COMMENT], 60 * 60 * 1000, config.feedLimit); // 1 hour cache
const cachedKind1 = await getRecentCachedEvents([KIND.SHORT_TEXT_NOTE], 60 * 60 * 1000, config.feedLimit);
const cachedYakBacks = await getRecentCachedEvents([KIND.VOICE_REPLY], 60 * 60 * 1000, config.feedLimit);
const cachedZaps = await getRecentCachedEvents([KIND.ZAP_RECEIPT], 60 * 60 * 1000, config.feedLimit);
// Filter cached events to only those that reference this thread
const cachedReplies = [
...cachedComments.filter(r => referencesRoot(r)),
...cachedKind1.filter(r => referencesRoot(r)),
...cachedYakBacks.filter(r => referencesRoot(r)),
...cachedZaps.filter(r => referencesRoot(r))
];
if (cachedReplies.length > 0 && isMounted) {
// Process cached replies immediately
handleReplyUpdate(cachedReplies);
loading = false; // Show cached content immediately
} else {
loading = true; // Only show loading if no cache
}
} catch (error) {
console.debug('Error loading cached comments:', error);
loading = true; // Show loading if cache check fails
}
const allRelays = relayManager.getProfileReadRelays();
const replyFilters: any[] = [
{ kinds: [KIND.COMMENT], '#e': [threadId], limit: config.feedLimit },
@ -310,44 +338,23 @@ @@ -310,44 +338,23 @@
{ kinds: [KIND.ZAP_RECEIPT], '#e': [threadId], limit: config.feedLimit }
];
// fetchEvents with useCache:true returns cached data immediately if available,
// then fetches fresh data in background. Only show loading if no cache.
// Stream fresh data from relays (progressive enhancement)
try {
// Quick cache check - if we have cache, don't show loading
if (!isMounted) return;
const fetchPromise1 = nostrClient.fetchEvents(
replyFilters,
allRelays,
{ useCache: true, cacheResults: false, timeout: config.shortTimeout }
);
activeFetchPromises.add(fetchPromise1);
const quickCacheCheck = await fetchPromise1;
activeFetchPromises.delete(fetchPromise1);
if (!isMounted) return;
if (quickCacheCheck.length === 0) {
loading = true; // Only show loading if no cache
}
// Now fetch with full options - returns relay results immediately, then enhances with cache
// onUpdate callback will be called as events arrive from relays, allowing immediate rendering
// Use high priority to ensure comments load before background fetches (reactions, profiles, etc.)
const fetchPromise2 = nostrClient.fetchEvents(
// Use cache-first strategy - already shown cache above, now stream updates
const fetchPromise = nostrClient.fetchEvents(
replyFilters,
allRelays,
{
useCache: true,
useCache: 'cache-first', // Already shown cache above, now stream updates
cacheResults: true,
timeout: config.longTimeout,
onUpdate: handleReplyUpdate,
onUpdate: handleReplyUpdate, // Stream events as they arrive
priority: 'high'
}
);
activeFetchPromises.add(fetchPromise2);
const allReplies = await fetchPromise2;
activeFetchPromises.delete(fetchPromise2);
activeFetchPromises.add(fetchPromise);
const allReplies = await fetchPromise;
activeFetchPromises.delete(fetchPromise);
if (!isMounted) return; // Don't process if unmounted

6
src/lib/modules/discussions/DiscussionList.svelte

@ -136,7 +136,7 @@ @@ -136,7 +136,7 @@
try {
// Always load all cached threads (limit 100), filtering happens in the view
const cachedThreads = await getRecentCachedEvents([KIND.DISCUSSION_THREAD], 15 * 60 * 1000, 100);
const cachedThreads = await getRecentCachedEvents([KIND.DISCUSSION_THREAD], 60 * 60 * 1000, 100); // 1 hour cache (optimized for slow connections)
if (cachedThreads.length > 0 && isMounted) {
// Build threads map from cached results (no time filtering here)
@ -176,13 +176,13 @@ @@ -176,13 +176,13 @@
const zapRelays = relayManager.getZapReceiptReadRelays();
const commentRelays = relayManager.getCommentReadRelays();
// Query relays first with 3-second timeout, then fill from cache if needed
// Load from cache first, then stream updates from relays (optimized for slow connections)
// Always fetch limit 100, no time filtering
const fetchPromise = nostrClient.fetchEvents(
[{ kinds: [KIND.DISCUSSION_THREAD], limit: 100 }],
threadRelays,
{
useCache: 'relay-first', // Query relays first with timeout, fill from cache if relay query returns nothing
useCache: 'cache-first', // Load from cache first, then stream updates from relays
cacheResults: true, // Cache the results
timeout: config.standardTimeout,
onUpdate: async (updatedEvents) => {

33
src/lib/modules/discussions/DiscussionView.svelte

@ -15,7 +15,18 @@ @@ -15,7 +15,18 @@
let rootEvent = $state<NostrEvent | null>(null);
let loading = $state(true);
// Cleanup tracking
let isMounted = $state(true);
// Cleanup on unmount
$effect(() => {
return () => {
isMounted = false;
};
});
onMount(async () => {
isMounted = true;
await nostrClient.initialize();
loadRootEvent();
});
@ -82,15 +93,31 @@ @@ -82,15 +93,31 @@
}
async function loadRootEvent() {
loading = true;
// Load from cache first (fast - instant display)
try {
const { getEvent } = await import('../../services/cache/event-cache.js');
const cachedEvent = await getEvent(threadId);
if (cachedEvent && isMounted) {
// Find the root OP by traversing up the chain
rootEvent = await findRootEvent(cachedEvent);
loading = false; // Show cached content immediately
} else {
loading = true; // Only show loading if no cache
}
} catch (error) {
console.debug('Error loading cached event:', error);
loading = true; // Show loading if cache check fails
}
// Fetch fresh data from relays (progressive enhancement)
try {
const threadRelays = relayManager.getThreadReadRelays();
const feedRelays = relayManager.getFeedReadRelays();
const allRelays = [...new Set([...threadRelays, ...feedRelays])];
// Load the event by ID
// Load the event by ID with cache-first strategy
const event = await nostrClient.getEventById(threadId, allRelays);
if (event) {
if (event && isMounted) {
// Find the root OP by traversing up the chain
rootEvent = await findRootEvent(event);
}

43
src/lib/modules/feed/FeedPage.svelte

@ -227,10 +227,10 @@ @@ -227,10 +227,10 @@
relayError = null;
try {
// Load from cache first (fast)
// Load from cache first (fast - instant display)
if (!singleRelay) {
const feedKinds = getFeedKinds().filter(k => k !== KIND.DISCUSSION_THREAD);
const cached = await getRecentFeedEvents(feedKinds, 15 * 60 * 1000, config.feedLimit);
const cached = await getRecentFeedEvents(feedKinds, 60 * 60 * 1000, config.feedLimit); // 1 hour cache
const filtered = cached.filter(e =>
e.kind !== KIND.DISCUSSION_THREAD &&
getKindInfo(e.kind).showInFeed === true
@ -259,6 +259,8 @@ @@ -259,6 +259,8 @@
const feedKinds = getFeedKinds().filter(k => k !== KIND.DISCUSSION_THREAD);
const filters = feedKinds.map(k => ({ kinds: [k], limit: config.feedLimit }));
// Stream events as they arrive from relays (progressive enhancement)
// Don't wait for all relays - update UI as each relay responds
const fetched = await nostrClient.fetchEvents(
filters,
relays,
@ -267,19 +269,47 @@ @@ -267,19 +269,47 @@
cacheResults: false,
timeout: config.singleRelayTimeout
} : {
useCache: 'relay-first',
useCache: 'cache-first', // Already shown cache above
cacheResults: true,
timeout: config.standardTimeout
timeout: config.standardTimeout,
// Stream events as they arrive from each relay
onUpdate: (newEvents) => {
if (!isMounted) return;
const filtered = newEvents.filter(e =>
e.kind !== KIND.DISCUSSION_THREAD &&
getKindInfo(e.kind).showInFeed === true
);
if (filtered.length === 0) return;
// Merge with existing events (deduplicate by ID)
const eventMap = new Map(allEvents.map((e: NostrEvent) => [e.id, e]));
for (const event of filtered) {
eventMap.set(event.id, event);
}
// Update UI immediately with new events
const sorted = Array.from(eventMap.values())
.sort((a: NostrEvent, b: NostrEvent) => b.created_at - a.created_at);
allEvents = sorted;
if (sorted.length > 0) {
oldestTimestamp = Math.min(...sorted.map((e: NostrEvent) => e.created_at));
}
}
}
);
if (!isMounted) return;
// Final merge of any remaining events (for single relay mode or fallback)
const filtered = fetched.filter(e =>
e.kind !== KIND.DISCUSSION_THREAD &&
getKindInfo(e.kind).showInFeed === true
);
if (filtered.length > 0) {
const eventMap = new Map(allEvents.map((e: NostrEvent) => [e.id, e]));
for (const event of filtered) {
eventMap.set(event.id, event);
@ -291,9 +321,12 @@ @@ -291,9 +321,12 @@
if (sorted.length > 0) {
oldestTimestamp = Math.min(...sorted.map((e: NostrEvent) => e.created_at));
}
}
// Batch fetch referenced events (e, a, q tags) after main events are loaded
await batchFetchReferencedEvents(sorted);
if (allEvents.length > 0) {
await batchFetchReferencedEvents(allEvents);
}
} catch (error) {
console.error('Error loading feed:', error);
if (!events.length) {

129
src/lib/modules/profiles/ProfilePage.svelte

@ -112,7 +112,7 @@ @@ -112,7 +112,7 @@
const events = await nostrClient.fetchEvents(
[{ kinds: [KIND.METADATA], authors: [pubkey], limit: 1 }],
relays,
{ useCache: true, cacheResults: true }
{ useCache: 'cache-first', cacheResults: true }
);
if (events.length > 0 && isMounted) {
@ -128,7 +128,30 @@ @@ -128,7 +128,30 @@
async function loadWallComments(profileEventId: string) {
if (!isMounted || !profileEventId) return;
loadingWall = true;
// Load from cache first (fast - instant display)
try {
const { getRecentCachedEvents } = await import('../../services/cache/event-cache.js');
const cachedComments = await getRecentCachedEvents([KIND.COMMENT], 60 * 60 * 1000, 100); // 1 hour cache
// Filter to only comments that reference this profile event as root
const filtered = cachedComments.filter(comment => {
const kTag = comment.tags.find(t => t[0] === 'K' && t[1] === '0');
const eTag = comment.tags.find(t => t[0] === 'E' && t[1] === profileEventId);
return kTag && eTag;
});
if (filtered.length > 0 && isMounted) {
wallComments = filtered.sort((a, b) => b.created_at - a.created_at);
loadingWall = false; // Show cached content immediately
} else {
loadingWall = true; // Only show loading if no cache
}
} catch (error) {
console.debug('Error loading cached wall comments:', error);
loadingWall = true; // Show loading if cache check fails
}
// Stream fresh data from relays (progressive enhancement)
try {
// Fetch kind 1111 comments that reference this kind 0 event
// NIP-22 format: K="0", E=profileEventId
@ -143,7 +166,25 @@ @@ -143,7 +166,25 @@
}
],
relays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{
useCache: 'cache-first', // Already shown cache above, now stream updates
cacheResults: true,
timeout: config.mediumTimeout,
onUpdate: (newComments) => {
if (!isMounted) return;
// Merge with existing comments
const commentMap = new Map(wallComments.map(c => [c.id, c]));
for (const comment of newComments) {
const kTag = comment.tags.find(t => t[0] === 'K' && t[1] === '0');
const eTag = comment.tags.find(t => t[0] === 'E' && t[1] === profileEventId);
if (kTag && eTag) {
commentMap.set(comment.id, comment);
}
}
wallComments = Array.from(commentMap.values()).sort((a, b) => b.created_at - a.created_at);
loadingWall = false;
}
}
);
if (!isMounted) return;
@ -177,7 +218,7 @@ @@ -177,7 +218,7 @@
const pinLists = await nostrClient.fetchEvents(
[{ kinds: [KIND.PIN_LIST], authors: [pubkey], limit: 1 }],
profileRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{ useCache: 'cache-first', cacheResults: true, timeout: config.mediumTimeout }
);
if (!isMounted || pinLists.length === 0) {
@ -198,11 +239,24 @@ @@ -198,11 +239,24 @@
return;
}
// Fetch the actual pinned events
// Fetch the actual pinned events with cache-first and streaming
const fetchPromise = nostrClient.fetchEvents(
[{ ids: Array.from(pinnedIds), limit: config.feedLimit }],
profileRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{
useCache: 'cache-first', // Load from cache first
cacheResults: true,
timeout: config.mediumTimeout,
onUpdate: (newPins) => {
if (!isMounted) return;
// Merge with existing pins
const pinMap = new Map(pins.map(p => [p.id, p]));
for (const pin of newPins) {
pinMap.set(pin.id, pin);
}
pins = Array.from(pinMap.values()).sort((a, b) => b.created_at - a.created_at);
}
}
);
activeFetchPromises.add(fetchPromise);
const pinnedEvents = await fetchPromise;
@ -227,7 +281,7 @@ @@ -227,7 +281,7 @@
const bookmarkLists = await nostrClient.fetchEvents(
[{ kinds: [KIND.BOOKMARKS], authors: [pubkey], limit: 400 }],
profileRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{ useCache: 'cache-first', cacheResults: true, timeout: config.mediumTimeout }
);
if (!isMounted || bookmarkLists.length === 0) {
@ -256,6 +310,26 @@ @@ -256,6 +310,26 @@
return;
}
// Load from cache first (fast - instant display)
try {
const { getEvent } = await import('../../services/cache/event-cache.js');
const cachedBookmarks: NostrEvent[] = [];
for (const id of bookmarkedIds) {
const cached = await getEvent(id);
if (cached) {
cachedBookmarks.push(cached);
}
}
if (cachedBookmarks.length > 0 && isMounted) {
bookmarks = cachedBookmarks.sort((a, b) => b.created_at - a.created_at);
loadingBookmarks = false; // Show cached content immediately
}
} catch (error) {
console.debug('Error loading cached bookmarks:', error);
}
// Stream fresh data from relays (progressive enhancement)
// Fetch the actual bookmarked events in batches
const batchSize = 100;
const allBookmarkedEvents: NostrEvent[] = [];
@ -266,7 +340,21 @@ @@ -266,7 +340,21 @@
const fetchPromise = nostrClient.fetchEvents(
[{ ids: batch, limit: batch.length }],
profileRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{
useCache: 'cache-first', // Already shown cache above, now stream updates
cacheResults: true,
timeout: config.mediumTimeout,
onUpdate: (newBookmarks) => {
if (!isMounted) return;
// Merge with existing bookmarks
const bookmarkMap = new Map(bookmarks.map(b => [b.id, b]));
for (const bookmark of newBookmarks) {
bookmarkMap.set(bookmark.id, bookmark);
}
bookmarks = Array.from(bookmarkMap.values()).sort((a, b) => b.created_at - a.created_at);
loadingBookmarks = false;
}
}
);
activeFetchPromises.add(fetchPromise);
const batchEvents = await fetchPromise;
@ -301,14 +389,14 @@ @@ -301,14 +389,14 @@
const userPosts = await nostrClient.fetchEvents(
[{ kinds: [KIND.SHORT_TEXT_NOTE], authors: [pubkey], limit: 100 }],
notificationRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{ useCache: 'cache-first', cacheResults: true, timeout: config.mediumTimeout }
);
if (!isMounted) return;
const userPostIds = new Set(userPosts.map(p => p.id));
// Fetch notifications: replies, mentions, reactions, zaps
// Fetch notifications: replies, mentions, reactions, zaps with cache-first and streaming
const notificationEvents = await nostrClient.fetchEvents(
[
{ kinds: [KIND.SHORT_TEXT_NOTE], '#e': Array.from(userPostIds).slice(0, 50), limit: 100 }, // Replies to user's posts
@ -317,7 +405,20 @@ @@ -317,7 +405,20 @@
{ kinds: [KIND.ZAP_RECEIPT], '#p': [pubkey], limit: 100 } // Zaps
],
notificationRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{
useCache: 'cache-first', // Load from cache first
cacheResults: true,
timeout: config.mediumTimeout,
onUpdate: (newNotifications) => {
if (!isMounted) return;
// Merge with existing notifications
const notificationMap = new Map(notifications.map(n => [n.id, n]));
for (const notification of newNotifications) {
notificationMap.set(notification.id, notification);
}
notifications = Array.from(notificationMap.values()).sort((a, b) => b.created_at - a.created_at);
}
}
);
if (!isMounted) return;
@ -352,7 +453,7 @@ @@ -352,7 +453,7 @@
const fetchPromise1 = nostrClient.fetchEvents(
[{ kinds: [KIND.SHORT_TEXT_NOTE], authors: [currentUserPubkey], limit: config.mediumBatchLimit }],
interactionRelays,
{ useCache: true, cacheResults: true, timeout: config.shortTimeout } // Short timeout for cache
{ useCache: 'cache-first', cacheResults: true, timeout: config.shortTimeout } // Short timeout for cache
);
activeFetchPromises.add(fetchPromise1);
const currentUserPosts = await fetchPromise1;
@ -368,14 +469,14 @@ @@ -368,14 +469,14 @@
return;
}
// Fetch interactions with timeout to prevent blocking
// Fetch interactions with cache-first and streaming
const fetchPromise2 = nostrClient.fetchEvents(
[
{ kinds: [KIND.SHORT_TEXT_NOTE], authors: [profilePubkey], '#e': Array.from(currentUserPostIds).slice(0, config.smallBatchLimit), limit: config.smallBatchLimit }, // Limit IDs to avoid huge queries
{ kinds: [KIND.SHORT_TEXT_NOTE], authors: [profilePubkey], '#p': [currentUserPubkey], limit: config.smallBatchLimit }
],
interactionRelays,
{ useCache: true, cacheResults: true, timeout: config.mediumTimeout }
{ useCache: 'cache-first', cacheResults: true, timeout: config.mediumTimeout }
);
activeFetchPromises.add(fetchPromise2);
const interactionEvents = await Promise.race([

4
src/lib/services/nostr/nostr-client.ts

@ -61,7 +61,7 @@ class NostrClient { @@ -61,7 +61,7 @@ class NostrClient {
// Cache NIP-11 metadata to avoid repeated HTTP requests
private nip11MetadataCache: Map<string, { requiresAuth: boolean; cachedAt: number }> = new Map();
private readonly NIP11_CACHE_TTL = 300000; // 5 minutes
private readonly NIP11_CACHE_TTL = 60 * 60 * 24 * 1000; // 24 hours (optimized for slow connections)
// Track fetch patterns to identify repeated fetches
private fetchPatterns: Map<string, { count: number; lastFetch: number; totalEvents: number }> = new Map();
@ -69,7 +69,7 @@ class NostrClient { @@ -69,7 +69,7 @@ class NostrClient {
// Cache empty results to prevent repeated fetches of non-existent data
// Also track pending fetches to prevent concurrent duplicate fetches
private emptyResultCache: Map<string, { cachedAt: number; pending?: boolean }> = new Map();
private readonly EMPTY_RESULT_CACHE_TTL = 300000; // 5 minutes - cache empty results longer to prevent repeated fetches
private readonly EMPTY_RESULT_CACHE_TTL = 60 * 60 * 1000; // 1 hour - cache empty results longer to prevent repeated fetches (optimized for slow connections)
private readonly PENDING_FETCH_TTL = 5000; // 5 seconds - how long to wait for a pending fetch
/**

31
vite.config.ts

@ -2,10 +2,21 @@ import { sveltekit } from '@sveltejs/kit/vite'; @@ -2,10 +2,21 @@ import { sveltekit } from '@sveltejs/kit/vite';
import { defineConfig } from 'vite';
import { execSync } from 'child_process';
import { SvelteKitPWA } from '@vite-pwa/sveltekit';
import compression from 'vite-plugin-compression';
export default defineConfig({
plugins: [
sveltekit(),
compression({
algorithm: 'gzip',
ext: '.gz',
threshold: 1024, // Compress files > 1KB
}),
compression({
algorithm: 'brotliCompress',
ext: '.br',
threshold: 1024,
}),
SvelteKitPWA({
strategies: 'generateSW',
registerType: 'autoUpdate',
@ -24,16 +35,15 @@ export default defineConfig({ @@ -24,16 +35,15 @@ export default defineConfig({
}
},
{
// Cache API responses (relay responses) with network-first strategy
// Cache API responses (relay responses) with cache-first strategy for slow connections
urlPattern: /^wss?:\/\//i,
handler: 'NetworkFirst',
handler: 'CacheFirst',
options: {
cacheName: 'api-cache',
expiration: {
maxEntries: 50,
maxAgeSeconds: 60 * 5 // 5 minutes - short cache for API responses
},
networkTimeoutSeconds: 3 // Fallback to cache if network is slow
maxEntries: 200, // Increased from 50
maxAgeSeconds: 60 * 60 * 24 // 24 hours instead of 5 minutes (optimized for slow connections)
}
}
},
{
@ -126,6 +136,13 @@ export default defineConfig({ @@ -126,6 +136,13 @@ export default defineConfig({
build: {
target: 'esnext',
sourcemap: false,
manifest: false
manifest: false,
minify: 'terser',
terserOptions: {
compress: {
drop_console: ['log', 'debug'], // Remove console.log and console.debug in production, keep error/warn
passes: 2, // Multiple passes for better compression
}
}
}
});

Loading…
Cancel
Save