feat: implement incremental card caching to the server per set to prevent payload size limits.
This commit is contained in:
@@ -32,3 +32,4 @@
|
||||
- [Peasant Algorithm Implementation](./devlog/2025-12-16-225700_peasant_algorithm.md): Completed. Implemented Peasant-specific pack generation rules including slot logic for commons, uncommons, lands, and wildcards.
|
||||
- [Multi-Expansion Selection](./devlog/2025-12-16-230500_multi_expansion_selection.md): Completed. Implemented searchable multi-select interface for "From Expansion" pack generation, allowing mixed-set drafts.
|
||||
- [Game Type Filter](./devlog/2025-12-16-231000_game_type_filter.md): Completed. Added Paper/Digital filter to the expansion selection list.
|
||||
- [Incremental Caching](./devlog/2025-12-16-233000_incremental_caching.md): Completed. Refactored data fetching to cache cards to the server incrementally per set, preventing PayloadTooLarge errors.
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
# Incremental Data Caching
|
||||
|
||||
## Objective
|
||||
Enable caching of card data to the server incrementally per set when multiple sets are selected, rather than sending a single massive payload at the end. avoiding `PayloadTooLargeError`.
|
||||
|
||||
## Implementation Details
|
||||
1. **Helper Function**: Created `cacheCardsToServer` helper within `fetchAndParse` to handle server communication for a chunk of cards.
|
||||
2. **Incremental Loop**: Modified the set fetching loop to call `cacheCardsToServer` immediately after receiving data for each set.
|
||||
3. **UI Feedback**: Updated progress text to clearly indicate when the system is "Caching [Set Name]..." to the server.
|
||||
4. **Error Handling**: Added try/catch within the caching helper to prevent a single cache failure from aborting the entire fetch process (logs error to console).
|
||||
|
||||
## Status
|
||||
Completed. Large multi-set fetches should now be robust against body size limits.
|
||||
@@ -146,6 +146,21 @@ export const CubeManager: React.FC<CubeManagerProps> = ({ packs, setPacks, onGoT
|
||||
setPacks([]);
|
||||
setProgress(sourceMode === 'set' ? 'Fetching set data...' : 'Parsing text...');
|
||||
|
||||
const cacheCardsToServer = async (cardsToCache: ScryfallCard[]) => {
|
||||
if (cardsToCache.length === 0) return;
|
||||
try {
|
||||
// Deduplicate for shipping to server
|
||||
const uniqueCards = Array.from(new Map(cardsToCache.map(c => [c.id, c])).values());
|
||||
await fetch('/api/cards/cache', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ cards: uniqueCards })
|
||||
});
|
||||
} catch (e) {
|
||||
console.error("Failed to cache chunk to server:", e);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
let expandedCards: ScryfallCard[] = [];
|
||||
|
||||
@@ -154,14 +169,17 @@ export const CubeManager: React.FC<CubeManagerProps> = ({ packs, setPacks, onGoT
|
||||
|
||||
for (const [index, setCode] of selectedSets.entries()) {
|
||||
// Update progress for set
|
||||
const setInfo = availableSets.find(s => s.code === setCode);
|
||||
const setName = setInfo ? setInfo.name : setCode;
|
||||
// const setInfo = availableSets.find(s => s.code === setCode);
|
||||
|
||||
setProgress(`Fetching ${setName}... (${index + 1}/${selectedSets.length})`);
|
||||
setProgress(`Loading sets... (${index + 1}/${selectedSets.length})`);
|
||||
|
||||
const cards = await scryfallService.fetchSetCards(setCode, (_count) => {
|
||||
// Progress handled by outer loop mostly, but we could update strictly if needed.
|
||||
// Progress handled by outer loop mostly
|
||||
});
|
||||
|
||||
// Incrementally cache this set to server
|
||||
await cacheCardsToServer(cards);
|
||||
|
||||
expandedCards.push(...cards);
|
||||
}
|
||||
} else {
|
||||
@@ -198,27 +216,17 @@ export const CubeManager: React.FC<CubeManagerProps> = ({ packs, setPacks, onGoT
|
||||
|
||||
if (missing > 0) {
|
||||
alert(`Warning: ${missing} cards could not be identified or fetched.`);
|
||||
} else {
|
||||
// Optional: Feedback on cache
|
||||
// console.log(`Parsed ${expandedCards.length} cards. (${cachedCount} / ${fetchList.length} unique identifiers were pre-cached)`);
|
||||
}
|
||||
|
||||
// Cache custom list to server
|
||||
if (expandedCards.length > 0) {
|
||||
setProgress('Caching to server...');
|
||||
await cacheCardsToServer(expandedCards);
|
||||
}
|
||||
}
|
||||
|
||||
setRawScryfallData(expandedCards);
|
||||
|
||||
// Cache to server
|
||||
if (expandedCards.length > 0) {
|
||||
setProgress('Loading...');
|
||||
// Deduplicate for shipping to server
|
||||
const uniqueCards = Array.from(new Map(expandedCards.map(c => [c.id, c])).values());
|
||||
|
||||
await fetch('/api/cards/cache', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ cards: uniqueCards }) // Send full metadata
|
||||
});
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
setProgress('');
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ export const db = {
|
||||
const store = transaction.objectStore(STORE_NAME);
|
||||
|
||||
transaction.oncomplete = () => resolve();
|
||||
transaction.onerror = (event) => reject(transaction.error);
|
||||
transaction.onerror = (_event) => reject(transaction.error);
|
||||
|
||||
cards.forEach(card => store.put(card));
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user