feat: Implement fetchWithRetry helper and enhance download concurrency handling
This commit is contained in:
@@ -14,6 +14,29 @@ function base64ToBuffer(base64: string) {
|
||||
return bytes.buffer;
|
||||
}
|
||||
|
||||
// New helper function to handle retries for failed fetches
|
||||
async function fetchWithRetry(url: string, retries: number = 3, delay: number = 1000): Promise<Response> {
|
||||
for (let i = 0; i < retries; i++) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (response.ok) {
|
||||
return response;
|
||||
}
|
||||
// Don't retry on client errors (e.g., 404), but do on server errors (5xx)
|
||||
if (response.status >= 400 && response.status < 500) {
|
||||
throw new Error(`Client error: ${response.status} for URL ${url}`);
|
||||
}
|
||||
// For server errors or network issues, log and prepare to retry
|
||||
console.warn(`Attempt ${i + 1} failed for ${url} with status ${response.status}. Retrying in ${delay}ms...`);
|
||||
} catch (error) {
|
||||
// This catches network errors (e.g., no internet) and the thrown client error
|
||||
console.warn(`Attempt ${i + 1} failed for ${url}. Error: ${error}. Retrying in ${delay}ms...`);
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, delay * (i + 1))); // Exponential backoff
|
||||
}
|
||||
throw new Error(`Failed to fetch ${url} after ${retries} attempts.`);
|
||||
}
|
||||
|
||||
export default function DownloadPage() {
|
||||
const [downloadState, setDownloadState] = useState<'idle' | 'downloading' | 'decrypting' | 'complete' | 'error'>('idle');
|
||||
const [progress, setProgress] = useState(0);
|
||||
@@ -45,18 +68,33 @@ export default function DownloadPage() {
|
||||
const metadata = await metaRes.json();
|
||||
setFilename(metadata.filename);
|
||||
|
||||
// 2. Download all parts in parallel
|
||||
// 2. Download all parts with a concurrency limit
|
||||
setDownloadState('downloading');
|
||||
const CONCURRENCY_LIMIT = 5;
|
||||
const queue = [...metadata.parts];
|
||||
const encryptedParts = new Array(metadata.num_parts);
|
||||
let downloadedCount = 0;
|
||||
const downloadPromises = metadata.parts.map((part: any) =>
|
||||
fetch(`/api/download-part/${file_id}/${part.part_index}`).then(res => res.arrayBuffer()).then(buffer => {
|
||||
|
||||
const downloadPart = async (part: any) => {
|
||||
const response = await fetchWithRetry(`/api/download-part/${file_id}/${part.part_index}`);
|
||||
const buffer = await response.arrayBuffer();
|
||||
encryptedParts[part.part_index] = { index: part.part_index, data: buffer };
|
||||
downloadedCount++;
|
||||
setProgress(Math.round((downloadedCount / metadata.num_parts) * 50));
|
||||
return { index: part.part_index, data: buffer };
|
||||
})
|
||||
);
|
||||
const encryptedParts = await Promise.all(downloadPromises);
|
||||
encryptedParts.sort((a, b) => a.index - b.index);
|
||||
};
|
||||
|
||||
const active = [];
|
||||
while (queue.length > 0 || active.length > 0) {
|
||||
while (active.length < CONCURRENCY_LIMIT && queue.length > 0) {
|
||||
const task = queue.shift()!;
|
||||
const promise = downloadPart(task).finally(() => {
|
||||
const index = active.indexOf(promise);
|
||||
if (index > -1) active.splice(index, 1);
|
||||
});
|
||||
active.push(promise);
|
||||
}
|
||||
await Promise.race(active);
|
||||
}
|
||||
|
||||
// 3. Import key and decrypt parts
|
||||
setDownloadState('decrypting');
|
||||
|
||||
Reference in New Issue
Block a user