perf: optimize bulk XML upload for 100k+ files
Backend: - Add batch insert using multi-row INSERT with ON CONFLICT - Process in batches of 500 records for optimal DB performance - Return detailed batch results (inserted, duplicates, errors) Frontend: - Parse files in chunks of 500 to prevent memory issues - Upload in batches of 200 CFDIs per request - Add detailed progress bar with real-time stats - Show upload statistics (loaded, duplicates, errors) - Add cancel functionality during upload - Refresh data after upload completes Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -61,8 +61,28 @@ export async function createCfdi(data: CreateCfdiData): Promise<Cfdi> {
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function createManyCfdis(cfdis: CreateCfdiData[]): Promise<{ count: number }> {
|
||||
const response = await apiClient.post<{ count: number; message: string }>('/cfdi/bulk', { cfdis });
|
||||
export interface BatchUploadResult {
|
||||
message: string;
|
||||
batchNumber: number;
|
||||
totalBatches: number;
|
||||
inserted: number;
|
||||
duplicates: number;
|
||||
errors: number;
|
||||
errorMessages?: string[];
|
||||
}
|
||||
|
||||
export async function createManyCfdis(
|
||||
cfdis: CreateCfdiData[],
|
||||
batchNumber?: number,
|
||||
totalBatches?: number,
|
||||
totalFiles?: number
|
||||
): Promise<BatchUploadResult> {
|
||||
const response = await apiClient.post<BatchUploadResult>('/cfdi/bulk', {
|
||||
cfdis,
|
||||
batchNumber: batchNumber || 1,
|
||||
totalBatches: totalBatches || 1,
|
||||
totalFiles: totalFiles || cfdis.length
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user