marriedtermiteblyi commited on
Commit
8f35e50
·
verified ·
1 Parent(s): f93e913

Update hooks/useFileUpload.ts

Browse files
Files changed (1) hide show
  1. hooks/useFileUpload.ts +77 -48
hooks/useFileUpload.ts CHANGED
@@ -1,15 +1,21 @@
1
 
2
- import { useState, useCallback } from 'react';
3
  import { FileItem, UploadStatus } from '../types';
4
  import { uploadBatchToHub } from '../services/hfService';
5
 
6
  export const useFileUpload = () => {
7
  const [files, setFiles] = useState<FileItem[]>([]);
8
  const [isUploading, setIsUploading] = useState(false);
 
 
 
 
9
 
10
- // --- CONFIGURATION ---
11
- const BATCH_SIZE = 5; // Files per request
12
- const CONCURRENCY_LIMIT = 3; // Parallel requests
 
 
13
 
14
  // --- UPLOAD LOGIC ---
15
 
@@ -26,73 +32,96 @@ export const useFileUpload = () => {
26
  }, []);
27
 
28
  const startUpload = useCallback(async () => {
29
- const filesToUpload = files.filter(
 
30
  (f) => f.status === UploadStatus.IDLE || f.status === UploadStatus.ERROR
31
  );
32
 
33
- if (filesToUpload.length === 0) return;
34
 
35
  setIsUploading(true);
36
 
37
- // 1. Create Batches (Chunks)
38
- const batches = [];
39
- for (let i = 0; i < filesToUpload.length; i += BATCH_SIZE) {
40
- batches.push(filesToUpload.slice(i, i + BATCH_SIZE));
41
  }
42
 
43
- // 2. Process Batch Function
44
- const processBatch = async (batch: FileItem[]) => {
45
- // Set status UPLOADING for this batch
46
- setFiles((prev) =>
47
- prev.map((f) =>
48
- batch.find((b) => b.id === f.id)
49
- ? { ...f, status: UploadStatus.UPLOADING, error: undefined }
50
- : f
51
- )
 
 
 
 
 
 
 
 
 
 
52
  );
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  try {
55
- // Prepare payload for service
56
- const batchPayload = batch.map(item => ({
57
  id: item.id,
58
  file: item.file,
59
  path: item.path
60
  }));
61
 
62
- // Call API
63
- const urls = await uploadBatchToHub(batchPayload);
64
-
65
- // Success: Update status and add URLs
66
- setFiles((prev) =>
67
- prev.map((f) => {
68
- const index = batch.findIndex(b => b.id === f.id);
69
- if (index !== -1) {
70
- return { ...f, status: UploadStatus.SUCCESS, url: urls[index] };
71
- }
72
- return f;
73
- })
74
- );
75
  } catch (err: any) {
76
- // Error: Update status for whole batch
77
- setFiles((prev) =>
78
- prev.map((f) =>
79
- batch.find((b) => b.id === f.id)
80
- ? { ...f, status: UploadStatus.ERROR, error: err.message }
81
- : f
82
- )
83
- );
 
84
  }
85
  };
86
 
87
- // 3. Execute with Concurrency Limit
88
- // We process batches in groups of CONCURRENCY_LIMIT
89
- for (let i = 0; i < batches.length; i += CONCURRENCY_LIMIT) {
90
- const activeBatches = batches.slice(i, i + CONCURRENCY_LIMIT);
91
- await Promise.allSettled(activeBatches.map(batch => processBatch(batch)));
 
 
92
  }
93
 
 
 
 
94
  setIsUploading(false);
95
- }, [files]);
 
96
 
97
  return {
98
  files,
 
1
 
2
+ import { useState, useCallback, useRef } from 'react';
3
  import { FileItem, UploadStatus } from '../types';
4
  import { uploadBatchToHub } from '../services/hfService';
5
 
6
  export const useFileUpload = () => {
7
  const [files, setFiles] = useState<FileItem[]>([]);
8
  const [isUploading, setIsUploading] = useState(false);
9
+
10
+ // Use Ref to access latest state inside async loops without dependencies issues
11
+ const filesRef = useRef<FileItem[]>([]);
12
+ filesRef.current = files;
13
 
14
+ // --- CONFIGURATION FOR SPEED ---
15
+ // Tăng số lượng file mỗi lần gửi để giảm thời gian tạo commit trên HF
16
+ const BATCH_SIZE = 10;
17
+ // Số lượng request gửi song song (Browser thường giới hạn 6 domain connections)
18
+ const CONCURRENCY_LIMIT = 5;
19
 
20
  // --- UPLOAD LOGIC ---
21
 
 
32
  }, []);
33
 
34
  const startUpload = useCallback(async () => {
35
+ // Filter pending files
36
+ const pendingFiles = filesRef.current.filter(
37
  (f) => f.status === UploadStatus.IDLE || f.status === UploadStatus.ERROR
38
  );
39
 
40
+ if (pendingFiles.length === 0) return;
41
 
42
  setIsUploading(true);
43
 
44
+ // 1. Chunk files into Batches
45
+ const batches: FileItem[][] = [];
46
+ for (let i = 0; i < pendingFiles.length; i += BATCH_SIZE) {
47
+ batches.push(pendingFiles.slice(i, i + BATCH_SIZE));
48
  }
49
 
50
+ // A queue of batches to process
51
+ const queue = [...batches];
52
+ let activeWorkers = 0;
53
+
54
+ // Helper to update status safely
55
+ const updateBatchStatus = (batchItems: FileItem[], status: UploadStatus, result?: { urls?: string[], error?: string }) => {
56
+ setFiles((prev) =>
57
+ prev.map((f) => {
58
+ const batchIndex = batchItems.findIndex(b => b.id === f.id);
59
+ if (batchIndex !== -1) {
60
+ return {
61
+ ...f,
62
+ status: status,
63
+ url: status === UploadStatus.SUCCESS ? result?.urls?.[batchIndex] : f.url,
64
+ error: status === UploadStatus.ERROR ? result?.error : undefined
65
+ };
66
+ }
67
+ return f;
68
+ })
69
  );
70
+ };
71
+
72
+ // 2. The Worker Function
73
+ // Process one batch, then immediately grab the next one from the queue
74
+ const processNextBatch = async (): Promise<void> => {
75
+ if (queue.length === 0) return;
76
+
77
+ const batch = queue.shift();
78
+ if (!batch) return;
79
+
80
+ activeWorkers++;
81
+
82
+ // Update UI -> UPLOADING
83
+ updateBatchStatus(batch, UploadStatus.UPLOADING);
84
 
85
  try {
86
+ const payload = batch.map(item => ({
 
87
  id: item.id,
88
  file: item.file,
89
  path: item.path
90
  }));
91
 
92
+ const urls = await uploadBatchToHub(payload);
93
+
94
+ // Update UI -> SUCCESS
95
+ updateBatchStatus(batch, UploadStatus.SUCCESS, { urls });
96
+
 
 
 
 
 
 
 
 
97
  } catch (err: any) {
98
+ console.error("Batch failed:", err);
99
+ // Update UI -> ERROR
100
+ updateBatchStatus(batch, UploadStatus.ERROR, { error: err.message || "Upload failed" });
101
+ } finally {
102
+ activeWorkers--;
103
+ // Recursively process next batch if available
104
+ if (queue.length > 0) {
105
+ await processNextBatch();
106
+ }
107
  }
108
  };
109
 
110
+ // 3. Start Initial Workers (Pool)
111
+ // Create exactly CONCURRENCY_LIMIT workers that will keep eating from the queue
112
+ const initialWorkers = [];
113
+ const limit = Math.min(CONCURRENCY_LIMIT, batches.length);
114
+
115
+ for (let i = 0; i < limit; i++) {
116
+ initialWorkers.push(processNextBatch());
117
  }
118
 
119
+ // Wait for all workers to finish depleting the queue
120
+ await Promise.all(initialWorkers);
121
+
122
  setIsUploading(false);
123
+
124
+ }, []); // Remove 'files' dependency to avoid closure staleness, use ref
125
 
126
  return {
127
  files,