mirror of
				https://gitea.com/actions/cache.git
				synced 2025-10-31 17:18:10 +07:00 
			
		
		
		
	Concurrency take 2
This commit is contained in:
		
							parent
							
								
									ba6476e454
								
							
						
					
					
						commit
						289c5d2518
					
				
							
								
								
									
										65
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										65
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1615,53 +1615,40 @@ function commitCache(restClient, cacheId, filesize) { | |||||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); |         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function parallelAwait(queue, concurrency) { | function uploadFile(restClient, cacheId, archivePath) { | ||||||
|     var _a; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const workQueue = queue.reverse(); |         // Upload Chunks
 | ||||||
|         let completedWork = []; |         const fileSize = fs.statSync(archivePath).size; | ||||||
|         let entries = queue.length; |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|         while (entries > 0) { |         const responses = []; | ||||||
|             if (entries < concurrency) { |         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | ||||||
|                 completedWork.push(yield Promise.all(workQueue)); |         const concurrency = 4; // # of HTTP requests in parallel
 | ||||||
|             } |         const threads = new Array(concurrency); | ||||||
|             else { |         core.debug("Awaiting all uploads"); | ||||||
|                 let promises = []; |         let offset = 0; | ||||||
|                 let i; |         Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|                 for (i = 0; i < concurrency; i++) { |             while (offset < fileSize) { | ||||||
|                     promises.push((_a = workQueue.pop(), (_a !== null && _a !== void 0 ? _a : Promise.resolve()))); |                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|                 } |                 const start = offset; | ||||||
|                 completedWork.push(yield Promise.all(promises)); |                 const end = offset + chunkSize - 1; | ||||||
|  |                 offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
 | ||||||
|  |                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||||
|  |                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||||
|             } |             } | ||||||
|  |         }))); | ||||||
|  |         fs.closeSync(fd); | ||||||
|  |         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); | ||||||
|  |         if (failedResponse) { | ||||||
|  |             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); | ||||||
|         } |         } | ||||||
|         return completedWork; |         return; | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function saveCache(cacheId, archivePath) { | function saveCache(cacheId, archivePath) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const restClient = createRestClient(); |         const restClient = createRestClient(); | ||||||
|         core.debug("Uploading chunks"); |         core.debug("Upload cache"); | ||||||
|         // Upload Chunks
 |         yield uploadFile(restClient, cacheId, archivePath); | ||||||
|         const fileSize = fs.statSync(archivePath).size; |  | ||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |  | ||||||
|         const uploads = []; |  | ||||||
|         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |  | ||||||
|         let offset = 0; |  | ||||||
|         while (offset < fileSize) { |  | ||||||
|             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |  | ||||||
|             const end = offset + chunkSize - 1; |  | ||||||
|             const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false }); |  | ||||||
|             uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end)); |  | ||||||
|             offset += MAX_CHUNK_SIZE; |  | ||||||
|         } |  | ||||||
|         core.debug("Awaiting all uploads"); |  | ||||||
|         const responses = yield parallelAwait(uploads, 4); |  | ||||||
|         fs.closeSync(fd); |  | ||||||
|         //const responses = await Promise.all(uploads);
 |  | ||||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); |  | ||||||
|         if (failedResponse) { |  | ||||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); |  | ||||||
|         } |  | ||||||
|         core.debug("Commiting cache"); |         core.debug("Commiting cache"); | ||||||
|         // Commit Cache
 |         // Commit Cache
 | ||||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); |         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||||
|  | |||||||
							
								
								
									
										65
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										65
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1615,53 +1615,40 @@ function commitCache(restClient, cacheId, filesize) { | |||||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); |         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function parallelAwait(queue, concurrency) { | function uploadFile(restClient, cacheId, archivePath) { | ||||||
|     var _a; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const workQueue = queue.reverse(); |         // Upload Chunks
 | ||||||
|         let completedWork = []; |         const fileSize = fs.statSync(archivePath).size; | ||||||
|         let entries = queue.length; |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|         while (entries > 0) { |         const responses = []; | ||||||
|             if (entries < concurrency) { |         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | ||||||
|                 completedWork.push(yield Promise.all(workQueue)); |         const concurrency = 4; // # of HTTP requests in parallel
 | ||||||
|             } |         const threads = new Array(concurrency); | ||||||
|             else { |         core.debug("Awaiting all uploads"); | ||||||
|                 let promises = []; |         let offset = 0; | ||||||
|                 let i; |         Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|                 for (i = 0; i < concurrency; i++) { |             while (offset < fileSize) { | ||||||
|                     promises.push((_a = workQueue.pop(), (_a !== null && _a !== void 0 ? _a : Promise.resolve()))); |                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|                 } |                 const start = offset; | ||||||
|                 completedWork.push(yield Promise.all(promises)); |                 const end = offset + chunkSize - 1; | ||||||
|  |                 offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
 | ||||||
|  |                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||||
|  |                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||||
|             } |             } | ||||||
|  |         }))); | ||||||
|  |         fs.closeSync(fd); | ||||||
|  |         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); | ||||||
|  |         if (failedResponse) { | ||||||
|  |             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); | ||||||
|         } |         } | ||||||
|         return completedWork; |         return; | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function saveCache(cacheId, archivePath) { | function saveCache(cacheId, archivePath) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const restClient = createRestClient(); |         const restClient = createRestClient(); | ||||||
|         core.debug("Uploading chunks"); |         core.debug("Upload cache"); | ||||||
|         // Upload Chunks
 |         yield uploadFile(restClient, cacheId, archivePath); | ||||||
|         const fileSize = fs.statSync(archivePath).size; |  | ||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |  | ||||||
|         const uploads = []; |  | ||||||
|         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |  | ||||||
|         let offset = 0; |  | ||||||
|         while (offset < fileSize) { |  | ||||||
|             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |  | ||||||
|             const end = offset + chunkSize - 1; |  | ||||||
|             const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false }); |  | ||||||
|             uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end)); |  | ||||||
|             offset += MAX_CHUNK_SIZE; |  | ||||||
|         } |  | ||||||
|         core.debug("Awaiting all uploads"); |  | ||||||
|         const responses = yield parallelAwait(uploads, 4); |  | ||||||
|         fs.closeSync(fd); |  | ||||||
|         //const responses = await Promise.all(uploads);
 |  | ||||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); |  | ||||||
|         if (failedResponse) { |  | ||||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); |  | ||||||
|         } |  | ||||||
|         core.debug("Commiting cache"); |         core.debug("Commiting cache"); | ||||||
|         // Commit Cache
 |         // Commit Cache
 | ||||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); |         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||||
|  | |||||||
| @ -174,55 +174,30 @@ async function commitCache( | |||||||
|     ); |     ); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function parallelAwait(queue: Promise<any>[], concurrency: number): Promise<any[]> { | async function uploadFile(restClient: RestClient, cacheId: number, archivePath: string): Promise<void> { | ||||||
|     const workQueue = queue.reverse(); |  | ||||||
|     let completedWork: any[] = []; |  | ||||||
|     let entries = queue.length; |  | ||||||
|     while (entries > 0) { |  | ||||||
|         if (entries < concurrency) { |  | ||||||
|             completedWork.push(await Promise.all(workQueue)); |  | ||||||
|         } else { |  | ||||||
|             let promises: Promise<any>[] = []; |  | ||||||
|             let i: number; |  | ||||||
|             for (i = 0; i < concurrency; i++) { |  | ||||||
|                 promises.push(workQueue.pop() ?? Promise.resolve()); |  | ||||||
|             } |  | ||||||
|             completedWork.push(await Promise.all(promises)); |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     return completedWork; |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| export async function saveCache( |  | ||||||
|     cacheId: number, |  | ||||||
|     archivePath: string |  | ||||||
| ): Promise<void> { |  | ||||||
|     const restClient = createRestClient(); |  | ||||||
| 
 |  | ||||||
|     core.debug("Uploading chunks"); |  | ||||||
|     // Upload Chunks
 |     // Upload Chunks
 | ||||||
|     const fileSize = fs.statSync(archivePath).size; |     const fileSize = fs.statSync(archivePath).size; | ||||||
|     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|     const uploads: Promise<IRestResponse<void>>[] = []; |     const responses: IRestResponse<void>[] = []; | ||||||
| 
 |  | ||||||
|     const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |     const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | ||||||
|     let offset = 0; |  | ||||||
|     while (offset < fileSize) { |  | ||||||
|         const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |  | ||||||
|         const end = offset + chunkSize - 1; |  | ||||||
|         const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false }); |  | ||||||
|         uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end)); |  | ||||||
|         offset += MAX_CHUNK_SIZE; |  | ||||||
|     } |  | ||||||
| 
 | 
 | ||||||
|  |     const concurrency = 4; // # of HTTP requests in parallel
 | ||||||
|  |     const threads = new Array(concurrency); | ||||||
|     core.debug("Awaiting all uploads"); |     core.debug("Awaiting all uploads"); | ||||||
|     const responses = await parallelAwait(uploads, 4); |     let offset = 0; | ||||||
|  |     Promise.all(threads.map(async () => { // This might not work cause something something closures
 | ||||||
|  |         while (offset < fileSize) { | ||||||
|  |             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|  |             const start = offset; | ||||||
|  |             const end = offset + chunkSize - 1; | ||||||
|  |             offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
 | ||||||
|  |             const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||||
|  |             responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||||
|  |         } | ||||||
|  |     })); | ||||||
|  | 
 | ||||||
|     fs.closeSync(fd); |     fs.closeSync(fd); | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     //const responses = await Promise.all(uploads);
 |  | ||||||
| 
 |  | ||||||
|     const failedResponse = responses.find( |     const failedResponse = responses.find( | ||||||
|         x => !isSuccessStatusCode(x.statusCode) |         x => !isSuccessStatusCode(x.statusCode) | ||||||
|     ); |     ); | ||||||
| @ -232,6 +207,18 @@ export async function saveCache( | |||||||
|         ); |         ); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     return; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | export async function saveCache( | ||||||
|  |     cacheId: number, | ||||||
|  |     archivePath: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     const restClient = createRestClient(); | ||||||
|  | 
 | ||||||
|  |     core.debug("Upload cache"); | ||||||
|  |     await uploadFile(restClient, cacheId, archivePath); | ||||||
|  | 
 | ||||||
|     core.debug("Commiting cache"); |     core.debug("Commiting cache"); | ||||||
|     // Commit Cache
 |     // Commit Cache
 | ||||||
|     const cacheSize = utils.getArchiveFileSize(archivePath); |     const cacheSize = utils.getArchiveFileSize(archivePath); | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user