mirror of
				https://gitea.com/actions/cache.git
				synced 2025-10-31 09:08:10 +07:00 
			
		
		
		
	Test out 16 concurrent requests
This commit is contained in:
		
							parent
							
								
									4fcbc07edb
								
							
						
					
					
						commit
						8c77f01f0b
					
				
							
								
								
									
										7
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1622,18 +1622,17 @@ function uploadFile(restClient, cacheId, archivePath) { | |||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|         const responses = []; |         const responses = []; | ||||||
|         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | ||||||
|         const concurrency = 8; // # of HTTP requests in parallel
 |         const concurrency = 16; // # of HTTP requests in parallel
 | ||||||
|  |         core.debug(`Concurrency: ${concurrency}`); | ||||||
|         const threads = [...new Array(concurrency).keys()]; |         const threads = [...new Array(concurrency).keys()]; | ||||||
|         core.debug("Awaiting all uploads"); |         core.debug("Awaiting all uploads"); | ||||||
|         let offset = 0; |         let offset = 0; | ||||||
|         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { |         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|             core.debug(`Offset: ${offset} FileSize: ${fileSize}`); |  | ||||||
|             while (offset < fileSize) { |             while (offset < fileSize) { | ||||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|                 const start = offset; |                 const start = offset; | ||||||
|                 const end = offset + chunkSize - 1; |                 const end = offset + chunkSize - 1; | ||||||
|                 core.debug(`Start: ${start} End: ${end}`); |                 offset += MAX_CHUNK_SIZE; | ||||||
|                 offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
 |  | ||||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); |                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); |                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||||
|             } |             } | ||||||
|  | |||||||
							
								
								
									
										7
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1622,18 +1622,17 @@ function uploadFile(restClient, cacheId, archivePath) { | |||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|         const responses = []; |         const responses = []; | ||||||
|         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | ||||||
|         const concurrency = 8; // # of HTTP requests in parallel
 |         const concurrency = 16; // # of HTTP requests in parallel
 | ||||||
|  |         core.debug(`Concurrency: ${concurrency}`); | ||||||
|         const threads = [...new Array(concurrency).keys()]; |         const threads = [...new Array(concurrency).keys()]; | ||||||
|         core.debug("Awaiting all uploads"); |         core.debug("Awaiting all uploads"); | ||||||
|         let offset = 0; |         let offset = 0; | ||||||
|         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { |         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|             core.debug(`Offset: ${offset} FileSize: ${fileSize}`); |  | ||||||
|             while (offset < fileSize) { |             while (offset < fileSize) { | ||||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|                 const start = offset; |                 const start = offset; | ||||||
|                 const end = offset + chunkSize - 1; |                 const end = offset + chunkSize - 1; | ||||||
|                 core.debug(`Start: ${start} End: ${end}`); |                 offset += MAX_CHUNK_SIZE; | ||||||
|                 offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
 |  | ||||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); |                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); |                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||||
|             } |             } | ||||||
|  | |||||||
| @ -181,18 +181,17 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath: | |||||||
|     const responses: IRestResponse<void>[] = []; |     const responses: IRestResponse<void>[] = []; | ||||||
|     const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |     const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | ||||||
| 
 | 
 | ||||||
|     const concurrency = 8; // # of HTTP requests in parallel
 |     const concurrency = 16; // # of HTTP requests in parallel
 | ||||||
|  |     core.debug(`Concurrency: ${concurrency}`); | ||||||
|     const threads = [...new Array(concurrency).keys()]; |     const threads = [...new Array(concurrency).keys()]; | ||||||
|     core.debug("Awaiting all uploads"); |     core.debug("Awaiting all uploads"); | ||||||
|     let offset = 0; |     let offset = 0; | ||||||
|     await Promise.all(threads.map(async () => { // This might not work cause something something closures
 |     await Promise.all(threads.map(async () => { | ||||||
|         core.debug(`Offset: ${offset} FileSize: ${fileSize}`); |  | ||||||
|         while (offset < fileSize) { |         while (offset < fileSize) { | ||||||
|             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|             const start = offset; |             const start = offset; | ||||||
|             const end = offset + chunkSize - 1; |             const end = offset + chunkSize - 1; | ||||||
|             core.debug(`Start: ${start} End: ${end}`); |             offset += MAX_CHUNK_SIZE; | ||||||
|             offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
 |  | ||||||
|             const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); |             const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||||
|             responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end)); |             responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||||
|         } |         } | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user