mirror of
				https://gitea.com/actions/cache.git
				synced 2025-10-31 00:58:10 +07:00 
			
		
		
		
	Merge pull request #305 from actions/fix-upload-chunk
Fix upload chunk retries
This commit is contained in:
		
						commit
						5ddc028cc8
					
				
							
								
								
									
										9
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -2326,7 +2326,7 @@ function getContentRange(start, end) { | |||||||
|     // Content-Range: bytes 0-199/*
 |     // Content-Range: bytes 0-199/*
 | ||||||
|     return `bytes ${start}-${end}/*`; |     return `bytes ${start}-${end}/*`; | ||||||
| } | } | ||||||
| function uploadChunk(httpClient, resourceUrl, data, start, end) { | function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         core.debug(`Uploading chunk of size ${end - |         core.debug(`Uploading chunk of size ${end - | ||||||
|             start + |             start + | ||||||
| @ -2336,7 +2336,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) { | |||||||
|             "Content-Range": getContentRange(start, end) |             "Content-Range": getContentRange(start, end) | ||||||
|         }; |         }; | ||||||
|         const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { |         const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { | ||||||
|             return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders); |             return yield httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); | ||||||
|         }); |         }); | ||||||
|         const response = yield uploadChunkRequest(); |         const response = yield uploadChunkRequest(); | ||||||
|         if (isSuccessStatusCode(response.message.statusCode)) { |         if (isSuccessStatusCode(response.message.statusCode)) { | ||||||
| @ -2379,13 +2379,12 @@ function uploadFile(httpClient, cacheId, archivePath) { | |||||||
|                     const start = offset; |                     const start = offset; | ||||||
|                     const end = offset + chunkSize - 1; |                     const end = offset + chunkSize - 1; | ||||||
|                     offset += MAX_CHUNK_SIZE; |                     offset += MAX_CHUNK_SIZE; | ||||||
|                     const chunk = fs.createReadStream(archivePath, { |                     yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { | ||||||
|                         fd, |                         fd, | ||||||
|                         start, |                         start, | ||||||
|                         end, |                         end, | ||||||
|                         autoClose: false |                         autoClose: false | ||||||
|                     }); |                     }), start, end); | ||||||
|                     yield uploadChunk(httpClient, resourceUrl, chunk, start, end); |  | ||||||
|                 } |                 } | ||||||
|             }))); |             }))); | ||||||
|         } |         } | ||||||
|  | |||||||
							
								
								
									
										9
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -2326,7 +2326,7 @@ function getContentRange(start, end) { | |||||||
|     // Content-Range: bytes 0-199/*
 |     // Content-Range: bytes 0-199/*
 | ||||||
|     return `bytes ${start}-${end}/*`; |     return `bytes ${start}-${end}/*`; | ||||||
| } | } | ||||||
| function uploadChunk(httpClient, resourceUrl, data, start, end) { | function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         core.debug(`Uploading chunk of size ${end - |         core.debug(`Uploading chunk of size ${end - | ||||||
|             start + |             start + | ||||||
| @ -2336,7 +2336,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) { | |||||||
|             "Content-Range": getContentRange(start, end) |             "Content-Range": getContentRange(start, end) | ||||||
|         }; |         }; | ||||||
|         const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { |         const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { | ||||||
|             return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders); |             return yield httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); | ||||||
|         }); |         }); | ||||||
|         const response = yield uploadChunkRequest(); |         const response = yield uploadChunkRequest(); | ||||||
|         if (isSuccessStatusCode(response.message.statusCode)) { |         if (isSuccessStatusCode(response.message.statusCode)) { | ||||||
| @ -2379,13 +2379,12 @@ function uploadFile(httpClient, cacheId, archivePath) { | |||||||
|                     const start = offset; |                     const start = offset; | ||||||
|                     const end = offset + chunkSize - 1; |                     const end = offset + chunkSize - 1; | ||||||
|                     offset += MAX_CHUNK_SIZE; |                     offset += MAX_CHUNK_SIZE; | ||||||
|                     const chunk = fs.createReadStream(archivePath, { |                     yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { | ||||||
|                         fd, |                         fd, | ||||||
|                         start, |                         start, | ||||||
|                         end, |                         end, | ||||||
|                         autoClose: false |                         autoClose: false | ||||||
|                     }); |                     }), start, end); | ||||||
|                     yield uploadChunk(httpClient, resourceUrl, chunk, start, end); |  | ||||||
|                 } |                 } | ||||||
|             }))); |             }))); | ||||||
|         } |         } | ||||||
|  | |||||||
| @ -206,7 +206,7 @@ function getContentRange(start: number, end: number): string { | |||||||
| async function uploadChunk( | async function uploadChunk( | ||||||
|     httpClient: HttpClient, |     httpClient: HttpClient, | ||||||
|     resourceUrl: string, |     resourceUrl: string, | ||||||
|     data: NodeJS.ReadableStream, |     openStream: () => NodeJS.ReadableStream, | ||||||
|     start: number, |     start: number, | ||||||
|     end: number |     end: number | ||||||
| ): Promise<void> { | ): Promise<void> { | ||||||
| @ -227,7 +227,7 @@ async function uploadChunk( | |||||||
|         return await httpClient.sendStream( |         return await httpClient.sendStream( | ||||||
|             "PATCH", |             "PATCH", | ||||||
|             resourceUrl, |             resourceUrl, | ||||||
|             data, |             openStream(), | ||||||
|             additionalHeaders |             additionalHeaders | ||||||
|         ); |         ); | ||||||
|     }; |     }; | ||||||
| @ -290,17 +290,17 @@ async function uploadFile( | |||||||
|                     const start = offset; |                     const start = offset; | ||||||
|                     const end = offset + chunkSize - 1; |                     const end = offset + chunkSize - 1; | ||||||
|                     offset += MAX_CHUNK_SIZE; |                     offset += MAX_CHUNK_SIZE; | ||||||
|                     const chunk = fs.createReadStream(archivePath, { |  | ||||||
|                         fd, |  | ||||||
|                         start, |  | ||||||
|                         end, |  | ||||||
|                         autoClose: false |  | ||||||
|                     }); |  | ||||||
| 
 | 
 | ||||||
|                     await uploadChunk( |                     await uploadChunk( | ||||||
|                         httpClient, |                         httpClient, | ||||||
|                         resourceUrl, |                         resourceUrl, | ||||||
|                         chunk, |                         () => | ||||||
|  |                             fs.createReadStream(archivePath, { | ||||||
|  |                                 fd, | ||||||
|  |                                 start, | ||||||
|  |                                 end, | ||||||
|  |                                 autoClose: false | ||||||
|  |                             }), | ||||||
|                         start, |                         start, | ||||||
|                         end |                         end | ||||||
|                     ); |                     ); | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user