From 23daa0a63872c5e06df3e0317e3ec9636f09c39b Mon Sep 17 00:00:00 2001
From: Josh Gross <joshmgross@github.com>
Date: Mon, 23 Dec 2019 10:45:46 -0500
Subject: [PATCH] PR feedback

---
 __tests__/save.test.ts |  2 +-
 package-lock.json      |  2 +-
 src/cacheHttpClient.ts | 73 ++++++++++++++++++------------------------
 src/restore.ts         |  4 +--
 src/save.ts            |  8 ++---
 5 files changed, 40 insertions(+), 49 deletions(-)

diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts
index 4381ee3..b355076 100644
--- a/__tests__/save.test.ts
+++ b/__tests__/save.test.ts
@@ -208,7 +208,7 @@ test("save with large cache outputs warning", async () => {
 
     expect(logWarningMock).toHaveBeenCalledTimes(1);
     expect(logWarningMock).toHaveBeenCalledWith(
-        "Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache."
+        "Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache."
     );
 
     expect(failedMock).toHaveBeenCalledTimes(0);
diff --git a/package-lock.json b/package-lock.json
index 986b08b..37e50d2 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,6 @@
 {
   "name": "cache",
-  "version": "1.0.3",
+  "version": "1.1.0",
   "lockfileVersion": 1,
   "requires": true,
   "dependencies": {
diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts
index 626d051..7d3e348 100644
--- a/src/cacheHttpClient.ts
+++ b/src/cacheHttpClient.ts
@@ -150,8 +150,8 @@ async function uploadChunk(
 ): Promise<IRestResponse<void>> {
     core.debug(
         `Uploading chunk of size ${end -
-            start +
-            1} bytes at offset ${start} with content range: ${getContentRange(
+        start +
+        1} bytes at offset ${start} with content range: ${getContentRange(
             start,
             end
         )}`
@@ -177,6 +177,7 @@ async function uploadChunk(
     }
 
     if (isRetryableStatusCode(response.statusCode)) {
+        core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`);
         const retryResponse = await uploadChunkRequest();
         if (isSuccessStatusCode(retryResponse.statusCode)) {
             return retryResponse;
@@ -199,53 +200,43 @@ async function uploadFile(
     const responses: IRestResponse<void>[] = [];
     const fd = fs.openSync(archivePath, "r");
 
-    const concurrency = 4; // # of HTTP requests in parallel
-    const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
+    const concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]) ?? 4; // # of HTTP requests in parallel
+    const MAX_CHUNK_SIZE = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) ?? 32000000; // 32 MB Chunks
     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
 
     const parallelUploads = [...new Array(concurrency).keys()];
     core.debug("Awaiting all uploads");
     let offset = 0;
-    await Promise.all(
-        parallelUploads.map(async () => {
-            while (offset < fileSize) {
-                const chunkSize =
-                    offset + MAX_CHUNK_SIZE > fileSize
-                        ? fileSize - offset
-                        : MAX_CHUNK_SIZE;
-                const start = offset;
-                const end = offset + chunkSize - 1;
-                offset += MAX_CHUNK_SIZE;
-                const chunk = fs.createReadStream(archivePath, {
-                    fd,
-                    start,
-                    end,
-                    autoClose: false
-                });
-                responses.push(
-                    await uploadChunk(
-                        restClient,
-                        resourceUrl,
-                        chunk,
+
+    try {
+        await Promise.all(
+            parallelUploads.map(async () => {
+                while (offset < fileSize) {
+                    const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE)
+                    const start = offset;
+                    const end = offset + chunkSize - 1;
+                    offset += MAX_CHUNK_SIZE;
+                    const chunk = fs.createReadStream(archivePath, {
+                        fd,
                         start,
-                        end
-                    )
-                );
-            }
-        })
-    );
-
-    fs.closeSync(fd);
-
-    const failedResponse = responses.find(
-        x => !isSuccessStatusCode(x.statusCode)
-    );
-    if (failedResponse) {
-        throw new Error(
-            `Cache service responded with ${failedResponse.statusCode} during chunk upload.`
+                        end,
+                        autoClose: false
+                    });
+                    responses.push(
+                        await uploadChunk(
+                            restClient,
+                            resourceUrl,
+                            chunk,
+                            start,
+                            end
+                        )
+                    );
+                }
+            })
         );
+    } finally {
+        fs.closeSync(fd);
     }
-
     return;
 }
 
diff --git a/src/restore.ts b/src/restore.ts
index 32b5b5e..4911e7e 100644
--- a/src/restore.ts
+++ b/src/restore.ts
@@ -60,7 +60,7 @@ async function run(): Promise<void> {
 
         try {
             const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
-            if (!cacheEntry || !cacheEntry?.archiveLocation) {
+            if (!cacheEntry?.archiveLocation) {
                 core.info(
                     `Cache not found for input keys: ${keys.join(", ")}.`
                 );
@@ -78,7 +78,7 @@ async function run(): Promise<void> {
 
             // Download the cache from the cache entry
             await cacheHttpClient.downloadCache(
-                cacheEntry?.archiveLocation,
+                cacheEntry.archiveLocation,
                 archivePath
             );
 
diff --git a/src/save.ts b/src/save.ts
index 9233f08..ee64e42 100644
--- a/src/save.ts
+++ b/src/save.ts
@@ -36,7 +36,7 @@ async function run(): Promise<void> {
 
         core.debug("Reserving Cache");
         const cacheId = await cacheHttpClient.reserveCache(primaryKey);
-        if (cacheId < 0) {
+        if (cacheId == -1) {
             core.info(
                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
             );
@@ -62,13 +62,13 @@ async function run(): Promise<void> {
         if (archiveFileSize > fileSizeLimit) {
             utils.logWarning(
                 `Cache size of ~${Math.round(
-                    archiveFileSize / (1024 * 1024 * 1024)
-                )} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
+                    archiveFileSize / (1024 * 1024)
+                )} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
             );
             return;
         }
 
-        core.debug("Saving Cache");
+        core.debug(`Saving Cache (ID: ${cacheId})`);
         await cacheHttpClient.saveCache(cacheId, archivePath);
     } catch (error) {
         utils.logWarning(error.message);