From 8c77f01f0b05f4348a731edd4016d10dc9599d6a Mon Sep 17 00:00:00 2001
From: Josh Gross <jogros@microsoft.com>
Date: Tue, 17 Dec 2019 17:35:30 -0500
Subject: [PATCH] Test out 16 concurrent requests

---
 dist/restore/index.js  | 7 +++----
 dist/save/index.js     | 7 +++----
 src/cacheHttpClient.ts | 9 ++++-----
 3 files changed, 10 insertions(+), 13 deletions(-)

diff --git a/dist/restore/index.js b/dist/restore/index.js
index dd689a7..cc9b6e3 100644
--- a/dist/restore/index.js
+++ b/dist/restore/index.js
@@ -1622,18 +1622,17 @@ function uploadFile(restClient, cacheId, archivePath) {
         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
         const responses = [];
         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
-        const concurrency = 8; // # of HTTP requests in parallel
+        const concurrency = 16; // # of HTTP requests in parallel
+        core.debug(`Concurrency: ${concurrency}`);
         const threads = [...new Array(concurrency).keys()];
         core.debug("Awaiting all uploads");
         let offset = 0;
         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
-            core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
             while (offset < fileSize) {
                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
                 const start = offset;
                 const end = offset + chunkSize - 1;
-                core.debug(`Start: ${start} End: ${end}`);
-                offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
+                offset += MAX_CHUNK_SIZE;
                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
             }
diff --git a/dist/save/index.js b/dist/save/index.js
index c5a5a02..e7f8045 100644
--- a/dist/save/index.js
+++ b/dist/save/index.js
@@ -1622,18 +1622,17 @@ function uploadFile(restClient, cacheId, archivePath) {
         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
         const responses = [];
         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
-        const concurrency = 8; // # of HTTP requests in parallel
+        const concurrency = 16; // # of HTTP requests in parallel
+        core.debug(`Concurrency: ${concurrency}`);
         const threads = [...new Array(concurrency).keys()];
         core.debug("Awaiting all uploads");
         let offset = 0;
         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
-            core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
             while (offset < fileSize) {
                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
                 const start = offset;
                 const end = offset + chunkSize - 1;
-                core.debug(`Start: ${start} End: ${end}`);
-                offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
+                offset += MAX_CHUNK_SIZE;
                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
             }
diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts
index e5f8d85..617528a 100644
--- a/src/cacheHttpClient.ts
+++ b/src/cacheHttpClient.ts
@@ -181,18 +181,17 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath:
     const responses: IRestResponse<void>[] = [];
     const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 
-    const concurrency = 8; // # of HTTP requests in parallel
+    const concurrency = 16; // # of HTTP requests in parallel
+    core.debug(`Concurrency: ${concurrency}`);
     const threads = [...new Array(concurrency).keys()];
     core.debug("Awaiting all uploads");
     let offset = 0;
-    await Promise.all(threads.map(async () => { // This might not work cause something something closures
-        core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
+    await Promise.all(threads.map(async () => {
         while (offset < fileSize) {
             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
             const start = offset;
             const end = offset + chunkSize - 1;
-            core.debug(`Start: ${start} End: ${end}`);
-            offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
+            offset += MAX_CHUNK_SIZE;
             const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
             responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
         }