diff --git a/dist/restore/index.js b/dist/restore/index.js
index 1bf68d4..cbdd5bc 100644
--- a/dist/restore/index.js
+++ b/dist/restore/index.js
@@ -43294,12 +43294,43 @@ class CacheService {
     }
     restoreCache(paths, primaryKey, restoreKeys) {
         return __awaiter(this, void 0, void 0, function* () {
-            return "";
+            restoreKeys = restoreKeys || [];
+            const keys = [primaryKey, ...restoreKeys];
+            core.debug("Resolved Keys:");
+            core.debug(JSON.stringify(keys));
+            const compressionMethod = yield utils.getCompressionMethod();
+            // path are needed to compute version
+            const cacheEntry = yield this.getS3CacheKey(keys);
+            if (!cacheEntry) {
+                // Cache not found
+                return undefined;
+            }
+            const archivePath = path.join(yield utils.createTempDirectory(), cacheEntry);
+            core.debug(`Archive Path: ${archivePath}`);
+            try {
+                // Download the cache from the cache entry
+                yield this.downloadFromS3(cacheEntry, archivePath);
+                if (core.isDebug()) {
+                    yield tar_1.listTar(archivePath, compressionMethod);
+                }
+                core.info(`Cache Size: ~${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
+                yield tar_1.extractTar(archivePath, compressionMethod);
+                core.info("Cache restored successfully");
+            }
+            finally {
+                // Try to delete the archive to save space
+                try {
+                    yield utils.unlinkFile(archivePath);
+                }
+                catch (error) {
+                    core.debug(`Failed to delete archive: ${error}`);
+                }
+            }
+            return cacheEntry;
         });
     }
     saveCache(paths, key) {
         return __awaiter(this, void 0, void 0, function* () {
-            const cacheId = this.getCacheId(key);
             const compressionMethod = yield utils.getCompressionMethod();
             const cachePaths = yield utils.resolvePaths(paths);
             core.debug("Cache Paths:");
@@ -43313,8 +43344,8 @@ class CacheService {
                     yield tar_1.listTar(archivePath, compressionMethod);
                 }
                 core.info(`Archive Size: ${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
-                core.debug(`Saving Cache (ID: ${cacheId})`);
-                yield this.uploadToS3(cacheId, archivePath);
+                core.debug(`Saving Cache (ID: ${key})`);
+                yield this.uploadToS3(key, archivePath);
             }
             finally {
                 // Try to delete the archive to save space
@@ -43330,20 +43361,73 @@ class CacheService {
     }
     uploadToS3(key, archivePath) {
         return __awaiter(this, void 0, void 0, function* () {
-            const client = new aws_sdk_1.S3();
             const data = fs_1.default.readFileSync(archivePath).toString("base64");
-            return client
+            return this._client
                 .putObject({
                 Bucket: this._bucket,
-                Key: key,
+                Key: path.join(this.getCacheFolder(), key),
                 Body: data
             })
                 .promise();
         });
     }
-    getCacheId(primaryKey) {
-        var _a;
-        return `${(_a = process.env["GITHUB_REPOSITORY"]) === null || _a === void 0 ? void 0 : _a.replace("/", "-").toLowerCase()}-${primaryKey}`;
+    downloadFromS3(key, savePath) {
+        return __awaiter(this, void 0, void 0, function* () {
+            try {
+                const response = yield this._client
+                    .getObject({
+                    Bucket: this._bucket,
+                    Key: path.join(this.getCacheFolder(), key)
+                })
+                    .promise();
+                fs_1.default.writeFileSync(savePath, response.Body);
+            }
+            catch (err) {
+                core.warning("Could not download cache from S3");
+                core.warning(err.message);
+            }
+        });
+    }
+    getS3CacheKey(keys) {
+        return __awaiter(this, void 0, void 0, function* () {
+            // return first matching key
+            for (let i = 0; i < keys.length; i++) {
+                if (i === 0) {
+                    // look for exact match
+                    try {
+                        yield this._client
+                            .headObject({
+                            Bucket: this._bucket,
+                            Key: path.join(this.getCacheFolder(), keys[i])
+                        })
+                            .promise();
+                        return keys[i];
+                        // eslint-disable-next-line no-empty
+                    }
+                    catch (_a) { }
+                }
+                else {
+                    // look for match with newest added date that matches a prefix
+                    try {
+                        const response = yield this._client
+                            .listObjectsV2({
+                            Bucket: this._bucket,
+                            Prefix: path.join(this.getCacheFolder(), keys[i])
+                        })
+                            .promise();
+                        core.debug(JSON.stringify(response));
+                        // eslint-disable-next-line no-empty
+                    }
+                    catch (_b) { }
+                }
+            }
+            return undefined;
+        });
+    }
+    getCacheFolder() {
+        return process.env["GITHUB_REPOSITORY"]
+            .replace("/", "-")
+            .toLowerCase();
     }
 }
 exports.CacheService = CacheService;
diff --git a/dist/save/index.js b/dist/save/index.js
index 1e389d6..93f1986 100644
--- a/dist/save/index.js
+++ b/dist/save/index.js
@@ -43294,12 +43294,43 @@ class CacheService {
     }
     restoreCache(paths, primaryKey, restoreKeys) {
         return __awaiter(this, void 0, void 0, function* () {
-            return "";
+            restoreKeys = restoreKeys || [];
+            const keys = [primaryKey, ...restoreKeys];
+            core.debug("Resolved Keys:");
+            core.debug(JSON.stringify(keys));
+            const compressionMethod = yield utils.getCompressionMethod();
+            // path are needed to compute version
+            const cacheEntry = yield this.getS3CacheKey(keys);
+            if (!cacheEntry) {
+                // Cache not found
+                return undefined;
+            }
+            const archivePath = path.join(yield utils.createTempDirectory(), cacheEntry);
+            core.debug(`Archive Path: ${archivePath}`);
+            try {
+                // Download the cache from the cache entry
+                yield this.downloadFromS3(cacheEntry, archivePath);
+                if (core.isDebug()) {
+                    yield tar_1.listTar(archivePath, compressionMethod);
+                }
+                core.info(`Cache Size: ~${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
+                yield tar_1.extractTar(archivePath, compressionMethod);
+                core.info("Cache restored successfully");
+            }
+            finally {
+                // Try to delete the archive to save space
+                try {
+                    yield utils.unlinkFile(archivePath);
+                }
+                catch (error) {
+                    core.debug(`Failed to delete archive: ${error}`);
+                }
+            }
+            return cacheEntry;
         });
     }
     saveCache(paths, key) {
         return __awaiter(this, void 0, void 0, function* () {
-            const cacheId = this.getCacheId(key);
             const compressionMethod = yield utils.getCompressionMethod();
             const cachePaths = yield utils.resolvePaths(paths);
             core.debug("Cache Paths:");
@@ -43313,8 +43344,8 @@ class CacheService {
                     yield tar_1.listTar(archivePath, compressionMethod);
                 }
                 core.info(`Archive Size: ${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
-                core.debug(`Saving Cache (ID: ${cacheId})`);
-                yield this.uploadToS3(cacheId, archivePath);
+                core.debug(`Saving Cache (ID: ${key})`);
+                yield this.uploadToS3(key, archivePath);
             }
             finally {
                 // Try to delete the archive to save space
@@ -43330,20 +43361,73 @@ class CacheService {
     }
     uploadToS3(key, archivePath) {
         return __awaiter(this, void 0, void 0, function* () {
-            const client = new aws_sdk_1.S3();
             const data = fs_1.default.readFileSync(archivePath).toString("base64");
-            return client
+            return this._client
                 .putObject({
                 Bucket: this._bucket,
-                Key: key,
+                Key: path.join(this.getCacheFolder(), key),
                 Body: data
             })
                 .promise();
         });
     }
-    getCacheId(primaryKey) {
-        var _a;
-        return `${(_a = process.env["GITHUB_REPOSITORY"]) === null || _a === void 0 ? void 0 : _a.replace("/", "-").toLowerCase()}-${primaryKey}`;
+    downloadFromS3(key, savePath) {
+        return __awaiter(this, void 0, void 0, function* () {
+            try {
+                const response = yield this._client
+                    .getObject({
+                    Bucket: this._bucket,
+                    Key: path.join(this.getCacheFolder(), key)
+                })
+                    .promise();
+                fs_1.default.writeFileSync(savePath, response.Body);
+            }
+            catch (err) {
+                core.warning("Could not download cache from S3");
+                core.warning(err.message);
+            }
+        });
+    }
+    getS3CacheKey(keys) {
+        return __awaiter(this, void 0, void 0, function* () {
+            // return first matching key
+            for (let i = 0; i < keys.length; i++) {
+                if (i === 0) {
+                    // look for exact match
+                    try {
+                        yield this._client
+                            .headObject({
+                            Bucket: this._bucket,
+                            Key: path.join(this.getCacheFolder(), keys[i])
+                        })
+                            .promise();
+                        return keys[i];
+                        // eslint-disable-next-line no-empty
+                    }
+                    catch (_a) { }
+                }
+                else {
+                    // look for match with newest added date that matches a prefix
+                    try {
+                        const response = yield this._client
+                            .listObjectsV2({
+                            Bucket: this._bucket,
+                            Prefix: path.join(this.getCacheFolder(), keys[i])
+                        })
+                            .promise();
+                        core.debug(JSON.stringify(response));
+                        // eslint-disable-next-line no-empty
+                    }
+                    catch (_b) { }
+                }
+            }
+            return undefined;
+        });
+    }
+    getCacheFolder() {
+        return process.env["GITHUB_REPOSITORY"]
+            .replace("/", "-")
+            .toLowerCase();
     }
 }
 exports.CacheService = CacheService;
diff --git a/package.json b/package.json
index 7af2581..8b95911 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "cache",
-  "version": "0.4.0",
+  "version": "0.5.0",
   "private": true,
   "description": "Cache dependencies and build outputs",
   "main": "dist/restore/index.js",
diff --git a/src/cache.service.ts b/src/cache.service.ts
index c60f8d6..e5890ce 100644
--- a/src/cache.service.ts
+++ b/src/cache.service.ts
@@ -1,7 +1,12 @@
 import * as utils from "@actions/cache/lib/internal/cacheUtils";
-import { createTar, listTar } from "@actions/cache/lib/internal/tar";
+import {
+    createTar,
+    extractTar,
+    listTar
+} from "@actions/cache/lib/internal/tar";
 import * as core from "@actions/core";
 import { AWSError, S3 } from "aws-sdk";
+import { GetObjectOutput, ListObjectsV2Output } from "aws-sdk/clients/s3";
 import { PromiseResult } from "aws-sdk/lib/request";
 import filesize from "filesize";
 import fs from "fs";
@@ -32,12 +37,54 @@ export class CacheService {
         primaryKey: string,
         restoreKeys: string[]
     ): Promise<string | undefined> {
-        return "";
+        restoreKeys = restoreKeys || [];
+        const keys = [primaryKey, ...restoreKeys];
+
+        core.debug("Resolved Keys:");
+        core.debug(JSON.stringify(keys));
+
+        const compressionMethod = await utils.getCompressionMethod();
+
+        // path are needed to compute version
+        const cacheEntry = await this.getS3CacheKey(keys);
+        if (!cacheEntry) {
+            // Cache not found
+            return undefined;
+        }
+
+        const archivePath = path.join(
+            await utils.createTempDirectory(),
+            cacheEntry
+        );
+        core.debug(`Archive Path: ${archivePath}`);
+
+        try {
+            // Download the cache from the cache entry
+            await this.downloadFromS3(cacheEntry, archivePath);
+
+            if (core.isDebug()) {
+                await listTar(archivePath, compressionMethod);
+            }
+
+            core.info(
+                `Cache Size: ~${filesize(fs.statSync(archivePath).size)}`
+            );
+
+            await extractTar(archivePath, compressionMethod);
+            core.info("Cache restored successfully");
+        } finally {
+            // Try to delete the archive to save space
+            try {
+                await utils.unlinkFile(archivePath);
+            } catch (error) {
+                core.debug(`Failed to delete archive: ${error}`);
+            }
+        }
+
+        return cacheEntry;
     }
 
     async saveCache(paths: string[], key: string): Promise<string> {
-        const cacheId: string = this.getCacheId(key);
-
         const compressionMethod = await utils.getCompressionMethod();
 
         const cachePaths = await utils.resolvePaths(paths);
@@ -62,8 +109,8 @@ export class CacheService {
                 `Archive Size: ${filesize(fs.statSync(archivePath).size)}`
             );
 
-            core.debug(`Saving Cache (ID: ${cacheId})`);
-            await this.uploadToS3(cacheId, archivePath);
+            core.debug(`Saving Cache (ID: ${key})`);
+            await this.uploadToS3(key, archivePath);
         } finally {
             // Try to delete the archive to save space
             try {
@@ -80,21 +127,67 @@ export class CacheService {
         key: string,
         archivePath: string
     ): Promise<PromiseResult<S3.PutObjectOutput, AWSError>> {
-        const client = new S3();
         const data = fs.readFileSync(archivePath).toString("base64");
 
-        return client
+        return this._client
             .putObject({
                 Bucket: this._bucket,
-                Key: key,
+                Key: path.join(this.getCacheFolder(), key),
                 Body: data
             })
             .promise();
     }
 
-    private getCacheId(primaryKey: string): string {
-        return `${process.env["GITHUB_REPOSITORY"]
-            ?.replace("/", "-")
-            .toLowerCase()}-${primaryKey}`;
+    private async downloadFromS3(key: string, savePath: string): Promise<void> {
+        try {
+            const response: GetObjectOutput = await this._client
+                .getObject({
+                    Bucket: this._bucket,
+                    Key: path.join(this.getCacheFolder(), key)
+                })
+                .promise();
+            fs.writeFileSync(savePath, response.Body);
+        } catch (err) {
+            core.warning("Could not download cache from S3");
+            core.warning(err.message);
+        }
+    }
+
+    private async getS3CacheKey(keys: string[]): Promise<string | undefined> {
+        // return first matching key
+        for (let i = 0; i < keys.length; i++) {
+            if (i === 0) {
+                // look for exact match
+                try {
+                    await this._client
+                        .headObject({
+                            Bucket: this._bucket,
+                            Key: path.join(this.getCacheFolder(), keys[i])
+                        })
+                        .promise();
+                    return keys[i];
+                    // eslint-disable-next-line no-empty
+                } catch {}
+            } else {
+                // look for match with newest added date that matches a prefix
+                try {
+                    const response: ListObjectsV2Output = await this._client
+                        .listObjectsV2({
+                            Bucket: this._bucket,
+                            Prefix: path.join(this.getCacheFolder(), keys[i])
+                        })
+                        .promise();
+                    core.debug(JSON.stringify(response));
+                    // eslint-disable-next-line no-empty
+                } catch {}
+            }
+        }
+        return undefined;
+    }
+
+    private getCacheFolder(): string {
+        return (process.env["GITHUB_REPOSITORY"] as string)
+            .replace("/", "-")
+            .toLowerCase();
     }
 }