From 6bf3f6b5b0005f4d4755d56f3032f17d73f1878b Mon Sep 17 00:00:00 2001 From: ducku Date: Sun, 22 Oct 2023 19:52:05 -0700 Subject: [PATCH 1/6] add timed check for expired files --- package-lock.json | 35 +++++++++++++++++++++++++++++++++++ package.json | 1 + src/config.json | 3 ++- src/server.mjs | 31 +++++++++++++++++++++++++++++++ 4 files changed, 69 insertions(+), 1 deletion(-) diff --git a/package-lock.json b/package-lock.json index ed113dbf..b43a36f2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -36,6 +36,7 @@ "gh-pages": "^4.0.0", "markdown-to-jsx": "^7.2.0", "multer": "^1.4.5-lts.1", + "node-cron": "^3.0.2", "path-is-inside": "^1.0.2", "polyfill-object.fromentries": "^1.0.1", "prop-types": "^15.8.1", @@ -13888,6 +13889,25 @@ "tslib": "^2.0.3" } }, + "node_modules/node-cron": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.2.tgz", + "integrity": "sha512-iP8l0yGlNpE0e6q1o185yOApANRe47UPbLf4YxfbiNHt/RU5eBcGB/e0oudruheSf+LQeDMezqC5BVAb5wwRcQ==", + "dependencies": { + "uuid": "8.3.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/node-cron/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", @@ -29771,6 +29791,21 @@ "tslib": "^2.0.3" } }, + "node-cron": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.2.tgz", + "integrity": "sha512-iP8l0yGlNpE0e6q1o185yOApANRe47UPbLf4YxfbiNHt/RU5eBcGB/e0oudruheSf+LQeDMezqC5BVAb5wwRcQ==", + "requires": { + "uuid": "8.3.2" + }, + "dependencies": { + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } + } + }, "node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", diff --git a/package.json b/package.json index 72e781ea..1bdcfe04 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "gh-pages": "^4.0.0", "markdown-to-jsx": "^7.2.0", "multer": "^1.4.5-lts.1", + "node-cron": "^3.0.2", "path-is-inside": "^1.0.2", "polyfill-object.fromentries": "^1.0.1", "prop-types": "^15.8.1", diff --git a/src/config.json b/src/config.json index a39f9ae9..20128fa0 100644 --- a/src/config.json +++ b/src/config.json @@ -88,6 +88,7 @@ }, "MAXUPLOADSIZE": 5242880, - "pickerTypeOptions": ["mounted", "upload"] + "pickerTypeOptions": ["mounted", "upload"], + "fileExpirationTime": 86400 } diff --git a/src/server.mjs b/src/server.mjs index df6bbaf3..44b56a7b 100644 --- a/src/server.mjs +++ b/src/server.mjs @@ -28,6 +28,7 @@ import { finished } from "stream/promises"; import sanitize from "sanitize-filename"; import { createHash } from "node:crypto"; import { JSONParser} from '@streamparser/json'; +import cron from "node-cron"; @@ -113,6 +114,36 @@ var limits = { }; var upload = multer({ storage, limits }); +// runs every hour +// deletes any files in the download directory past the set fileExpirationTime set in config +cron.schedule('0 * * * *', () => { + console.log("cron scheduled check"); + const currentTime = new Date().getTime(); + // loop through these specified directories + for (const dir of [DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH]) { + fs.readdir(dir, (err, files) => { + + if (!files || files.length === 0) { + return; + } + + files.forEach((file) => { + const filePath = path.join(dir, file) + // get file statistics + fs.stat(filePath, (statErr, stats) => { + const creationTime = stats.birthtime.getTime(); + if (currentTime - creationTime >= config.fileExpirationTime) { + // delete file + if (file !== ".gitignore") { + fs.unlink(filePath); + } + } + }); + }); + }); + } +}); + const app = express(); // Configure global server settings From 40442ed21d17005cbb308c3300fa8538ca7664d3 Mon Sep 17 00:00:00 2001 From: ducku Date: Sat, 28 Oct 2023 18:42:02 -0700 Subject: [PATCH 2/6] deletion of expiration files now checks for nested directories --- src/server.mjs | 60 ++++++++++++++++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 21 deletions(-) diff --git a/src/server.mjs b/src/server.mjs index 44b56a7b..e54d1bf0 100644 --- a/src/server.mjs +++ b/src/server.mjs @@ -114,33 +114,51 @@ var limits = { }; var upload = multer({ storage, limits }); +// deletes expired files given a directory, recursively calls itself for nested directories +// expired files are files not accessed for a certain amount of time +function deleteExpiredFiles(directoryPath) { + const currentTime = new Date().getTime(); + + if (!fs.existsSync(directoryPath)) { + return + } + + const files = fs.readdirSync(directoryPath); + + files.forEach((file) => { + const filePath = path.join(directoryPath, file); + + + if (fs.statSync(filePath).isFile()) { + // check to see if file needs to be deleted + const lastAccessedTime = fs.statSync(filePath).atime; + console.log(`${filePath}: ${lastAccessedTime}, currentTime: ${currentTime}`); + if (currentTime - lastAccessedTime >= config.fileExpirationTime) { + if (file !== ".gitignore") { + fs.unlinkSync(filePath); + console.log("Deleting file: ", filePath); + } + } + } else if (fs.statSync(filePath).isDirectory()) { + // call deleteExpiredFiles on the nested directory + deleteExpiredFiles(filePath); + + // if the nested directory is empty after deleting expired files, remove it + if (fs.readdirSync(filePath).length === 0) { + fs.rmdirSync(filePath); + console.log("Deleting directory: ", filePath); + } + } + }); +} + // runs every hour // deletes any files in the download directory past the set fileExpirationTime set in config cron.schedule('0 * * * *', () => { console.log("cron scheduled check"); - const currentTime = new Date().getTime(); // loop through these specified directories for (const dir of [DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH]) { - fs.readdir(dir, (err, files) => { - - if (!files || files.length === 0) { - return; - } - - files.forEach((file) => { - const filePath = path.join(dir, file) - // get file statistics - fs.stat(filePath, (statErr, stats) => { - const creationTime = stats.birthtime.getTime(); - if (currentTime - creationTime >= config.fileExpirationTime) { - // delete file - if (file !== ".gitignore") { - fs.unlink(filePath); - } - } - }); - }); - }); + deleteExpiredFiles(dir); } }); From 41eb87b03902ff07a57637db16fa0bff6390098a Mon Sep 17 00:00:00 2001 From: ducku Date: Sun, 29 Oct 2023 14:48:29 -0700 Subject: [PATCH 3/6] implement directory locking for deleting expired files --- package-lock.json | 17 +++++++++++++++ package.json | 1 + src/server.mjs | 54 ++++++++++++++++++++++++++++++++++++++++++----- 3 files changed, 67 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index b43a36f2..914cb612 100644 --- a/package-lock.json +++ b/package-lock.json @@ -34,6 +34,7 @@ "express": "^4.18.2", "fs-extra": "^10.1.0", "gh-pages": "^4.0.0", + "lockfile": "^1.0.4", "markdown-to-jsx": "^7.2.0", "multer": "^1.4.5-lts.1", "node-cron": "^3.0.2", @@ -13460,6 +13461,14 @@ "node": ">=8" } }, + "node_modules/lockfile": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", + "integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==", + "dependencies": { + "signal-exit": "^3.0.2" + } + }, "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", @@ -29464,6 +29473,14 @@ "p-locate": "^4.1.0" } }, + "lockfile": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", + "integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==", + "requires": { + "signal-exit": "^3.0.2" + } + }, "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", diff --git a/package.json b/package.json index 1bdcfe04..dff22ca3 100644 --- a/package.json +++ b/package.json @@ -29,6 +29,7 @@ "express": "^4.18.2", "fs-extra": "^10.1.0", "gh-pages": "^4.0.0", + "lockfile": "^1.0.4", "markdown-to-jsx": "^7.2.0", "multer": "^1.4.5-lts.1", "node-cron": "^3.0.2", diff --git a/src/server.mjs b/src/server.mjs index e54d1bf0..ebd39443 100644 --- a/src/server.mjs +++ b/src/server.mjs @@ -29,6 +29,7 @@ import sanitize from "sanitize-filename"; import { createHash } from "node:crypto"; import { JSONParser} from '@streamparser/json'; import cron from "node-cron"; +import lockFile from "lockfile"; @@ -117,6 +118,7 @@ var upload = multer({ storage, limits }); // deletes expired files given a directory, recursively calls itself for nested directories // expired files are files not accessed for a certain amount of time function deleteExpiredFiles(directoryPath) { + console.log("deleting expired files in ", directoryPath); const currentTime = new Date().getTime(); if (!fs.existsSync(directoryPath)) { @@ -128,13 +130,11 @@ function deleteExpiredFiles(directoryPath) { files.forEach((file) => { const filePath = path.join(directoryPath, file); - if (fs.statSync(filePath).isFile()) { // check to see if file needs to be deleted const lastAccessedTime = fs.statSync(filePath).atime; - console.log(`${filePath}: ${lastAccessedTime}, currentTime: ${currentTime}`); if (currentTime - lastAccessedTime >= config.fileExpirationTime) { - if (file !== ".gitignore") { + if (file !== ".gitignore" && file !== "directory.lock") { fs.unlinkSync(filePath); console.log("Deleting file: ", filePath); } @@ -152,13 +152,57 @@ function deleteExpiredFiles(directoryPath) { }); } +function lockDirectory(directoryPath) { + if (!fs.existsSync(directoryPath)) { + return 1; + } + const lockOptions = { + "retries": 10, // number of tries before giving up + "retriesWait": 1000 // number of miliseconds to wait before trying to aquire the lock again + } + + const lockFilePath = path.join(directoryPath, "directory.lock"); + // attempt to aquire the lock for the directory + lockFile.lock(lockFilePath, lockOptions, function (err) { + if (err) { + console.log("failed to aquire locks for", directoryPath); + return err; + } else { + console.log("successfully aquired locks for", directoryPath); + return 0; + } + }); +} + +function unlockDirectory(directoryPath) { + if (!fs.existsSync(directoryPath)) { + return 1; + } + const lockFilePath = path.join(directoryPath, "directory.lock"); + lockFile.unlock(lockFilePath, function (err) { + if (err) { + console.log("failed to release locks for", directoryPath); + return err; + } else { + console.log("successfully released locks for ", directoryPath); + return 0; + } + }); +} + // runs every hour // deletes any files in the download directory past the set fileExpirationTime set in config -cron.schedule('0 * * * *', () => { +cron.schedule('* * * * *', () => { console.log("cron scheduled check"); // loop through these specified directories for (const dir of [DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH]) { - deleteExpiredFiles(dir); + const err = lockDirectory(dir); + if (err) { + console.log("unable to delete expired files for", dir); + } else{ + deleteExpiredFiles(dir); + unlockDirectory(dir); + } } }); From a722251e138ae30b80d40efa3c055c4067f19978 Mon Sep 17 00:00:00 2001 From: ducku Date: Thu, 2 Nov 2023 23:08:21 -0700 Subject: [PATCH 4/6] lockfile package changes --- package-lock.json | 11 +++++++++++ package.json | 1 + src/server.mjs | 28 ++++++++++++---------------- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/package-lock.json b/package-lock.json index 914cb612..aeaaaef4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -54,6 +54,7 @@ "react-select-event": "^5.5.1", "reactjs-popup": "^2.0.5", "reactstrap": "^9.1.9", + "readers-writer-lock": "^1.0.0", "sanitize-filename": "^1.6.3", "uuid": "^9.0.0", "webpack": "^5.82.0", @@ -16493,6 +16494,11 @@ "node": ">=8.10.0" } }, + "node_modules/readers-writer-lock": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/readers-writer-lock/-/readers-writer-lock-1.0.0.tgz", + "integrity": "sha512-cXBZkOGtZGRQhB+P9TJriYUZoqFFXfe9ciiooHOPVbdEmVE2X5T8OW1Fxfd8EySo6QOkzZw0wZJWOOUxkq0xDQ==" + }, "node_modules/recursive-readdir": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz", @@ -31525,6 +31531,11 @@ "picomatch": "^2.2.1" } }, + "readers-writer-lock": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/readers-writer-lock/-/readers-writer-lock-1.0.0.tgz", + "integrity": "sha512-cXBZkOGtZGRQhB+P9TJriYUZoqFFXfe9ciiooHOPVbdEmVE2X5T8OW1Fxfd8EySo6QOkzZw0wZJWOOUxkq0xDQ==" + }, "recursive-readdir": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz", diff --git a/package.json b/package.json index dff22ca3..e06972f0 100644 --- a/package.json +++ b/package.json @@ -49,6 +49,7 @@ "react-select-event": "^5.5.1", "reactjs-popup": "^2.0.5", "reactstrap": "^9.1.9", + "readers-writer-lock": "^1.0.0", "sanitize-filename": "^1.6.3", "uuid": "^9.0.0", "webpack": "^5.82.0", diff --git a/src/server.mjs b/src/server.mjs index ebd39443..ab3ee542 100644 --- a/src/server.mjs +++ b/src/server.mjs @@ -29,7 +29,7 @@ import sanitize from "sanitize-filename"; import { createHash } from "node:crypto"; import { JSONParser} from '@streamparser/json'; import cron from "node-cron"; -import lockFile from "lockfile"; +import RWLock from "readers-writer-lock"; @@ -75,6 +75,8 @@ const fileTypes = { BED:"bed", }; +const lockMap = new Map(); + // Make sure that the scratch directory exists at startup, so multiple requests // can't fight over its creation. fs.mkdirSync(SCRATCH_DATA_PATH, { recursive: true }); @@ -152,26 +154,20 @@ function deleteExpiredFiles(directoryPath) { }); } -function lockDirectory(directoryPath) { +async function lockDirectory(directoryPath, lockType, func) { if (!fs.existsSync(directoryPath)) { return 1; } - const lockOptions = { - "retries": 10, // number of tries before giving up - "retriesWait": 1000 // number of miliseconds to wait before trying to aquire the lock again + + let lock = lockMap.get(directoryPath); + // if there are no locks, create a new lock and store it in the lock directionary + if (!lock) { + lock = new RWLock(); + lockMap.set(directoryPath, lock); } - const lockFilePath = path.join(directoryPath, "directory.lock"); - // attempt to aquire the lock for the directory - lockFile.lock(lockFilePath, lockOptions, function (err) { - if (err) { - console.log("failed to aquire locks for", directoryPath); - return err; - } else { - console.log("successfully aquired locks for", directoryPath); - return 0; - } - }); + if lockType == + } function unlockDirectory(directoryPath) { From ca9def9c9e6051d1ed4f515284913eba98eaa720 Mon Sep 17 00:00:00 2001 From: ducku Date: Sun, 5 Nov 2023 13:27:29 -0800 Subject: [PATCH 5/6] lock directories when processing a getchunk request --- src/server.mjs | 70 ++++++++++++++++++++++++++++++-------------------- 1 file changed, 42 insertions(+), 28 deletions(-) diff --git a/src/server.mjs b/src/server.mjs index ab3ee542..28382f49 100644 --- a/src/server.mjs +++ b/src/server.mjs @@ -29,7 +29,7 @@ import sanitize from "sanitize-filename"; import { createHash } from "node:crypto"; import { JSONParser} from '@streamparser/json'; import cron from "node-cron"; -import RWLock from "readers-writer-lock"; +import { RWLock, combine } from "readers-writer-lock"; @@ -77,6 +77,11 @@ const fileTypes = { const lockMap = new Map(); +const lockTypes = { + READ_LOCK: "read_lock", + WRITE_LOCK: "write_lock" +} + // Make sure that the scratch directory exists at startup, so multiple requests // can't fight over its creation. fs.mkdirSync(SCRATCH_DATA_PATH, { recursive: true }); @@ -154,51 +159,58 @@ function deleteExpiredFiles(directoryPath) { }); } +// takes in an async function, locks the direcotry for the duration of the function async function lockDirectory(directoryPath, lockType, func) { - if (!fs.existsSync(directoryPath)) { - return 1; - } - + console.log("Acquiring", lockType, "for", directoryPath); + // look into lockMap to see if there is a lock assigned to the directory let lock = lockMap.get(directoryPath); // if there are no locks, create a new lock and store it in the lock directionary if (!lock) { lock = new RWLock(); + lockMap.set(directoryPath, lock); } - if lockType == + if (lockType == lockTypes.READ_LOCK) { + // lock is released when func returns + return lock.read(func); + } else if (lockType == lockTypes.WRITE_LOCK) { + return lock.write(func); + } else { + console.log("Not a valid lock type:", lockType); + return 1; + } } -function unlockDirectory(directoryPath) { - if (!fs.existsSync(directoryPath)) { - return 1; +// expects an array of directory paths, attemping to acquire all directory locks +async function lockDirectories(directoryPaths, lockType, func) { + // input is unexpected + if (!directoryPaths || directoryPaths.length === 0) { + return } - const lockFilePath = path.join(directoryPath, "directory.lock"); - lockFile.unlock(lockFilePath, function (err) { - if (err) { - console.log("failed to release locks for", directoryPath); - return err; - } else { - console.log("successfully released locks for ", directoryPath); - return 0; - } - }); + + // last lock to acquire, ready to proceed + if (directoryPaths.length === 1) { + return lockDirectory(directoryPaths[0], lockType, func); + } + + // attempt to acquire a lock for the next directory, and call lockDirectories on the remaining directories + const currDirectory = directoryPaths.pop(); + return lockDirectory(currDirectory, lockType, async function() { + lockDirectories(directoryPaths, lockType, func); + }) } // runs every hour // deletes any files in the download directory past the set fileExpirationTime set in config cron.schedule('* * * * *', () => { console.log("cron scheduled check"); - // loop through these specified directories + // attempt to acquire a write lock for each on the directory before attemping to delete files for (const dir of [DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH]) { - const err = lockDirectory(dir); - if (err) { - console.log("unable to delete expired files for", dir); - } else{ + lockDirectory(dir, lockTypes.WRITE_LOCK, async function() { deleteExpiredFiles(dir); - unlockDirectory(dir); - } + }); } }); @@ -341,8 +353,10 @@ api.post("/getChunkedData", (req, res, next) => { // // So we set up a promise here and we make sure to handle failures // ourselves with next(). - let promise = getChunkedData(req, res, next); - promise.catch(next); + lockDirectories([DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH], lockTypes.READ_LOCK, async function() { + let promise = getChunkedData(req, res, next); + promise.catch(next); + }) }); // Handle a chunked data (tube map view) request. Returns a promise. On error, From ef16b8584d3ffe0fc250e082790388232e45f788 Mon Sep 17 00:00:00 2001 From: ducku Date: Tue, 7 Nov 2023 10:42:04 -0800 Subject: [PATCH 6/6] await promise before releasing lock --- package-lock.json | 17 ----------------- package.json | 1 - src/server.mjs | 9 ++++++++- 3 files changed, 8 insertions(+), 19 deletions(-) diff --git a/package-lock.json b/package-lock.json index aeaaaef4..a0a46311 100644 --- a/package-lock.json +++ b/package-lock.json @@ -34,7 +34,6 @@ "express": "^4.18.2", "fs-extra": "^10.1.0", "gh-pages": "^4.0.0", - "lockfile": "^1.0.4", "markdown-to-jsx": "^7.2.0", "multer": "^1.4.5-lts.1", "node-cron": "^3.0.2", @@ -13462,14 +13461,6 @@ "node": ">=8" } }, - "node_modules/lockfile": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", - "integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==", - "dependencies": { - "signal-exit": "^3.0.2" - } - }, "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", @@ -29479,14 +29470,6 @@ "p-locate": "^4.1.0" } }, - "lockfile": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", - "integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==", - "requires": { - "signal-exit": "^3.0.2" - } - }, "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", diff --git a/package.json b/package.json index e06972f0..2a3c5349 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,6 @@ "express": "^4.18.2", "fs-extra": "^10.1.0", "gh-pages": "^4.0.0", - "lockfile": "^1.0.4", "markdown-to-jsx": "^7.2.0", "multer": "^1.4.5-lts.1", "node-cron": "^3.0.2", diff --git a/src/server.mjs b/src/server.mjs index 28382f49..653709c7 100644 --- a/src/server.mjs +++ b/src/server.mjs @@ -124,6 +124,8 @@ var upload = multer({ storage, limits }); // deletes expired files given a directory, recursively calls itself for nested directories // expired files are files not accessed for a certain amount of time +// TODO: find a more reliable way to detect file accessed time than stat.atime? +// atime requires correct environment configurations function deleteExpiredFiles(directoryPath) { console.log("deleting expired files in ", directoryPath); const currentTime = new Date().getTime(); @@ -184,6 +186,8 @@ async function lockDirectory(directoryPath, lockType, func) { } // expects an array of directory paths, attemping to acquire all directory locks +// all uses of this function requires the array of directoryPaths to be in the same order +// e.g locking [DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH] should always lock DOWNLOAD_DATA_PATH first to prevent deadlock async function lockDirectories(directoryPaths, lockType, func) { // input is unexpected if (!directoryPaths || directoryPaths.length === 0) { @@ -353,10 +357,13 @@ api.post("/getChunkedData", (req, res, next) => { // // So we set up a promise here and we make sure to handle failures // ourselves with next(). + + // put readlock on necessary directories while processing chunked data lockDirectories([DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH], lockTypes.READ_LOCK, async function() { let promise = getChunkedData(req, res, next); promise.catch(next); - }) + await promise; + }); }); // Handle a chunked data (tube map view) request. Returns a promise. On error,