Skip to content

Commit

Permalink
Using fs-merger module to facilitate array of trees as input to persi…
Browse files Browse the repository at this point in the history
…stent filter

1) This will allow plugin developers to reduce mergeTree or funneling all the trees to one tree and then pass it to filter
  • Loading branch information
SparshithNR committed Aug 28, 2019
1 parent 7ba9626 commit b760b22
Show file tree
Hide file tree
Showing 7 changed files with 168 additions and 492 deletions.
95 changes: 69 additions & 26 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
// @ts-check
'use strict';

const fs = require('fs');
const fs = require('fs-extra');
const path = require('path');
const mkdirp = require('mkdirp');
const rimraf = require('rimraf');
const Promise = require('rsvp').Promise;
const Plugin = require('broccoli-plugin');
const walkSync = require('walk-sync');
const mapSeries = require('promise-map-series');
const symlinkOrCopySync = require('symlink-or-copy').sync;
const debugGenerator = require('heimdalljs-logger');
Expand All @@ -19,6 +18,7 @@ const hashForDep = require('hash-for-dep');
const FSTree = require('fs-tree-diff');
const heimdall = require('heimdalljs');
const queue = require('async-promise-queue');
const FSMerger = require('fs-merger');

class ApplyPatchesSchema {
constructor() {
Expand Down Expand Up @@ -68,7 +68,7 @@ function shouldPersist(env, persist) {
Filter.prototype = Object.create(Plugin.prototype);
Filter.prototype.constructor = Filter;

function Filter(inputTree, options) {
function Filter(inputTrees, options) {
if (!this || !(this instanceof Filter) ||
Object.getPrototypeOf(this) === Filter.prototype) {
throw new TypeError('Filter is an abstract class and must be sub-classed');
Expand All @@ -83,13 +83,18 @@ function Filter(inputTree, options) {

/** @type {{debug(...s: any[]): void; info(...s: any[]): void}} */
this._logger = debugGenerator(loggerName);
let inputTreesParsed = [];
inputTrees = Array.isArray(inputTrees) ? inputTrees : [inputTrees];
inputTrees.forEach((tree) => {
inputTreesParsed.push(tree.root ? tree.root : tree);
});

Plugin.call(this, [inputTree], {
Plugin.call(this, inputTreesParsed, {
name: (options && options.name) || this.name || loggerName,
annotation: (options && options.annotation) || this.annotation || annotation,
persistentOutput: true
});

this.inputNodes = inputTrees;
/** @type {Processor} */
this.processor = new Processor(options);
/** @type {Dependencies | null} */
Expand Down Expand Up @@ -162,21 +167,21 @@ function invalidationsAsPatches(invalidated, currentTree, nextTree) {
}

Filter.prototype.build = function() {
// @ts-ignore
let srcDir = this.inputPaths[0];
// @ts-ignore
let destDir = this.outputPath;

if (this.dependencyInvalidation && !this.dependencies) {
this.dependencies = this.processor.initialDependencies(srcDir);
// @ts-ignore
this.dependencies = this.processor.initialDependencies(this.inputPaths);
}

if (this._needsReset) {
this.currentTree = new FSTree();
// @ts-ignore
let instrumentation = heimdall.start('reset');
if (this.dependencies) {
this.dependencies = this.processor.initialDependencies(srcDir);
// @ts-ignore
this.dependencies = this.processor.initialDependencies(this.inputPaths);
}
// @ts-ignore
rimraf.sync(this.outputPath);
Expand All @@ -190,8 +195,9 @@ Filter.prototype.build = function() {
let instrumentation = heimdall.start('derivePatches', DerivePatchesSchema);

let walkStart = process.hrtime();
let entries = walkSync.entries(srcDir);
let nextTree = FSTree.fromEntries(entries);
this.fsMergerFiles = new FSMerger(this.inputNodes);
let entries = this.fsMergerFiles.entries('');
let nextTree = FSTree.fromEntries(entries, { sortAndExpand: true });
let walkDuration = timeSince(walkStart);

let invalidationsStart = process.hrtime();
Expand Down Expand Up @@ -243,7 +249,17 @@ Filter.prototype.build = function() {
let operation = patch[0];
let relativePath = patch[1];
let entry = patch[2];
let outputPath = destDir + '/' + (this.getDestFilePath(relativePath, entry) || relativePath);
let fileMeta = this.fsMergerFiles.readFileMeta(relativePath, { basePath: entry.basePath });
let outputPath = relativePath;
if (fileMeta && fileMeta.getDestinationPath) {
outputPath = fileMeta.getDestinationPath(relativePath);
} else {
outputPath = this.getDestFilePath(relativePath, entry) || outputPath;
if (fileMeta && fileMeta.prefix) {
outputPath = fileMeta.prefix + '/' + outputPath;
}
}
outputPath = destDir + '/' + outputPath;
let outputFilePath = outputPath;
let forceInvalidation = invalidated.includes(relativePath);

Expand All @@ -252,7 +268,7 @@ Filter.prototype.build = function() {
switch (operation) {
case 'mkdir': {
instrumentation.mkdir++;
return fs.mkdirSync(outputPath);
return mkdirp.sync(outputPath);
} case 'rmdir': {
instrumentation.rmdir++;
return fs.rmdirSync(outputPath);
Expand All @@ -263,7 +279,7 @@ Filter.prototype.build = function() {
// wrap this in a function so it doesn't actually run yet, and can be throttled
let changeOperation = () => {
instrumentation.change++;
return this._handleFile(relativePath, srcDir, destDir, entry, outputFilePath, forceInvalidation, true, instrumentation);
return this._handleFile(relativePath, destDir, entry, outputFilePath, forceInvalidation, true, instrumentation);
};
if (this.async) {
pendingWork.push(changeOperation);
Expand All @@ -274,7 +290,7 @@ Filter.prototype.build = function() {
// wrap this in a function so it doesn't actually run yet, and can be throttled
let createOperation = () => {
instrumentation.create++;
return this._handleFile(relativePath, srcDir, destDir, entry, outputFilePath, forceInvalidation, false, instrumentation);
return this._handleFile(relativePath, destDir, entry, outputFilePath, forceInvalidation, false, instrumentation);
};
if (this.async) {
pendingWork.push(createOperation);
Expand All @@ -298,28 +314,43 @@ Filter.prototype.build = function() {
});
};

Filter.prototype._handleFile = function(relativePath, srcDir, destDir, entry, outputPath, forceInvalidation, isChange, stats) {
Filter.prototype._handleFile = function(relativePath, destDir, entry, outputPath, forceInvalidation, isChange, stats) {
stats.handleFile++;

let handleFileStart = process.hrtime();

return new Promise(resolve => {
let result;
let srcPath = srcDir + '/' + relativePath;

if (this.canProcessFile(relativePath, entry)) {
stats.processed++;
if (this._outputLinks[outputPath] === true) {
delete this._outputLinks[outputPath];
fs.unlinkSync(outputPath);
}
result = this.processAndCacheFile(srcDir, destDir, entry, forceInvalidation, isChange, stats);
result = this.processAndCacheFile(relativePath, destDir, entry, forceInvalidation, isChange, stats);
} else {
stats.linked++;
if (isChange) {
fs.unlinkSync(outputPath);
}
result = symlinkOrCopySync(srcPath, outputPath);
/**
* Since we do not have a funneled/merged folder, patches may not contain the folder
* structure we expect to be present. We try to symlink first. Catch the error if any and create folder then try again
*/

try {
result = symlinkOrCopySync(entry.fullPath, outputPath);
} catch (error) {
if (error && error.code === 'ENOENT') {
let relativeDir = outputPath.split(path.basename(outputPath))[0];
fs.ensureDirSync(relativeDir);
result = symlinkOrCopySync(entry.fullPath, outputPath);
} else {
throw error;
}
}

this._outputLinks[outputPath] = true;
}
resolve(result);
Expand Down Expand Up @@ -374,13 +405,24 @@ Filter.prototype.canProcessFile =

Filter.prototype.isDirectory = function(relativePath, entry) {
// @ts-ignore
if (this.inputPaths === undefined) {
if (this.fsMergerFiles === undefined && this.inputPaths === undefined) {
return false;
}

// @ts-ignore
let srcDir = this.inputPaths[0];
let path = srcDir + '/' + relativePath;
let path = '';
if (this.fsMergerFiles) {
let fileMeta = this.fsMergerFiles.readFileMeta(relativePath, { basePath: entry && entry.basePath });
path = fileMeta.path;
} else {
// @ts-ignore
for (let index = 0; index < this.inputPaths.length; index++) {
// @ts-ignore
let inputPath = this.inputPaths[index] + '/' + relativePath;
if (fs.existsSync(inputPath)) {
path = inputPath;
break;
}
}
}

return (entry || fs.lstatSync(path)).isDirectory();
};
Expand Down Expand Up @@ -442,9 +484,10 @@ Filter.prototype.processFile = function(srcDir, destDir, relativePath, forceInva
if (inputEncoding === undefined) inputEncoding = 'utf8';
if (outputEncoding === undefined) outputEncoding = 'utf8';

let contents = fs.readFileSync(srcDir + '/' + relativePath, {
let contents = fs.readFileSync(entry.fullPath, {
encoding: inputEncoding
});
let fileMeta = this.fsMergerFiles.readFileMeta(srcDir, { basePath: entry.basePath });

instrumentation.processString++;
let processStringStart = process.hrtime();
Expand All @@ -460,7 +503,7 @@ Filter.prototype.processFile = function(srcDir, destDir, relativePath, forceInva
relativePath + '") is null');
}

outputPath = destDir + '/' + outputPath;
outputPath = path.join(destDir, fileMeta.prefix || '', outputPath);

if (isChange) {
let isSame = fs.readFileSync(outputPath, 'UTF-8') === outputString;
Expand Down
74 changes: 49 additions & 25 deletions lib/dependencies.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,20 @@ const FSHashTree = fsHashDiff.FSHashTree;
module.exports = class Dependencies {
/**
* Creates an instance of Dependencies.
* @param rootDir {string} The root directory containing the files that
* @param inputPaths {Array<string>} The root directories containing the files that
* have dependencies. Relative paths are resolved against this directory.
*/
constructor(rootDir) {
constructor(inputPaths) {
/**
* Tracks whether new dependencies can be added.
**/
this.sealed = false;
/**
* The root directory containing the files that have dependencies. Relative
* The root directories containing the files that have dependencies. Relative
* paths are resolved against this directory.
* @type {string}
* @type {Array<string>}
*/
this.rootDir = path.normalize(rootDir);
this.rootDirs = Array.isArray(inputPaths) ? inputPaths : [inputPaths];
/**
* Tracks dependencies on a per file basis.
* The key is a relative path, values are absolute paths.
Expand Down Expand Up @@ -67,9 +67,13 @@ module.exports = class Dependencies {
// Build a unified set of dependencies for the entire tree
/** @type {string} */
let depRoot;
if (deps[i].startsWith(this.rootDir + path.sep)) {
depRoot = this.rootDir;
} else {
for (let index = 0; index < this.rootDirs.length; index++) {
if (deps[i].startsWith(path.normalize(this.rootDirs[index]) + path.sep)) {
depRoot = this.rootDirs[index];
break;
}
}
if (!depRoot) {
depRoot = path.parse(deps[i]).root;
}
let depsForRoot = this._getDepsForRoot(depRoot);
Expand Down Expand Up @@ -143,7 +147,12 @@ module.exports = class Dependencies {
for (let i = 0; i < dependencies.length; i++) {
let depPath = path.normalize(dependencies[i]);
if (!path.isAbsolute(depPath)) {
depPath = path.resolve(this.rootDir, fileDir, depPath);
for (let index = 0; index < this.rootDirs.length; index++) {
let tempPath = path.resolve(this.rootDirs[index], fileDir, depPath);
if (fs.existsSync(tempPath)) {
depPath = tempPath;
}
}
}
absoluteDeps.push(depPath);
}
Expand All @@ -162,7 +171,7 @@ module.exports = class Dependencies {
*/
copyWithout(files) {
files = files.map(f => path.normalize(f));
let newDeps = new Dependencies(this.rootDir);
let newDeps = new Dependencies(this.rootDirs);
for (let file of this.dependencyMap.keys()) {
if (!files.includes(file)) {
newDeps.setDependencies(file, this.dependencyMap.get(file));
Expand Down Expand Up @@ -195,7 +204,7 @@ module.exports = class Dependencies {
let dependencies = this.allDependencies.get(fsRoot);
/** @type {FSTree<Entry> | FSHashTree} */
let fsTree;
if (fsRoot === this.rootDir) {
if (this.rootDirs.indexOf(fsRoot) > -1) {
fsTree = getHashTree(fsRoot, dependencies);
} else {
fsTree = getStatTree(fsRoot, dependencies);
Expand Down Expand Up @@ -237,7 +246,7 @@ module.exports = class Dependencies {
*
* This object is serializable so it can be put into the persistent cache and
* used to invalidate files during the next build in a new process.
* @return {{rootDir: string, dependencies: {[k: string]: string[]}, fsTrees: Array<{fsRoot: string, entries: Array<{relativePath: string} & ({type: 'stat', size: number, mtime: number, mode: number} | {type: 'hash', hash: string})>}>}}
* @return {{rootDirs: Array<string>, dependencies: {[k: string]: string[]}, fsTrees: Array<{fsRoot: string, entries: Array<{relativePath: string} & ({type: 'stat', size: number, mtime: number, mode: number} | {type: 'hash', hash: string})>}>}}
*/
serialize() {
/** @type {{[k: string]: string[]}} */
Expand Down Expand Up @@ -274,7 +283,7 @@ module.exports = class Dependencies {
});
}
let serialized = {
rootDir: this.rootDir,
rootDirs: this.rootDirs,
dependencies,
fsTrees
};
Expand All @@ -285,21 +294,30 @@ module.exports = class Dependencies {
* Deserialize from JSON data returned from the `serialize` method.
*
* @param dependencyData {ReturnType<Dependencies['serialize']>}
* @param [newRootDir] {string | undefined}
* @param [newRootDirs] {Array<string> | undefined}
* @return {Dependencies};
*/
static deserialize(dependencyData, newRootDir) {
let oldRootDir = dependencyData.rootDir;
newRootDir = path.normalize(newRootDir || oldRootDir);
let dependencies = new Dependencies(newRootDir);
static deserialize(dependencyData, newRootDirs) {
let oldRootDirs = dependencyData.rootDirs;
newRootDirs = newRootDirs || oldRootDirs;
let dependencies = new Dependencies(newRootDirs);
let files = Object.keys(dependencyData.dependencies);
for (let file of files) {
let deps = dependencyData.dependencies[file];
if (newRootDir) {
if (newRootDirs) {
for (let i = 0; i < deps.length; i++) {
let dep = deps[i];
if (dep.startsWith(oldRootDir+path.sep)) {
deps[i] = dep.replace(oldRootDir, newRootDir);
for (let index = 0; index < oldRootDirs.length; index++) {
if (dep.startsWith(path.normalize(oldRootDirs[index]) + path.sep)) {
for (let rootIndex = 0; rootIndex < newRootDirs.length; rootIndex++) {
let newPath = dep.replace(oldRootDirs[index], newRootDirs[rootIndex]);
if (fs.existsSync(newPath)) {
deps[i] = newPath;
break;
}
}
break;
}
}
}
}
Expand All @@ -321,10 +339,16 @@ module.exports = class Dependencies {
let fsTree;
/** @type {string} */
let treeRoot;
if (fsTreeData.fsRoot === oldRootDir) {
treeRoot = newRootDir;
fsTree = FSHashTree.fromHashEntries(entries, { sortAndExpand: true });
} else {
for (let index = 0; index < oldRootDirs.length; index++) {
const oldRootDir = oldRootDirs[index];
if (fsTreeData.fsRoot === oldRootDir) {
// need to check this swapping logic. newRootDirs may not be in same sequence as it was with oldRootDirs.
treeRoot = newRootDirs[index];
fsTree = FSHashTree.fromHashEntries(entries, { sortAndExpand: true });
break;
}
}
if (!(treeRoot || fsTree)) {
treeRoot = fsTreeData.fsRoot;
fsTree = FSTree.fromEntries(entries, { sortAndExpand: true });
}
Expand Down
Loading

0 comments on commit b760b22

Please sign in to comment.