diff --git a/lib/Pipeline.js b/lib/Pipeline.js index ae18e517..3bb5b941 100644 --- a/lib/Pipeline.js +++ b/lib/Pipeline.js @@ -83,6 +83,8 @@ Pipeline.processJSON = function(gltf, options) { * @param {Object} [options.compressTextureCoordinates=false] Flag to run compressTextureCoordinates stage. * @param {Object} [options.kmcOptions=undefined] Options to pass to the generateModelMaterialsCommon stage, if undefined, stage is not run. * @param {Object} [options.quantize] Flag to run quantizeAttributes stage. + * @param {Object} [options.preserve=false] Flag to turn optimization pipeline stages on/off to preserve the original glTF hierarchy. + * @param {Boolean} [options.mergeVertices=false] Flag to merge duplicate vertices, which can produce a smaller model size but greatly increases conversion time. This setting only applies when options.preserve is false. * @param {Object|Object[]} [options.textureCompressionOptions=undefined] Options to pass to the compressTextures stage. If an array of options is given, the textures will be compressed in multiple formats. If undefined, stage is not run. * @returns {Promise} A promise that resolves to the processed glTF asset. */ @@ -111,8 +113,12 @@ Pipeline.processJSONWithExtras = function(gltfWithExtras, options) { if (smoothNormals || faceNormals) { generateNormals(gltfWithExtras, options); } + + var mergeVertices = defaultValue(options.mergeVertices, false); if (!shouldPreserve) { - mergeDuplicateVertices(gltfWithExtras); + if (mergeVertices) { + mergeDuplicateVertices(gltfWithExtras); + } MergeDuplicateProperties.mergeAll(gltfWithExtras); RemoveUnusedProperties.removeAll(gltfWithExtras); removeDuplicatePrimitives(gltfWithExtras); diff --git a/lib/mergeDuplicateVertices.js b/lib/mergeDuplicateVertices.js index d86cc65c..325132fb 100644 --- a/lib/mergeDuplicateVertices.js +++ b/lib/mergeDuplicateVertices.js @@ -25,6 +25,10 @@ module.exports = mergeDuplicateVertices; * @see removeUnusedVertices */ function mergeDuplicateVertices(gltf) { + return mergeDuplicateVertices._implementation(gltf); +} + +mergeDuplicateVertices._implementation = function (gltf) { var meshes = gltf.meshes; var indexAccessors = {}; for (var meshId in meshes) { @@ -37,7 +41,7 @@ function mergeDuplicateVertices(gltf) { mergeDuplicateVerticesFromMapping(gltf, indexAccessors); removeUnusedVertices(gltf); return gltf; -} +}; function mergeDuplicateVerticesFromMapping(gltf, indexAccessors) { var accessors = gltf.accessors; diff --git a/lib/parseArguments.js b/lib/parseArguments.js index 4bdaa22a..79ed8521 100644 --- a/lib/parseArguments.js +++ b/lib/parseArguments.js @@ -72,6 +72,10 @@ function parseArguments(args) { describe: 'Compress the testure coordinates of this glTF asset.', type: 'boolean' }, + 'mergeVertices': { + describe: 'Merges duplicate vertices, which can produce a smaller model size but greatly increases conversion time. This setting only applies when preserve is false.', + type: 'boolean' + }, 'removeNormals': { alias: 'r', describe: 'Strips off existing normals, allowing them to be regenerated.', @@ -261,6 +265,7 @@ function parseArguments(args) { preserve: argv.p, quantize: argv.q, removeNormals: argv.r, - textureCompressionOptions: textureCompressionOptions + textureCompressionOptions: textureCompressionOptions, + mergeVertices: argv.mergeVertices }; -} \ No newline at end of file +} diff --git a/specs/lib/PipelineSpec.js b/specs/lib/PipelineSpec.js index 2925fc01..db15efb0 100644 --- a/specs/lib/PipelineSpec.js +++ b/specs/lib/PipelineSpec.js @@ -4,6 +4,7 @@ var fsExtra = require('fs-extra'); var path = require('path'); var Promise = require('bluebird'); +var mergeDuplicateVertices = require('../../lib/mergeDuplicateVertices'); var Pipeline = require('../../lib/Pipeline'); var readGltf = require('../../lib/readGltf'); @@ -11,6 +12,7 @@ var processFile = Pipeline.processFile; var processFileToDisk = Pipeline.processFileToDisk; var processJSON = Pipeline.processJSON; var processJSONToDisk = Pipeline.processJSONToDisk; +var processJSONWithExtras = Pipeline.processJSONWithExtras; var fsExtraReadFile = Promise.promisify(fsExtra.readFile); @@ -159,4 +161,28 @@ describe('Pipeline', function() { expect(initialUri).not.toEqual(finalUri); }), done).toResolve(); }); + + it('processJSONWithExtras does not merge duplicate vertices by default', function (done) { + spyOn(mergeDuplicateVertices, '_implementation').and.callThrough(); + var promise = readGltf(gltfPath) + .then(function (gltf) { + return processJSONWithExtras(gltf); + }) + .then(function () { + expect(mergeDuplicateVertices._implementation).not.toHaveBeenCalled(); + }); + expect(promise, done).toResolve(); + }); + + it('processJSONWithExtras can merge duplicate vertices.', function (done) { + spyOn(mergeDuplicateVertices, '_implementation').and.callThrough(); + var promise = readGltf(gltfPath) + .then(function (gltf) { + return processJSONWithExtras(gltf, {mergeVertices: true}); + }) + .then(function (gltf) { + expect(mergeDuplicateVertices._implementation).toHaveBeenCalledWith(gltf); + }); + expect(promise, done).toResolve(); + }); });