diff --git a/README.md b/README.md
index ad5694ee..c0ecc3c0 100644
--- a/README.md
+++ b/README.md
@@ -48,7 +48,7 @@ $ npm install json2csv --save
-V, --version output the version number
-i, --input Path and name of the incoming json file. If not provided, will read from stdin.
-o, --output [output] Path and name of the resulting csv file. Defaults to stdout.
- -L, --ldjson Treat the input as Line-Delimited JSON.
+ -n, --ndjson Treat the input as NewLine-Delimited JSON.
-s, --no-streamming Process the whole JSON array in memory instead of doing it line by line.
-f, --fields Specify the fields to convert.
-l, --field-list [list] Specify a file with a list of fields to include. One field per line.
@@ -153,7 +153,7 @@ $ json2csv -i test.json -f name,version --no-header >> test.csv
The programatic APIs take a configuration object very equivalent to the CLI options.
- `fields` - Array of Objects/Strings. Defaults to toplevel JSON attributes. See example below.
-- `ldjson` - Only effective on the streaming API. Indicates that data coming through the stream is ld-json.
+- `ndjson` - Only effective on the streaming API. Indicates that data coming through the stream is NDJSON.
- `unwind` - Array of Strings, creates multiple rows from a single JSON document similar to MongoDB's $unwind
- `flatten` - Boolean, flattens nested JSON using [flat]. Defaults to `false`.
- `defaultValue` - String, default value to use when missing data. Defaults to `` if not specified. (Overridden by `fields[].default`)
diff --git a/bin/json2csv.js b/bin/json2csv.js
index ecf57fda..c993ebd7 100755
--- a/bin/json2csv.js
+++ b/bin/json2csv.js
@@ -9,7 +9,7 @@ const Table = require('cli-table');
const program = require('commander');
const debug = require('debug')('json2csv:cli');
const json2csv = require('../lib/json2csv');
-const parseLdJson = require('../lib/parse-ldjson');
+const parseNdJson = require('../lib/parse-ndjson');
const pkg = require('../package');
const JSON2CSVParser = json2csv.Parser;
@@ -19,7 +19,7 @@ program
.version(pkg.version)
.option('-i, --input ', 'Path and name of the incoming json file. If not provided, will read from stdin.')
.option('-o, --output [output]', 'Path and name of the resulting csv file. Defaults to stdout.')
- .option('-L, --ldjson', 'Treat the input as Line-Delimited JSON.')
+ .option('-n, --ndjson', 'Treat the input as NewLine-Delimited JSON.')
.option('-s, --no-streamming', 'Process the whole JSON array in memory instead of doing it line by line.')
.option('-f, --fields ', 'Specify the fields to convert.')
.option('-l, --field-list [list]', 'Specify a file with a list of fields to include. One field per line.')
@@ -72,9 +72,9 @@ function getFields(fieldList, fields) {
: undefined);
}
-function getInput(input, ldjson) {
+function getInput(input, ndjson) {
if (inputPath) {
- if (ldjson) {
+ if (ndjson) {
return new Promise((resolve, reject) => {
fs.readFile(inputPath, 'utf8', (err, data) => {
if (err) {
@@ -82,7 +82,7 @@ function getInput(input, ldjson) {
return;
}
- resolve(parseLdJson(data));
+ resolve(parseNdJson(data));
});
});
}
@@ -97,8 +97,8 @@ function getInput(input, ldjson) {
process.stdin.on('data', chunk => (inputData += chunk));
process.stdin.on('error', err => debug('Could not read from stdin', err));
process.stdin.on('end', () => {
- const rows = ldjson
- ? parseLdJson(inputData)
+ const rows = ndjson
+ ? parseNdJson(inputData)
: JSON.parse(inputData);
return Promise.resolve(rows);
@@ -155,7 +155,7 @@ getFields(program.fieldList, program.fields)
};
if (program.streamming === false) {
- return getInput(program.input, program.ldjson)
+ return getInput(program.input, program.ndjson)
.then(input => new JSON2CSVParser(opts).parse(input))
.then(processOutput);
}
diff --git a/index.d.ts b/index.d.ts
index 865ae33f..f5aa3194 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -17,7 +17,7 @@ declare namespace json2csv {
}
interface Options {
- ldjson?: boolean;
+ ndjson?: boolean;
fields?: (string | Field | CallbackField)[];
unwind?: string | string[];
flatten?: boolean;
diff --git a/lib/JSON2CSVTransform.js b/lib/JSON2CSVTransform.js
index e1b30eb9..a9b6939d 100644
--- a/lib/JSON2CSVTransform.js
+++ b/lib/JSON2CSVTransform.js
@@ -17,8 +17,8 @@ class JSON2CSVTransform extends Transform {
this._data = '';
this._hasWritten = false;
- if (this.params.ldjson) {
- this.initLDJSONParse();
+ if (this.params.ndjson) {
+ this.initNDJSONParse();
} else {
this.initJSONParser();
}
@@ -30,11 +30,11 @@ class JSON2CSVTransform extends Transform {
}
/**
- * Init the transform with a parser to process LD-JSON data.
+ * Init the transform with a parser to process NDJSON data.
* It maintains a buffer of received data, parses each line
* as JSON and send it to `pushLine for processing.
*/
- initLDJSONParse() {
+ initNDJSONParse() {
const transform = this;
this.parser = {
diff --git a/lib/parse-ldjson.js b/lib/parse-ndjson.js
similarity index 71%
rename from lib/parse-ldjson.js
rename to lib/parse-ndjson.js
index 4ba7e253..dbc9c67b 100644
--- a/lib/parse-ldjson.js
+++ b/lib/parse-ndjson.js
@@ -1,6 +1,6 @@
'use strict';
-function parseLdJson(input) {
+function parseNdJson(input) {
return input
.split('\n')
.map(line => line.trim())
@@ -8,4 +8,4 @@ function parseLdJson(input) {
.map(line=> JSON.parse(line));
}
-module.exports = parseLdJson;
+module.exports = parseNdJson;
diff --git a/test/fixtures/csv/ldjson.csv b/test/fixtures/csv/ndjson.csv
similarity index 100%
rename from test/fixtures/csv/ldjson.csv
rename to test/fixtures/csv/ndjson.csv
diff --git a/test/fixtures/json/ldjson.json b/test/fixtures/json/ndjson.json
similarity index 100%
rename from test/fixtures/json/ldjson.json
rename to test/fixtures/json/ndjson.json
diff --git a/test/fixtures/json/ldjsonInvalid.json b/test/fixtures/json/ndjsonInvalid.json
similarity index 100%
rename from test/fixtures/json/ldjsonInvalid.json
rename to test/fixtures/json/ndjsonInvalid.json
diff --git a/test/index.js b/test/index.js
index ae578459..9e6c8cea 100644
--- a/test/index.js
+++ b/test/index.js
@@ -5,7 +5,7 @@ const test = require('tape');
const json2csv = require('../lib/json2csv');
const Json2csvParser = json2csv.Parser;
const Json2csvTransform = json2csv.Transform;
-const parseLdJson = require('../lib/parse-ldjson');
+const parsendjson = require('../lib/parse-ndjson');
const loadFixtures = require('./helpers/load-fixtures');
Promise.all([
@@ -632,33 +632,33 @@ Promise.all([
// Tests for Streaming API
// =======================================================
- test('should handle ld-json', (t) => {
+ test('should handle ndjson', (t) => {
const opts = {
fields: ['carModel', 'price', 'color', 'transmission'],
- ldjson: true
+ ndjson: true
};
const transform = new Json2csvTransform(opts);
- const processor = jsonFixturesStreams.ldjson().pipe(transform);
+ const processor = jsonFixturesStreams.ndjson().pipe(transform);
let csv = '';
processor
.on('data', chunk => (csv += chunk.toString()))
.on('end', () => {
- t.equal(csv, csvFixtures.ldjson);
+ t.equal(csv, csvFixtures.ndjson);
t.end();
})
.on('error', err => t.notOk(err));
});
- test('should error on invalid ld-json input data', (t) => {
+ test('should error on invalid ndjson input data', (t) => {
const opts = {
fields: ['carModel', 'price', 'color', 'transmission'],
- ldjson: true
+ ndjson: true
};
const transform = new Json2csvTransform(opts);
- const processor = jsonFixturesStreams.ldjsonInvalid().pipe(transform);
+ const processor = jsonFixturesStreams.ndjsonInvalid().pipe(transform);
processor.on('finish', () => {
t.notOk(true);
@@ -1538,7 +1538,7 @@ Promise.all([
});
// =======================================================
- // Test for parseLdJson
+ // Test for parsendjson
// =======================================================
test('should output a string', (t) => {
@@ -1560,7 +1560,7 @@ Promise.all([
test('should parse line-delimited JSON', (t) => {
const input = '{"foo":"bar"}\n{"foo":"qux"}';
- const parsed = parseLdJson(input);
+ const parsed = parsendjson(input);
t.equal(parsed.length, 2, 'parsed input has correct length');
t.end();