From 875c6498d608e9788cd9eaec606c42907e9fb9a8 Mon Sep 17 00:00:00 2001 From: nginnever Date: Mon, 9 May 2016 23:20:44 -0700 Subject: [PATCH] update readme --- README.md | 133 ++++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 98 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index 851457a4..aee63d8c 100644 --- a/README.md +++ b/README.md @@ -10,80 +10,143 @@ IPFS unixFS Engine [![Dependency Status](https://david-dm.org/ipfs/js-ipfs-unixfs-engine.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs-engine) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) -## Example +## Example Importer Let's create a little directory to import: ```sh $ cd /tmp $ mkdir foo $ echo 'hello' > foo/bar -$ echo 'warld' > foo/quux +$ echo 'world' > foo/quux ``` And write the importing logic: ```js // Dependencies to create a DAG Service (where the dir will be imported into) -var memStore = require('abstract-blob-store') -var ipfsRepo = require('ipfs-repo') -var ipfsBlock = require('ipfs-block') -var ipfsBlockService = require('ipfs-block') -var ipfsMerkleDag = require('ipfs-merkle-dag') +const memStore = require('abstract-blob-store') +const ipfsRepo = require('ipfs-repo') +const ipfsBlock = require('ipfs-block') +const ipfsBlockService = require('ipfs-block-service') +const ipfsMerkleDag = require('ipfs-merkle-dag') +const fs = require('fs') -var repo = new ipfsRepo('', { stores: memStore }) -var blocks = new ipfsBlockService(repo) -var dag = new ipfsMerkleDag.DAGService(blocks) +const repo = new ipfsRepo('', { stores: memStore }) +const blocks = new ipfsBlockService(repo) +const dag = new ipfsMerkleDag.DAGService(blocks) -var ipfsData = require('ipfs-unixfs-engine') - -// Import /tmp/foo -ipfsData.import('/tmp/foo', dag, { - recursive: true -}, done) +const Importer = require('ipfs-unixfs-engine').importer +const add = new Importer(dag) +// An array to hold the return of nested file/dir info from the importer // A root DAG Node is received upon completion -function done (err, rootStat) { - if (err) { throw err } - console.log(rootStat) -} + +const res = [] + +// Import path /tmp/foo/bar + +const rs = fs.createReadStream(file) +const rs2 = fs.createReadStream(file2) +const input = {path: /tmp/foo/bar, stream: rs} +const input2 = {path: /tmp/foo/quxx, stream: rs2} + +// Listen for the data event from the importer stream + +add.on('data', (info) => { + res.push(info) +}) + +// The end event of the stream signals that the importer is done + +add.on('end', () => { + console.log('Finished adding files!') + return +}) + +// Calling write on the importer to add the file/object tuples + +add.write(input) +add.end() ``` -When run, the stat of root DAG Node is outputted: +When run, the stat of DAG Node is outputted for each file on data event until the root: ``` -{ Hash: , +{ multihash: , + Size: 39243, + path: '/tmp/foo/bar' } + +{ multihash: , Size: 59843, - Name: 'foo' } + path: '/tmp/foo/quxx' } + +{ multihash: , + Size: 93242, + path: '/tmp/foo' } + +{ multihash: , + Size: 94234, + path: '/tmp' } + ``` ## API ```js -var importer = require('ipfs-data-importing') +const Importer = require('ipfs-unixfs-engine').importer ``` -### importer.import(target, dagService, opts, cb) +### const add = new Importer(dag) -`target` can be a `string`, `Buffer`, or `Stream`. When it's a string, the file -or directory structure rooted on the filesystem at `target` is imported, with -the hierarchy preserved. If a Buffer or Stream, a single DAG node will be -imported representing the buffer or stream's contents. +The importer is a duplex stream in object mode that writes inputs of tuples +of path and readable streams of data. You can stream an array of files to the +importer, just call the 'end' function to signal that you are done inputting file/s. +Listen to the 'data' for the returned informtion 'multihash, size and path' for +each file added. Listen to the 'end' event from the stream to know when the +importer has finished importing files. Input file paths with directory structure +will preserve the hierarchy in the dag node. Uses the [DAG Service](https://github.com/vijayee/js-ipfs-merkle-dag/) instance -`dagService`. Accepts the following `opts`: +`dagService`. + +## Example Exporter + +``` +const ipfsRepo = require('ipfs-repo') +const ipfsBlock = require('ipfs-block') +const ipfsBlockService = require('ipfs-block-service') +const ipfsMerkleDag = require('ipfs-merkle-dag') + +const repo = new ipfsRepo('', { stores: memStore }) +const blocks = new ipfsBlockService(repo) +const dag = new ipfsMerkleDag.DAGService(blocks) + +// Create an export event with the hash you want to export and a dag service + +const exportEvent = Exporter(hash, dag) + +// Pipe the return stream to console + +exportEvent.on('file', (result) => { + result.stream.pipe(process.stdout) +} +``` + +##API +```js +const Importer = require('ipfs-unixfs-engine').exporter +``` -- `recursive`: whether to recurse into directories. Defaults to `false`. +The exporter is an event emitter that returns a stream of the file found +by the multihash of the file from the dag service. -Calls the callback `cb(err, stat)` on completion or error, where `stat` is an -object with the `Hash`, `Size`, and `Name` of the root -[`DAGNode`](https://github.com/vijayee/js-ipfs-merkle-dag/). ## install With [npm](https://npmjs.org/) installed, run ``` -$ npm install ipfs-data-importing +$ npm install ipfs-unixfs-engine ``` ## license