diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d8867d18..48c5525e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -22,7 +22,7 @@ jobs: test-node: needs: check runs-on: ${{ matrix.os }} - name: Test ${{ matrix.project }} node + name: test ${{ matrix.project }} node ${{ matrix.node }} strategy: matrix: os: [windows-latest, ubuntu-latest, macos-latest] @@ -37,6 +37,7 @@ jobs: with: node-version: ${{ matrix.node }} - run: npm install + - run: npm run build - run: npm run test -- --scope=${{ matrix.project }} -- -- --cov -t node test-browser: needs: check @@ -60,6 +61,7 @@ jobs: with: node-version: 16 - run: npm install + - run: npm run build - run: npm run test -- --scope=${{ matrix.project }} -- -- -t ${{ matrix.type }} -- --browser ${{ matrix.browser }} # test-electron: # needs: check @@ -80,6 +82,7 @@ jobs: # with: # node-version: 16 # - run: npm install +# - run: npm run build # - uses: GabrielBB/xvfb-action@v1 # with: # run: npm run test -- --scope=${{ matrix.project }} -- -- -t ${{ matrix.type }} --bail diff --git a/packages/ipfs-repo-migrations/.aegir.js b/packages/ipfs-repo-migrations/.aegir.cjs similarity index 100% rename from packages/ipfs-repo-migrations/.aegir.js rename to packages/ipfs-repo-migrations/.aegir.cjs diff --git a/packages/ipfs-repo-migrations/README.md b/packages/ipfs-repo-migrations/README.md index 83eb3685..7181fcca 100644 --- a/packages/ipfs-repo-migrations/README.md +++ b/packages/ipfs-repo-migrations/README.md @@ -77,13 +77,13 @@ This framework: ### Use in Node.js ```js -const migrations = require('ipfs-repo-migrations') +const migrations from 'ipfs-repo-migrations') ``` ### Use in a browser with browserify, webpack or any other bundler ```js -const migrations = require('ipfs-repo-migrations') +const migrations from 'ipfs-repo-migrations') ``` ## Usage @@ -91,7 +91,7 @@ const migrations = require('ipfs-repo-migrations') Example: ```js -const migrations = require('ipfs-repo-migrations') +const migrations from 'ipfs-repo-migrations') const repoPath = 'some/repo/path' const currentRepoVersion = 7 diff --git a/packages/ipfs-repo-migrations/migrations/index.js b/packages/ipfs-repo-migrations/migrations/index.js index b1070fa5..fc10e763 100644 --- a/packages/ipfs-repo-migrations/migrations/index.js +++ b/packages/ipfs-repo-migrations/migrations/index.js @@ -1,4 +1,7 @@ -'use strict' +import { migration as migration8 } from './migration-8/index.js' +import { migration as migration9 } from './migration-9/index.js' +import { migration as migration10 } from './migration-10/index.js' +import { migration as migration11 } from './migration-11/index.js' /** * @type {import('../src/types').Migration} @@ -12,7 +15,7 @@ const emptyMigration = { empty: true } -module.exports = [ +export default [ Object.assign({ version: 1 }, emptyMigration), Object.assign({ version: 2 }, emptyMigration), Object.assign({ version: 3 }, emptyMigration), @@ -20,8 +23,8 @@ module.exports = [ Object.assign({ version: 5 }, emptyMigration), Object.assign({ version: 6 }, emptyMigration), Object.assign({ version: 7 }, emptyMigration), - require('./migration-8'), - require('./migration-9'), - require('./migration-10'), - require('./migration-11') + migration8, + migration9, + migration10, + migration11 ] diff --git a/packages/ipfs-repo-migrations/migrations/migration-10/index.js b/packages/ipfs-repo-migrations/migrations/migration-10/index.js index e9de5a56..97a24a79 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-10/index.js +++ b/packages/ipfs-repo-migrations/migrations/migration-10/index.js @@ -1,10 +1,7 @@ -'use strict' -const { - findLevelJs -} = require('../../src/utils') -const { fromString } = require('uint8arrays/from-string') -const { toString } = require('uint8arrays/to-string') +import { findLevelJs } from '../../src/utils.js' +import { fromString } from 'uint8arrays/from-string' +import { toString } from 'uint8arrays/to-string' /** * @typedef {import('../../src/types').Migration} Migration @@ -131,7 +128,7 @@ async function process (backends, onProgress, fn) { } /** @type {Migration} */ -module.exports = { +export const migration = { version: 10, description: 'Migrates datastore-level keys to binary', migrate: (backends, onProgress = () => {}) => { @@ -174,7 +171,7 @@ function withEach (db, fn) { try { req = op.type === 'del' ? store.delete(key) : store.put(op.value, key) - } catch (err) { + } catch (/** @type {any} */ err) { error = err transaction.abort() return diff --git a/packages/ipfs-repo-migrations/migrations/migration-11/index.js b/packages/ipfs-repo-migrations/migrations/migration-11/index.js index 8bf7152d..60216257 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-11/index.js +++ b/packages/ipfs-repo-migrations/migrations/migration-11/index.js @@ -1,6 +1,5 @@ -'use strict' -const { Key } = require('interface-datastore') +import { Key } from 'interface-datastore/key' const MFS_ROOT_KEY = new Key('/local/filesroot') @@ -14,9 +13,11 @@ async function storeMfsRootInDatastore (backends, onProgress = () => {}) { await backends.root.open() await backends.datastore.open() - const root = await backends.root.get(MFS_ROOT_KEY) - await backends.datastore.put(MFS_ROOT_KEY, root) - await backends.root.delete(MFS_ROOT_KEY) + if (await backends.root.has(MFS_ROOT_KEY)) { + const root = await backends.root.get(MFS_ROOT_KEY) + await backends.datastore.put(MFS_ROOT_KEY, root) + await backends.root.delete(MFS_ROOT_KEY) + } await backends.datastore.close() await backends.root.close() @@ -34,9 +35,11 @@ async function storeMfsRootInRoot (backends, onProgress = () => {}) { await backends.root.open() await backends.datastore.open() - const root = await backends.datastore.get(MFS_ROOT_KEY) - await backends.root.put(MFS_ROOT_KEY, root) - await backends.datastore.delete(MFS_ROOT_KEY) + if (await backends.datastore.has(MFS_ROOT_KEY)) { + const root = await backends.datastore.get(MFS_ROOT_KEY) + await backends.root.put(MFS_ROOT_KEY, root) + await backends.datastore.delete(MFS_ROOT_KEY) + } await backends.datastore.close() await backends.root.close() @@ -45,7 +48,7 @@ async function storeMfsRootInRoot (backends, onProgress = () => {}) { } /** @type {import('../../src/types').Migration} */ -module.exports = { +export const migration = { version: 11, description: 'Store mfs root in the datastore', migrate: storeMfsRootInDatastore, diff --git a/packages/ipfs-repo-migrations/migrations/migration-8/index.js b/packages/ipfs-repo-migrations/migrations/migration-8/index.js index 1bfa1c50..ed9c0091 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-8/index.js +++ b/packages/ipfs-repo-migrations/migrations/migration-8/index.js @@ -1,13 +1,13 @@ -'use strict' -const { CID } = require('multiformats/cid') -const Key = require('interface-datastore').Key -const log = require('debug')('ipfs:repo:migrator:migration-8') +import { CID } from 'multiformats/cid' +import { Key } from 'interface-datastore/key' +import debug from 'debug' +import length from 'it-length' +import { base32 } from 'multiformats/bases/base32' +import * as raw from 'multiformats/codecs/raw' +import * as mhd from 'multiformats/hashes/digest' -const length = require('it-length') -const { base32 } = require('multiformats/bases/base32') -const raw = require('multiformats/codecs/raw') -const mhd = require('multiformats/hashes/digest') +const log = debug('ipfs:repo:migrator:migration-8') /** * @typedef {import('../../src/types').Migration} Migration @@ -41,7 +41,7 @@ function keyToMultihash (key) { const multihashStr = base32.encode(multihash).slice(1).toUpperCase() return new Key(`/${multihashStr}`, false) - } catch (err) { + } catch (/** @type {any} */ err) { return key } } @@ -105,7 +105,7 @@ async function process (backends, onProgress, keyFunction) { } /** @type {Migration} */ -module.exports = { +export const migration = { version: 8, description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32', migrate: (backends, onProgress = () => {}) => { diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/index.js b/packages/ipfs-repo-migrations/migrations/migration-9/index.js index 0551dd2d..f1d332bf 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-9/index.js +++ b/packages/ipfs-repo-migrations/migrations/migration-9/index.js @@ -1,14 +1,13 @@ -'use strict' - -const { CID } = require('multiformats/cid') -const dagPb = require('@ipld/dag-pb') -const cbor = require('cborg') -const pinset = require('./pin-set') -const { cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') -const length = require('it-length') -const { sha256 } = require('multiformats/hashes/sha2') -const mhd = require('multiformats/hashes/digest') -const { base32 } = require('multiformats/bases/base32') + +import { CID } from 'multiformats/cid' +import * as dagPb from '@ipld/dag-pb' +import * as cbor from 'cborg' +import * as pinset from './pin-set.js' +import { cidToKey, PIN_DS_KEY, PinTypes } from './utils.js' +import length from 'it-length' +import { sha256 } from 'multiformats/hashes/sha2' +import * as mhd from 'multiformats/hashes/digest' +import { base32 } from 'multiformats/bases/base32' /** * @typedef {import('../../src/types').Migration} Migration @@ -153,7 +152,7 @@ async function process (backends, onProgress, fn) { } /** @type {Migration} */ -module.exports = { +export const migration = { version: 9, description: 'Migrates pins to datastore', migrate: (backends, onProgress = () => {}) => { diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js b/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js index d3e47449..78c2947e 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js +++ b/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js @@ -1,24 +1,18 @@ -'use strict' - -const { CID } = require('multiformats/cid') -const { - ipfs: { - pin: { - Set: PinSet - } - } -} = require('./pin') +import { CID } from 'multiformats/cid' +import { ipfs } from './pin.js' // @ts-ignore -const fnv1a = require('fnv1a') -const varint = require('varint') -const dagPb = require('@ipld/dag-pb') -const { DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') -const { concat: uint8ArrayConcat } = require('uint8arrays/concat') -const { compare: uint8ArrayCompare } = require('uint8arrays/compare') -const { toString: uint8ArrayToString } = require('uint8arrays/to-string') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') -const { sha256 } = require('multiformats/hashes/sha2') +import fnv1a from 'fnv1a' +import varint from 'varint' +import * as dagPb from '@ipld/dag-pb' +import { DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } from './utils.js' +import { concat as uint8ArrayConcat } from 'uint8arrays/concat' +import { compare as uint8ArrayCompare } from 'uint8arrays/compare' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { sha256 } from 'multiformats/hashes/sha2' + +const PinSet = ipfs.pin.Set /** * @typedef {import('interface-datastore').Datastore} Datastore @@ -125,7 +119,7 @@ async function * walkItems (blockstore, node) { * @param {PBNode} rootNode * @param {string} name */ -async function * loadSet (blockstore, rootNode, name) { +export async function * loadSet (blockstore, rootNode, name) { const link = rootNode.Links.find(l => l.Name === name) if (!link) { @@ -253,7 +247,7 @@ function storeItems (blockstore, items) { * @param {string} type * @param {CID[]} cids */ -async function storeSet (blockstore, type, cids) { +export async function storeSet (blockstore, type, cids) { const rootNode = await storeItems(blockstore, cids.map(cid => { return { key: cid @@ -273,8 +267,3 @@ async function storeSet (blockstore, type, cids) { Hash: cid } } - -module.exports = { - loadSet, - storeSet -} diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/pin.js b/packages/ipfs-repo-migrations/migrations/migration-9/pin.js index 21b509e9..47465c1c 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-9/pin.js +++ b/packages/ipfs-repo-migrations/migrations/migration-9/pin.js @@ -1,22 +1,20 @@ /*eslint-disable*/ -"use strict"; - -var $protobuf = require("protobufjs/minimal"); +import $protobuf from "protobufjs/minimal.js"; // Common aliases -var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; +const $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; // Exported root namespace -var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); +const $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); -$root.ipfs = (function() { +export const ipfs = $root.ipfs = (() => { /** * Namespace ipfs. * @exports ipfs * @namespace */ - var ipfs = {}; + const ipfs = {}; ipfs.pin = (function() { @@ -25,7 +23,7 @@ $root.ipfs = (function() { * @memberof ipfs * @namespace */ - var pin = {}; + const pin = {}; pin.Set = (function() { @@ -207,4 +205,4 @@ $root.ipfs = (function() { return ipfs; })(); -module.exports = $root; +export { $root as default }; diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/utils.js b/packages/ipfs-repo-migrations/migrations/migration-9/utils.js index a479ccc0..92bb6e9d 100644 --- a/packages/ipfs-repo-migrations/migrations/migration-9/utils.js +++ b/packages/ipfs-repo-migrations/migrations/migration-9/utils.js @@ -1,15 +1,14 @@ -'use strict' -const { Key } = require('interface-datastore') -const { base32 } = require('multiformats/bases/base32') -const { CID } = require('multiformats') +import { Key } from 'interface-datastore/key' +import { base32 } from 'multiformats/bases/base32' +import { CID } from 'multiformats/cid' -const PIN_DS_KEY = new Key('/local/pins') -const DEFAULT_FANOUT = 256 -const MAX_ITEMS = 8192 -const EMPTY_KEY = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') +export const PIN_DS_KEY = new Key('/local/pins') +export const DEFAULT_FANOUT = 256 +export const MAX_ITEMS = 8192 +export const EMPTY_KEY = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') -const PinTypes = { +export const PinTypes = { direct: 'direct', recursive: 'recursive' } @@ -17,15 +16,6 @@ const PinTypes = { /** * @param {import('multiformats').CID} cid */ -function cidToKey (cid) { + export function cidToKey (cid) { return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase().substring(1)}`) } - -module.exports = { - PIN_DS_KEY, - DEFAULT_FANOUT, - MAX_ITEMS, - EMPTY_KEY, - PinTypes, - cidToKey -} diff --git a/packages/ipfs-repo-migrations/package.json b/packages/ipfs-repo-migrations/package.json index f0dff7b2..50202562 100644 --- a/packages/ipfs-repo-migrations/package.json +++ b/packages/ipfs-repo-migrations/package.json @@ -12,13 +12,22 @@ "url": "https://github.com/ipfs/js-ipfs-repo/issues/" }, "license": "(Apache-2.0 OR MIT)", + "main": "src/index.js", + "types": "types/src/index.d.ts", + "type": "module", "files": [ - "migrations", - "src", - "dist" + "*", + "!**/*.tsbuildinfo" ], - "types": "./dist/src/index.d.ts", - "main": "src/index.js", + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "sourceType": "module" + } + }, + "publishConfig": { + "directory": "dist" + }, "browser": { "datastore-fs": "datastore-level" }, @@ -28,10 +37,11 @@ }, "scripts": { "clean": "rimraf types dist", - "build": "run-s build:*", - "build:proto": "pbjs -t static-module -w commonjs --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o migrations/migration-9/pin.js migrations/migration-9/pin.proto", - "build:proto-types": "pbts -o migrations/migration-9/pin.d.ts migrations/migration-9/pin.js", - "build:source": "aegir build", + "generate": "run-s generate:*", + "generate:proto": "pbjs -t static-module -w es6 --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o migrations/migration-9/pin.js migrations/migration-9/pin.proto", + "generate:proto-types": "pbts -o migrations/migration-9/pin.d.ts migrations/migration-9/pin.js", + "build": "aegir build", + "pretest": "aegir build --esm-tests", "test": "aegir test", "lint": "aegir ts -p check && aegir lint", "release": "aegir release", diff --git a/packages/ipfs-repo-migrations/scripts/node-globals.js b/packages/ipfs-repo-migrations/scripts/node-globals.js index 8c4e233b..2a1cefe2 100644 --- a/packages/ipfs-repo-migrations/scripts/node-globals.js +++ b/packages/ipfs-repo-migrations/scripts/node-globals.js @@ -1,3 +1,3 @@ // file: node-globals.js // @ts-nocheck -export const { Buffer } = require('buffer') +export import { Buffer } from 'buffer' diff --git a/packages/ipfs-repo-migrations/src/errors.js b/packages/ipfs-repo-migrations/src/errors.js index 1bf39295..34fa7b95 100644 --- a/packages/ipfs-repo-migrations/src/errors.js +++ b/packages/ipfs-repo-migrations/src/errors.js @@ -1,17 +1,16 @@ -'use strict' /** * Exception raised when trying to revert migration that is not possible * to revert. */ -class NonReversibleMigrationError extends Error { +export class NonReversibleMigrationError extends Error { /** * @param {string} message */ constructor (message) { super(message) this.name = 'NonReversibleMigrationError' - this.code = 'ERR_NON_REVERSIBLE_MIGRATION' + this.code = NonReversibleMigrationError.code this.message = message } } @@ -20,14 +19,14 @@ NonReversibleMigrationError.code = 'ERR_NON_REVERSIBLE_MIGRATION' /** * Exception raised when repo is not initialized. */ -class NotInitializedRepoError extends Error { +export class NotInitializedRepoError extends Error { /** * @param {string} message */ constructor (message) { super(message) this.name = 'NotInitializedRepoError' - this.code = 'ERR_NOT_INITIALIZED_REPO' + this.code = NotInitializedRepoError.code this.message = message } } @@ -36,14 +35,14 @@ NotInitializedRepoError.code = 'ERR_NOT_INITIALIZED_REPO' /** * Exception raised when required parameter is not provided. */ -class RequiredParameterError extends Error { +export class RequiredParameterError extends Error { /** * @param {string} message */ constructor (message) { super(message) this.name = 'RequiredParameterError' - this.code = 'ERR_REQUIRED_PARAMETER' + this.code = RequiredParameterError.code this.message = message } } @@ -52,14 +51,14 @@ RequiredParameterError.code = 'ERR_REQUIRED_PARAMETER' /** * Exception raised when value is not valid. */ -class InvalidValueError extends Error { +export class InvalidValueError extends Error { /** * @param {string} message */ constructor (message) { super(message) this.name = 'InvalidValueError' - this.code = 'ERR_INVALID_VALUE' + this.code = InvalidValueError.code this.message = message } } @@ -68,23 +67,15 @@ InvalidValueError.code = 'ERR_INVALID_VALUE' /** * Exception raised when config is not passed. */ -class MissingRepoOptionsError extends Error { +export class MissingRepoOptionsError extends Error { /** * @param {string} message */ constructor (message) { super(message) this.name = 'MissingRepoOptionsError' - this.code = 'ERR_MISSING_REPO_OPTIONS' + this.code = MissingRepoOptionsError.code this.message = message } } MissingRepoOptionsError.code = 'ERR_MISSING_REPO_OPTIONS' - -module.exports = { - NonReversibleMigrationError, - NotInitializedRepoError, - RequiredParameterError, - InvalidValueError, - MissingRepoOptionsError -} diff --git a/packages/ipfs-repo-migrations/src/index.js b/packages/ipfs-repo-migrations/src/index.js index 21d065a9..0d7f12de 100644 --- a/packages/ipfs-repo-migrations/src/index.js +++ b/packages/ipfs-repo-migrations/src/index.js @@ -1,11 +1,12 @@ /* eslint complexity: ["error", 27] */ -'use strict' -const defaultMigrations = require('../migrations') -const repoVersion = require('./repo/version') -const errors = require('./errors') -const { wrapBackends } = require('./utils') -const log = require('debug')('ipfs:repo:migrator') +import defaultMigrations from '../migrations/index.js' +import * as repoVersion from './repo/version.js' +import * as Errors from './errors.js' +import { wrapBackends } from './utils.js' +import debug from 'debug' + +const log = debug('ipfs:repo:migrator') /** * @typedef {import('./types').Migration} Migration @@ -20,7 +21,7 @@ const log = require('debug')('ipfs:repo:migrator') * * @param {Migration[]} [migrations] - Array of migrations to consider. If undefined, the bundled migrations are used. Mainly for testing purpose. */ -function getLatestMigrationVersion (migrations) { +export function getLatestMigrationVersion (migrations) { migrations = migrations || defaultMigrations if (!Array.isArray(migrations) || migrations.length === 0) { @@ -42,7 +43,7 @@ function getLatestMigrationVersion (migrations) { * @param {number} toVersion - Version to which the repo should be migrated. * @param {MigrationOptions} [options] - Options for migration */ -async function migrate (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { +export async function migrate (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations if (!path) { @@ -106,7 +107,7 @@ async function migrate (path, backends, repoOptions, toVersion, { ignoreLock = f await migration.migrate(backends, progressCallback) } - } catch (e) { + } catch (/** @type {any} */ e) { const lastSuccessfullyMigratedVersion = migration.version - 1 log(`An exception was raised during execution of migration. Setting the repo's version to last successfully migrated version: ${lastSuccessfullyMigratedVersion}`) @@ -142,7 +143,7 @@ async function migrate (path, backends, repoOptions, toVersion, { ignoreLock = f * @param {number} toVersion - Version to which the repo will be reverted. * @param {MigrationOptions} [options] - Options for the reversion */ -async function revert (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { +export async function revert (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations if (!path) { @@ -209,7 +210,7 @@ async function revert (path, backends, repoOptions, toVersion, { ignoreLock = fa await migration.revert(backends, progressCallback) } - } catch (e) { + } catch (/** @type {any} */ e) { const lastSuccessfullyRevertedVersion = migration.version log(`An exception was raised during execution of migration. Setting the repo's version to last successfully reverted version: ${lastSuccessfullyRevertedVersion}`) await repoVersion.setVersion(lastSuccessfullyRevertedVersion, backends) @@ -262,10 +263,6 @@ function verifyAvailableMigrations (migrations, fromVersion, toVersion, checkRev } } -module.exports = { - getCurrentRepoVersion: repoVersion.getVersion, - getLatestMigrationVersion, - errors, - migrate, - revert -} +export const getCurrentRepoVersion = repoVersion.getVersion +export const errors = Errors +export const migrations = defaultMigrations diff --git a/packages/ipfs-repo-migrations/src/repo/init.js b/packages/ipfs-repo-migrations/src/repo/init.js index 30374ee6..e6ea4340 100644 --- a/packages/ipfs-repo-migrations/src/repo/init.js +++ b/packages/ipfs-repo-migrations/src/repo/init.js @@ -1,13 +1,14 @@ -'use strict' -const log = require('debug')('ipfs:repo:migrator:repo:init') -const { CONFIG_KEY, VERSION_KEY } = require('../utils') -const { MissingRepoOptionsError } = require('../errors') +import debug from 'debug' +import { CONFIG_KEY, VERSION_KEY } from '../utils.js' +import { MissingRepoOptionsError } from '../errors.js' + +const log = debug('ipfs:repo:migrator:repo:init') /** * @param {import('../types').Backends} backends */ -async function isRepoInitialized (backends) { +export async function isRepoInitialized (backends) { if (!backends) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } @@ -25,7 +26,7 @@ async function isRepoInitialized (backends) { } return true - } catch (e) { + } catch (/** @type {any} */ e) { log('While checking if repo is initialized error was thrown: ' + e.message) return false } finally { @@ -36,7 +37,3 @@ async function isRepoInitialized (backends) { } } } - -module.exports = { - isRepoInitialized -} diff --git a/packages/ipfs-repo-migrations/src/repo/version.js b/packages/ipfs-repo-migrations/src/repo/version.js index 9ee56c50..daa4b5ed 100644 --- a/packages/ipfs-repo-migrations/src/repo/version.js +++ b/packages/ipfs-repo-migrations/src/repo/version.js @@ -1,10 +1,9 @@ -'use strict' -const repoInit = require('./init') -const { MissingRepoOptionsError, NotInitializedRepoError } = require('../errors') -const { VERSION_KEY } = require('../utils') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') -const { toString: uint8ArrayToString } = require('uint8arrays/to-string') +import { isRepoInitialized } from './init.js' +import { MissingRepoOptionsError, NotInitializedRepoError } from '../errors.js' +import { VERSION_KEY } from '../utils.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' /** * Function that has responsibility to retrieve version of repo from its root datastore's instance. @@ -13,8 +12,8 @@ const { toString: uint8ArrayToString } = require('uint8arrays/to-string') * * @param {import('../types').Backends} backends */ -async function getVersion (backends) { - if (!(await repoInit.isRepoInitialized(backends))) { +export async function getVersion (backends) { + if (!(await isRepoInitialized(backends))) { throw new NotInitializedRepoError('Repo is not initialized!') } @@ -34,7 +33,7 @@ async function getVersion (backends) { * @param {number} version * @param {import('../types').Backends} backends */ -async function setVersion (version, backends) { +export async function setVersion (version, backends) { if (!backends) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } @@ -44,8 +43,3 @@ async function setVersion (version, backends) { await store.put(VERSION_KEY, uint8ArrayFromString(String(version))) await store.close() } - -module.exports = { - getVersion, - setVersion -} diff --git a/packages/ipfs-repo-migrations/src/types.d.ts b/packages/ipfs-repo-migrations/src/types.ts similarity index 100% rename from packages/ipfs-repo-migrations/src/types.d.ts rename to packages/ipfs-repo-migrations/src/types.ts diff --git a/packages/ipfs-repo-migrations/src/utils.js b/packages/ipfs-repo-migrations/src/utils.js index c1f7b490..eaf5b603 100644 --- a/packages/ipfs-repo-migrations/src/utils.js +++ b/packages/ipfs-repo-migrations/src/utils.js @@ -1,14 +1,13 @@ -'use strict' -const { Key } = require('interface-datastore/key') -const { Errors } = require('datastore-core') +import { Key } from 'interface-datastore/key' +import { notFoundError } from 'datastore-core/errors' /** * @typedef {import('interface-datastore').Datastore} Datastore */ -const CONFIG_KEY = new Key('/config') -const VERSION_KEY = new Key('/version') +export const CONFIG_KEY = new Key('/config') +export const VERSION_KEY = new Key('/version') /** * Level dbs wrap level dbs that wrap level dbs. Find a level-js @@ -17,7 +16,7 @@ const VERSION_KEY = new Key('/version') * @param {Datastore} store * @returns {Datastore | undefined} */ -function findLevelJs (store) { +export function findLevelJs (store) { let db = store // @ts-ignore @@ -39,7 +38,7 @@ function findLevelJs (store) { * @param {Datastore} store * @returns {Promise} */ -async function hasWithFallback (key, has, store) { +export async function hasWithFallback (key, has, store) { const result = await has(key) if (result) { @@ -86,7 +85,7 @@ async function getWithFallback (key, get, has, store) { const levelJs = findLevelJs(store) if (!levelJs) { - throw Errors.notFoundError() + throw notFoundError() } return new Promise((resolve, reject) => { @@ -101,7 +100,7 @@ async function getWithFallback (key, get, has, store) { return resolve(req.result) } - reject(Errors.notFoundError()) + reject(notFoundError()) } }) } @@ -128,7 +127,7 @@ function wrapStore (store) { /** * @param {import('./types').Backends} backends */ -function wrapBackends (backends) { +export function wrapBackends (backends) { return { ...backends, root: wrapStore(backends.root), @@ -137,12 +136,3 @@ function wrapBackends (backends) { keys: wrapStore(backends.keys) } } - -module.exports = { - wrapBackends, - hasWithFallback, - getWithFallback, - findLevelJs, - CONFIG_KEY, - VERSION_KEY -} diff --git a/packages/ipfs-repo-migrations/test/browser.js b/packages/ipfs-repo-migrations/test/browser.js index 49d01b18..194d16c1 100644 --- a/packages/ipfs-repo-migrations/test/browser.js +++ b/packages/ipfs-repo-migrations/test/browser.js @@ -1,14 +1,17 @@ /* eslint-env mocha */ -'use strict' -const { LevelDatastore } = require('datastore-level') -const { S3Datastore } = require('datastore-s3') -const { ShardingDatastore } = require('datastore-core/sharding') -const { NextToLast } = require('datastore-core/shard') -const { BlockstoreDatastoreAdapter } = require('blockstore-datastore-adapter') -const mockS3 = require('./fixtures/mock-s3') -const S3 = require('aws-sdk').S3 -const { createRepo } = require('./fixtures/repo') +import { LevelDatastore } from 'datastore-level' +import { S3Datastore } from 'datastore-s3' +import { ShardingDatastore } from 'datastore-core/sharding' +import { NextToLast } from 'datastore-core/shard' +import { BlockstoreDatastoreAdapter } from 'blockstore-datastore-adapter' +import { mockS3 } from './fixtures/mock-s3.js' +import S3 from 'aws-sdk/clients/s3.js' +import { createRepo } from './fixtures/repo.js' +import { test as versionTests } from './version-test.js' +import { test as migrationTests } from './migrations/index.js' +import { test as initTests } from './init-test.js' +import { test as integrationTests } from './integration-test.js' /** * @typedef {import('../src/types').Backends} Backends @@ -132,19 +135,19 @@ CONFIGURATIONS.forEach(({ name, createBackends, cleanup }) => { describe(name, () => { describe('version tests', () => { - require('./version-test')(setup, cleanup) + versionTests(setup, cleanup) }) describe('migrations tests', () => { - require('./migrations')(setup, cleanup) + migrationTests(setup, cleanup) }) describe('init tests', () => { - require('./init-test')(setup, cleanup) + initTests(setup, cleanup) }) describe('integration tests', () => { - require('./integration-test')(setup, cleanup) + integrationTests(setup, cleanup) }) }) }) diff --git a/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js b/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js index f6ce4842..f6a8eacb 100644 --- a/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js +++ b/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js @@ -1,19 +1,19 @@ -'use strict' /* eslint-disable no-console */ // nb. must be ipfs@0.48.0 or below // @ts-expect-error not in package.json -const IPFS = require('ipfs') +import IPFS from 'ipfs' +import { Key } from 'interface-datastore/key' +import fs from 'fs' +import { CarWriter } from '@ipld/car' +import path from 'path' +import { Readable } from 'stream' + +const PIN_DS_KEY = new Key('/local/pins') const { CID } = IPFS -const { Key } = require('interface-datastore') -const PIN_DS_KEY = new Key('/local/pins') -const fs = require('fs') -const { CarWriter } = require('@ipld/car') -const path = require('path') -const { Readable } = require('stream') const TO_PIN = 9000 diff --git a/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js b/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js index 1ea8d6b8..00ecea8c 100644 --- a/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js +++ b/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js @@ -1,9 +1,8 @@ -'use strict' -const { expect } = require('aegir/utils/chai') -const sinon = require('sinon') -const { Buffer } = require('buffer') -const AWS = require('aws-sdk') +import { expect } from 'aegir/utils/chai.js' +import sinon from 'sinon' +import { Buffer } from 'buffer' +import AWS from 'aws-sdk' class S3Error extends Error { /** @@ -51,7 +50,7 @@ const s3Reject = (err) => { * @param {import('aws-sdk/clients/s3')} s3 * @returns {void} */ -module.exports = function (s3) { +export function mockS3 (s3) { /** @type {Record} */ const storage = {} diff --git a/packages/ipfs-repo-migrations/test/fixtures/repo.js b/packages/ipfs-repo-migrations/test/fixtures/repo.js index b5e71b38..a434c91e 100644 --- a/packages/ipfs-repo-migrations/test/fixtures/repo.js +++ b/packages/ipfs-repo-migrations/test/fixtures/repo.js @@ -1,7 +1,6 @@ -'use strict' -const loadFixture = require('aegir/utils/fixtures') -const { CONFIG_KEY, VERSION_KEY } = require('../../src/utils') +import loadFixture from 'aegir/utils/fixtures.js' +import { CONFIG_KEY, VERSION_KEY } from '../../src/utils.js' /** * @typedef {import('../../src/types').Backends} Backends @@ -13,7 +12,7 @@ const { CONFIG_KEY, VERSION_KEY } = require('../../src/utils') * @param {*} prefix * @returns */ -async function createRepo (createBackends, prefix) { +export async function createRepo (createBackends, prefix) { const dir = `${prefix ? `${prefix}/` : ''}test-repo-for-${Date.now()}` const backends = createBackends(dir) @@ -29,15 +28,10 @@ async function createRepo (createBackends, prefix) { /** * @param {Backends} backends */ -async function initRepo (backends) { +export async function initRepo (backends) { const store = backends.root await store.open() await store.put(VERSION_KEY, loadFixture('test/fixtures/test-repo/version')) await store.put(CONFIG_KEY, loadFixture('test/fixtures/test-repo/config')) await store.close() } - -module.exports = { - createRepo, - initRepo -} diff --git a/packages/ipfs-repo-migrations/test/index.spec.js b/packages/ipfs-repo-migrations/test/index.spec.js index 6486f306..bbfee317 100644 --- a/packages/ipfs-repo-migrations/test/index.spec.js +++ b/packages/ipfs-repo-migrations/test/index.spec.js @@ -1,19 +1,22 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const sinon = require('sinon') -const { MemoryBlockstore } = require('blockstore-core/memory') -const { MemoryDatastore } = require('datastore-core') - -const migrator = require('../src/index') -const repoVersion = require('../src/repo/version') -const repoInit = require('../src/repo/init') -const errors = require('../src/errors') +import { expect } from 'aegir/utils/chai.js' +import sinon from 'sinon' +import { MemoryBlockstore } from 'blockstore-core/memory' +import { MemoryDatastore } from 'datastore-core/memory' +import * as migrator from '../src/index.js' +import { + RequiredParameterError, + InvalidValueError, + NonReversibleMigrationError +} from '../src/errors.js' +import { VERSION_KEY, CONFIG_KEY } from '../src/utils.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' /** * @typedef {import('../src/types').Migration} Migration * @typedef {import('../src/types').MigrationOptions} MigrationOptions + * @typedef {import('../src/types').RepoOptions} RepoOptions */ /** @@ -60,69 +63,44 @@ function createOptions () { } } -describe('index.js', () => { - /** - * @type {import('sinon').SinonStub} - */ - let getVersionStub - /** - * @type {import('sinon').SinonStub} - */ - let setVersionStub - /** - * @type {import('sinon').SinonStub} - */ - let lockStub - /** - * @type {import('sinon').SinonStub} - */ - let initStub - /** - * @type {import('sinon').SinonStub} - */ - let lockCloseStub - const repoOptions = { - repoLock: { - locked: () => Promise.resolve(false), - lock: () => Promise.resolve({ - close: () => Promise.resolve() - }) - }, - autoMigrate: true, - onMigrationProgress: () => {}, - repoOwner: true - } - - const backends = { +function createBackends () { + return { root: new MemoryDatastore(), blocks: new MemoryBlockstore(), datastore: new MemoryDatastore(), keys: new MemoryDatastore(), pins: new MemoryDatastore() } +} - beforeEach(() => { - // Reset all stubs - sinon.reset() - - initStub.resolves(true) - lockCloseStub.resolves() - lockStub.resolves({ close: lockCloseStub }) - }) - - before(() => { - getVersionStub = sinon.stub(repoVersion, 'getVersion') - setVersionStub = sinon.stub(repoVersion, 'setVersion') - lockCloseStub = sinon.stub() - lockStub = sinon.stub(repoOptions.repoLock, 'lock') - initStub = sinon.stub(repoInit, 'isRepoInitialized') - }) +describe('index.js', () => { + /** @type {RepoOptions} */ + let repoOptions - after(() => { - getVersionStub.restore() - setVersionStub.restore() - lockStub.restore() - initStub.restore() + beforeEach(() => { + let locked = false + + repoOptions = { + repoLock: { + locked: sinon.stub().callsFake(() => { + return Promise.resolve(locked) + }), + lock: sinon.stub().callsFake(() => { + locked = true + + return Promise.resolve({ + close: () => { + locked = false + + return Promise.resolve() + } + }) + }) + }, + autoMigrate: true, + onMigrationProgress: () => {}, + repoOwner: true + } }) it('get version of the latest migration', () => { @@ -138,7 +116,7 @@ describe('index.js', () => { // @ts-expect-error invalid params return expect(migrator.revert(undefined, undefined, undefined, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error without backends argument', () => { @@ -146,23 +124,23 @@ describe('index.js', () => { // @ts-expect-error invalid params return expect(migrator.revert('/some/path', undefined, undefined, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error without repo options argument', () => { const options = createOptions() // @ts-expect-error invalid params - return expect(migrator.revert('/some/path', backends, undefined, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + return expect(migrator.revert('/some/path', createBackends(), undefined, undefined, options)) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error without toVersion argument', () => { const options = createOptions() // @ts-expect-error invalid params - return expect(migrator.revert('/some/path', backends, {}, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + return expect(migrator.revert('/some/path', createBackends(), {}, undefined, options)) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error with invalid toVersion argument', () => { @@ -171,55 +149,75 @@ describe('index.js', () => { return Promise.all( // @ts-expect-error invalid params - invalidValues.map((value) => expect(migrator.revert('/some/path', backends, repoOptions, value, options)) - .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code)) + invalidValues.map((value) => expect(migrator.revert('/some/path', createBackends(), repoOptions, value, options)) + .to.eventually.be.rejectedWith(InvalidValueError).with.property('code', InvalidValueError.code)) ) }) it('should not revert if current repo version and toVersion matches', async () => { - getVersionStub.returns(2) const options = createOptions() - await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) - .to.eventually.be.fulfilled() + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() - expect(lockStub).to.have.property('called', false) + await migrator.revert('/some/path', backends, repoOptions, 2, options) + + for (const migration of options.migrations) { + expect(migration.revert).to.have.property('called', false) + } }) it('should not revert if current repo version is lower then toVersion', async () => { - getVersionStub.returns(2) const options = createOptions() + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('1')) + await backends.root.close() + await expect(migrator.revert('/some/path', backends, repoOptions, 3, options)) - .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + .to.eventually.be.rejectedWith(InvalidValueError).with.property('code', InvalidValueError.code) - expect(lockStub).to.have.property('called', false) + for (const migration of options.migrations) { + expect(migration.revert).to.have.property('called', false) + } }) - it('should not allow to reverse migration that is not reversible', () => { + it('should not allow to reverse migration that is not reversible', async () => { const nonReversibleMigrationsMock = createMigrations() // @ts-expect-error invalid params nonReversibleMigrationsMock[2].revert = undefined - const options = { migrations: nonReversibleMigrationsMock } + const options = { + ...createOptions(), + migrations: nonReversibleMigrationsMock + } + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('4')) + await backends.root.close() - getVersionStub.returns(4) - return expect( - migrator.revert('/some/path', backends, repoOptions, 1, options) - ).to.eventually.be.rejectedWith(errors.NonReversibleMigrationError) - .with.property('code', errors.NonReversibleMigrationError.code) + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) + .to.eventually.be.rejectedWith(NonReversibleMigrationError).with.property('code', NonReversibleMigrationError.code) }) it('should revert expected migrations', async () => { const options = createOptions() - getVersionStub.returns(3) + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('3')) + await backends.root.close() await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('calledOnce', true) - expect(lockStub).to.have.property('calledOnce', true) - expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() - // Checking migrations expect(options.migrations[3].revert).to.have.property('called', false) expect(options.migrations[2].revert).to.have.property('calledOnce', true) @@ -229,15 +227,16 @@ describe('index.js', () => { it('should revert one migration as expected', async () => { const options = createOptions() - getVersionStub.returns(2) + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('calledOnce', true) - expect(lockStub).to.have.property('calledOnce', true) - expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() - // Checking migrations expect(options.migrations[3].revert).to.have.property('called', false) expect(options.migrations[2].revert).to.have.property('called', false) @@ -255,60 +254,81 @@ describe('index.js', () => { revert: sinon.stub().resolves() } ] - const options = { migrations: migrationsMock } - getVersionStub.returns(2) + const options = { + ...createOptions(), + migrations: migrationsMock + } + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('calledOnce', true) - expect(lockStub).to.have.property('calledOnce', true) - expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() - // Checking migrations expect(migrationsMock[0].revert).to.have.property('calledOnce', true) }) it('should not have any side-effects when in dry run', async () => { - const options = createOptions() - getVersionStub.returns(4) - options.isDryRun = true + const options = { + ...createOptions(), + isDryRun: true + } + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('4')) + await backends.root.close() await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('called', false) - expect(lockStub).to.have.property('called', false) - expect(setVersionStub).to.have.property('called', false) - - return options.migrations.forEach(({ revert }) => expect(revert).to.have.property('calledOnce', false)) + for (const migration of options.migrations) { + expect(migration.revert).to.have.property('called', false) + expect(migration.migrate).to.have.property('called', false) + } }) it('should not lock repo when ignoreLock is used', async () => { - const options = createOptions() - options.ignoreLock = true + const options = { + ...createOptions(), + ignoreLock: true + } - getVersionStub.returns(4) + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('4')) + await backends.root.close() await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('called', false) - expect(lockStub).to.have.property('called', false) - expect(setVersionStub.calledOnceWith(2, backends)).to.be.true() - // Checking migrations expect(options.migrations[3].revert).to.have.property('calledOnce', true) expect(options.migrations[2].revert).to.have.property('calledOnce', true) expect(options.migrations[1].revert).to.have.property('called', false) expect(options.migrations[0].revert).to.have.property('called', false) + + expect(repoOptions.repoLock.lock).to.have.property('called', false) }) it('should report progress when progress callback is supplied', async () => { - const options = createOptions() const onProgressStub = sinon.stub() - options.onProgress = onProgressStub - getVersionStub.returns(4) + const options = { + ...createOptions(), + onProgress: onProgressStub + } + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('4')) + await backends.root.close() options.migrations[2].revert = async (backends, onProgress) => { onProgress(50, 'hello') @@ -321,18 +341,25 @@ describe('index.js', () => { }) it('should unlock repo when error is thrown', async () => { - getVersionStub.returns(4) const options = createOptions() + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('4')) + await backends.root.close() + options.migrations[2].revert = sinon.stub().rejects() await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.rejected() - expect(lockCloseStub).to.have.property('calledOnce', true) - expect(lockStub).to.have.property('calledOnce', true) - // The last successfully reverted migration should be set as repo's version - expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() + await backends.root.open() + expect(await backends.root.get(VERSION_KEY)).to.equalBytes(uint8ArrayFromString('3')) + await backends.root.close() + + expect(repoOptions.repoLock.locked('/some/path')).to.eventually.be.false() }) }) @@ -342,7 +369,7 @@ describe('index.js', () => { // @ts-expect-error invalid params return expect(migrator.migrate(undefined, undefined, undefined, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error with out backends argument', () => { @@ -350,23 +377,23 @@ describe('index.js', () => { // @ts-expect-error invalid params return expect(migrator.migrate('/some/path', undefined, undefined, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error with out repoOptions argument', () => { const options = createOptions() // @ts-expect-error invalid params - return expect(migrator.migrate('/some/path', backends, undefined, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + return expect(migrator.migrate('/some/path', createBackends(), undefined, undefined, options)) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error with out toVersion argument', () => { const options = createOptions() // @ts-expect-error invalid params - return expect(migrator.migrate('/some/path', backends, repoOptions, undefined, options)) - .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + return expect(migrator.migrate('/some/path', createBackends(), repoOptions, undefined, options)) + .to.eventually.be.rejectedWith(RequiredParameterError).with.property('code', RequiredParameterError.code) }) it('should error with invalid toVersion argument', () => { @@ -374,12 +401,12 @@ describe('index.js', () => { return Promise.all( // @ts-expect-error invalid params - invalidValues.map((invalidValue) => expect(migrator.migrate('/some/path', backends, repoOptions, invalidValue, createOptions())) - .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code)) + invalidValues.map((invalidValue) => expect(migrator.migrate('/some/path', createBackends(), repoOptions, invalidValue, createOptions())) + .to.eventually.be.rejectedWith(InvalidValueError).with.property('code', InvalidValueError.code)) ) }) - it('should verify that all migrations are available', () => { + it('should verify that all migrations are available', async () => { const options = { migrations: [ { @@ -397,13 +424,17 @@ describe('index.js', () => { ] } - getVersionStub.returns(1) + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('1')) + await backends.root.close() return expect(migrator.migrate('/some/path', backends, repoOptions, 3, options)) - .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + .to.eventually.be.rejectedWith(InvalidValueError).with.property('code', InvalidValueError.code) }) - it('should verify that all migrations are available', () => { + it('should verify that all migrations are available', async () => { const options = { migrations: [ { @@ -421,43 +452,62 @@ describe('index.js', () => { ] } - getVersionStub.returns(3) + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('3')) + await backends.root.close() return expect(migrator.migrate('/some/path', backends, repoOptions, 5, options)) - .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + .to.eventually.be.rejectedWith(InvalidValueError).with.property('code', InvalidValueError.code) }) it('should not migrate if current repo version and toVersion matches', async () => { - getVersionStub.returns(2) const options = createOptions() + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() + await expect(migrator.migrate('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() - expect(lockStub).to.have.property('called', false) + await backends.root.open() + expect(await backends.root.get(VERSION_KEY)).to.equalBytes(uint8ArrayFromString('2')) + await backends.root.close() }) it('should not migrate if current repo version is higher then toVersion', async () => { - getVersionStub.returns(3) const options = createOptions() + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('3')) + await backends.root.close() + await expect(migrator.migrate('/some/path', backends, repoOptions, 2, options)) - .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + .to.eventually.be.rejectedWith(InvalidValueError).with.property('code', InvalidValueError.code) - expect(lockStub).to.have.property('called', false) + await backends.root.open() + expect(await backends.root.get(VERSION_KEY)).to.equalBytes(uint8ArrayFromString('3')) + await backends.root.close() }) it('should migrate expected migrations', async () => { const options = createOptions() - getVersionStub.returns(1) + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('1')) + await backends.root.close() await expect(migrator.migrate('/some/path', backends, repoOptions, 3, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('calledOnce', true) - expect(lockStub).to.have.property('calledOnce', true) - expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() - // Checking migrations expect(options.migrations[3].migrate).to.have.property('called', false) expect(options.migrations[2].migrate).to.have.property('calledOnce', true) @@ -468,42 +518,58 @@ describe('index.js', () => { it('should not have any side-effects when in dry run', async () => { const options = createOptions() options.isDryRun = true - getVersionStub.returns(2) + + const backends = createBackends() + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('called', false) - expect(lockStub).to.have.property('called', false) - expect(setVersionStub).to.have.property('called', false) - - return options.migrations.forEach(({ migrate }) => expect(migrate).to.have.property('calledOnce', false)) + for (const migration of options.migrations) { + expect(migration.revert).to.have.property('called', false) + expect(migration.migrate).to.have.property('called', false) + } }) it('should not lock repo when ignoreLock is used', async () => { - const options = createOptions() - options.ignoreLock = true - getVersionStub.returns(2) + const backends = createBackends() + const options = { + ...createOptions(), + ignoreLock: true + } + + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.fulfilled() - expect(lockCloseStub).to.have.property('called', false) - expect(lockStub).to.have.property('called', false) - expect(setVersionStub.calledOnceWith(4, backends)).to.be.true() - // Checking migrations expect(options.migrations[3].migrate).to.have.property('calledOnce', true) expect(options.migrations[2].migrate).to.have.property('calledOnce', true) expect(options.migrations[1].migrate).to.have.property('called', false) expect(options.migrations[0].migrate).to.have.property('called', false) + + expect(repoOptions.repoLock.lock).to.have.property('called', false) }) it('should report progress when progress callback is supplied', async () => { - const options = createOptions() const onProgressStub = sinon.stub() - options.onProgress = onProgressStub - getVersionStub.returns(2) + const backends = createBackends() + const options = { + ...createOptions(), + onProgress: onProgressStub + } + + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() options.migrations[2].migrate = async (backends, onProgress) => { onProgress(50, 'hello') @@ -516,18 +582,25 @@ describe('index.js', () => { }) it('should unlock repo when error is thrown', async () => { - getVersionStub.returns(2) + const backends = createBackends() const options = createOptions() + + await backends.root.open() + await backends.root.put(CONFIG_KEY, uint8ArrayFromString('{}')) + await backends.root.put(VERSION_KEY, uint8ArrayFromString('2')) + await backends.root.close() + options.migrations[3].migrate = sinon.stub().rejects() - await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.rejected() - expect(lockCloseStub).to.have.property('calledOnce', true) - expect(lockStub).to.have.property('calledOnce', true) - // The last successfully migrated migration should be set as repo's version - expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() + await backends.root.open() + expect(await backends.root.get(VERSION_KEY)).to.equalBytes(uint8ArrayFromString('2')) + await backends.root.close() + + expect(repoOptions.repoLock.locked('/some/path')).to.eventually.be.false() }) }) }) diff --git a/packages/ipfs-repo-migrations/test/init-test.js b/packages/ipfs-repo-migrations/test/init-test.js index 00688c95..9a153de7 100644 --- a/packages/ipfs-repo-migrations/test/init-test.js +++ b/packages/ipfs-repo-migrations/test/init-test.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { CONFIG_KEY, VERSION_KEY } = require('../src/utils') -const repoInit = require('../src/repo/init') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +import { expect } from 'aegir/utils/chai.js' +import { CONFIG_KEY, VERSION_KEY } from '../src/utils.js' +import { isRepoInitialized } from '../src/repo/init.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' /** * @param {import('./types').SetupFunction} setup * @param {import('./types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { /** @type {string} */ let dir /** @type {import('../src/types').Backends} */ @@ -30,7 +29,7 @@ module.exports = (setup, cleanup) => { await store.put(CONFIG_KEY, uint8ArrayFromString('config')) await store.close() - expect(await repoInit.isRepoInitialized(backends)).to.be.true() + expect(await isRepoInitialized(backends)).to.be.true() }) it('should return false with missing version key', async () => { @@ -39,7 +38,7 @@ module.exports = (setup, cleanup) => { await store.put(CONFIG_KEY, uint8ArrayFromString('')) await store.close() - expect(await repoInit.isRepoInitialized(backends)).to.be.false() + expect(await isRepoInitialized(backends)).to.be.false() }) it('should return false with missing config key', async () => { @@ -48,10 +47,10 @@ module.exports = (setup, cleanup) => { await store.put(VERSION_KEY, uint8ArrayFromString('')) await store.close() - expect(await repoInit.isRepoInitialized(backends)).to.be.false() + expect(await isRepoInitialized(backends)).to.be.false() }) it('should return false if the repo does not exists', async () => { - return expect(await repoInit.isRepoInitialized(backends)).to.be.false() + return expect(await isRepoInitialized(backends)).to.be.false() }) } diff --git a/packages/ipfs-repo-migrations/test/integration-test.js b/packages/ipfs-repo-migrations/test/integration-test.js index eca91921..6b7283d1 100644 --- a/packages/ipfs-repo-migrations/test/integration-test.js +++ b/packages/ipfs-repo-migrations/test/integration-test.js @@ -1,18 +1,16 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') - -const migrator = require('../src') -const migrations = require('./test-migrations') -const { VERSION_KEY, CONFIG_KEY } = require('../src/utils') -const { initRepo } = require('./fixtures/repo') +import { expect } from 'aegir/utils/chai.js' +import * as migrator from '../src/index.js' +import migrations from './test-migrations/index.js' +import { VERSION_KEY, CONFIG_KEY } from '../src/utils.js' +import { initRepo } from './fixtures/repo.js' /** * @param {import('./types').SetupFunction} setup * @param {import('./types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { /** @type {string} */ let dir /** @type {import('../src/types').Backends} */ diff --git a/packages/ipfs-repo-migrations/test/migrations/index.js b/packages/ipfs-repo-migrations/test/migrations/index.js index a6f93555..253fcf07 100644 --- a/packages/ipfs-repo-migrations/test/migrations/index.js +++ b/packages/ipfs-repo-migrations/test/migrations/index.js @@ -1,12 +1,15 @@ -'use strict' +import { test as migration8Test } from './migration-8-test.js' +import { test as migration9Test } from './migration-9-test.js' +import { test as migration10Test } from './migration-10-test.js' +import { test as migration11Test } from './migration-11-test.js' /** * @param {import('../types').SetupFunction} setup * @param {import('../types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { - require('./migration-8-test')(setup, cleanup) - require('./migration-9-test')(setup, cleanup) - require('./migration-10-test')(setup, cleanup) - require('./migration-11-test')(setup, cleanup) +export function test (setup, cleanup) { + migration8Test(setup, cleanup) + migration9Test(setup, cleanup) + migration10Test(setup, cleanup) + migration11Test(setup, cleanup) } diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js index 391320b8..8fa6925f 100644 --- a/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js +++ b/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js @@ -1,19 +1,17 @@ /* eslint-env mocha */ /* eslint-disable max-nested-callbacks */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats/cid') -const { BaseBlockstore } = require('blockstore-core/base') - -const migration = require('../../migrations/migration-10') -const Key = require('interface-datastore').Key -const { fromString } = require('uint8arrays/from-string') -const { equals } = require('uint8arrays/equals') +import { expect } from 'aegir/utils/chai.js' +import { CID } from 'multiformats/cid' +import { BaseBlockstore } from 'blockstore-core/base' +import { migration } from '../../migrations/migration-10/index.js' +import { Key } from 'interface-datastore/key' +import { fromString } from 'uint8arrays/from-string' +import { equals } from 'uint8arrays/equals' // @ts-expect-error no types -const Level5 = require('level-5') +import Level5 from 'level-5' // @ts-expect-error no types -const Level6 = require('level-6') +import Level6 from 'level-6' /** * @typedef {import('../../src/types').Backends} Backends @@ -117,7 +115,7 @@ function withLevel (store, LevelImpl) { * @param {import('../types').SetupFunction} setup * @param {import('../types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { describe('migration 10', function () { this.timeout(1024 * 1000) /** @type {string} */ diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js index baeff1c7..10b414df 100644 --- a/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js +++ b/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js @@ -1,11 +1,10 @@ /* eslint-env mocha */ /* eslint-disable max-nested-callbacks */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats/cid') -const migration = require('../../migrations/migration-11') -const { Key } = require('interface-datastore') +import { expect } from 'aegir/utils/chai.js' +import { CID } from 'multiformats/cid' +import { migration } from '../../migrations/migration-11/index.js' +import { Key } from 'interface-datastore/key' const MFS_ROOT_KEY = new Key('/local/filesroot') const MFS_ROOT = CID.parse('Qmc42sn2WBHYeAShU3nx8mYkhKVq4sRLapawTaGh4XH4iE') @@ -14,7 +13,7 @@ const MFS_ROOT = CID.parse('Qmc42sn2WBHYeAShU3nx8mYkhKVq4sRLapawTaGh4XH4iE') * @param {import('../types').SetupFunction} setup * @param {import('../types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { describe('migration 11', function () { this.timeout(1024 * 1000) /** @type {string} */ diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js index c816e3ce..166380bf 100644 --- a/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js +++ b/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js @@ -1,12 +1,10 @@ /* eslint-env mocha */ /* eslint-disable max-nested-callbacks */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') - -const migration = require('../../migrations/migration-8') -const Key = require('interface-datastore').Key +import { expect } from 'aegir/utils/chai.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { migration } from '../../migrations/migration-8/index.js' +import { Key } from 'interface-datastore/key' /** * @typedef {import('../../src/types').Backends} Backends @@ -115,7 +113,7 @@ async function validateBlocks (backends, migrated) { * @param {import('../types').SetupFunction} setup * @param {import('../types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { describe('migration 8', function () { this.timeout(1024 * 1000) /** @type {string} */ diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js index b954feac..f0828da6 100644 --- a/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js +++ b/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js @@ -1,17 +1,16 @@ /* eslint-env mocha */ /* eslint-disable max-nested-callbacks */ -'use strict' - -const { expect } = require('aegir/utils/chai') -const cbor = require('cborg') -const migration = require('../../migrations/migration-9') -const { PIN_DS_KEY } = require('../../migrations/migration-9/utils') -const { CID } = require('multiformats/cid') -const { CarReader } = require('@ipld/car') -const loadFixture = require('aegir/utils/fixtures') -const dagPb = require('@ipld/dag-pb') -const mhd = require('multiformats/hashes/digest') -const { base32 } = require('multiformats/bases/base32') + +import { expect } from 'aegir/utils/chai.js' +import * as cbor from 'cborg' +import { migration } from '../../migrations/migration-9/index.js' +import { PIN_DS_KEY } from '../../migrations/migration-9/utils.js' +import { CID } from 'multiformats/cid' +import { CarReader } from '@ipld/car' +import loadFixture from 'aegir/utils/fixtures.js' +import * as dagPb from '@ipld/dag-pb' +import * as mhd from 'multiformats/hashes/digest' +import { base32 } from 'multiformats/bases/base32' /** * @typedef {import('interface-datastore').Datastore} Datastore @@ -132,7 +131,7 @@ async function assertPinsetRootIsPresent (datastore, pinset) { * @param {import('../types').SetupFunction} setup * @param {import('../types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { describe('migration 9', function () { this.timeout(1024 * 1000) /** @type {string} */ diff --git a/packages/ipfs-repo-migrations/test/node.js b/packages/ipfs-repo-migrations/test/node.js index fd719468..fe3001fc 100644 --- a/packages/ipfs-repo-migrations/test/node.js +++ b/packages/ipfs-repo-migrations/test/node.js @@ -1,17 +1,20 @@ /* eslint-env mocha */ -'use strict' -const os = require('os') -const rimraf = require('rimraf') -const { FsDatastore } = require('datastore-fs') -const { LevelDatastore } = require('datastore-level') -const { S3Datastore } = require('datastore-s3') -const { ShardingDatastore } = require('datastore-core/sharding') -const { NextToLast } = require('datastore-core/shard') -const { BlockstoreDatastoreAdapter } = require('blockstore-datastore-adapter') -const mockS3 = require('./fixtures/mock-s3') -const S3 = require('aws-sdk').S3 -const { createRepo } = require('./fixtures/repo') +import os from 'os' +import rimraf from 'rimraf' +import { FsDatastore } from 'datastore-fs' +import { LevelDatastore } from 'datastore-level' +import { S3Datastore } from 'datastore-s3' +import { ShardingDatastore } from 'datastore-core/sharding' +import { NextToLast } from 'datastore-core/shard' +import { BlockstoreDatastoreAdapter } from 'blockstore-datastore-adapter' +import { mockS3 } from './fixtures/mock-s3.js' +import S3 from 'aws-sdk/clients/s3.js' +import { createRepo } from './fixtures/repo.js' +import { test as versionTests } from './version-test.js' +import { test as migrationTests } from './migrations/index.js' +import { test as initTests } from './init-test.js' +import { test as integrationTests } from './integration-test.js' /** * @param {string} dir @@ -112,19 +115,19 @@ CONFIGURATIONS.forEach(({ name, createBackends, cleanup }) => { describe(name, () => { describe('version tests', () => { - require('./version-test')(setup, cleanup) + versionTests(setup, cleanup) }) describe('migrations tests', () => { - require('./migrations')(setup, cleanup) + migrationTests(setup, cleanup) }) describe('init tests', () => { - require('./init-test')(setup, cleanup) + initTests(setup, cleanup) }) describe('integration tests', () => { - require('./integration-test')(setup, cleanup) + integrationTests(setup, cleanup) }) }) }) diff --git a/packages/ipfs-repo-migrations/test/test-migrations/index.js b/packages/ipfs-repo-migrations/test/test-migrations/index.js index 6676d164..0acccb56 100644 --- a/packages/ipfs-repo-migrations/test/test-migrations/index.js +++ b/packages/ipfs-repo-migrations/test/test-migrations/index.js @@ -1,5 +1,5 @@ -'use strict' +import { migration } from './migration-2/index.js' -module.exports = [ - require('./migration-2') +export default [ + migration ] diff --git a/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js b/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js index e33c48ea..b59bd774 100644 --- a/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js +++ b/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js @@ -1,8 +1,7 @@ -'use strict' -const Key = require('interface-datastore').Key -const _set = require('just-safe-set') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +import { Key } from 'interface-datastore/key' +import _set from 'just-safe-set' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' /** * @typedef {import('../../../src/types').Backends} Backends @@ -117,7 +116,7 @@ async function revert (backends, onProgress) { onProgress(100, 'done!') } -module.exports = { +export const migration = { version: 2, description: 'Updates config', migrate, diff --git a/packages/ipfs-repo-migrations/test/types.d.ts b/packages/ipfs-repo-migrations/test/types.ts similarity index 78% rename from packages/ipfs-repo-migrations/test/types.d.ts rename to packages/ipfs-repo-migrations/test/types.ts index 16b60ae6..2c9e993e 100644 --- a/packages/ipfs-repo-migrations/test/types.d.ts +++ b/packages/ipfs-repo-migrations/test/types.ts @@ -1,4 +1,4 @@ -import { Backends } from '../src/types' +import type { Backends } from '../src/types' export interface SetupFunction { (prefix?: string): Promise<{ dir: string, backends: Backends}> } export interface CleanupFunction { (dir: string): Promise } diff --git a/packages/ipfs-repo-migrations/test/version-test.js b/packages/ipfs-repo-migrations/test/version-test.js index 57364c40..00547c73 100644 --- a/packages/ipfs-repo-migrations/test/version-test.js +++ b/packages/ipfs-repo-migrations/test/version-test.js @@ -1,11 +1,10 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { VERSION_KEY, CONFIG_KEY } = require('../src/utils') -const version = require('../src/repo/version') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') -const errors = require('../src/errors') +import { expect } from 'aegir/utils/chai.js' +import { VERSION_KEY, CONFIG_KEY } from '../src/utils.js' +import * as version from '../src/repo/version.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import * as errors from '../src/errors.js' // When new versioning mechanism is introduced in new version don't forget to update // the range (from/to) of the previous version test's description @@ -14,7 +13,7 @@ const errors = require('../src/errors') * @param {import('./types').SetupFunction} setup * @param {import('./types').CleanupFunction} cleanup */ -module.exports = (setup, cleanup) => { +export function test (setup, cleanup) { /** @type {string} */ let dir /** @type {import('../src/types').Backends} */ diff --git a/packages/ipfs-repo-migrations/tsconfig.json b/packages/ipfs-repo-migrations/tsconfig.json index e17e1c7d..b1ffcdd5 100644 --- a/packages/ipfs-repo-migrations/tsconfig.json +++ b/packages/ipfs-repo-migrations/tsconfig.json @@ -1,7 +1,7 @@ { "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "types" }, "include": [ "src", diff --git a/packages/ipfs-repo/.aegir.js b/packages/ipfs-repo/.aegir.cjs similarity index 100% rename from packages/ipfs-repo/.aegir.js rename to packages/ipfs-repo/.aegir.cjs diff --git a/packages/ipfs-repo/README.md b/packages/ipfs-repo/README.md index b3f1361e..8d86bb1f 100644 --- a/packages/ipfs-repo/README.md +++ b/packages/ipfs-repo/README.md @@ -120,13 +120,13 @@ This provides a well defined interface for creating and interacting with an IPFS ### Use in Node.js ```js -var IPFSRepo = require('ipfs-repo') +var IPFSRepo from 'ipfs-repo') ``` ### Use in a browser with browserify, webpack or any other bundler ```js -var IPFSRepo = require('ipfs-repo') +var IPFSRepo from 'ipfs-repo') ``` ### Use in a browser Using a script tag @@ -142,7 +142,7 @@ Loading this module through a script tag will make the `IpfsRepo` obj available Example: ```js -const Repo = require('ipfs-repo') +const Repo from 'ipfs-repo') const repo = new Repo('/tmp/ipfs-repo') await repo.init({ cool: 'config' }) @@ -367,8 +367,8 @@ Returned promise resolves to an `Object` with the following keys: IPFS Repo comes with two built in locks: memory and fs. These can be imported via the following: ```js -const fsLock = require('ipfs-repo/src/lock') // Default in Node.js -const memoryLock = require('ipfs-repo/src/lock-memory') // Default in browser +const fsLock from 'ipfs-repo/src/lock') // Default in Node.js +const memoryLock from 'ipfs-repo/src/lock-memory') // Default in browser ``` You can also provide your own custom Lock. It must be an object with the following interface: diff --git a/packages/ipfs-repo/package.json b/packages/ipfs-repo/package.json index 25a1b82f..fe440fca 100644 --- a/packages/ipfs-repo/package.json +++ b/packages/ipfs-repo/package.json @@ -4,11 +4,21 @@ "description": "IPFS Repo implementation", "leadMaintainer": "Alex Potsides ", "main": "src/index.js", - "types": "dist/src/index.d.ts", + "types": "types/src/index.d.ts", + "type": "module", "files": [ - "src", - "dist" + "*", + "!**/*.tsbuildinfo" ], + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "sourceType": "module" + } + }, + "publishConfig": { + "directory": "dist" + }, "browser": { "rimraf": false, "datastore-fs": "datastore-level", @@ -16,14 +26,15 @@ "./src/default-options.js": "./src/default-options.browser.js" }, "scripts": { - "clean": "rimraf types dist", - "test": "aegir test", - "build": "aegir build", + "clean": "rimraf dist types", "lint": "aegir ts -p check && aegir lint", - "release": "aegir release", - "release-minor": "aegir release --type minor", - "release-major": "aegir release --type major", - "depcheck": "aegir dep-check" + "build": "aegir build --no-bundle", + "release": "aegir release --target node", + "release-minor": "aegir release --type minor --target node", + "release-major": "aegir release --type major --target node", + "pretest": "aegir build --esm-tests", + "test": "aegir test", + "dep-check": "aegir dep-check -i rimraf" }, "repository": { "type": "git", @@ -79,15 +90,9 @@ "merge-options": "^3.0.4", "mortice": "^2.0.1", "multiformats": "^9.0.4", - "p-queue": "^6.0.0", + "p-queue": "^7.1.0", "proper-lockfile": "^4.0.0", - "sort-keys": "^4.0.0", + "sort-keys": "^5.0.0", "uint8arrays": "^3.0.0" - }, - "eslintConfig": { - "extends": "ipfs", - "ignorePatterns": [ - "!.aegir.js" - ] } } diff --git a/packages/ipfs-repo/src/api-addr.js b/packages/ipfs-repo/src/api-addr.js index cb01790a..7ccb3a89 100644 --- a/packages/ipfs-repo/src/api-addr.js +++ b/packages/ipfs-repo/src/api-addr.js @@ -1,7 +1,6 @@ -'use strict' -const Key = require('interface-datastore').Key -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +import { Key } from 'interface-datastore/key' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' const apiFile = new Key('api') @@ -9,7 +8,7 @@ const apiFile = new Key('api') * * @param {import('interface-datastore').Datastore} store */ -module.exports = (store) => { +export function apiAddr (store) { return { /** * Get the current configuration from the repo. diff --git a/packages/ipfs-repo/src/config.js b/packages/ipfs-repo/src/config.js index c328cefe..06252c57 100644 --- a/packages/ipfs-repo/src/config.js +++ b/packages/ipfs-repo/src/config.js @@ -1,29 +1,25 @@ -'use strict' - -const { Key } = require('interface-datastore') -const { default: Queue } = require('p-queue') -const _get = require('just-safe-get') -const _set = require('just-safe-set') -const errCode = require('err-code') -const errors = require('./errors') -const { toString: uint8ArrayToString } = require('uint8arrays/to-string') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') -const { - hasWithFallback, - getWithFallback -// @ts-ignore -} = require('ipfs-repo-migrations/src/utils') + +import { Key } from 'interface-datastore/key' +import Queue from 'p-queue' +import _get from 'just-safe-get' +import _set from 'just-safe-set' +import errCode from 'err-code' +import { NotFoundError } from './errors/index.js' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { getWithFallback, hasWithFallback } from './utils/level.js' const configKey = new Key('config') /** * @typedef {import('./types').Config} Config + * @typedef {import('interface-datastore').Datastore} Datastore */ /** - * @param {import('interface-datastore').Datastore} store + * @param {Datastore} store */ -module.exports = (store) => { +export function config (store) { const setQueue = new Queue({ concurrency: 1 }) const configStore = { @@ -38,9 +34,7 @@ module.exports = (store) => { // level-js@5.x cannot read keys from level-js@4.x dbs so fall back to // using IndexedDB API with string keys - only necessary until we do // the migratiion to v10 or above - const encodedValue = await getWithFallback(configKey, store.get.bind(store), store.has.bind(store), store, { - signal: options.signal - }) + const encodedValue = await getWithFallback(configKey, store.get.bind(store), store.has.bind(store), store) return JSON.parse(uint8ArrayToString(encodedValue)) }, @@ -54,14 +48,14 @@ module.exports = (store) => { */ async get (key, options = {}) { if (key == null) { - throw new errors.NotFoundError(`Key ${key} does not exist in config`) + throw new NotFoundError(`Key ${key} does not exist in config`) } const config = await this.getAll(options) const value = _get(config, key) if (value === undefined) { - throw new errors.NotFoundError(`Key ${key} does not exist in config`) + throw new NotFoundError(`Key ${key} does not exist in config`) } return value @@ -116,7 +110,7 @@ module.exports = (store) => { async exists () { // eslint-disable-line require-await // level-js@5.x cannot read keys from level-js@4.x dbs so fall back to // using IndexedDB API with string keys - only necessary until we do - // the migratiion to v10 or above + // the migration to v10 or above return hasWithFallback(configKey, store.has.bind(store), store) } } diff --git a/packages/ipfs-repo/src/constants.js b/packages/ipfs-repo/src/constants.js index 05c4b03a..32bd6c1f 100644 --- a/packages/ipfs-repo/src/constants.js +++ b/packages/ipfs-repo/src/constants.js @@ -1,5 +1,2 @@ -'use strict' -module.exports = { - repoVersion: 10 -} +export const repoVersion = 11 diff --git a/packages/ipfs-repo/src/default-datastore.js b/packages/ipfs-repo/src/default-datastore.js index b1043ddb..86ac1bc1 100644 --- a/packages/ipfs-repo/src/default-datastore.js +++ b/packages/ipfs-repo/src/default-datastore.js @@ -1,7 +1,6 @@ -'use strict' // Default configuration for the datastore spec in node.js -module.exports = { +export default { Spec: { type: 'mount', mounts: [ diff --git a/packages/ipfs-repo/src/default-options.browser.js b/packages/ipfs-repo/src/default-options.browser.js index fcf8fdaf..41d071c7 100644 --- a/packages/ipfs-repo/src/default-options.browser.js +++ b/packages/ipfs-repo/src/default-options.browser.js @@ -1,9 +1,9 @@ -'use strict' +import * as MemoryLock from './locks/memory.js' // Default configuration for a repo in the browser -module.exports = { +export default { autoMigrate: true, onMigrationProgress: () => {}, repoOwner: true, - repoLock: require('./locks/memory') + repoLock: MemoryLock } diff --git a/packages/ipfs-repo/src/default-options.js b/packages/ipfs-repo/src/default-options.js index a09146d7..c1788456 100644 --- a/packages/ipfs-repo/src/default-options.js +++ b/packages/ipfs-repo/src/default-options.js @@ -1,15 +1,13 @@ -'use strict' +import * as FsLock from './locks/fs.js' // Default configuration for a repo in node.js /** * @type {Partial} */ -const defaultOptions = { +export default { autoMigrate: true, onMigrationProgress: () => {}, repoOwner: true, - repoLock: require('./locks/fs') + repoLock: FsLock } - -module.exports = defaultOptions diff --git a/packages/ipfs-repo/src/errors/index.js b/packages/ipfs-repo/src/errors/index.js index 4e7b6ce0..832f58d3 100644 --- a/packages/ipfs-repo/src/errors/index.js +++ b/packages/ipfs-repo/src/errors/index.js @@ -1,9 +1,7 @@ -'use strict' - /** * Error raised when there is lock already in place when repo is being opened. */ -class LockExistsError extends Error { +export class LockExistsError extends Error { /** * @param {string} [message] */ @@ -13,14 +11,12 @@ class LockExistsError extends Error { this.code = LockExistsError.code } } - LockExistsError.code = 'ERR_LOCK_EXISTS' -exports.LockExistsError = LockExistsError /** * Error raised when requested item is not found. */ -class NotFoundError extends Error { +export class NotFoundError extends Error { /** * @param {string} [message] */ @@ -30,14 +26,12 @@ class NotFoundError extends Error { this.code = NotFoundError.code } } - NotFoundError.code = 'ERR_NOT_FOUND' -exports.NotFoundError = NotFoundError /** * Error raised when version of the stored repo is not compatible with version of this package. */ -class InvalidRepoVersionError extends Error { +export class InvalidRepoVersionError extends Error { /** * @param {string} [message] */ @@ -47,10 +41,8 @@ class InvalidRepoVersionError extends Error { this.code = InvalidRepoVersionError.code } } - InvalidRepoVersionError.code = 'ERR_INVALID_REPO_VERSION' -exports.InvalidRepoVersionError = InvalidRepoVersionError -exports.ERR_REPO_NOT_INITIALIZED = 'ERR_REPO_NOT_INITIALIZED' -exports.ERR_REPO_ALREADY_OPEN = 'ERR_REPO_ALREADY_OPEN' -exports.ERR_REPO_ALREADY_CLOSED = 'ERR_REPO_ALREADY_CLOSED' +export const ERR_REPO_NOT_INITIALIZED = 'ERR_REPO_NOT_INITIALIZED' +export const ERR_REPO_ALREADY_OPEN = 'ERR_REPO_ALREADY_OPEN' +export const ERR_REPO_ALREADY_CLOSED = 'ERR_REPO_ALREADY_CLOSED' diff --git a/packages/ipfs-repo/src/gc.js b/packages/ipfs-repo/src/gc.js index d39aa526..4a6067fb 100644 --- a/packages/ipfs-repo/src/gc.js +++ b/packages/ipfs-repo/src/gc.js @@ -1,17 +1,18 @@ -'use strict' - -const { CID } = require('multiformats/cid') -const log = require('debug')('ipfs:repo:gc') -const Errors = require('datastore-core/errors') -const ERR_NOT_FOUND = Errors.notFoundError().code -const parallelBatch = require('it-parallel-batch') -const { pipe } = require('it-pipe') -const merge = require('it-merge') -const map = require('it-map') -const filter = require('it-filter') -const { Key } = require('interface-datastore') -const { base32 } = require('multiformats/bases/base32') -const walkDag = require('./utils/walk-dag') + +import { CID } from 'multiformats/cid' +import debug from 'debug' +import { notFoundError } from 'datastore-core/errors' +import parallelBatch from 'it-parallel-batch' +import { pipe } from 'it-pipe' +import merge from 'it-merge' +import map from 'it-map' +import filter from 'it-filter' +import { Key } from 'interface-datastore/key' +import { base32 } from 'multiformats/bases/base32' +import { walkDag } from './utils/walk-dag.js' + +const log = debug('ipfs:repo:gc') +const ERR_NOT_FOUND = notFoundError().code // Limit on the number of parallel block remove operations const BLOCK_RM_CONCURRENCY = 256 @@ -35,7 +36,7 @@ const MFS_ROOT_KEY = new Key('/local/filesroot') * @param {import('interface-datastore').Datastore} config.root * @param {loadCodec} config.loadCodec */ -module.exports = ({ gcLock, pins, blockstore, root, loadCodec }) => { +export function gc ({ gcLock, pins, blockstore, root, loadCodec }) { /** * @returns {AsyncGenerator} */ @@ -78,7 +79,7 @@ async function createMarkedSet ({ pins, blockstore, loadCodec, root }) { let mh try { mh = await root.get(MFS_ROOT_KEY) - } catch (err) { + } catch (/** @type {any} */ err) { if (err.code === ERR_NOT_FOUND) { log('No blocks in MFS') return @@ -139,14 +140,14 @@ async function * deleteUnmarkedBlocks ({ blockstore }, markedSet, blockKeys) { try { await blockstore.delete(cid) removedBlocksCount++ - } catch (err) { + } catch (/** @type {any} */ err) { return { err: new Error(`Could not delete block with CID ${cid}: ${err.message}`) } } return { cid } - } catch (err) { + } catch (/** @type {any} */ err) { const msg = `Could delete block with CID ${cid}` log(msg, err) return { err: new Error(msg + `: ${err.message}`) } diff --git a/packages/ipfs-repo/src/idstore.js b/packages/ipfs-repo/src/idstore.js index 60cd387e..854f9755 100644 --- a/packages/ipfs-repo/src/idstore.js +++ b/packages/ipfs-repo/src/idstore.js @@ -1,11 +1,10 @@ -'use strict' -const filter = require('it-filter') -const pushable = require('it-pushable') -const drain = require('it-drain') -const { CID } = require('multiformats/cid') -const errCode = require('err-code') -const { identity } = require('multiformats/hashes/identity') +import filter from 'it-filter' +import pushable from 'it-pushable' +import drain from 'it-drain' +import { CID } from 'multiformats/cid' +import errCode from 'err-code' +import { identity } from 'multiformats/hashes/identity' /** * @typedef {import('interface-datastore').Query} Query @@ -14,17 +13,11 @@ const { identity } = require('multiformats/hashes/identity') * @typedef {import('interface-blockstore').Blockstore} Blockstore */ -/** - * - * @param {Blockstore} blockstore - */ -module.exports = createIdStore - /** * @param {Blockstore} store * @returns {Blockstore} */ -function createIdStore (store) { +export function createIdStore (store) { return { open () { return store.open() @@ -90,7 +83,7 @@ function createIdStore (store) { }())) output.end() - } catch (err) { + } catch (/** @type {any} */ err) { output.end(err) } }) diff --git a/packages/ipfs-repo/src/index.js b/packages/ipfs-repo/src/index.js index 2207def6..6a3f0aee 100644 --- a/packages/ipfs-repo/src/index.js +++ b/packages/ipfs-repo/src/index.js @@ -1,27 +1,26 @@ -'use strict' - -const _get = require('just-safe-get') -const debug = require('debug') -const errCode = require('err-code') -const migrator = require('ipfs-repo-migrations') -const bytes = require('bytes') -const merge = require('merge-options') -const constants = require('./constants') -const version = require('./version') -const config = require('./config') -const spec = require('./spec') -const apiAddr = require('./api-addr') -const createIdstore = require('./idstore') -const defaultOptions = require('./default-options') -const defaultDatastore = require('./default-datastore') -const ERRORS = require('./errors') -const { PinManager, PinTypes } = require('./pins') -const createPinnedBlockstore = require('./pinned-blockstore') +import _get from 'just-safe-get' +import debug from 'debug' +import errCode from 'err-code' +import * as migrator from 'ipfs-repo-migrations' +import bytes from 'bytes' +import merge from 'merge-options' +import * as CONSTANTS from './constants.js' +import { version } from './version.js' +import { config } from './config.js' +import { spec } from './spec.js' +import { apiAddr } from './api-addr.js' +import { createIdStore } from './idstore.js' +import defaultOptions from './default-options.js' +import defaultDatastore from './default-datastore.js' +import * as ERRORS from './errors/index.js' +import { PinManager, PinTypes as PinTypesImport } from './pins.js' +import { createPinnedBlockstore } from './pinned-blockstore.js' // @ts-ignore - no types -const mortice = require('mortice') -const gc = require('./gc') -const MemoryLock = require('./locks/memory') -const FSLock = require('./locks/fs') +import mortice from 'mortice' +import { gc } from './gc.js' +import * as MemoryLock from './locks/memory.js' +import * as FSLock from './locks/fs.js' +import * as BlockstoreUtils from './utils/blockstore.js' const log = debug('ipfs:repo') @@ -77,7 +76,7 @@ class Repo { const pinnedBlockstore = createPinnedBlockstore(this.pins, blockstore) // this blockstore will extract blocks from multihashes with the identity codec - this.blocks = createIdstore(pinnedBlockstore) + this.blocks = createIdStore(pinnedBlockstore) this.version = version(this.root) this.config = config(this.root) @@ -103,7 +102,7 @@ class Repo { await this._openRoot() await this.config.replace(buildConfig(config)) await this.spec.set(buildDatastoreSpec(config)) - await this.version.set(constants.repoVersion) + await this.version.set(CONSTANTS.repoVersion) } /** @@ -125,7 +124,7 @@ class Repo { await this.root.close() return true - } catch (err) { + } catch (/** @type {any} */ err) { // FIXME: do not use exceptions for flow control return false } @@ -151,11 +150,11 @@ class Repo { this._lockfile = await this._openLock() log('acquired repo.lock') - const isCompatible = await this.version.check(constants.repoVersion) + const isCompatible = await this.version.check(CONSTANTS.repoVersion) if (!isCompatible) { if (await this._isAutoMigrationEnabled()) { - await this._migrate(constants.repoVersion, { + await this._migrate(CONSTANTS.repoVersion, { root: this.root, datastore: this.datastore, pins: this.pins.pinstore, @@ -181,7 +180,7 @@ class Repo { this.closed = false log('all opened') - } catch (err) { + } catch (/** @type {any} */ err) { if (this._lockfile) { try { await this._closeLock() @@ -203,7 +202,7 @@ class Repo { async _openRoot () { try { await this.root.open() - } catch (err) { + } catch (/** @type {any} */ err) { if (err.message !== 'Already open') { throw err } @@ -250,7 +249,7 @@ class Repo { this.spec.exists(), this.version.exists() ]) - } catch (err) { + } catch (/** @type {any} */ err) { if (err.code === 'ERR_NOT_FOUND') { throw errCode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { path: this.path @@ -281,7 +280,7 @@ class Repo { try { // Delete api, ignoring irrelevant errors await this.apiAddr.delete() - } catch (err) { + } catch (/** @type {any} */ err) { if (err.code !== ERRORS.ERR_REPO_NOT_INITIALIZED && !err.message.startsWith('ENOENT')) { throw err } @@ -350,7 +349,7 @@ class Repo { let autoMigrateConfig try { autoMigrateConfig = await this.config.get(AUTO_MIGRATE_CONFIG_KEY) - } catch (e) { + } catch (/** @type {any} */ e) { if (e.code === ERRORS.NotFoundError.code) { autoMigrateConfig = true // Config's default value is True } else { @@ -393,7 +392,7 @@ class Repo { try { const max = /** @type {number} */(await this.config.get('Datastore.StorageMax')) return BigInt(bytes(max)) - } catch (err) { + } catch (/** @type {any} */ err) { return BigInt(noLimit) } } @@ -436,22 +435,25 @@ async function getSize (datastore) { * @param {Partial} [options] - Configuration * @returns {import('./types').IPFSRepo} */ -function createRepo (path, loadCodec, backends, options) { +export function createRepo (path, loadCodec, backends, options) { return new Repo(path, loadCodec, backends, options) } -module.exports = { - createRepo, - repoVersion: constants.repoVersion, - errors: ERRORS, - utils: { blockstore: require('./utils/blockstore') }, - locks: { - memory: MemoryLock, - fs: FSLock - }, - PinTypes +export const repoVersion = CONSTANTS.repoVersion + +export const errors = ERRORS + +export const utils = { + blockstore: BlockstoreUtils } +export const locks = { + memory: MemoryLock, + fs: FSLock +} + +export const PinTypes = PinTypesImport + /** * @param {import('./types').Config} _config */ diff --git a/packages/ipfs-repo/src/locks/fs.js b/packages/ipfs-repo/src/locks/fs.js index 039a6b1d..003dc3d2 100644 --- a/packages/ipfs-repo/src/locks/fs.js +++ b/packages/ipfs-repo/src/locks/fs.js @@ -1,9 +1,7 @@ -'use strict' - -const { LockExistsError } = require('../errors') -const path = require('path') -const debug = require('debug') -const { lock: properLock, check } = require('proper-lockfile') +import { LockExistsError } from '../errors/index.js' +import path from 'path' +import debug from 'debug' +import { lock as properLock, check } from 'proper-lockfile' const log = debug('ipfs:repo:lock:fs') const lockFile = 'repo.lock' @@ -30,13 +28,13 @@ const STALE_TIME = 20000 * @param {string} dir * @returns {Promise} */ -const lock = async (dir) => { +export const lock = async (dir) => { const file = path.join(dir, lockFile) log('locking %s', file) let release try { release = await properLock(dir, { lockfilePath: file, stale: STALE_TIME }) - } catch (err) { + } catch (/** @type {any} */ err) { if (err.code === 'ELOCKED') { throw new LockExistsError(`Lock already being held for file: ${file}`) } else { @@ -54,13 +52,8 @@ const lock = async (dir) => { * @param {string} dir * @returns {Promise} */ -const locked = (dir) => { +export const locked = (dir) => { const file = path.join(dir, lockFile) return check(dir, { lockfilePath: file, stale: STALE_TIME }) } - -module.exports = { - locked, - lock -} diff --git a/packages/ipfs-repo/src/locks/memory.js b/packages/ipfs-repo/src/locks/memory.js index e28c2c75..f39faed4 100644 --- a/packages/ipfs-repo/src/locks/memory.js +++ b/packages/ipfs-repo/src/locks/memory.js @@ -1,10 +1,8 @@ -'use strict' -const errors = require('../errors') -const debug = require('debug') +import { LockExistsError } from '../errors/index.js' +import debug from 'debug' const log = debug('ipfs:repo:lock:memory') - const lockFile = 'repo.lock' /** @type {Record} */ @@ -20,12 +18,12 @@ const LOCKS = {} * @param {string} dir * @returns {Promise} */ -exports.lock = async (dir) => { +export async function lock (dir) { const file = dir + '/' + lockFile log('locking %s', file) if (LOCKS[file] === true) { - throw new errors.LockExistsError(`Lock already being held for file: ${file}`) + throw new LockExistsError(`Lock already being held for file: ${file}`) } LOCKS[file] = true @@ -45,7 +43,7 @@ exports.lock = async (dir) => { * @param {string} dir * @returns {Promise} */ -exports.locked = async (dir) => { +export async function locked (dir) { const file = dir + '/' + lockFile log(`checking lock: ${file}`) diff --git a/packages/ipfs-repo/src/pinned-blockstore.js b/packages/ipfs-repo/src/pinned-blockstore.js index 8ac95c4d..e59b499f 100644 --- a/packages/ipfs-repo/src/pinned-blockstore.js +++ b/packages/ipfs-repo/src/pinned-blockstore.js @@ -1,8 +1,7 @@ -'use strict' -const map = require('it-map') -const errCode = require('err-code') -const { PinTypes } = require('./pins') +import map from 'it-map' +import errCode from 'err-code' +import { PinTypes } from './pins.js' /** * @typedef {import('interface-datastore').Query} Query @@ -13,18 +12,12 @@ const { PinTypes } = require('./pins') * @typedef {import('./pins').Pins} Pins */ -/** - * - * @param {Blockstore} blockstore - */ -module.exports = createPinnedBlockstore - /** * @param {Pins} pins * @param {Blockstore} store * @returns {Blockstore} */ -function createPinnedBlockstore (pins, store) { +export function createPinnedBlockstore (pins, store) { return { open () { return store.open() diff --git a/packages/ipfs-repo/src/pins.js b/packages/ipfs-repo/src/pins.js index ec127cdf..c461331e 100644 --- a/packages/ipfs-repo/src/pins.js +++ b/packages/ipfs-repo/src/pins.js @@ -1,18 +1,17 @@ /* eslint max-nested-callbacks: ["error", 8] */ -'use strict' - -const { CID } = require('multiformats/cid') -const errCode = require('err-code') -const debug = require('debug') -const first = require('it-first') -const Block = require('multiformats/block') -const cborg = require('cborg') -const dagPb = require('@ipld/dag-pb') -const { + +import { CID } from 'multiformats/cid' +import errCode from 'err-code' +import debug from 'debug' +import first from 'it-first' +import { createUnsafe } from 'multiformats/block' +import * as cborg from 'cborg' +import * as dagPb from '@ipld/dag-pb' +import { cidToKey, keyToMultihash -} = require('./utils/blockstore') -const walkDag = require('./utils/walk-dag') +} from './utils/blockstore.js' +import { walkDag } from './utils/walk-dag.js' /** * @typedef {object} PinInternal @@ -39,7 +38,7 @@ function invalidPinTypeErr (type) { return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE') } -const PinTypes = { +export const PinTypes = { /** @type {'direct'} */ direct: ('direct'), /** @type {'recursive'} */ @@ -53,7 +52,7 @@ const PinTypes = { /** * @implements {Pins} */ -class PinManager { +export class PinManager { /** * @param {Object} config * @param {import('interface-datastore').Datastore} config.pinstore @@ -300,7 +299,7 @@ class PinManager { const bytes = await this.blockstore.get(cid, options) const codec = await this.loadCodec(cid.code) - const block = Block.createUnsafe({ bytes, cid, codec }) + const block = createUnsafe({ bytes, cid, codec }) await Promise.all( [...block.links()].map(([, childCid]) => walkDag(childCid, options)) @@ -323,8 +322,3 @@ class PinManager { return true } } - -module.exports = { - PinManager, - PinTypes -} diff --git a/packages/ipfs-repo/src/spec.js b/packages/ipfs-repo/src/spec.js index a9d603aa..f7c6c22f 100644 --- a/packages/ipfs-repo/src/spec.js +++ b/packages/ipfs-repo/src/spec.js @@ -1,9 +1,8 @@ -'use strict' -const Key = require('interface-datastore').Key -const sortKeys = require('sort-keys') -const { toString: uint8ArrayToString } = require('uint8arrays/to-string') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +import { Key } from 'interface-datastore' +import sortKeys from 'sort-keys' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' const specKey = new Key('datastore_spec') @@ -11,7 +10,7 @@ const specKey = new Key('datastore_spec') * * @param {import('interface-datastore').Datastore} store */ -module.exports = (store) => { +export function spec (store) { return { /** * Check if a datastore spec file exists. diff --git a/packages/ipfs-repo/src/utils/blockstore.js b/packages/ipfs-repo/src/utils/blockstore.js index 680a8944..3da09624 100644 --- a/packages/ipfs-repo/src/utils/blockstore.js +++ b/packages/ipfs-repo/src/utils/blockstore.js @@ -1,18 +1,16 @@ -'use strict' - -const { Key } = require('interface-datastore') -const { CID } = require('multiformats') -const raw = require('multiformats/codecs/raw') -const errCode = require('err-code') -const { base32 } = require('multiformats/bases/base32') -const Digest = require('multiformats/hashes/digest') +import { Key } from 'interface-datastore/key' +import { CID } from 'multiformats' +import * as raw from 'multiformats/codecs/raw' +import errCode from 'err-code' +import { base32 } from 'multiformats/bases/base32' +import * as Digest from 'multiformats/hashes/digest' /** * Transform a cid to the appropriate datastore key. * * @param {CID} c */ -function cidToKey (c) { +export function cidToKey (c) { const cid = CID.asCID(c) if (cid == null) { @@ -31,7 +29,7 @@ function cidToKey (c) { * * @param {Key} key */ -function keyToCid (key) { +export function keyToCid (key) { // Block key is of the form / return CID.createV1(raw.code, keyToMultihash(key)) } @@ -39,12 +37,6 @@ function keyToCid (key) { /** * @param {Key | string} key */ -function keyToMultihash (key) { +export function keyToMultihash (key) { return Digest.decode(base32.decode(`b${key.toString().toLowerCase().substring(1)}`)) } - -module.exports = { - cidToKey, - keyToCid, - keyToMultihash -} diff --git a/packages/ipfs-repo/src/utils/level.js b/packages/ipfs-repo/src/utils/level.js new file mode 100644 index 00000000..93ce21d4 --- /dev/null +++ b/packages/ipfs-repo/src/utils/level.js @@ -0,0 +1,103 @@ + +import { NotFoundError } from '../errors/index.js' + +/** + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-datastore').Key} Key + */ + +/** + * @param {Key} key + * @param {function (Key): Promise} has + * @param {Datastore} store + * @returns {Promise} + */ +export async function hasWithFallback (key, has, store) { + const result = await has(key) + + if (result) { + return result + } + + // Newer versions of level.js changed the key type from Uint8Array|string + // to Uint8Array so fall back to trying Uint8Arrays if we are using level.js + // and the string version of the key did not work + const levelJs = findLevelJs(store) + + if (!levelJs) { + return false + } + + return new Promise((resolve, reject) => { + // drop down to IndexDB API, otherwise level-js will monkey around with the keys/values + // @ts-ignore + const req = levelJs.store('readonly').get(key.toString()) + req.transaction.onabort = () => { + reject(req.transaction.error) + } + req.transaction.oncomplete = () => { + resolve(Boolean(req.result)) + } + }) +} + +/** + * @param {import('interface-datastore').Key} key + * @param {function (Key): Promise} get + * @param {function (Key): Promise} has + * @param {import('interface-datastore').Datastore} store + * @returns {Promise} + */ +export async function getWithFallback (key, get, has, store) { + if (await has(key)) { + return get(key) + } + + // Newer versions of level.js changed the key type from Uint8Array|string + // to Uint8Array so fall back to trying Uint8Arrays if we are using level.js + // and the string version of the key did not work + const levelJs = findLevelJs(store) + + if (!levelJs) { + throw new NotFoundError() + } + + return new Promise((resolve, reject) => { + // drop down to IndexDB API, otherwise level-js will monkey around with the keys/values + // @ts-ignore + const req = levelJs.store('readonly').get(key.toString()) + req.transaction.onabort = () => { + reject(req.transaction.error) + } + req.transaction.oncomplete = () => { + if (req.result) { + return resolve(req.result) + } + + reject(new NotFoundError()) + } + }) +} + +/** + * Level dbs wrap level dbs that wrap level dbs. Find a level-js + * instance in the chain if one exists. + * + * @param {Datastore} store + * @returns {Datastore | undefined} + */ +function findLevelJs (store) { + let db = store + + // @ts-ignore + while (db.db || db.child) { + // @ts-ignore + db = db.db || db.child + + // `Level` is only present in the browser, in node it is LevelDOWN + // @ts-ignore + if (db.type === 'level-js' || db.constructor.name === 'Level') { + return db + } + } +} diff --git a/packages/ipfs-repo/src/utils/walk-dag.js b/packages/ipfs-repo/src/utils/walk-dag.js index 9ac6bff3..31a899cb 100644 --- a/packages/ipfs-repo/src/utils/walk-dag.js +++ b/packages/ipfs-repo/src/utils/walk-dag.js @@ -1,7 +1,7 @@ -'use strict' +import debug from 'debug' +import { createUnsafe } from 'multiformats/block' -const log = require('debug')('ipfs:repo:utils:walk-dag') -const Block = require('multiformats/block') +const log = debug('ipfs:repo:utils:walk-dag') /** * @typedef {import('multiformats/cid').CID} CID @@ -17,21 +17,19 @@ const Block = require('multiformats/block') * @param {AbortOptions} [options] * @returns {AsyncGenerator} */ -async function * walkDag (cid, blockstore, loadCodec, options) { +export async function * walkDag (cid, blockstore, loadCodec, options) { try { const bytes = await blockstore.get(cid, options) const codec = await loadCodec(cid.code) - const block = Block.createUnsafe({ bytes, cid, codec }) + const block = createUnsafe({ bytes, cid, codec }) for (const [, childCid] of block.links()) { yield childCid yield * walkDag(childCid, blockstore, loadCodec, options) } - } catch (err) { + } catch (/** @type {any} */ err) { log('Could not walk DAG for CID', cid.toString(), err) throw err } } - -module.exports = walkDag diff --git a/packages/ipfs-repo/src/version.js b/packages/ipfs-repo/src/version.js index 5e6e39e6..58a0b45c 100644 --- a/packages/ipfs-repo/src/version.js +++ b/packages/ipfs-repo/src/version.js @@ -1,23 +1,18 @@ -'use strict' -const Key = require('interface-datastore').Key -const debug = require('debug') -const log = debug('ipfs:repo:version') -const { toString: uint8ArrayToString } = require('uint8arrays/to-string') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') -const { - hasWithFallback, - getWithFallback -// @ts-ignore -} = require('ipfs-repo-migrations/src/utils') +import { Key } from 'interface-datastore/key' +import debug from 'debug' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { getWithFallback, hasWithFallback } from './utils/level.js' +const log = debug('ipfs:repo:version') const versionKey = new Key('version') /** * * @param {import('interface-datastore').Datastore} store */ -module.exports = (store) => { +export function version (store) { return { /** * Check if a version file exists. diff --git a/packages/ipfs-repo/test/api-addr-test.js b/packages/ipfs-repo/test/api-addr-test.js index 492843f3..3154b936 100644 --- a/packages/ipfs-repo/test/api-addr-test.js +++ b/packages/ipfs-repo/test/api-addr-test.js @@ -1,12 +1,11 @@ /* eslint-env mocha */ -'use strict' -// const { expect } = require('aegir/utils/chai') -// const apiAddr = require('../src/api-addr') -// const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +// import { expect } from 'aegir/utils/chai.js' +// const apiAddr from '../src/api-addr') +// import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' // TODO this should all be refactor -module.exports = () => { +export default () => { describe('api-addr', () => { // describe('.get', () => { // it('should get a value from the store', async () => { diff --git a/packages/ipfs-repo/test/blockstore-test.js b/packages/ipfs-repo/test/blockstore-test.js index 144e47cb..503ad2ee 100644 --- a/packages/ipfs-repo/test/blockstore-test.js +++ b/packages/ipfs-repo/test/blockstore-test.js @@ -1,27 +1,26 @@ /* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ -'use strict' - -const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats') -const range = require('just-range') -const tempDir = require('ipfs-utils/src/temp-dir') -const { createRepo } = require('../src') -const drain = require('it-drain') -const all = require('it-all') -const first = require('it-first') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') -const { toString: uint8ArrayToString } = require('uint8arrays/to-string') -const { equals: uint8ArrayEquals } = require('uint8arrays/equals') -const { BaseBlockstore } = require('blockstore-core/base') -const { sha256 } = require('multiformats/hashes/sha2') -const { identity } = require('multiformats/hashes/identity') -const raw = require('multiformats/codecs/raw') -const dagCbor = require('@ipld/dag-cbor') -const dagPb = require('@ipld/dag-pb') -const loadCodec = require('./fixtures/load-codec') -const createBackend = require('./fixtures/create-backend') -const MemoryLock = require('../src/locks/memory') + +import { expect } from 'aegir/utils/chai.js' +import { CID } from 'multiformats/cid' +import range from 'just-range' +import tempDir from 'ipfs-utils/src/temp-dir.js' +import { createRepo } from '../src/index.js' +import drain from 'it-drain' +import all from 'it-all' +import first from 'it-first' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import { BaseBlockstore } from 'blockstore-core/base' +import { sha256 } from 'multiformats/hashes/sha2' +import { identity } from 'multiformats/hashes/identity' +import * as raw from 'multiformats/codecs/raw' +import * as dagCbor from '@ipld/dag-cbor' +import * as dagPb from '@ipld/dag-pb' +import { loadCodec } from './fixtures/load-codec.js' +import { createBackend } from './fixtures/create-backend.js' +import * as MemoryLock from '../src/locks/memory.js' async function makePair () { const data = new TextEncoder().encode(`hello-${Math.random()}`) @@ -41,7 +40,7 @@ async function makePair () { * * @param {IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('blockstore', () => { const blockData = range(100).map((i) => uint8ArrayFromString(`hello-${i}-${Math.random()}`)) const bData = uint8ArrayFromString('hello world') diff --git a/packages/ipfs-repo/test/blockstore-utils-test.js b/packages/ipfs-repo/test/blockstore-utils-test.js index 7ddad0fe..e73e75b5 100644 --- a/packages/ipfs-repo/test/blockstore-utils-test.js +++ b/packages/ipfs-repo/test/blockstore-utils-test.js @@ -1,20 +1,19 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { Key } = require('interface-datastore') -const { CID } = require('multiformats/cid') -const Repo = require('../src') -const raw = require('multiformats/codecs/raw') +import { expect } from 'aegir/utils/chai.js' +import { Key } from 'interface-datastore/key' +import { CID } from 'multiformats/cid' +import { utils } from '../src/index.js' +import * as raw from 'multiformats/codecs/raw' -module.exports = () => { +export default () => { describe('blockstore utils', () => { it('converts a CID to a datastore Key and back', () => { // CIDv1 in base32 with IPLD raw codec const originalCid = CID.parse('bafkreihkb3vrxxex5zvzkr3s3a6noe223r7jka4ofjy2nkzu27kueg76ii') - const key = Repo.utils.blockstore.cidToKey(originalCid) + const key = utils.blockstore.cidToKey(originalCid) expect(key instanceof Key).to.be.true() - const cid = Repo.utils.blockstore.keyToCid(key) + const cid = utils.blockstore.keyToCid(key) expect(cid instanceof CID).to.be.true() expect(originalCid.toString()).to.equal(cid.toString()) }) @@ -22,10 +21,10 @@ module.exports = () => { it('converts a CID to base32 encoded key', () => { // CIDv0 in base58btc with implicit dag-pb codec const originalCid = CID.parse('QmQPeNsJPyVWPFDVHb77w8G42Fvo15z4bG2X8D2GhfbSXc') - const key = Repo.utils.blockstore.cidToKey(originalCid) + const key = utils.blockstore.cidToKey(originalCid) expect(key instanceof Key).to.be.true() expect(key.toString()).to.equal('/CIQB4655YD5GLBB7WWEUAHCO6QONU5ICBONAA5JEPBIOEIVZ5RXTIYY') - const cid = Repo.utils.blockstore.keyToCid(key) + const cid = utils.blockstore.keyToCid(key) expect(cid instanceof CID).to.be.true() expect('bafkreia6po64b6tfqq73lckadrhpihg2oubaxgqaoushquhcek46y3zumm').to.equal(cid.toString()) expect(cid.code).to.equal(raw.code) diff --git a/packages/ipfs-repo/test/browser.js b/packages/ipfs-repo/test/browser.js index 82faa277..4878bd55 100644 --- a/packages/ipfs-repo/test/browser.js +++ b/packages/ipfs-repo/test/browser.js @@ -1,11 +1,21 @@ /* eslint-env mocha */ -'use strict' - -const { createRepo } = require('../src') -const loadCodec = require('./fixtures/load-codec') -const { MemoryDatastore } = require('datastore-core/memory') -const { MemoryBlockstore } = require('blockstore-core/memory') +import { createRepo } from '../src/index.js' +import { loadCodec } from './fixtures/load-codec.js' +import { MemoryDatastore } from 'datastore-core/memory' +import { MemoryBlockstore } from 'blockstore-core/memory' +import optionsTests from './options-test.js' +import migrationsTests from './migrations-test.js' +import repoTests from './repo-test.js' +import blockstoreTests from './blockstore-test.js' +import datastoreTests from './datastore-test.js' +import keystoreTests from './keystore-test.js' +import lockTests from './lock-test.js' +import configTests from './config-test.js' +import apiAddrTests from './api-addr-test.js' +import pinsTests from './pins-test.js' +import isInitializedTests from './is-initialized.js' +import blockstoreUtilsTests from './blockstore-utils-test.js' async function createTempRepo (options = {}) { const date = Date.now().toString() @@ -25,8 +35,8 @@ async function createTempRepo (options = {}) { } describe('IPFS Repo Tests on the Browser', () => { - require('./options-test') - require('./migrations-test')(createTempRepo) + optionsTests() + migrationsTests(createTempRepo) const repo = createRepo('myrepo', loadCodec, { blocks: new MemoryBlockstore(), @@ -45,14 +55,14 @@ describe('IPFS Repo Tests on the Browser', () => { await repo.close() }) - require('./repo-test')(repo) - require('./blockstore-test')(repo) - require('./blockstore-utils-test')() - require('./datastore-test')(repo) - require('./keystore-test')(repo) - require('./config-test')(repo) - require('./api-addr-test')() - require('./lock-test')(repo) - require('./pins-test')(repo) - require('./is-initialized') + repoTests(repo) + blockstoreTests(repo) + blockstoreUtilsTests() + datastoreTests(repo) + keystoreTests(repo) + configTests(repo) + apiAddrTests() + lockTests(repo) + pinsTests(repo) + isInitializedTests() }) diff --git a/packages/ipfs-repo/test/config-test.js b/packages/ipfs-repo/test/config-test.js index 1ef1bbd9..a38335c9 100644 --- a/packages/ipfs-repo/test/config-test.js +++ b/packages/ipfs-repo/test/config-test.js @@ -1,12 +1,11 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') +import { expect } from 'aegir/utils/chai.js' /** * @param {import('../src/types').IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('config', () => { describe('.set', () => { it('should throw when invalid key is passed', () => { diff --git a/packages/ipfs-repo/test/datastore-test.js b/packages/ipfs-repo/test/datastore-test.js index bca5ca60..838c62be 100644 --- a/packages/ipfs-repo/test/datastore-test.js +++ b/packages/ipfs-repo/test/datastore-test.js @@ -1,16 +1,15 @@ /* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const range = require('just-range') -const Key = require('interface-datastore').Key -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +import { expect } from 'aegir/utils/chai.js' +import range from 'just-range' +import { Key } from 'interface-datastore/key' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' /** * @param {import('../src/types').IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('datastore', () => { const dataList = range(100).map((i) => uint8ArrayFromString(`hello-${i}-${Math.random()}`)) const data = uint8ArrayFromString('hello world') diff --git a/packages/ipfs-repo/test/fixtures/create-backend.js b/packages/ipfs-repo/test/fixtures/create-backend.js index 474ee351..01752aa8 100644 --- a/packages/ipfs-repo/test/fixtures/create-backend.js +++ b/packages/ipfs-repo/test/fixtures/create-backend.js @@ -1,9 +1,8 @@ -'use strict' -const { MemoryDatastore } = require('datastore-core/memory') -const { BlockstoreDatastoreAdapter } = require('blockstore-datastore-adapter') +import { MemoryDatastore } from 'datastore-core/memory' +import { BlockstoreDatastoreAdapter } from 'blockstore-datastore-adapter' -function createBackend (overrides = {}) { +export function createBackend (overrides = {}) { return { datastore: new MemoryDatastore(), blocks: new BlockstoreDatastoreAdapter( @@ -15,5 +14,3 @@ function createBackend (overrides = {}) { ...overrides } } - -module.exports = createBackend diff --git a/packages/ipfs-repo/test/fixtures/load-codec.js b/packages/ipfs-repo/test/fixtures/load-codec.js index 4ed5c587..4861e433 100644 --- a/packages/ipfs-repo/test/fixtures/load-codec.js +++ b/packages/ipfs-repo/test/fixtures/load-codec.js @@ -1,8 +1,5 @@ -/* eslint-env mocha */ -'use strict' - -const dagPb = require('@ipld/dag-pb') -const dagCbor = require('@ipld/dag-cbor') +import * as dagPb from '@ipld/dag-pb' +import * as dagCbor from '@ipld/dag-cbor' /** * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec @@ -11,7 +8,7 @@ const dagCbor = require('@ipld/dag-cbor') /** * @type {import('../../src/types').loadCodec} */ -const loadCodec = (codeOrName) => { +export function loadCodec (codeOrName) { /** @type {Record} */ const lookup = { [dagPb.code]: dagPb, @@ -22,5 +19,3 @@ const loadCodec = (codeOrName) => { return Promise.resolve(lookup[codeOrName]) } - -module.exports = loadCodec diff --git a/packages/ipfs-repo/test/is-initialized.js b/packages/ipfs-repo/test/is-initialized.js index f957626e..ba2edf5f 100644 --- a/packages/ipfs-repo/test/is-initialized.js +++ b/packages/ipfs-repo/test/is-initialized.js @@ -1,47 +1,44 @@ /* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const tempDir = require('ipfs-utils/src/temp-dir') -const { createRepo } = require('../src') -const loadCodec = require('./fixtures/load-codec') -const createBackend = require('./fixtures/create-backend') -const MemoryLock = require('../src/locks/memory') - -/** - * @typedef {import('../src/types').IPFSRepo} IPFSRepo - */ - -describe('isInitialized', () => { - /** @type {IPFSRepo} */ - let repo - - beforeEach(() => { - repo = createRepo(tempDir(b => 'test-repo-for-' + b), loadCodec, createBackend(), { - repoLock: MemoryLock +import { expect } from 'aegir/utils/chai.js' +import tempDir from 'ipfs-utils/src/temp-dir.js' +import { createRepo } from '../src/index.js' +import { loadCodec } from './fixtures/load-codec.js' +import { createBackend } from './fixtures/create-backend.js' +import * as MemoryLock from '../src/locks/memory.js' + +export default () => { + describe('isInitialized', () => { + /** @type {import('../src/types').IPFSRepo} */ + let repo + + beforeEach(() => { + repo = createRepo(tempDir(b => 'test-repo-for-' + b), loadCodec, createBackend(), { + repoLock: MemoryLock + }) }) - }) - it('should be false before initialization', async () => { - expect(await repo.isInitialized()).to.be.false() - }) + it('should be false before initialization', async () => { + expect(await repo.isInitialized()).to.be.false() + }) - it('should be true after initialization', async () => { - await repo.init({}) - expect(await repo.isInitialized()).to.be.true() - }) + it('should be true after initialization', async () => { + await repo.init({}) + expect(await repo.isInitialized()).to.be.true() + }) - it('should be true after initialization and opening', async () => { - await repo.init({}) - await repo.open() - expect(await repo.isInitialized()).to.be.true() - }) + it('should be true after initialization and opening', async () => { + await repo.init({}) + await repo.open() + expect(await repo.isInitialized()).to.be.true() + }) - it('should be true after initialization, opening and closing', async () => { - await repo.init({}) - await repo.open() - await repo.close() - expect(await repo.isInitialized()).to.be.true() + it('should be true after initialization, opening and closing', async () => { + await repo.init({}) + await repo.open() + await repo.close() + expect(await repo.isInitialized()).to.be.true() + }) }) -}) +} diff --git a/packages/ipfs-repo/test/keystore-test.js b/packages/ipfs-repo/test/keystore-test.js index f45cdf2d..41bc4208 100644 --- a/packages/ipfs-repo/test/keystore-test.js +++ b/packages/ipfs-repo/test/keystore-test.js @@ -1,13 +1,12 @@ /* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') +import { expect } from 'aegir/utils/chai.js' /** * @param {import('../src/types').IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('keystore', () => { it('exists', () => { expect(repo).to.have.property('keys') diff --git a/packages/ipfs-repo/test/lock-test.js b/packages/ipfs-repo/test/lock-test.js index 6220564d..0c4b8ab1 100644 --- a/packages/ipfs-repo/test/lock-test.js +++ b/packages/ipfs-repo/test/lock-test.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { createRepo } = require('../') -const lockMemory = require('../src/locks/memory') -const { LockExistsError } = require('./../src/errors') -const loadCodec = require('./fixtures/load-codec') +import { expect } from 'aegir/utils/chai.js' +import { createRepo } from '../src/index.js' +import * as lockMemory from '../src/locks/memory.js' +import { LockExistsError } from './../src/errors/index.js' +import { loadCodec } from './fixtures/load-codec.js' /** * @param {import('../src/types').IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('Repo lock tests', () => { it('should handle locking for a repo lifecycle', async () => { // @ts-expect-error lockfile is not part of the interface @@ -33,7 +32,7 @@ module.exports = (repo) => { try { await repoClone.init({}) await repoClone.open() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code) .to.equal(LockExistsError.code) } diff --git a/packages/ipfs-repo/test/migrations-test.js b/packages/ipfs-repo/test/migrations-test.js index 2ee3c4f4..867dd864 100644 --- a/packages/ipfs-repo/test/migrations-test.js +++ b/packages/ipfs-repo/test/migrations-test.js @@ -1,14 +1,12 @@ /* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const sinon = require('sinon') - -const migrator = require('ipfs-repo-migrations') -const constants = require('../src/constants') -const errors = require('../src/errors') -const { createRepo } = require('../src') +import { expect } from 'aegir/utils/chai.js' +import sinon from 'sinon' +import { InvalidRepoVersionError } from '../src/errors/index.js' +import { createRepo } from '../src/index.js' +import { repoVersion } from '../src/constants.js' +import { migrations } from 'ipfs-repo-migrations' /** * @typedef {import('../src/types').IPFSRepo} IPFSRepo @@ -18,36 +16,13 @@ const { createRepo } = require('../src') /** * @param {(options?: Partial)=> Promise} createTempRepo */ -module.exports = (createTempRepo) => { +export default (createTempRepo) => { describe('Migrations tests', () => { /** @type {IPFSRepo} */ let repo - /** @type {sinon.SinonStub} */ - let migrateStub - /** @type {sinon.SinonStub} */ - let revertStub - /** @type {sinon.SinonStub} */ - let repoVersionStub - /** @type {sinon.SinonStub} */ - let getLatestMigrationVersionStub - - before(() => { - repoVersionStub = sinon.stub(constants, 'repoVersion') - migrateStub = sinon.stub(migrator, 'migrate') - revertStub = sinon.stub(migrator, 'revert') - getLatestMigrationVersionStub = sinon.stub(migrator, 'getLatestMigrationVersion') - }) - - after(() => { - repoVersionStub.restore() - migrateStub.restore() - revertStub.restore() - getLatestMigrationVersionStub.restore() - }) beforeEach(async () => { repo = await createTempRepo() - sinon.reset() }) // Testing migration logic @@ -65,14 +40,10 @@ module.exports = (createTempRepo) => { migrationLogic.forEach(({ config, option, result }) => { it(`should ${result ? '' : 'not '}migrate when config=${config} and option=${option}`, async () => { - migrateStub.resolves() - repoVersionStub.value(8) - getLatestMigrationVersionStub.returns(9) - if (config !== undefined) { await repo.config.set('repoAutoMigrate', config) } - await repo.version.set(7) + await repo.version.set(repoVersion - 1) await repo.close() // @ts-expect-error options is a private field @@ -89,103 +60,97 @@ module.exports = (createTempRepo) => { pins: repo.pins.pinstore }, newOpts) - expect(migrateStub.called).to.be.false() + const p = newRepo.open() - try { - await newRepo.open() - if (!result) expect.fail('should have thrown error') - } catch (err) { - expect(err.code).to.equal(errors.InvalidRepoVersionError.code) + if (!result) { + await expect(p).to.eventually.be.rejected().with.property('code', InvalidRepoVersionError.code) + } else { + await p } - expect(migrateStub.called).to.eq(result) + await expect(repo.version.get()).to.eventually.equal(result ? repoVersion : repoVersion - 1) }) }) it('should migrate by default', async () => { - migrateStub.resolves() - repoVersionStub.value(8) - getLatestMigrationVersionStub.returns(9) - - await repo.version.set(7) + await repo.version.set(repoVersion - 1) await repo.close() - expect(migrateStub.called).to.be.false() - await repo.open() - expect(migrateStub.called).to.be.true() + await expect(repo.version.get()).to.eventually.equal(repoVersion) }) it('should migrate with progress', async () => { - migrateStub.resolves() - repoVersionStub.value(8) - getLatestMigrationVersionStub.returns(9) - - await repo.version.set(7) + await repo.version.set(repoVersion - 1) await repo.close() - expect(migrateStub.called).to.be.false() - // @ts-expect-error options is a private field repo.options.onMigrationProgress = sinon.stub() await repo.open() - expect(migrateStub.called).to.be.true() // @ts-expect-error options is a private field - expect(migrateStub.getCall(0).args[4]).to.have.property('onProgress', repo.options.onMigrationProgress) + expect(repo.options.onMigrationProgress.called).to.be.true() + + await expect(repo.version.get()).to.eventually.equal(repoVersion) }) it('should not migrate when versions matches', async () => { - migrateStub.resolves() - repoVersionStub.value(8) + await repo.version.set(repoVersion) - await repo.version.set(8) await repo.close() - - expect(migrateStub.called).to.be.false() - await repo.open() - expect(migrateStub.called).to.be.false() + await expect(repo.version.get()).to.eventually.equal(repoVersion) }) it('should revert when current repo versions is higher then expected', async () => { - revertStub.resolves() - repoVersionStub.value(8) - - expect(revertStub.called).to.be.false() + migrations.push({ + version: repoVersion + 1, + description: '', + migrate: async (backends, progress) => { + progress(100, 'done') + }, + revert: async (backends, progress) => { + progress(100, 'done') + } + }) - await repo.version.set(9) + await repo.version.set(repoVersion + 1) await repo.close() - expect(migrateStub.called).to.be.false() - expect(revertStub.called).to.be.false() - await repo.open() - expect(revertStub.called).to.be.true() - expect(migrateStub.called).to.be.false() + await expect(repo.version.get()).to.eventually.equal(repoVersion) + + migrations.pop() }) it('should revert with progress', async () => { - revertStub.resolves() - repoVersionStub.value(8) + migrations.push({ + version: repoVersion + 1, + description: '', + migrate: async (backends, progress) => { + progress(100, 'done') + }, + revert: async (backends, progress) => { + progress(100, 'done') + } + }) - await repo.version.set(9) + await repo.version.set(repoVersion + 1) await repo.close() - expect(revertStub.called).to.be.false() - // @ts-expect-error options is a private field repo.options.onMigrationProgress = sinon.stub() await repo.open() - expect(revertStub.called).to.be.true() // @ts-expect-error options is a private field - expect(revertStub.getCall(0).args[4]).to.have.property('onProgress', repo.options.onMigrationProgress) + expect(repo.options.onMigrationProgress.called).to.be.true() + + migrations.pop() }) }) } diff --git a/packages/ipfs-repo/test/node.js b/packages/ipfs-repo/test/node.js index 12d4fc8a..f7a121bb 100644 --- a/packages/ipfs-repo/test/node.js +++ b/packages/ipfs-repo/test/node.js @@ -1,16 +1,28 @@ /* eslint-env mocha */ -'use strict' -const loadCodec = require('./fixtures/load-codec') -const MemoryLock = require('../src/locks/memory') -const createBackend = require('./fixtures/create-backend') +import { loadCodec } from './fixtures/load-codec.js' +import * as MemoryLock from '../src/locks/memory.js' +import { createBackend } from './fixtures/create-backend.js' +import optionsTests from './options-test.js' +import migrationsTests from './migrations-test.js' +import repoTests from './repo-test.js' +import blockstoreTests from './blockstore-test.js' +import datastoreTests from './datastore-test.js' +import keystoreTests from './keystore-test.js' +import statTests from './stat-test.js' +import lockTests from './lock-test.js' +import configTests from './config-test.js' +import apiAddrTests from './api-addr-test.js' +import pinsTests from './pins-test.js' +import isInitializedTests from './is-initialized.js' +import blockstoreUtilsTests from './blockstore-utils-test.js' /** * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec * @typedef {import('../src/types').Options} Options */ -const { createRepo } = require('../src') +import { createRepo } from '../src/index.js' /** * @param {Partial} [options] @@ -26,8 +38,8 @@ async function createTempRepo (options = {}) { } describe('IPFS Repo Tests onNode.js', () => { - require('./options-test') - require('./migrations-test')(createTempRepo) + optionsTests() + migrationsTests(createTempRepo) /** * @type {Array<{name: string, opts?: Options}>} @@ -62,17 +74,17 @@ describe('IPFS Repo Tests onNode.js', () => { await repo.close() }) - require('./repo-test')(repo) - require('./blockstore-test')(repo) - require('./datastore-test')(repo) - require('./keystore-test')(repo) - require('./stat-test')(repo) - require('./lock-test')(repo) - require('./config-test')(repo) - require('./api-addr-test')() - require('./pins-test')(repo) - require('./is-initialized') + repoTests(repo) + blockstoreTests(repo) + datastoreTests(repo) + keystoreTests(repo) + statTests(repo) + lockTests(repo) + configTests(repo) + apiAddrTests() + pinsTests(repo) + isInitializedTests() })) - require('./blockstore-utils-test')() + blockstoreUtilsTests() }) diff --git a/packages/ipfs-repo/test/options-test.js b/packages/ipfs-repo/test/options-test.js index d5949b66..ec61454b 100644 --- a/packages/ipfs-repo/test/options-test.js +++ b/packages/ipfs-repo/test/options-test.js @@ -1,111 +1,115 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const sinon = require('sinon') -const tempDir = require('ipfs-utils/src/temp-dir') -const { isNode } = require('ipfs-utils/src/env') -const rimraf = require('rimraf') +import { expect } from 'aegir/utils/chai.js' +import sinon from 'sinon' +import tempDir from 'ipfs-utils/src/temp-dir.js' +import { isNode } from 'ipfs-utils/src/env.js' +import rimraf from 'rimraf' +import { createRepo } from '../src/index.js' +import { loadCodec } from './fixtures/load-codec.js' +import { createBackend } from './fixtures/create-backend.js' +import defaultOptions from '../src/default-options.js' +import defaultOptionsBrowser from '../src/default-options.browser.js' + if (!rimraf.sync) { // browser rimraf.sync = noop } -const { createRepo } = require('../') -const loadCodec = require('./fixtures/load-codec') -const createBackend = require('./fixtures/create-backend') -describe('custom options tests', () => { - const repoPath = tempDir() - after(() => { - rimraf.sync(repoPath) - }) +export default () => { + describe('custom options tests', () => { + const repoPath = tempDir() + after(() => { + rimraf.sync(repoPath) + }) - it('missing repoPath', () => { - expect( - // @ts-expect-error - () => createRepo() - ).to.throw('missing repo path') - }) + it('missing repoPath', () => { + expect( + // @ts-expect-error + () => createRepo() + ).to.throw('missing repo path') + }) - it('default options', () => { - const repo = createRepo(repoPath, loadCodec, createBackend()) - // @ts-expect-error options is a private field - expect(repo.options).to.deep.equal(expectedRepoOptions()) - }) + it('default options', () => { + const repo = createRepo(repoPath, loadCodec, createBackend()) + // @ts-expect-error options is a private field + expect(repo.options).to.deep.equal(expectedRepoOptions()) + }) - it('allows for a custom lock', async () => { - const release = { - close () { return Promise.resolve() } - } + it('allows for a custom lock', async () => { + const release = { + close () { return Promise.resolve() } + } - const lock = { - /** - * @param {string} path - */ - lock: (path) => { - return Promise.resolve(release) - }, - /** - * @param {string} path - */ - locked: (path) => { - return Promise.resolve(true) + const lock = { + /** + * @param {string} path + */ + lock: (path) => { + return Promise.resolve(release) + }, + /** + * @param {string} path + */ + locked: (path) => { + return Promise.resolve(true) + } } - } - const lockSpy = sinon.spy(lock, 'lock') - const releaseSpy = sinon.spy(release, 'close') + const lockSpy = sinon.spy(lock, 'lock') + const releaseSpy = sinon.spy(release, 'close') - const repo = createRepo(repoPath, loadCodec, createBackend(), { - repoLock: lock - }) + const repo = createRepo(repoPath, loadCodec, createBackend(), { + repoLock: lock + }) - await repo.init({}) - await repo.open() - await repo.close() + await repo.init({}) + await repo.open() + await repo.close() - expect(lockSpy.callCount).to.equal(1) - expect(releaseSpy.callCount).to.equal(1) - }) + expect(lockSpy.callCount).to.equal(1) + expect(releaseSpy.callCount).to.equal(1) + }) - it('ensures a custom lock has a .close method', async () => { - const lock = { - /** - * @param {any} path - */ - lock: async (path) => { - return Promise.resolve({ - shouldBeCalledClose () { return Promise.resolve() } - }) - }, - /** - * @param {any} path - */ - locked: async (path) => { - return Promise.resolve(true) + it('ensures a custom lock has a .close method', async () => { + const lock = { + /** + * @param {any} path + */ + lock: async (path) => { + return Promise.resolve({ + shouldBeCalledClose () { return Promise.resolve() } + }) + }, + /** + * @param {any} path + */ + locked: async (path) => { + return Promise.resolve(true) + } } - } - const repo = createRepo(repoPath, loadCodec, createBackend(), { - // @ts-expect-error lock closer types are wrong - repoLock: lock + const repo = createRepo(repoPath, loadCodec, createBackend(), { + // @ts-expect-error lock closer types are wrong + repoLock: lock + }) + let error + try { + // @ts-ignore we should not be using private methods + await repo._openLock(repo.path) + } catch (/** @type {any} */ err) { + error = err + } + expect(error.code).to.equal('ERR_NO_CLOSE_FUNCTION') }) - let error - try { - // @ts-ignore we should not be using private methods - await repo._openLock(repo.path) - } catch (err) { - error = err - } - expect(error.code).to.equal('ERR_NO_CLOSE_FUNCTION') }) -}) +} function noop () {} function expectedRepoOptions () { if (isNode) { - return require('../src/default-options') + return defaultOptions } - return require('../src/default-options.browser') + return defaultOptionsBrowser } diff --git a/packages/ipfs-repo/test/pins-test.js b/packages/ipfs-repo/test/pins-test.js index 9cd3e929..7a8f5455 100644 --- a/packages/ipfs-repo/test/pins-test.js +++ b/packages/ipfs-repo/test/pins-test.js @@ -1,17 +1,14 @@ /* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ -'use strict' - -const { expect } = require('aegir/utils/chai') -const dagPb = require('@ipld/dag-pb') -const dagCbor = require('@ipld/dag-cbor') -const { sha256 } = require('multiformats/hashes/sha2') -const { CID } = require('multiformats/cid') -const all = require('it-all') -const { - PinTypes -} = require('../src/pins') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') + +import { expect } from 'aegir/utils/chai.js' +import * as dagPb from '@ipld/dag-pb' +import * as dagCbor from '@ipld/dag-cbor' +import { sha256 } from 'multiformats/hashes/sha2' +import { CID } from 'multiformats/cid' +import all from 'it-all' +import { PinTypes } from '../src/pins.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' /** * @param {import('@ipld/dag-pb').PBNode} node @@ -46,7 +43,7 @@ async function createDagCborNode (node = { Data: uint8ArrayFromString(`data-${Ma /** * @param {import('../src/types').IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('pins', () => { it('exists', () => { expect(repo).to.have.property('pins') diff --git a/packages/ipfs-repo/test/repo-test.js b/packages/ipfs-repo/test/repo-test.js index 350bb194..358ca6e5 100644 --- a/packages/ipfs-repo/test/repo-test.js +++ b/packages/ipfs-repo/test/repo-test.js @@ -1,17 +1,18 @@ /* eslint-env mocha */ -'use strict' - -const { expect } = require('aegir/utils/chai') -const sinon = require('sinon') -const tempDir = require('ipfs-utils/src/temp-dir') -const { createRepo } = require('../') -const Errors = require('../src/errors') -const bytes = require('bytes') -const { BaseDatastore, MemoryDatastore } = require('datastore-core') -const { MemoryBlockstore } = require('blockstore-core') -const loadCodec = require('./fixtures/load-codec') -const MemoryLock = require('../src/locks/memory') -const createBackend = require('./fixtures/create-backend') + +import { expect } from 'aegir/utils/chai.js' +import sinon from 'sinon' +import tempDir from 'ipfs-utils/src/temp-dir.js' +import { createRepo } from '../src/index.js' +import * as Errors from '../src/errors/index.js' +import bytes from 'bytes' +import { BaseDatastore } from 'datastore-core/base' +import { MemoryDatastore } from 'datastore-core/memory' +import { MemoryBlockstore } from 'blockstore-core/memory' +import { loadCodec } from './fixtures/load-codec.js' +import * as MemoryLock from '../src/locks/memory.js' +import { createBackend } from './fixtures/create-backend.js' +import { repoVersion } from '../src/constants.js' /** * @typedef {import('../src/types').IPFSRepo} IPFSRepo @@ -20,7 +21,7 @@ const createBackend = require('./fixtures/create-backend') /** * @param {IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('IPFS Repo Tests', () => { it('check if Repo exists', async () => { const exists = await repo.exists() @@ -70,12 +71,12 @@ module.exports = (repo) => { describe('version', () => { afterEach(async () => { - await repo.version.set(10) + await repo.version.set(repoVersion) }) it('get version', async () => { const version = await repo.version.get() - expect(version).to.equal(10) + expect(version).to.equal(repoVersion) }) it('set version', async () => { @@ -124,7 +125,7 @@ module.exports = (repo) => { await repo.close() try { await repo.close() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.eql(Errors.ERR_REPO_ALREADY_CLOSED) return } @@ -157,7 +158,7 @@ module.exports = (repo) => { await repo.open() try { await repo.open() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.eql(Errors.ERR_REPO_ALREADY_OPEN) return } @@ -244,7 +245,7 @@ module.exports = (repo) => { try { await otherRepo.init({}) await otherRepo.open() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.message).to.equal('wat') } }) @@ -256,7 +257,7 @@ module.exports = (repo) => { try { await otherRepo.open() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.equal(Errors.ERR_REPO_NOT_INITIALIZED) } }) @@ -271,7 +272,7 @@ module.exports = (repo) => { try { await otherRepo.open() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.equal(Errors.ERR_REPO_NOT_INITIALIZED) } }) diff --git a/packages/ipfs-repo/test/stat-test.js b/packages/ipfs-repo/test/stat-test.js index 3bcb05aa..9195387d 100644 --- a/packages/ipfs-repo/test/stat-test.js +++ b/packages/ipfs-repo/test/stat-test.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats/cid') -const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +import { expect } from 'aegir/utils/chai.js' +import { CID } from 'multiformats/cid' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' /** * @param {import('../src/types').IPFSRepo} repo */ -module.exports = (repo) => { +export default (repo) => { describe('stat', () => { before(async () => { await repo.blocks.put( diff --git a/packages/ipfs-repo/tsconfig.json b/packages/ipfs-repo/tsconfig.json index 30f8bc4a..12bc73a3 100644 --- a/packages/ipfs-repo/tsconfig.json +++ b/packages/ipfs-repo/tsconfig.json @@ -1,14 +1,9 @@ { "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist", - "baseUrl": "./", - "paths": { - "*": ["./types/*"] - } + "outDir": "types" }, "include": [ - "types", "src", "test" ]