diff --git a/.travis.yml b/.travis.yml index 982bde53d0..50882ff412 100644 --- a/.travis.yml +++ b/.travis.yml @@ -84,6 +84,7 @@ jobs: - stage: test name: lint script: + - npm run build - npm run lint -- $RUN_SINCE --concurrency 1 - stage: test diff --git a/docs/core-api/FILES.md b/docs/core-api/FILES.md index 2c6ad68622..e3f517ef48 100644 --- a/docs/core-api/FILES.md +++ b/docs/core-api/FILES.md @@ -447,6 +447,9 @@ An optional object which may have the following keys: | Name | Type | Default | Description | | ---- | ---- | ------- | ----------- | +| archive | `boolean` | `undefined` | Return the file/directory in a tarball | +| compress | `boolean` | `false` | Gzip the returned stream | +| compressionLevel | `Number` | `undefined` | How much compression to apply (1-9) | | timeout | `Number` | `undefined` | A timeout in ms | | signal | [AbortSignal][] | `undefined` | Can be used to cancel any long running requests started as a result of this call | @@ -454,46 +457,29 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `AsyncIterable` | An async iterable that yields objects representing the files | +| `AsyncIterable` | An async iterable that yields bytes | -Each yielded object is of the form: +What is streamed as a response depends on the options passed and what the `ipfsPath` resolves to. -```js -{ - type: string, // 'file' or 'dir' - path: string, // a deeply nested path within the directory structure - content?: >, // only present if `type` is 'file' - mode: Number, // implicit if not provided - 0644 for files, 0755 for directories - mtime?: { secs: Number, nsecs: Number } -} -``` - -Here, each `path` corresponds to the name of a file, and `content` is an async iterable with the file contents. +1. If `ipfsPath` resolves to a file: + * By default you will get a tarball containing the file + * Pass `compress: true` (and an optional `compressionLevel`) to instead get the gzipped file contents + * Pass `compress: true` (and an optional `compressionLevel`) AND `archive: true` to get a gzipped tarball containing the file +2. If `ipfsPath` resolves to a directory: + * By default you will get a tarball containing the contents of the directory + * Passing `compress: true` will cause an error + * Pass `compress: true` (and an optional `compressionLevel`) AND `archive: true` to get a gzipped tarball containing the contents of the directory #### Example ```JavaScript const cid = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -for await (const file of ipfs.get(cid)) { - console.log(file.type, file.path) - - if (!file.content) continue; - - const content = [] - - for await (const chunk of file.content) { - content.push(chunk) - } - - console.log(content) +for await (const buf of ipfs.get(cid)) { + // do something with buf } ``` -When invoking this method via the HTTP API client, the response arrives as a stream containing either the entire contents of the file (if the passed [CID][] resolves to a file) or recursive directory tree and all files contained therein (if the passed [CID][] resolves to a directory). - -If you are iterating over a directory, in order to proceed to the next entry in the stream, you must consume the `content` field of the current entry if it is present. - A great source of [examples](https://github.com/ipfs/js-ipfs/blob/master/packages/interface-ipfs-core/src/get.js) can be found in the tests for this API. ### `ipfs.ls(ipfsPath)` diff --git a/examples/browser-ipns-publish/package.json b/examples/browser-ipns-publish/package.json index 713857753e..d94152be83 100644 --- a/examples/browser-ipns-publish/package.json +++ b/examples/browser-ipns-publish/package.json @@ -28,7 +28,7 @@ "devDependencies": { "delay": "^5.0.0", "execa": "^5.0.0", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "go-ipfs": "0.8.0", "parcel": "2.0.0-beta.2", "path": "^0.12.7", diff --git a/examples/http-client-browser-pubsub/package.json b/examples/http-client-browser-pubsub/package.json index 73994c65af..96edce5b95 100644 --- a/examples/http-client-browser-pubsub/package.json +++ b/examples/http-client-browser-pubsub/package.json @@ -22,7 +22,7 @@ "execa": "^5.0.0", "go-ipfs": "0.8.0", "ipfs": "^0.56.1", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "parcel": "2.0.0-beta.2", "test-ipfs-example": "^3.0.0" } diff --git a/examples/http-client-bundle-webpack/package.json b/examples/http-client-bundle-webpack/package.json index 1383674159..c9ba097aa2 100644 --- a/examples/http-client-bundle-webpack/package.json +++ b/examples/http-client-bundle-webpack/package.json @@ -25,7 +25,7 @@ "copy-webpack-plugin": "^8.1.0", "execa": "^5.0.0", "ipfs": "^0.56.1", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "react-hot-loader": "^4.12.21", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0", diff --git a/examples/http-client-name-api/package.json b/examples/http-client-name-api/package.json index e825ff0727..6168a47241 100644 --- a/examples/http-client-name-api/package.json +++ b/examples/http-client-name-api/package.json @@ -18,7 +18,7 @@ "devDependencies": { "execa": "^5.0.0", "go-ipfs": "0.8.0", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "parcel": "2.0.0-beta.2", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0" diff --git a/examples/ipfs-client-add-files/package.json b/examples/ipfs-client-add-files/package.json index 086e97d12d..e0da947817 100644 --- a/examples/ipfs-client-add-files/package.json +++ b/examples/ipfs-client-add-files/package.json @@ -16,7 +16,7 @@ "devDependencies": { "execa": "^5.0.0", "ipfs": "^0.56.1", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "parcel": "2.0.0-beta.2", "rimraf": "^3.0.2", "test-ipfs-example": "^3.0.0" diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json index f90c0b844f..e971f1eb1a 100644 --- a/packages/interface-ipfs-core/package.json +++ b/packages/interface-ipfs-core/package.json @@ -13,9 +13,10 @@ "ipfs-utils/src/files/glob-source": false }, "scripts": { + "build": "aegir build", "lint": "aegir lint", "test": "echo 'No tests here'", - "dep-check": "aegir dep-check -i abort-controller" + "dep-check": "aegir dep-check -i abort-controller -i ipfs-core-types" }, "files": [ "src/", @@ -35,17 +36,21 @@ "test/fixtures/*" ] }, + "types": "dist/src/index.d.ts", "dependencies": { "@ipld/car": "^3.1.6", "@ipld/dag-cbor": "^6.0.5", "@ipld/dag-pb": "^2.1.3", + "@types/readable-stream": "^2.3.11", + "@types/pako": "^1.0.2", "abort-controller": "^3.0.0", "aegir": "^34.0.2", "delay": "^5.0.0", "err-code": "^3.0.1", "interface-blockstore": "^1.0.0", + "ipfs-core-types": "^0.6.0", "ipfs-unixfs": "^5.0.0", - "ipfs-unixfs-importer": "^8.0.0", + "ipfs-unixfs-importer": "^8.0.2", "ipfs-utils": "^8.1.4", "ipns": "^0.13.2", "is-ipfs": "^6.0.1", @@ -57,7 +62,9 @@ "it-first": "^1.0.4", "it-last": "^1.0.4", "it-map": "^1.0.4", + "it-pipe": "^1.1.0", "it-pushable": "^1.4.2", + "it-tar": "^4.0.0", "it-to-buffer": "^2.0.0", "libp2p-crypto": "^0.19.6", "libp2p-websockets": "^0.16.1", @@ -67,6 +74,7 @@ "native-abort-controller": "^1.0.3", "p-map": "^4.0.0", "p-retry": "^4.5.0", + "pako": "^1.0.2", "peer-id": "^0.15.1", "readable-stream": "^3.4.0", "uint8arrays": "^2.1.6" diff --git a/packages/interface-ipfs-core/src/add-all.js b/packages/interface-ipfs-core/src/add-all.js index 552c5a5120..a992f89d69 100644 --- a/packages/interface-ipfs-core/src/add-all.js +++ b/packages/interface-ipfs-core/src/add-all.js @@ -18,20 +18,29 @@ const bufferStream = require('it-buffer-stream') const raw = require('multiformats/codecs/raw') const dagPb = require('@ipld/dag-pb') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + * @typedef {import('ipfs-unixfs').MtimeLike} MtimeLike + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.addAll', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {string | number} mode + * @param {number} expectedMode + */ async function testMode (mode, expectedMode) { const content = String(Math.random() + Date.now()) const files = await all(ipfs.addAll([{ @@ -45,12 +54,16 @@ module.exports = (common, options) => { expect(stats).to.have.property('mode', expectedMode) } + /** + * @param {MtimeLike} mtime + * @param {MtimeLike} expectedMtime + */ async function testMtime (mtime, expectedMtime) { const content = String(Math.random() + Date.now()) - const files = await all(ipfs.addAll({ + const files = await all(ipfs.addAll([{ content: uint8ArrayFromString(content), mtime - })) + }])) expect(files).to.have.length(1) expect(files).to.have.deep.nested.property('[0].mtime', expectedMtime) @@ -58,12 +71,15 @@ module.exports = (common, options) => { expect(stats).to.have.deep.property('mtime', expectedMtime) } - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should add a File as array of tuples', async function () { - if (!supportsFileReader) return this.skip('skip in node') + if (!supportsFileReader) { + // @ts-ignore this is mocha + return this.skip('skip in node') + } const tuple = { path: 'filename.txt', @@ -86,7 +102,11 @@ module.exports = (common, options) => { }) it('should add array of objects with readable stream content', async function () { - if (!isNode) this.skip() + if (!isNode) { + // @ts-ignore this is mocha + this.skip('Only node supports readable streams') + } + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() @@ -105,11 +125,17 @@ module.exports = (common, options) => { }) it('should add a nested directory as array of tupples', async function () { + /** + * @param {string} name + */ const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] }) + /** + * @param {string} name + */ const emptyDir = (name) => ({ path: `test-folder/${name}` }) const dirs = [ @@ -125,17 +151,29 @@ module.exports = (common, options) => { const root = await last(ipfs.addAll(dirs)) + if (!root) { + throw new Error('Dirs were not loaded') + } + expect(root.path).to.equal('test-folder') expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) }) it('should add a nested directory as array of tupples with progress', async function () { + /** + * @param {string} name + */ const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] }) - const emptyDir = (name) => ({ path: `test-folder/${name}` }) + /** + * @param {string} name + */ + const emptyDir = (name) => ({ path: `test-folder/${name}`, content: undefined }) + + /** @type {Record} */ const progressSizes = {} const dirs = [ @@ -149,7 +187,7 @@ module.exports = (common, options) => { emptyDir('files/empty') ] - const total = dirs.reduce((acc, curr) => { + const total = dirs.reduce((/** @type {Record} */ acc, curr) => { if (curr.content) { acc[curr.path] = curr.content.length } @@ -157,18 +195,28 @@ module.exports = (common, options) => { return acc }, {}) + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ const handler = (bytes, path) => { - progressSizes[path] = bytes + if (path) { + progressSizes[path] = bytes + } } const root = await last(ipfs.addAll(dirs, { progress: handler })) expect(progressSizes).to.deep.equal(total) - expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) + expect(root).to.have.property('path', 'test-folder') + expect(root).to.have.deep.property('cid', fixtures.directory.cid) }) it('should receive progress path as empty string when adding content without paths', async function () { + /** + * @param {string} name + */ const content = (name) => fixtures.directory.files[name] + + /** @type {Record} */ const progressSizes = {} const dirs = [ @@ -181,8 +229,11 @@ module.exports = (common, options) => { '': dirs.reduce((acc, curr) => acc + curr.length, 0) } + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ const handler = (bytes, path) => { - progressSizes[path] = bytes + progressSizes[`${path}`] = bytes } await drain(ipfs.addAll(dirs, { progress: handler })) @@ -190,9 +241,16 @@ module.exports = (common, options) => { }) it('should receive file name from progress event', async () => { + /** @type {string[]} */ const receivedNames = [] + + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ function handler (p, name) { - receivedNames.push(name) + if (name) { + receivedNames.push(name) + } } await drain(ipfs.addAll([{ @@ -210,9 +268,12 @@ module.exports = (common, options) => { }) it('should add files to a directory non sequentially', async function () { + /** + * @param {string} path + */ const content = path => ({ path: `test-dir/${path}`, - content: fixtures.directory.files[path.split('/').pop()] + content: fixtures.directory.files[path.split('/').pop() || ''] }) const input = [ @@ -224,6 +285,10 @@ module.exports = (common, options) => { const filesAdded = await all(ipfs.addAll(input)) + /** + * @param {object} arg + * @param {string} arg.path + */ const toPath = ({ path }) => path const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) const filesAddedPaths = filesAdded.map(toPath) @@ -234,13 +299,14 @@ module.exports = (common, options) => { it('should fail when passed invalid input', async () => { const nonValid = 138 + // @ts-expect-error nonValid is the wrong type await expect(all(ipfs.addAll(nonValid))).to.eventually.be.rejected() }) it('should wrap content in a directory', async () => { const data = { path: 'testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await all(ipfs.addAll(data, { wrapWithDirectory: true })) + const filesAdded = await all(ipfs.addAll([data], { wrapWithDirectory: true })) expect(filesAdded).to.have.length(2) const file = filesAdded[0] @@ -251,6 +317,7 @@ module.exports = (common, options) => { }) it('should add a directory with only-hash=true', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const content = String(Math.random() + Date.now()) @@ -272,18 +339,21 @@ module.exports = (common, options) => { }) it('should add with mode as string', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mode = '0777' await testMode(mode, parseInt(mode, 8)) }) it('should add with mode as number', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mode = parseInt('0777', 8) await testMode(mode, mode) }) it('should add with mtime as Date', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mtime = new Date(5000) await testMtime(mtime, { @@ -293,6 +363,7 @@ module.exports = (common, options) => { }) it('should add with mtime as { nsecs, secs }', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mtime = { secs: 5, @@ -302,6 +373,7 @@ module.exports = (common, options) => { }) it('should add with mtime as timespec', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) await testMtime({ Seconds: 5, @@ -313,6 +385,7 @@ module.exports = (common, options) => { }) it('should add with mtime as hrtime', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mtime = process.hrtime() await testMtime(mtime, { @@ -322,6 +395,7 @@ module.exports = (common, options) => { }) it('should add a directory from the file system', async function () { + // @ts-ignore this is mocha if (!isNode) this.skip() const filesPath = path.join(__dirname, '..', 'test', 'fixtures', 'test-folder') @@ -330,6 +404,7 @@ module.exports = (common, options) => { }) it('should add a directory from the file system with an odd name', async function () { + // @ts-ignore this is mocha if (!isNode) this.skip() const filesPath = path.join(__dirname, '..', 'test', 'fixtures', 'weird name folder [v0]') @@ -339,6 +414,7 @@ module.exports = (common, options) => { }) it('should ignore a directory from the file system', async function () { + // @ts-ignore this is mocha if (!isNode) this.skip() const filesPath = path.join(__dirname, '..', 'test', 'fixtures', 'test-folder') @@ -348,6 +424,7 @@ module.exports = (common, options) => { }) it('should add a file from the file system', async function () { + // @ts-ignore this is mocha if (!isNode) this.skip() const filePath = path.join(__dirname, 'add-all.js') @@ -358,6 +435,7 @@ module.exports = (common, options) => { }) it('should add a hidden file in a directory from the file system', async function () { + // @ts-ignore this is mocha if (!isNode) this.skip() const filesPath = path.join(__dirname, '..', 'test', 'fixtures', 'hidden-files-folder') @@ -369,8 +447,10 @@ module.exports = (common, options) => { }) it('should add a file from the file system with only-hash=true', async function () { + // @ts-ignore this is mocha if (!isNode) this.skip() + // @ts-ignore this is mocha this.slow(10 * 1000) const content = String(Math.random() + Date.now()) @@ -387,7 +467,7 @@ module.exports = (common, options) => { }) it('should respect raw leaves when file is smaller than one block and no metadata is present', async () => { - const files = await all(ipfs.addAll(Uint8Array.from([0, 1, 2]), { + const files = await all(ipfs.addAll([Uint8Array.from([0, 1, 2])], { cidVersion: 1, rawLeaves: true })) @@ -399,14 +479,14 @@ module.exports = (common, options) => { }) it('should override raw leaves when file is smaller than one block and metadata is present', async () => { - const files = await all(ipfs.addAll({ + const files = await all(ipfs.addAll([{ content: Uint8Array.from([0, 1, 2]), mode: 0o123, mtime: { secs: 1000, nsecs: 0 } - }, { + }], { cidVersion: 1, rawLeaves: true })) @@ -434,8 +514,11 @@ module.exports = (common, options) => { }) it('should support bidirectional streaming', async function () { - let progressInvoked + let progressInvoked = false + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ const handler = (bytes, path) => { progressInvoked = true } @@ -453,7 +536,7 @@ module.exports = (common, options) => { // so the streaming is bidirectional and we can finish up if (progressInvoked) { clearInterval(interval) - resolve() + resolve(null) } }, 10) }) diff --git a/packages/interface-ipfs-core/src/add.js b/packages/interface-ipfs-core/src/add.js index 86f3de4e0b..c64fcc1e2b 100644 --- a/packages/interface-ipfs-core/src/add.js +++ b/packages/interface-ipfs-core/src/add.js @@ -7,27 +7,36 @@ const { supportsFileReader } = require('ipfs-utils/src/supports') const urlSource = require('ipfs-utils/src/files/url-source') const { isNode } = require('ipfs-utils/src/env') const { getDescribe, getIt, expect } = require('./utils/mocha') -const echoUrl = (text) => `${process.env.ECHO_SERVER}/download?data=${encodeURIComponent(text)}` -const redirectUrl = (url) => `${process.env.ECHO_SERVER}/redirect?to=${encodeURI(url)}` +const echoUrl = (/** @type {string} */ text) => `${process.env.ECHO_SERVER}/download?data=${encodeURIComponent(text)}` +const redirectUrl = (/** @type {string} */ url) => `${process.env.ECHO_SERVER}/redirect?to=${encodeURI(url)}` const uint8ArrayFromString = require('uint8arrays/from-string') const last = require('it-last') const raw = require('multiformats/codecs/raw') const dagPb = require('@ipld/dag-pb') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + * @typedef {import('ipfs-unixfs').MtimeLike} MtimeLike + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.add', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {string | number} mode + * @param {number} expectedMode + */ async function testMode (mode, expectedMode) { const content = String(Math.random() + Date.now()) const file = await ipfs.add({ @@ -40,6 +49,10 @@ module.exports = (common, options) => { expect(stats).to.have.property('mode', expectedMode) } + /** + * @param {MtimeLike} mtime + * @param {MtimeLike} expectedMtime + */ async function testMtime (mtime, expectedMtime) { const content = String(Math.random() + Date.now()) const file = await ipfs.add({ @@ -52,19 +65,25 @@ module.exports = (common, options) => { expect(stats).to.have.deep.property('mtime', expectedMtime) } - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should add a File', async function () { - if (!supportsFileReader) return this.skip('skip in node') + if (!supportsFileReader) { + // @ts-ignore this is mocha + return this.skip('skip in node') + } const fileAdded = await ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })) expect(fileAdded.cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) it('should add a File as tuple', async function () { - if (!supportsFileReader) return this.skip('skip in node') + if (!supportsFileReader) { + // @ts-ignore this is mocha + return this.skip('skip in node') + } const tuple = { path: 'filename.txt', @@ -96,6 +115,10 @@ module.exports = (common, options) => { it('should add a BIG Uint8Array with progress enabled', async () => { let progCalled = false let accumProgress = 0 + + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ function handler (p) { progCalled = true accumProgress = p @@ -112,6 +135,10 @@ module.exports = (common, options) => { it('should add an empty file with progress enabled', async () => { let progCalled = false let accumProgress = 0 + + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ function handler (p) { progCalled = true accumProgress = p @@ -127,6 +154,10 @@ module.exports = (common, options) => { it('should receive file name from progress event', async () => { let receivedName + + /** + * @type {import('ipfs-core-types/src/root').AddProgressFn} + */ function handler (p, name) { receivedName = name } @@ -178,7 +209,10 @@ module.exports = (common, options) => { }) it('should add readable stream', async function () { - if (!isNode) this.skip() + if (!isNode) { + // @ts-ignore this is mocha + this.skip() + } const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() @@ -195,6 +229,7 @@ module.exports = (common, options) => { it('should fail when passed invalid input', async () => { const nonValid = 138 + // @ts-expect-error nonValid is non valid await expect(ipfs.add(nonValid)).to.eventually.be.rejected() }) @@ -210,6 +245,7 @@ module.exports = (common, options) => { }) it('should add with only-hash=true', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const content = String(Math.random() + Date.now()) @@ -221,18 +257,21 @@ module.exports = (common, options) => { }) it('should add with mode as string', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mode = '0777' await testMode(mode, parseInt(mode, 8)) }) it('should add with mode as number', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mode = parseInt('0777', 8) await testMode(mode, mode) }) it('should add with mtime as Date', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mtime = new Date(5000) await testMtime(mtime, { @@ -242,6 +281,7 @@ module.exports = (common, options) => { }) it('should add with mtime as { nsecs, secs }', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mtime = { secs: 5, @@ -251,6 +291,7 @@ module.exports = (common, options) => { }) it('should add with mtime as timespec', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) await testMtime({ Seconds: 5, @@ -262,6 +303,7 @@ module.exports = (common, options) => { }) it('should add with mtime as hrtime', async function () { + // @ts-ignore this is mocha this.slow(10 * 1000) const mtime = process.hrtime() await testMtime(mtime, { @@ -279,8 +321,6 @@ module.exports = (common, options) => { ipfs.add(text) ]) - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() expect(result.cid.toString()).to.equal(expectedResult.cid.toString()) expect(result.size).to.equal(expectedResult.size) }) @@ -294,8 +334,6 @@ module.exports = (common, options) => { ipfs.add(text) ]) - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() expect(result.cid.toString()).to.equal(expectedResult.cid.toString()) expect(result.size).to.equal(expectedResult.size) }) @@ -320,8 +358,6 @@ module.exports = (common, options) => { ipfs.add(urlSource(url), addOpts), ipfs.add({ path: 'download', content: filename }, addOpts) ]) - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() expect(result).to.deep.equal(expectedResult) }) @@ -332,11 +368,9 @@ module.exports = (common, options) => { const [result, expectedResult] = await Promise.all([ ipfs.add(urlSource(url), addOpts), - ipfs.add([{ path: 'download', content: filename }], addOpts) + ipfs.add({ path: 'download', content: filename }, addOpts) ]) - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() expect(result).to.deep.equal(expectedResult) }) @@ -388,16 +422,23 @@ module.exports = (common, options) => { })) it('should be able to add dir without sharding', async () => { - const { path, cid } = await last(ipfs.addAll(testFiles)) + const result = await last(ipfs.addAll(testFiles)) + + if (!result) { + throw new Error('No addAll result received') + } + + const { path, cid } = result expect(path).to.eql('test-folder') expect(cid.toString()).to.eql('QmWWM8ZV6GPhqJ46WtKcUaBPNHN5yQaFsKDSQ1RE73w94Q') }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js @@ -415,7 +456,13 @@ module.exports = (common, options) => { }) it('should be able to add dir with sharding', async () => { - const { path, cid } = await last(ipfs.addAll(testFiles)) + const result = await last(ipfs.addAll(testFiles)) + + if (!result) { + throw new Error('No addAll result received') + } + + const { path, cid } = result expect(path).to.eql('test-folder') expect(cid.toString()).to.eql('Qmb3JNLq2KcvDTSGT23qNQkMrr4Y4fYMktHh6DtC7YatLa') }) diff --git a/packages/interface-ipfs-core/src/bitswap/stat.js b/packages/interface-ipfs-core/src/bitswap/stat.js index 7dd1264238..a385e6dd2b 100644 --- a/packages/interface-ipfs-core/src/bitswap/stat.js +++ b/packages/interface-ipfs-core/src/bitswap/stat.js @@ -4,24 +4,29 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsBitswap } = require('../stats/utils') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.bitswap.stat', function () { this.timeout(60 * 1000) + + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get bitswap stats', async () => { const res = await ipfs.bitswap.stat() @@ -29,7 +34,7 @@ module.exports = (common, options) => { }) it('should not get bitswap stats when offline', async () => { - const node = await common.spawn() + const node = await factory.spawn() await node.stop() return expect(node.api.bitswap.stat()).to.eventually.be.rejected() diff --git a/packages/interface-ipfs-core/src/bitswap/transfer.js b/packages/interface-ipfs-core/src/bitswap/transfer.js index 346017e60d..761a664cc8 100644 --- a/packages/interface-ipfs-core/src/bitswap/transfer.js +++ b/packages/interface-ipfs-core/src/bitswap/transfer.js @@ -10,7 +10,11 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const pmap = require('p-map') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + * @typedef {import('multiformats').CID} CID + */ + /** * @param {Factory} factory * @param {Object} options @@ -29,8 +33,9 @@ module.exports = (factory, options) => { it('2 peers', async function () { // webworkers are not dialable because webrtc is not available const remote = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const remoteId = await remote.id() const local = (await factory.spawn({ type: 'proc', ipfsOptions })).api - await local.swarm.connect(remote.peerId.addresses[0]) + await local.swarm.connect(remoteId.addresses[0]) const data = uint8ArrayFromString(`IPFS is awesome ${nanoid()}`) const cid = await local.block.put(data) @@ -42,13 +47,16 @@ module.exports = (factory, options) => { it('3 peers', async () => { const blocks = Array(6).fill(0).map(() => uint8ArrayFromString(`IPFS is awesome ${nanoid()}`)) const remote1 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const remote1Id = await remote1.id() const remote2 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const remote2Id = await remote2.id() const local = (await factory.spawn({ type: 'proc', ipfsOptions })).api - await local.swarm.connect(remote1.peerId.addresses[0]) - await local.swarm.connect(remote2.peerId.addresses[0]) - await remote1.swarm.connect(remote2.peerId.addresses[0]) + await local.swarm.connect(remote1Id.addresses[0]) + await local.swarm.connect(remote2Id.addresses[0]) + await remote1.swarm.connect(remote2Id.addresses[0]) // order is important + /** @type {CID[]} */ const cids = [] cids.push(await remote1.block.put(blocks[0])) cids.push(await remote1.block.put(blocks[1])) @@ -69,8 +77,9 @@ module.exports = (factory, options) => { it('2 peers', async () => { const content = randomBytes(1024) const remote = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const remoteId = await remote.id() const local = (await factory.spawn({ type: 'proc', ipfsOptions })).api - local.swarm.connect(remote.peerId.addresses[0]) + local.swarm.connect(remoteId.addresses[0]) const file = await remote.add({ path: 'awesome.txt', content }) const data = await concat(local.cat(file.cid)) diff --git a/packages/interface-ipfs-core/src/bitswap/unwant.js b/packages/interface-ipfs-core/src/bitswap/unwant.js index 1ee57cb804..7a4f39407e 100644 --- a/packages/interface-ipfs-core/src/bitswap/unwant.js +++ b/packages/interface-ipfs-core/src/bitswap/unwant.js @@ -3,7 +3,10 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + /** * @param {Factory} factory * @param {Object} options @@ -15,6 +18,7 @@ module.exports = (factory, options) => { describe('.bitswap.unwant', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { @@ -24,6 +28,7 @@ module.exports = (factory, options) => { after(() => factory.clean()) it('should throw error for invalid CID input', async () => { + // @ts-expect-error input is invalid await expect(ipfs.bitswap.unwant('INVALID CID')).to.eventually.be.rejected() }) }) diff --git a/packages/interface-ipfs-core/src/bitswap/utils.js b/packages/interface-ipfs-core/src/bitswap/utils.js index b0ee8fb62b..e3ca6d6fbb 100644 --- a/packages/interface-ipfs-core/src/bitswap/utils.js +++ b/packages/interface-ipfs-core/src/bitswap/utils.js @@ -2,6 +2,11 @@ const delay = require('delay') +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {string} key + * @param {{ timeout?: number, interval?: number, peerId?: string }} [opts] + */ async function waitForWantlistKey (ipfs, key, opts = {}) { opts.timeout = opts.timeout || 10000 opts.interval = opts.interval || 100 @@ -27,6 +32,11 @@ async function waitForWantlistKey (ipfs, key, opts = {}) { throw new Error(`Timed out waiting for ${key} in wantlist`) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {string} key + * @param {{ timeout?: number, interval?: number, peerId?: string }} [opts] + */ async function waitForWantlistKeyToBeRemoved (ipfs, key, opts = {}) { opts.timeout = opts.timeout || 10000 opts.interval = opts.interval || 100 diff --git a/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js b/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js index 72ee450d07..6a6d3b4a5a 100644 --- a/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js +++ b/packages/interface-ipfs-core/src/bitswap/wantlist-for-peer.js @@ -7,12 +7,15 @@ const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') const { CID } = require('multiformats/cid') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -20,25 +23,31 @@ module.exports = (common, options) => { describe('.bitswap.wantlistForPeer', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api // Add key to the wantlist for ipfsB ipfsB.block.get(CID.parse(key)).catch(() => { /* is ok, expected on teardown */ }) - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + const ipfsBId = await ipfsB.id() + + await ipfsA.swarm.connect(ipfsBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) + + it('should get the wantlist by peer ID for a different node', async () => { + const ipfsBId = await ipfsB.id() - it('should get the wantlist by peer ID for a different node', function () { return waitForWantlistKey(ipfsA, key, { - peerId: ipfsB.peerId.id, + peerId: ipfsBId.id, timeout: 60 * 1000 }) }) diff --git a/packages/interface-ipfs-core/src/bitswap/wantlist.js b/packages/interface-ipfs-core/src/bitswap/wantlist.js index 0fb961a5d1..9affe9f434 100644 --- a/packages/interface-ipfs-core/src/bitswap/wantlist.js +++ b/packages/interface-ipfs-core/src/bitswap/wantlist.js @@ -10,12 +10,15 @@ const { CID } = require('multiformats/cid') const delay = require('delay') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -23,21 +26,25 @@ module.exports = (common, options) => { describe('.bitswap.wantlist', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api // Add key to the wantlist for ipfsB ipfsB.block.get(CID.parse(key)).catch(() => { /* is ok, expected on teardown */ }) - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + const ipfsBId = await ipfsB.id() + + await ipfsA.swarm.connect(ipfsBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when getting bitswap wantlist', () => { return testTimeout(() => ipfsA.bitswap.wantlist({ @@ -50,7 +57,7 @@ module.exports = (common, options) => { }) it('should not get the wantlist when offline', async () => { - const node = await common.spawn() + const node = await factory.spawn() await node.stop() return expect(node.api.bitswap.stat()).to.eventually.be.rejected() diff --git a/packages/interface-ipfs-core/src/block/get.js b/packages/interface-ipfs-core/src/block/get.js index bbb74ceb37..fdb61150f2 100644 --- a/packages/interface-ipfs-core/src/block/get.js +++ b/packages/interface-ipfs-core/src/block/get.js @@ -7,25 +7,31 @@ const { CID } = require('multiformats/cid') const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.block.get', () => { const data = uint8ArrayFromString('blorb') - let ipfs, cid + /** @type {import('ipfs-core-types').IPFS} */ + let ipfs + /** @type {CID} */ + let cid before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api cid = await ipfs.block.put(data) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when getting a block', () => { return testTimeout(() => ipfs.block.get(CID.parse('QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rA3'), { @@ -86,6 +92,7 @@ module.exports = (common, options) => { }) it('should return an error for an invalid CID', () => { + // @ts-expect-error invalid input return expect(ipfs.block.get('Non-base58 character')).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) diff --git a/packages/interface-ipfs-core/src/block/put.js b/packages/interface-ipfs-core/src/block/put.js index a25d4e5e86..e1767b4671 100644 --- a/packages/interface-ipfs-core/src/block/put.js +++ b/packages/interface-ipfs-core/src/block/put.js @@ -9,23 +9,27 @@ const all = require('it-all') const raw = require('multiformats/codecs/raw') const { sha512 } = require('multiformats/hashes/sha2') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.block.put', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should put a buffer, using defaults', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' diff --git a/packages/interface-ipfs-core/src/block/rm.js b/packages/interface-ipfs-core/src/block/rm.js index 1331b7d35e..73671c944b 100644 --- a/packages/interface-ipfs-core/src/block/rm.js +++ b/packages/interface-ipfs-core/src/block/rm.js @@ -11,21 +11,25 @@ const { CID } = require('multiformats/cid') const raw = require('multiformats/codecs/raw') const testTimeout = require('../utils/test-timeout') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.block.rm', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when removing a block', () => { return testTimeout(() => drain(ipfs.block.rm(CID.parse('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), { @@ -93,8 +97,7 @@ module.exports = (common, options) => { const result = await all(ipfs.block.rm(cid)) expect(result).to.be.an('array').and.to.have.lengthOf(1) - expect(result[0]).to.have.property('error') - expect(result[0].error.message).to.include('block not found') + expect(result).to.have.nested.property('[0].error.message').that.includes('block not found') }) it('should not error when force removing non-existent blocks', async () => { @@ -138,6 +141,7 @@ module.exports = (common, options) => { }) it('should throw error for invalid CID input', () => { + // @ts-expect-error invalid input return expect(all(ipfs.block.rm('INVALID CID'))) .to.eventually.be.rejected() }) diff --git a/packages/interface-ipfs-core/src/block/stat.js b/packages/interface-ipfs-core/src/block/stat.js index 4b08e489d2..01494308ae 100644 --- a/packages/interface-ipfs-core/src/block/stat.js +++ b/packages/interface-ipfs-core/src/block/stat.js @@ -6,25 +6,31 @@ const { CID } = require('multiformats/cid') const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.block.stat', () => { const data = uint8ArrayFromString('blorb') - let ipfs, cid + /** @type {import('ipfs-core-types').IPFS} */ + let ipfs + /** @type {CID} */ + let cid before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api cid = await ipfs.block.put(data) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when statting a block', () => { return testTimeout(() => ipfs.block.stat(CID.parse('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), { @@ -39,11 +45,13 @@ module.exports = (common, options) => { }) it('should return error for missing argument', () => { + // @ts-expect-error invalid input return expect(ipfs.block.stat(null)).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) it('should return error for invalid argument', () => { + // @ts-expect-error invalid input return expect(ipfs.block.stat('invalid')).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) diff --git a/packages/interface-ipfs-core/src/bootstrap/add.js b/packages/interface-ipfs-core/src/bootstrap/add.js index 2b931bd98e..6ee1d30856 100644 --- a/packages/interface-ipfs-core/src/bootstrap/add.js +++ b/packages/interface-ipfs-core/src/bootstrap/add.js @@ -7,27 +7,32 @@ const { Multiaddr } = require('multiaddr') const invalidArg = 'this/Is/So/Invalid/' const validIp4 = new Multiaddr('/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.bootstrap.add', function () { this.timeout(100 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should return an error when called with an invalid arg', () => { + // @ts-expect-error invalid input return expect(ipfs.bootstrap.add(invalidArg)).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) diff --git a/packages/interface-ipfs-core/src/bootstrap/clear.js b/packages/interface-ipfs-core/src/bootstrap/clear.js index 6fd49162d6..eeee125d00 100644 --- a/packages/interface-ipfs-core/src/bootstrap/clear.js +++ b/packages/interface-ipfs-core/src/bootstrap/clear.js @@ -4,12 +4,15 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { Multiaddr } = require('multiaddr') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) @@ -18,11 +21,12 @@ module.exports = (common, options) => { describe('.bootstrap.clear', function () { this.timeout(100 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should return a list containing the peer removed when called with a valid arg (ip4)', async () => { await ipfs.bootstrap.clear() diff --git a/packages/interface-ipfs-core/src/bootstrap/list.js b/packages/interface-ipfs-core/src/bootstrap/list.js index 53d30b88d1..4e21f87576 100644 --- a/packages/interface-ipfs-core/src/bootstrap/list.js +++ b/packages/interface-ipfs-core/src/bootstrap/list.js @@ -4,23 +4,27 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { Multiaddr } = require('multiaddr') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.bootstrap.list', function () { this.timeout(100 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should return a list of peers', async () => { const res = await ipfs.bootstrap.list() diff --git a/packages/interface-ipfs-core/src/bootstrap/reset.js b/packages/interface-ipfs-core/src/bootstrap/reset.js index 334c6922c4..3b35606b56 100644 --- a/packages/interface-ipfs-core/src/bootstrap/reset.js +++ b/packages/interface-ipfs-core/src/bootstrap/reset.js @@ -4,25 +4,29 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { Multiaddr } = require('multiaddr') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.bootstrap.reset', function () { this.timeout(100 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should return a list of bootstrap peers when resetting the bootstrap nodes', async () => { const res = await ipfs.bootstrap.reset() diff --git a/packages/interface-ipfs-core/src/bootstrap/rm.js b/packages/interface-ipfs-core/src/bootstrap/rm.js index cb8e22af0f..8f31edf3fb 100644 --- a/packages/interface-ipfs-core/src/bootstrap/rm.js +++ b/packages/interface-ipfs-core/src/bootstrap/rm.js @@ -4,12 +4,15 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { Multiaddr } = require('multiaddr') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) @@ -19,13 +22,15 @@ module.exports = (common, options) => { describe('.bootstrap.rm', function () { this.timeout(100 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should return an error when called with an invalid arg', () => { + // @ts-expect-error invalid input return expect(ipfs.bootstrap.rm(invalidArg)).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) diff --git a/packages/interface-ipfs-core/src/cat.js b/packages/interface-ipfs-core/src/cat.js index 293afc3012..b5b0880b8f 100644 --- a/packages/interface-ipfs-core/src/cat.js +++ b/packages/interface-ipfs-core/src/cat.js @@ -13,27 +13,31 @@ const testTimeout = require('./utils/test-timeout') const { importer } = require('ipfs-unixfs-importer') const blockstore = require('./utils/blockstore-adapter') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.cat', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) before(() => Promise.all([ - all(importer([{ content: fixtures.smallFile.data }], blockstore(ipfs))), - all(importer([{ content: fixtures.bigFile.data }], blockstore(ipfs))) + all(importer({ content: fixtures.smallFile.data }, blockstore(ipfs))), + all(importer({ content: fixtures.bigFile.data }, blockstore(ipfs))) ])) it('should respect timeout option when catting files', () => { @@ -64,7 +68,7 @@ module.exports = (common, options) => { it('should cat a file added as CIDv0 with a CIDv1', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], blockstore(ipfs))) + const res = await all(importer([{ content: (async function * () { yield input }()) }], blockstore(ipfs))) expect(res).to.have.nested.property('[0].cid.version', 0) @@ -77,7 +81,7 @@ module.exports = (common, options) => { it('should cat a file added as CIDv1 with a CIDv0', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) + const res = await all(importer([{ content: (async function * () { yield input }()) }], blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) expect(res).to.have.nested.property('[0].cid.version', 1) @@ -103,11 +107,15 @@ module.exports = (common, options) => { it('should cat with IPFS path, nested value', async () => { const fileToAdd = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await all(importer([fileToAdd], blockstore(ipfs))) + const filesAdded = await all(importer(fileToAdd, blockstore(ipfs))) const file = await filesAdded.find((f) => f.path === 'a') expect(file).to.exist() + if (!file) { + throw new Error('No file added') + } + const data = uint8ArrayConcat(await all(ipfs.cat(`/ipfs/${file.cid}/testfile.txt`))) expect(uint8ArrayToString(data)).to.contain('Plz add me!') @@ -121,6 +129,10 @@ module.exports = (common, options) => { const file = filesAdded.find((f) => f.path === 'a') expect(file).to.exist() + if (!file) { + throw new Error('No file added') + } + const data = uint8ArrayConcat(await all(ipfs.cat(`/ipfs/${file.cid}/b/testfile.txt`))) expect(uint8ArrayToString(data)).to.contain('Plz add me!') }) diff --git a/packages/interface-ipfs-core/src/config/get.js b/packages/interface-ipfs-core/src/config/get.js index f65521af91..dc20ad8853 100644 --- a/packages/interface-ipfs-core/src/config/get.js +++ b/packages/interface-ipfs-core/src/config/get.js @@ -3,24 +3,29 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.config.get', function () { this.timeout(30 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should fail with error', async () => { + // @ts-expect-error missing arg await expect(ipfs.config.get()).to.eventually.rejectedWith('key argument is required') }) @@ -35,6 +40,7 @@ module.exports = (common, options) => { }) it('should fail on non valid key', () => { + // @ts-expect-error invalid arg return expect(ipfs.config.get(1234)).to.eventually.be.rejected() }) @@ -45,11 +51,12 @@ module.exports = (common, options) => { describe('.config.getAll', function () { this.timeout(30 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should retrieve the whole config', async () => { const config = await ipfs.config.getAll() diff --git a/packages/interface-ipfs-core/src/config/profiles/apply.js b/packages/interface-ipfs-core/src/config/profiles/apply.js index 2e05dfa4b9..a1592764c6 100644 --- a/packages/interface-ipfs-core/src/config/profiles/apply.js +++ b/packages/interface-ipfs-core/src/config/profiles/apply.js @@ -3,31 +3,35 @@ const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.config.profiles.apply', function () { this.timeout(30 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should apply a config profile', async () => { const diff = await ipfs.config.profiles.apply('lowpower') - expect(diff.original.Swarm.ConnMgr.LowWater).to.not.equal(diff.updated.Swarm.ConnMgr.LowWater) + expect(diff.original.Swarm?.ConnMgr?.LowWater).to.not.equal(diff.updated.Swarm?.ConnMgr?.LowWater) const newConfig = await ipfs.config.getAll() - expect(newConfig.Swarm.ConnMgr.LowWater).to.equal(diff.updated.Swarm.ConnMgr.LowWater) + expect(newConfig.Swarm?.ConnMgr?.LowWater).to.equal(diff.updated.Swarm?.ConnMgr?.LowWater) }) it('should strip private key from diff output', async () => { diff --git a/packages/interface-ipfs-core/src/config/profiles/list.js b/packages/interface-ipfs-core/src/config/profiles/list.js index 9cbe56219e..a59ed14051 100644 --- a/packages/interface-ipfs-core/src/config/profiles/list.js +++ b/packages/interface-ipfs-core/src/config/profiles/list.js @@ -3,24 +3,28 @@ const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.config.profiles.list', function () { this.timeout(30 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should list config profiles', async () => { const profiles = await ipfs.config.profiles.list() diff --git a/packages/interface-ipfs-core/src/config/replace.js b/packages/interface-ipfs-core/src/config/replace.js index 847e9f0a83..5644c5c6a6 100644 --- a/packages/interface-ipfs-core/src/config/replace.js +++ b/packages/interface-ipfs-core/src/config/replace.js @@ -3,27 +3,33 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.config.replace', function () { this.timeout(30 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) const config = { - Fruit: 'Bananas' + Addresses: { + API: '' + } } it('should replace the whole config', async () => { diff --git a/packages/interface-ipfs-core/src/config/set.js b/packages/interface-ipfs-core/src/config/set.js index 38ad9ae339..03ad0ba9f4 100644 --- a/packages/interface-ipfs-core/src/config/set.js +++ b/packages/interface-ipfs-core/src/config/set.js @@ -4,24 +4,28 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.config.set', function () { this.timeout(30 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should set a new key', async () => { await ipfs.config.set('Fruit', 'banana') @@ -74,6 +78,7 @@ module.exports = (common, options) => { }) it('should fail on non valid key', () => { + // @ts-expect-error invalid arg return expect(ipfs.config.set(uint8ArrayFromString('heeey'), '')).to.eventually.be.rejected() }) diff --git a/packages/interface-ipfs-core/src/dag/export.js b/packages/interface-ipfs-core/src/dag/export.js index b1ff5fb299..7a701f4d0a 100644 --- a/packages/interface-ipfs-core/src/dag/export.js +++ b/packages/interface-ipfs-core/src/dag/export.js @@ -10,22 +10,26 @@ const dagCbor = require('@ipld/dag-cbor') const loadFixture = require('aegir/utils/fixtures') const toBuffer = require('it-to-buffer') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dag.export', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should export a car file', async () => { const child = dagPb.encode({ @@ -68,6 +72,7 @@ module.exports = (common, options) => { }) it('export of shuffled devnet export identical to canonical original', async function () { + // @ts-ignore this is mocha this.timeout(360000) const input = loadFixture('test/fixtures/car/lotus_devnet_genesis.car', 'interface-ipfs-core') @@ -78,6 +83,7 @@ module.exports = (common, options) => { }) it('export of shuffled testnet export identical to canonical original', async function () { + // @ts-ignore this is mocha this.timeout(360000) const input = loadFixture('test/fixtures/car/lotus_testnet_export_128.car', 'interface-ipfs-core') diff --git a/packages/interface-ipfs-core/src/dag/get.js b/packages/interface-ipfs-core/src/dag/get.js index f2fa5f5106..248aa4f992 100644 --- a/packages/interface-ipfs-core/src/dag/get.js +++ b/packages/interface-ipfs-core/src/dag/get.js @@ -16,26 +16,48 @@ const { identity } = require('multiformats/hashes/identity') const dagCbor = require('@ipld/dag-cbor') const blockstore = require('../utils/blockstore-adapter') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dag.get', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) + /** + * @type {dagPB.PBNode} + */ let pbNode + /** + * @type {any} + */ let cborNode + /** + * @type {dagPB.PBNode} + */ let nodePb + /** + * @type {any} + */ let nodeCbor + /** + * @type {CID} + */ let cidPb + /** + * @type {CID} + */ let cidCbor before(async () => { @@ -252,11 +274,13 @@ module.exports = (common, options) => { }) it('should throw error for invalid string CID input', () => { + // @ts-expect-error invalid arg return expect(ipfs.dag.get('INVALID CID')) .to.eventually.be.rejected() }) it('should throw error for invalid buffer CID input', () => { + // @ts-expect-error invalid arg return expect(ipfs.dag.get(uint8ArrayFromString('INVALID CID'))) .to.eventually.be.rejected() }) diff --git a/packages/interface-ipfs-core/src/dag/import.js b/packages/interface-ipfs-core/src/dag/import.js index 3c13bd3b5f..1187d17c01 100644 --- a/packages/interface-ipfs-core/src/dag/import.js +++ b/packages/interface-ipfs-core/src/dag/import.js @@ -31,7 +31,7 @@ async function createBlocks (num) { /** * @param {{ cid: CID, bytes: Uint8Array }[]} blocks - * @returns {AsyncIterable} + * @returns {Promise>} */ async function createCar (blocks) { const rootBlock = blocks[0] @@ -49,22 +49,26 @@ async function createCar (blocks) { return out } -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dag.import', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should import a car file', async () => { const blocks = await createBlocks(5) @@ -72,6 +76,7 @@ module.exports = (common, options) => { const result = await all(ipfs.dag.import(car)) expect(result).to.have.lengthOf(1) + // @ts-ignore chai types are messed up expect(result).to.have.nested.deep.property('[0].root.cid', blocks[0].cid) for (const { cid } of blocks) { @@ -150,6 +155,7 @@ module.exports = (common, options) => { expect(cids[0].toString()).to.equal('bafkqaaa') const result = await all(ipfs.dag.import(async function * () { yield input }())) + // @ts-ignore chai types are messed up expect(result).to.have.nested.deep.property('[0].root.cid', cids[0]) }) }) diff --git a/packages/interface-ipfs-core/src/dag/put.js b/packages/interface-ipfs-core/src/dag/put.js index e391b4c39f..da8c819024 100644 --- a/packages/interface-ipfs-core/src/dag/put.js +++ b/packages/interface-ipfs-core/src/dag/put.js @@ -7,21 +7,25 @@ const { CID } = require('multiformats/cid') const { sha256, sha512 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dag.put', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) const pbNode = { Data: uint8ArrayFromString('some data'), diff --git a/packages/interface-ipfs-core/src/dag/resolve.js b/packages/interface-ipfs-core/src/dag/resolve.js index 230ddda5b7..89af36306e 100644 --- a/packages/interface-ipfs-core/src/dag/resolve.js +++ b/packages/interface-ipfs-core/src/dag/resolve.js @@ -6,20 +6,24 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') const uint8ArrayFromString = require('uint8arrays/from-string') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dag.resolve', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when resolving a path within a DAG node', async () => { const cid = await ipfs.dag.put({}, { format: 'dag-cbor', hashAlg: 'sha2-256' }) @@ -104,7 +108,7 @@ module.exports = (common, options) => { }) it('should resolve a raw node', async () => { - const node = Uint8Array.from(['hello world']) + const node = uint8ArrayFromString('hello world') const cid = await ipfs.dag.put(node, { format: 'raw', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(cid, { path: '/' }) diff --git a/packages/interface-ipfs-core/src/dht/disabled.js b/packages/interface-ipfs-core/src/dht/disabled.js index 1d95fa07de..06c461553c 100644 --- a/packages/interface-ipfs-core/src/dht/disabled.js +++ b/packages/interface-ipfs-core/src/dht/disabled.js @@ -2,8 +2,12 @@ 'use strict' const { getDescribe, getIt, expect } = require('../utils/mocha') +const uint8ArrayFromString = require('uint8arrays/from-string') + +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** * @param {Factory} factory * @param {Object} options @@ -15,7 +19,9 @@ module.exports = (factory, options) => { describe('disabled', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB before(async () => { @@ -29,13 +35,14 @@ module.exports = (factory, options) => { } })).api nodeB = (await factory.spawn()).api - await nodeA.swarm.connect(nodeB.peerId.addresses[0]) + const nodeBId = await nodeB.id() + await nodeA.swarm.connect(nodeBId.addresses[0]) }) after(() => factory.clean()) it('should error when DHT not available', async () => { - await expect(nodeA.dht.get('/ipns/Qme6KJdKcp85TYbLxuLV7oQzMiLremD7HMoXLZEmgo6Rnh')) + await expect(nodeA.dht.get(uint8ArrayFromString('/ipns/Qme6KJdKcp85TYbLxuLV7oQzMiLremD7HMoXLZEmgo6Rnh'))) .to.eventually.be.rejected() }) }) diff --git a/packages/interface-ipfs-core/src/dht/find-peer.js b/packages/interface-ipfs-core/src/dht/find-peer.js index 38fa494a90..e52c7eab51 100644 --- a/packages/interface-ipfs-core/src/dht/find-peer.js +++ b/packages/interface-ipfs-core/src/dht/find-peer.js @@ -4,28 +4,37 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dht.findPeer', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeBId before(async () => { - nodeA = (await common.spawn()).api - nodeB = (await common.spawn()).api - await nodeB.swarm.connect(nodeA.peerId.addresses[0]) + nodeA = (await factory.spawn()).api + nodeB = (await factory.spawn()).api + nodeBId = await nodeB.id() + + await nodeA.swarm.connect(nodeBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when finding a peer on the DHT', async () => { const nodeBId = await nodeB.id() @@ -43,7 +52,7 @@ module.exports = (common, options) => { const nodeAddresses = nodeBId.addresses.map((addr) => addr.nodeAddress()) const peerAddresses = res.addrs.map(ma => ma.nodeAddress()) - expect(id).to.be.eql(nodeB.peerId.id) + expect(id).to.be.eql(nodeBId.id) expect(peerAddresses).to.deep.include(nodeAddresses[0]) }) diff --git a/packages/interface-ipfs-core/src/dht/find-provs.js b/packages/interface-ipfs-core/src/dht/find-provs.js index a7b2a1b7d8..45de780a5b 100644 --- a/packages/interface-ipfs-core/src/dht/find-provs.js +++ b/packages/interface-ipfs-core/src/dht/find-provs.js @@ -7,33 +7,54 @@ const drain = require('it-drain') const { fakeCid } = require('./utils') const testTimeout = require('../utils/test-timeout') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dht.findProvs', function () { this.timeout(20000) + + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB + /** @type {import('ipfs-core-types').IPFS} */ let nodeC + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeAId + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeBId + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeCId before(async () => { - nodeA = (await common.spawn()).api - nodeB = (await common.spawn()).api - nodeC = (await common.spawn()).api + nodeA = (await factory.spawn()).api + nodeB = (await factory.spawn()).api + nodeC = (await factory.spawn()).api + + nodeAId = await nodeA.id() + nodeBId = await nodeB.id() + nodeCId = await nodeC.id() + await Promise.all([ - nodeB.swarm.connect(nodeA.peerId.addresses[0]), - nodeC.swarm.connect(nodeB.peerId.addresses[0]) + nodeB.swarm.connect(nodeAId.addresses[0]), + nodeC.swarm.connect(nodeBId.addresses[0]) ]) }) - after(() => common.clean()) + after(() => factory.clean()) + /** + * @type {import('multiformats/cid').CID} + */ let providedCid before('add providers for the same cid', async function () { this.timeout(10 * 1000) @@ -58,14 +79,15 @@ module.exports = (common, options) => { }) it('should be able to find providers', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const provs = await all(nodeA.dht.findProvs(providedCid, { numProviders: 2 })) const providerIds = provs.map((p) => p.id.toString()) expect(providerIds).to.have.members([ - nodeB.peerId.id, - nodeC.peerId.id + nodeBId.id, + nodeCId.id ]) }) diff --git a/packages/interface-ipfs-core/src/dht/get.js b/packages/interface-ipfs-core/src/dht/get.js index 7ff2f1284b..6489a9c263 100644 --- a/packages/interface-ipfs-core/src/dht/get.js +++ b/packages/interface-ipfs-core/src/dht/get.js @@ -4,46 +4,57 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dht.get', function () { + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeBId before(async () => { - nodeA = (await common.spawn()).api - nodeB = (await common.spawn()).api - await nodeA.swarm.connect(nodeB.peerId.addresses[0]) + nodeA = (await factory.spawn()).api + nodeB = (await factory.spawn()).api + nodeBId = await nodeB.id() + + await nodeA.swarm.connect(nodeBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when getting a value from the DHT', async () => { const data = await nodeA.add('should put a value to the DHT') const publish = await nodeA.name.publish(data.cid) - await testTimeout(() => nodeB.dht.get(`/ipns/${publish.name}`, { + await testTimeout(() => nodeB.dht.get(uint8ArrayFromString(`/ipns/${publish.name}`), { timeout: 1 })) }) it('should error when getting a non-existent key from the DHT', () => { - return expect(nodeA.dht.get('non-existing', { timeout: 100 })).to.eventually.be.rejected + return expect(nodeA.dht.get(uint8ArrayFromString('non-existing'), { timeout: 100 })) + .to.eventually.be.rejected .and.be.an.instanceOf(Error) }) it('should get a value after it was put on another node', async () => { const data = await nodeA.add('should put a value to the DHT') const publish = await nodeA.name.publish(data.cid) - const record = await nodeA.dht.get(`/ipns/${publish.name}`) + const record = await nodeA.dht.get(uint8ArrayFromString(`/ipns/${publish.name}`)) expect(uint8ArrayToString(record)).to.contain(data.cid.toString()) }) diff --git a/packages/interface-ipfs-core/src/dht/provide.js b/packages/interface-ipfs-core/src/dht/provide.js index b75082ee4d..72373d2fb4 100644 --- a/packages/interface-ipfs-core/src/dht/provide.js +++ b/packages/interface-ipfs-core/src/dht/provide.js @@ -6,27 +6,32 @@ const { CID } = require('multiformats/cid') const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dht.provide', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api - const nodeB = (await common.spawn()).api - await ipfs.swarm.connect(nodeB.peerId.addresses[0]) + ipfs = (await factory.spawn()).api + const nodeB = (await factory.spawn()).api + const nodeBId = await nodeB.id() + await ipfs.swarm.connect(nodeBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should provide local CID', async () => { const res = await ipfs.add(uint8ArrayFromString('test')) @@ -58,10 +63,12 @@ module.exports = (common, options) => { }) it('should error on non CID arg', () => { + // @ts-expect-error invalid arg return expect(all(ipfs.dht.provide({}))).to.eventually.be.rejected() }) it('should error on array containing non CID arg', () => { + // @ts-expect-error invalid arg return expect(all(ipfs.dht.provide([{}]))).to.eventually.be.rejected() }) }) diff --git a/packages/interface-ipfs-core/src/dht/put.js b/packages/interface-ipfs-core/src/dht/put.js index d059e8950c..4120ed0120 100644 --- a/packages/interface-ipfs-core/src/dht/put.js +++ b/packages/interface-ipfs-core/src/dht/put.js @@ -3,35 +3,45 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') +const uint8ArrayFromString = require('uint8arrays/from-string') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dht.put', function () { + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeBId before(async () => { - nodeA = (await common.spawn()).api - nodeB = (await common.spawn()).api - await nodeA.swarm.connect(nodeB.peerId.addresses[0]) + nodeA = (await factory.spawn()).api + nodeB = (await factory.spawn()).api + nodeBId = await nodeB.id() + + await nodeA.swarm.connect(nodeBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should put a value to the DHT', async function () { const { cid } = await nodeA.add('should put a value to the DHT') const publish = await nodeA.name.publish(cid) - const record = await nodeA.dht.get(`/ipns/${publish.name}`) - const value = await all(nodeA.dht.put(`/ipns/${publish.name}`, record, { verbose: true })) + const record = await nodeA.dht.get(uint8ArrayFromString(`/ipns/${publish.name}`)) + const value = await all(nodeA.dht.put(uint8ArrayFromString(`/ipns/${publish.name}`), record, { verbose: true })) expect(value).to.has.length(3) - expect(value[2].id.toString()).to.be.equal(nodeB.peerId.id) + expect(value[2].id.toString()).to.be.equal(nodeBId.id) expect(value[2].type).to.be.equal(5) }) }) diff --git a/packages/interface-ipfs-core/src/dht/query.js b/packages/interface-ipfs-core/src/dht/query.js index 6df7cf2b4c..034498aa19 100644 --- a/packages/interface-ipfs-core/src/dht/query.js +++ b/packages/interface-ipfs-core/src/dht/query.js @@ -6,47 +6,58 @@ const all = require('it-all') const drain = require('it-drain') const testTimeout = require('../utils/test-timeout') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dht.query', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeBId before(async () => { - nodeA = (await common.spawn()).api - nodeB = (await common.spawn()).api - await nodeB.swarm.connect(nodeA.peerId.addresses[0]) + nodeA = (await factory.spawn()).api + nodeB = (await factory.spawn()).api + const nodeAId = await nodeA.id() + nodeBId = await nodeB.id() + await nodeB.swarm.connect(nodeAId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when querying the DHT', () => { - return testTimeout(() => drain(nodeA.dht.query(nodeB.peerId.id, { + return testTimeout(() => drain(nodeA.dht.query(nodeBId.id, { timeout: 1 }))) }) it('should return the other node in the query', async function () { const timeout = 150 * 1000 + // @ts-ignore this is mocha this.timeout(timeout) try { - const peers = await all(nodeA.dht.query(nodeB.peerId.id, { timeout: timeout - 1000 })) - expect(peers.map(p => p.id.toString())).to.include(nodeB.peerId.id) + const peers = await all(nodeA.dht.query(nodeBId.id, { timeout: timeout - 1000 })) + expect(peers.map(p => p.id.toString())).to.include(nodeBId.id) } catch (err) { if (err.name === 'TimeoutError') { // This test is meh. DHT works best with >= 20 nodes. Therefore a // failure might happen, but we don't want to report it as such. // Hence skip the test before the timeout is reached + // @ts-ignore this is mocha this.skip() } else { throw err diff --git a/packages/interface-ipfs-core/src/dht/utils.js b/packages/interface-ipfs-core/src/dht/utils.js index 42d934021d..2f2339da8f 100644 --- a/packages/interface-ipfs-core/src/dht/utils.js +++ b/packages/interface-ipfs-core/src/dht/utils.js @@ -4,6 +4,10 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { CID } = require('multiformats/cid') const { sha256 } = require('multiformats/hashes/sha2') +/** + * @param {Uint8Array} [data] + * @returns + */ exports.fakeCid = async (data) => { const bytes = data || uint8ArrayFromString(`TEST${Math.random()}`) const mh = await sha256.digest(bytes) diff --git a/packages/interface-ipfs-core/src/files/chmod.js b/packages/interface-ipfs-core/src/files/chmod.js index 84ae5837ec..f52e8dec59 100644 --- a/packages/interface-ipfs-core/src/files/chmod.js +++ b/packages/interface-ipfs-core/src/files/chmod.js @@ -6,15 +6,29 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const isShardAtPath = require('../utils/is-shard-at-path') -module.exports = (common, options) => { +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory + * @param {Object} options + */ +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.chmod', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {string} initialMode + * @param {string} modification + * @param {string} expectedFinalMode + */ async function testChmod (initialMode, modification, expectedFinalMode) { const path = `/test-${nanoid()}` @@ -32,10 +46,10 @@ module.exports = (common, options) => { } before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should update the mode for a file', async () => { const path = `/foo-${Math.random()}` diff --git a/packages/interface-ipfs-core/src/files/cp.js b/packages/interface-ipfs-core/src/files/cp.js index c3a9682948..8ef63feaa4 100644 --- a/packages/interface-ipfs-core/src/files/cp.js +++ b/packages/interface-ipfs-core/src/files/cp.js @@ -13,37 +13,45 @@ const { randomBytes } = require('iso-random-stream') const createShardedDirectory = require('../utils/create-sharded-directory') const isShardAtPath = require('../utils/is-shard-at-path') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.cp', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('refuses to copy files without a source', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.cp()).to.eventually.be.rejected.with('Please supply at least one source') }) it('refuses to copy files without a source, even with options', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.cp({})).to.eventually.be.rejected.with('Please supply at least one source') }) it('refuses to copy files without a destination', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.cp('/source')).to.eventually.be.rejected.with('Please supply at least one source') }) it('refuses to copy files without a destination, even with options', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.cp('/source', {})).to.eventually.be.rejected.with('Please supply at least one source') }) @@ -275,7 +283,7 @@ module.exports = (common, options) => { const seconds = Math.floor(mtime.getTime() / 1000) const expectedMtime = { secs: seconds, - nsecs: (mtime - (seconds * 1000)) * 1000 + nsecs: (mtime.getTime() - (seconds * 1000)) * 1000 } await ipfs.files.write(testSrcPath, uint8ArrayFromString('TEST'), { @@ -298,7 +306,7 @@ module.exports = (common, options) => { const seconds = Math.floor(mtime.getTime() / 1000) const expectedMtime = { secs: seconds, - nsecs: (mtime - (seconds * 1000)) * 1000 + nsecs: (mtime.getTime() - (seconds * 1000)) * 1000 } await ipfs.files.mkdir(testSrcPath, { @@ -321,7 +329,7 @@ module.exports = (common, options) => { const seconds = Math.floor(mtime.getTime() / 1000) const expectedMtime = { secs: seconds, - nsecs: (mtime - (seconds * 1000)) * 1000 + nsecs: (mtime.getTime() - (seconds * 1000)) * 1000 } const { @@ -339,10 +347,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/flush.js b/packages/interface-ipfs-core/src/files/flush.js index c1471a8ffe..420250517d 100644 --- a/packages/interface-ipfs-core/src/files/flush.js +++ b/packages/interface-ipfs-core/src/files/flush.js @@ -4,23 +4,27 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.flush', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should not flush not found file/dir, expect error', async () => { const testDir = `/test-${nanoid()}` @@ -33,6 +37,7 @@ module.exports = (common, options) => { }) it('should require a path', () => { + // @ts-expect-error invalid args expect(ipfs.files.flush()).to.eventually.be.rejected() }) diff --git a/packages/interface-ipfs-core/src/files/ls.js b/packages/interface-ipfs-core/src/files/ls.js index 3daa550435..cc7a64a84c 100644 --- a/packages/interface-ipfs-core/src/files/ls.js +++ b/packages/interface-ipfs-core/src/files/ls.js @@ -9,12 +9,15 @@ const all = require('it-all') const { randomBytes } = require('iso-random-stream') const raw = require('multiformats/codecs/raw') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const largeFile = randomBytes(490668) @@ -22,13 +25,15 @@ module.exports = (common, options) => { describe('.files.ls', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should require a path', () => { + // @ts-expect-error invalid args expect(all(ipfs.files.ls())).to.eventually.be.rejected() }) @@ -158,10 +163,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/mkdir.js b/packages/interface-ipfs-core/src/files/mkdir.js index d71c1793bd..f9f57d90ac 100644 --- a/packages/interface-ipfs-core/src/files/mkdir.js +++ b/packages/interface-ipfs-core/src/files/mkdir.js @@ -8,20 +8,28 @@ const createShardedDirectory = require('../utils/create-sharded-directory') const all = require('it-all') const isShardAtPath = require('../utils/is-shard-at-path') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.mkdir', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {number | string | undefined} mode + * @param {number} expectedMode + */ async function testMode (mode, expectedMode) { const testPath = `/test-${nanoid()}` await ipfs.files.mkdir(testPath, { @@ -32,6 +40,10 @@ module.exports = (common, options) => { expect(stats).to.have.property('mode', expectedMode) } + /** + * @param {import('ipfs-unixfs').MtimeLike} mtime + * @param {import('ipfs-unixfs').MtimeLike} expectedMtime + */ async function testMtime (mtime, expectedMtime) { const testPath = `/test-${nanoid()}` await ipfs.files.mkdir(testPath, { @@ -42,9 +54,9 @@ module.exports = (common, options) => { expect(stats).to.have.deep.property('mtime', expectedMtime) } - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('requires a directory', async () => { await expect(ipfs.files.mkdir('')).to.eventually.be.rejected() @@ -212,10 +224,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/mv.js b/packages/interface-ipfs-core/src/files/mv.js index 027b2319a3..cf60afecc1 100644 --- a/packages/interface-ipfs-core/src/files/mv.js +++ b/packages/interface-ipfs-core/src/files/mv.js @@ -9,33 +9,39 @@ const { randomBytes } = require('iso-random-stream') const isShardAtPath = require('../utils/is-shard-at-path') const all = require('it-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.mv', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) before(async () => { await ipfs.files.mkdir('/test/lv1/lv2', { parents: true }) await ipfs.files.write('/test/a', uint8ArrayFromString('Hello, world!'), { create: true }) }) - after(() => common.clean()) + after(() => factory.clean()) it('refuses to move files without arguments', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.mv()).to.eventually.be.rejected() }) it('refuses to move files without enough arguments', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.mv()).to.eventually.be.rejected() }) @@ -103,10 +109,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/read.js b/packages/interface-ipfs-core/src/files/read.js index 9de211bfe4..9c1a8c8b90 100644 --- a/packages/interface-ipfs-core/src/files/read.js +++ b/packages/interface-ipfs-core/src/files/read.js @@ -9,12 +9,15 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const createShardedDirectory = require('../utils/create-sharded-directory') const { randomBytes } = require('iso-random-stream') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const smallFile = randomBytes(13) @@ -22,11 +25,12 @@ module.exports = (common, options) => { describe('.files.read', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('reads a small file', async () => { const filePath = '/small-file.txt' @@ -109,10 +113,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/rm.js b/packages/interface-ipfs-core/src/files/rm.js index 2b9749a2fd..3bb7fbbb96 100644 --- a/packages/interface-ipfs-core/src/files/rm.js +++ b/packages/interface-ipfs-core/src/files/rm.js @@ -8,23 +8,27 @@ const createTwoShards = require('../utils/create-two-shards') const { randomBytes } = require('iso-random-stream') const isShardAtPath = require('../utils/is-shard-at-path') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.rm', function () { this.timeout(300 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should not remove not found file/dir, expect error', () => { const testDir = `/test-${nanoid()}` @@ -33,6 +37,7 @@ module.exports = (common, options) => { }) it('refuses to remove files without arguments', async () => { + // @ts-expect-error invalid args await expect(ipfs.files.rm()).to.eventually.be.rejected() }) @@ -127,10 +132,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/stat.js b/packages/interface-ipfs-core/src/files/stat.js index d46a912d64..327624fa1d 100644 --- a/packages/interface-ipfs-core/src/files/stat.js +++ b/packages/interface-ipfs-core/src/files/stat.js @@ -12,12 +12,15 @@ const { randomBytes } = require('iso-random-stream') const isShardAtPath = require('../utils/is-shard-at-path') const raw = require('multiformats/codecs/raw') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const smallFile = randomBytes(13) @@ -26,17 +29,18 @@ module.exports = (common, options) => { describe('.files.stat', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn({ - args: common.opts.type === 'go' ? [] : ['--enable-sharding-experiment'] + ipfs = (await factory.spawn({ + args: factory.opts.type === 'go' ? [] : ['--enable-sharding-experiment'] })).api }) before(async () => { await ipfs.add(fixtures.smallFile.data) }) - after(() => common.clean()) + after(() => factory.clean()) it('refuses to stat files with an empty path', async () => { await expect(ipfs.files.stat('')).to.eventually.be.rejected() @@ -222,7 +226,6 @@ module.exports = (common, options) => { await ipfs.files.write(`${testDir}/a`, uint8ArrayFromString('Hello, world!'), { create: true }) const stat = await ipfs.files.stat(testDir) - stat.cid = stat.cid.toString() expect(stat).to.include({ type: 'directory', @@ -314,9 +317,11 @@ module.exports = (common, options) => { // TODO enable this test when this feature gets released on go-ipfs it.skip('should stat withLocal file', async function () { const stat = await ipfs.files.stat('/test/b', { withLocal: true }) - stat.cid = stat.cid.toString() - expect(stat).to.eql({ + expect({ + ...stat, + cid: stat.cid.toString() + }).to.eql({ type: 'file', blocks: 1, size: 13, @@ -331,9 +336,11 @@ module.exports = (common, options) => { // TODO enable this test when this feature gets released on go-ipfs it.skip('should stat withLocal dir', async function () { const stat = await ipfs.files.stat('/test', { withLocal: true }) - stat.cid = stat.cid.toString() - expect(stat).to.eql({ + expect({ + ...stat, + cid: stat.cid.toString() + }).to.eql({ type: 'directory', blocks: 2, size: 0, @@ -347,9 +354,11 @@ module.exports = (common, options) => { it('should stat outside of mfs', async () => { const stat = await ipfs.files.stat(`/ipfs/${fixtures.smallFile.cid}`) - stat.cid = stat.cid.toString() - expect(stat).to.include({ + expect({ + ...stat, + cid: stat.cid.toString() + }).to.include({ type: 'file', blocks: 0, size: 12, @@ -362,10 +371,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/files/touch.js b/packages/interface-ipfs-core/src/files/touch.js index e8a71900f7..816a733949 100644 --- a/packages/interface-ipfs-core/src/files/touch.js +++ b/packages/interface-ipfs-core/src/files/touch.js @@ -8,15 +8,28 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') const all = require('it-all') -module.exports = (common, options) => { +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory + * @param {Object} options + */ +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.files.touch', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {import('ipfs-unixfs').MtimeLike} mtime + * @param {import('ipfs-unixfs').MtimeLike} expectedMtime + */ async function testMtime (mtime, expectedMtime) { const testPath = `/test-${nanoid()}` @@ -35,11 +48,12 @@ module.exports = (common, options) => { expect(stat2).to.have.deep.nested.property('mtime', expectedMtime) } - before(async () => { ipfs = (await common.spawn()).api }) + before(async () => { ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should have default mtime', async function () { + // @ts-ignore this is mocha this.slow(5 * 1000) const testPath = `/test-${nanoid()}` @@ -60,6 +74,7 @@ module.exports = (common, options) => { }) it('should update file mtime', async function () { + // @ts-ignore this is mocha this.slow(5 * 1000) const testPath = `/test-${nanoid()}` const mtime = new Date() @@ -77,6 +92,7 @@ module.exports = (common, options) => { }) it('should update directory mtime', async function () { + // @ts-ignore this is mocha this.slow(5 * 1000) const testPath = `/test-${nanoid()}` const mtime = new Date() @@ -104,12 +120,22 @@ module.exports = (common, options) => { shardSplitThreshold: 0 }) const originalMtime = (await ipfs.files.stat(path)).mtime + + if (!originalMtime) { + throw new Error('No originalMtime found') + } + await delay(1000) await ipfs.files.touch(path, { flush: true }) const updatedMtime = (await ipfs.files.stat(path)).mtime + + if (!updatedMtime) { + throw new Error('No updatedMtime found') + } + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) }) diff --git a/packages/interface-ipfs-core/src/files/write.js b/packages/interface-ipfs-core/src/files/write.js index 4befe577b3..fd31c9a200 100644 --- a/packages/interface-ipfs-core/src/files/write.js +++ b/packages/interface-ipfs-core/src/files/write.js @@ -16,17 +16,23 @@ const all = require('it-all') const isShardAtPath = require('../utils/is-shard-at-path') const raw = require('multiformats/codecs/raw') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const smallFile = randomBytes(13) const largeFile = randomBytes(490668) + /** + * @param {(arg: { type: string, path: string, content: Uint8Array | AsyncIterable, contentSize: number }) => void} fn + */ const runTest = (fn) => { const iterations = 5 const files = [{ @@ -60,8 +66,13 @@ module.exports = (common, options) => { describe('.files.write', function () { this.timeout(300 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {number | string} mode + * @param {number} expectedMode + */ async function testMode (mode, expectedMode) { const testPath = `/test-${nanoid()}` @@ -75,6 +86,10 @@ module.exports = (common, options) => { expect(stats).to.have.property('mode', expectedMode) } + /** + * @param {import('ipfs-unixfs').MtimeLike} mtime + * @param {import('ipfs-unixfs').MtimeLike} expectedMtime + */ async function testMtime (mtime, expectedMtime) { const testPath = `/test-${nanoid()}` @@ -89,18 +104,20 @@ module.exports = (common, options) => { } before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('explodes if it cannot convert content to a source', async () => { + // @ts-expect-error invalid arg await expect(ipfs.files.write('/foo-bad-source', -1, { create: true })).to.eventually.be.rejected() }) it('explodes if given an invalid path', async () => { + // @ts-expect-error invalid arg await expect(ipfs.files.write('foo-no-slash', null, { create: true })).to.eventually.be.rejected() @@ -126,7 +143,7 @@ module.exports = (common, options) => { }) await expect(all(ipfs.files.ls(path))).to.eventually.have.lengthOf(1) - .and.to.have.nested.property('[0]').that.includes({ + .and.to.have.nested.property('[0]').that.include({ name: 'foo-zero-length', size: 0 }) @@ -171,6 +188,7 @@ module.exports = (common, options) => { it('writes a small file using a Node stream (Node only)', async function () { if (!isNode) { + // @ts-ignore this is mocha this.skip() } const filePath = `/small-file-${Math.random()}.txt` @@ -187,6 +205,7 @@ module.exports = (common, options) => { it('writes a small file using an HTML5 Blob (Browser only)', async function () { if (!global.Blob) { + // @ts-ignore this is mocha return this.skip() } @@ -325,7 +344,7 @@ module.exports = (common, options) => { length: offset }))) - expect(buffer).to.deep.equal(new Uint8Array(offset, 0)) + expect(buffer).to.deep.equal(new Uint8Array(offset)) }) }) @@ -368,7 +387,7 @@ module.exports = (common, options) => { const buffer = uint8ArrayConcat(await all(ipfs.files.read(path))) - if (content[Symbol.asyncIterator]) { + if (!(content instanceof Uint8Array)) { content = uint8ArrayConcat(await all(content)) } @@ -416,6 +435,7 @@ module.exports = (common, options) => { }) it('supports concurrent writes', async function () { + /** @type {{ name: string, source: ReturnType}[]} */ const files = [] for (let i = 0; i < 10; i++) { @@ -630,10 +650,11 @@ module.exports = (common, options) => { }) describe('with sharding', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async function () { - const ipfsd = await common.spawn({ + const ipfsd = await factory.spawn({ ipfsOptions: { EXPERIMENTAL: { // enable sharding for js diff --git a/packages/interface-ipfs-core/src/get.js b/packages/interface-ipfs-core/src/get.js index 81a623fcfd..678d020597 100644 --- a/packages/interface-ipfs-core/src/get.js +++ b/packages/interface-ipfs-core/src/get.js @@ -14,28 +14,108 @@ const { getDescribe, getIt, expect } = require('./utils/mocha') const testTimeout = require('./utils/test-timeout') const { importer } = require('ipfs-unixfs-importer') const blockstore = require('./utils/blockstore-adapter') +const { Inflate } = require('pako') +const { extract } = require('it-tar') +const { pipe } = require('it-pipe') +const toBuffer = require('it-to-buffer') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @param {string} name + * @param {string} [path] + */ +const content = (name, path) => { + if (!path) { + path = name + } + + return { + path: `test-folder/${path}`, + content: fixtures.directory.files[name] + } +} + +/** + * @param {string} name + */ +const emptyDir = (name) => ({ path: `test-folder/${name}` }) + +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.get', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** + * @param {AsyncIterable} source + */ + async function * gzipped (source) { + const inflator = new Inflate() + + for await (const buf of source) { + inflator.push(buf, false) + } + + inflator.push(new Uint8Array(0), true) + + if (inflator.err) { + throw new Error(`Error ungzipping - message: "${inflator.msg}" code: ${inflator.err}`) + } + + if (inflator.result instanceof Uint8Array) { + yield inflator.result + } else { + throw new Error('Unexpected gzip data type') + } + } + + /** + * @param {AsyncIterable} source + */ + async function * tarballed (source) { + yield * pipe( + source, + extract(), + async function * (source) { + for await (const entry of source) { + yield { + ...entry, + body: await toBuffer(map(entry.body, (buf) => buf.slice())) + } + } + } + ) + } + + /** + * @template T + * @param {AsyncIterable} source + */ + async function collect (source) { + return all(source) + } + before(async () => { - ipfs = (await common.spawn()).api - await drain(importer([{ content: fixtures.smallFile.data }], blockstore(ipfs))) - await drain(importer([{ content: fixtures.bigFile.data }], blockstore(ipfs))) + ipfs = (await factory.spawn()).api + + await Promise.all([ + all(importer({ content: fixtures.smallFile.data }, blockstore(ipfs))), + all(importer({ content: fixtures.bigFile.data }, blockstore(ipfs))) + ]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when getting files', () => { return testTimeout(() => drain(ipfs.get(CID.parse('QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbS1'), { @@ -44,70 +124,83 @@ module.exports = (common, options) => { }) it('should get with a base58 encoded multihash', async () => { - const files = await all(ipfs.get(fixtures.smallFile.cid)) - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid.toString()) - expect(uint8ArrayToString(uint8ArrayConcat(await all(files[0].content)))).to.contain('Plz add me!') + const output = await pipe( + ipfs.get(fixtures.smallFile.cid), + tarballed, + collect + ) + expect(output).to.have.lengthOf(1) + expect(output).to.have.nested.property('[0].header.name', fixtures.smallFile.cid.toString()) + expect(output).to.have.nested.property('[0].body').that.equalBytes(fixtures.smallFile.data) }) it('should get a file added as CIDv0 with a CIDv1', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - - const res = await all(importer([{ content: input }], blockstore(ipfs))) + const res = await all(importer({ content: input }, blockstore(ipfs))) const cidv0 = res[0].cid expect(cidv0.version).to.equal(0) const cidv1 = cidv0.toV1() - const output = await all(ipfs.get(cidv1)) - expect(uint8ArrayConcat(await all(output[0].content))).to.eql(input) + const output = await pipe( + ipfs.get(cidv1), + tarballed, + collect + ) + expect(output).to.have.lengthOf(1) + expect(output).to.have.nested.property('[0].header.name', cidv1.toString()) + expect(output).to.have.nested.property('[0].body').that.equalBytes(input) }) it('should get a file added as CIDv1 with a CIDv0', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - - const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) + const res = await all(importer({ content: input }, blockstore(ipfs), { cidVersion: 1, rawLeaves: false })) const cidv1 = res[0].cid expect(cidv1.version).to.equal(1) const cidv0 = cidv1.toV0() - const output = await all(ipfs.get(cidv0)) - expect(uint8ArrayConcat(await all(output[0].content))).to.eql(input) + const output = await pipe( + ipfs.get(cidv0), + tarballed, + collect + ) + expect(output).to.have.lengthOf(1) + expect(output).to.have.nested.property('[0].header.name', cidv0.toString()) + expect(output).to.have.nested.property('[0].body').that.equalBytes(input) }) it('should get a file added as CIDv1 with rawLeaves', async () => { const input = uint8ArrayFromString(`TEST${Math.random()}`) - - const res = await all(importer([{ content: input }], blockstore(ipfs), { cidVersion: 1, rawLeaves: true })) + const res = await all(importer({ content: input }, blockstore(ipfs), { cidVersion: 1, rawLeaves: true })) const cidv1 = res[0].cid expect(cidv1.version).to.equal(1) - const output = await all(ipfs.get(cidv1)) - expect(output[0].type).to.eql('file') - expect(uint8ArrayConcat(await all(output[0].content))).to.eql(input) + const output = await pipe( + ipfs.get(cidv1), + tarballed, + collect + ) + expect(output).to.have.lengthOf(1) + expect(output).to.have.nested.property('[0].header.name', cidv1.toString()) + expect(output).to.have.nested.property('[0].body').that.equalBytes(input) }) it('should get a BIG file', async () => { - for await (const file of ipfs.get(fixtures.bigFile.cid)) { - expect(file.path).to.equal(fixtures.bigFile.cid.toString()) - const content = uint8ArrayConcat(await all(file.content)) - expect(content.length).to.eql(fixtures.bigFile.data.length) - expect(content.slice()).to.eql(fixtures.bigFile.data) - } + const output = await pipe( + ipfs.get(fixtures.bigFile.cid), + tarballed, + collect + ) + expect(output).to.have.lengthOf(1) + expect(output).to.have.nested.property('[0].header.name', fixtures.bigFile.cid.toString()) + expect(output).to.have.nested.property('[0].body').that.equalBytes(fixtures.bigFile.data) }) it('should get a directory', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - const dirs = [ content('pp.txt'), content('holmes.txt'), @@ -120,26 +213,16 @@ module.exports = (common, options) => { ] const res = await all(importer(dirs, blockstore(ipfs))) - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal(fixtures.directory.cid.toString()) - - let files = await all((async function * () { - for await (let { path, content } of ipfs.get(fixtures.directory.cid)) { - content = content ? uint8ArrayToString(uint8ArrayConcat(await all(content))) : null - yield { path, content } - } - })()) - - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 - }) + const { cid } = res[res.length - 1] + expect(`${cid}`).to.equal(fixtures.directory.cid.toString()) + const output = await pipe( + ipfs.get(cid), + tarballed, + collect + ) // Check paths - const paths = files.map((file) => { return file.path }) + const paths = output.map((file) => { return file.header.name }) expect(paths).to.include.members([ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', @@ -154,7 +237,7 @@ module.exports = (common, options) => { ]) // Check contents - expect(files.map(f => f.content)).to.include.members([ + expect(output.map(f => uint8ArrayToString(f.body))).to.include.members([ fixtures.directory.files['alice.txt'].toString(), fixtures.directory.files['files/hello.txt'].toString(), fixtures.directory.files['files/ipfs.txt'].toString(), @@ -165,11 +248,6 @@ module.exports = (common, options) => { }) it('should get a nested directory', async function () { - const content = (name, path) => ({ - path: `test-folder/${path}`, - content: fixtures.directory.files[name] - }) - const dirs = [ content('pp.txt', 'pp.txt'), content('holmes.txt', 'foo/holmes.txt'), @@ -177,26 +255,16 @@ module.exports = (common, options) => { ] const res = await all(importer(dirs, blockstore(ipfs))) - const root = res[res.length - 1] - expect(root.path).to.equal('test-folder') - expect(root.cid.toString()).to.equal('QmVMXXo3c2bDPH9ayy2VKoXpykfYJHwAcU5YCJjPf7jg3g') - - let files = await all( - map(ipfs.get(root.cid), async ({ path, content }) => { - content = content ? uint8ArrayToString(uint8ArrayConcat(await all(content))) : null - return { path, content } - }) + const { cid } = res[res.length - 1] + expect(`${cid}`).to.equal('QmVMXXo3c2bDPH9ayy2VKoXpykfYJHwAcU5YCJjPf7jg3g') + const output = await pipe( + ipfs.get(cid), + tarballed, + collect ) - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 - }) - // Check paths - const paths = files.map((file) => { return file.path }) - expect(paths).to.include.members([ + expect(output.map((file) => { return file.header.name })).to.include.members([ 'QmVMXXo3c2bDPH9ayy2VKoXpykfYJHwAcU5YCJjPf7jg3g', 'QmVMXXo3c2bDPH9ayy2VKoXpykfYJHwAcU5YCJjPf7jg3g/pp.txt', 'QmVMXXo3c2bDPH9ayy2VKoXpykfYJHwAcU5YCJjPf7jg3g/foo/holmes.txt', @@ -204,7 +272,7 @@ module.exports = (common, options) => { ]) // Check contents - expect(files.map(f => f.content)).to.include.members([ + expect(output.map(f => uint8ArrayToString(f.body))).to.include.members([ fixtures.directory.files['pp.txt'].toString(), fixtures.directory.files['holmes.txt'].toString(), fixtures.directory.files['jungle.txt'].toString() @@ -218,28 +286,138 @@ module.exports = (common, options) => { } const fileAdded = await last(importer([file], blockstore(ipfs))) + + if (!fileAdded) { + throw new Error('No file was added') + } + expect(fileAdded).to.have.property('path', 'a') - const files = await all(ipfs.get(`/ipfs/${fileAdded.cid}/testfile.txt`)) - expect(files).to.be.length(1) - expect(uint8ArrayToString(uint8ArrayConcat(await all(files[0].content)))).to.contain('Plz add me!') + const output = await pipe( + ipfs.get(`/ipfs/${fileAdded.cid}/testfile.txt`), + tarballed, + collect + ) + expect(output).to.be.length(1) + + expect(uint8ArrayToString(output[0].body)).to.equal('Plz add me!\n') + }) + + it('should compress a file directly', async () => { + const output = await pipe( + ipfs.get(fixtures.smallFile.cid, { + compress: true, + compressionLevel: 5 + }), + gzipped, + collect + ) + expect(uint8ArrayConcat(output)).to.equalBytes(fixtures.smallFile.data) + }) + + it('should compress a file as a tarball', async () => { + const output = await pipe( + ipfs.get(fixtures.smallFile.cid, { + archive: true, + compress: true, + compressionLevel: 5 + }), + gzipped, + tarballed, + collect + ) + expect(output).to.have.nested.property('[0].body').that.equalBytes(fixtures.smallFile.data) + }) + + it('should not compress a directory', async () => { + const dirs = [ + content('pp.txt'), + emptyDir('empty-folder'), + content('files/hello.txt') + ] + + const res = await all(importer(dirs, blockstore(ipfs))) + const { cid } = res[res.length - 1] + + await expect(drain(ipfs.get(cid, { + compress: true, + compressionLevel: 5 + }))).to.eventually.be.rejectedWith(/file is not regular/) + }) + + it('should compress a file with invalid compression level', async () => { + await expect(drain(ipfs.get(fixtures.smallFile.cid, { + compress: true, + compressionLevel: 10 + }))).to.eventually.be.rejected() + }) + + it('should compress a directory as a tarball', async () => { + const dirs = [ + content('pp.txt'), + emptyDir('empty-folder'), + content('files/hello.txt') + ] + + const res = await all(importer(dirs, blockstore(ipfs))) + const { cid } = res[res.length - 1] + const output = await pipe( + ipfs.get(cid, { + archive: true, + compress: true, + compressionLevel: 5 + }), + gzipped, + tarballed, + collect + ) + + // Check paths + const paths = output.map((file) => { return file.header.name }) + expect(paths).to.include.members([ + 'QmXpbhYKheGs5sopefFjsABsjr363QkRaJT4miRsN88ABU', + 'QmXpbhYKheGs5sopefFjsABsjr363QkRaJT4miRsN88ABU/empty-folder', + 'QmXpbhYKheGs5sopefFjsABsjr363QkRaJT4miRsN88ABU/files/hello.txt', + 'QmXpbhYKheGs5sopefFjsABsjr363QkRaJT4miRsN88ABU/pp.txt' + ]) + + // Check contents + expect(output.map(f => uint8ArrayToString(f.body))).to.include.members([ + fixtures.directory.files['files/hello.txt'].toString(), + fixtures.directory.files['pp.txt'].toString() + ]) }) it('should error on invalid key', async () => { const invalidCid = 'somethingNotMultihash' - const err = await expect(all(ipfs.get(invalidCid))).to.eventually.be.rejected() - - switch (err.toString()) { - case 'Error: invalid ipfs ref path': - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - break - case 'Error: Invalid Key': - expect(err.toString()).to.contain('Error: Invalid Key') - break - default: - break - } + await expect(all(ipfs.get(invalidCid))).to.eventually.be.rejected() + }) + + it('get path containing "+"s', async () => { + const filename = 'ti,c64x+mega++mod-pic.txt' + const subdir = 'tmp/c++files' + const expectedCid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' + const path = `${subdir}/${filename}` + const files = await all(ipfs.addAll([{ + path, + content: path + }])) + + expect(files[2].cid.toString()).to.equal(expectedCid) + + const cid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' + + const output = await pipe( + ipfs.get(CID.parse(cid)), + tarballed, + collect + ) + + expect(output).to.be.an('array').with.lengthOf(3) + expect(output).to.have.nested.property('[0].header.name', cid) + expect(output).to.have.nested.property('[1].header.name', `${cid}/c++files`) + expect(output).to.have.nested.property('[2].header.name', `${cid}/c++files/ti,c64x+mega++mod-pic.txt`) }) }) } diff --git a/packages/interface-ipfs-core/src/key/gen.js b/packages/interface-ipfs-core/src/key/gen.js index d3241b1382..0fd94ff424 100644 --- a/packages/interface-ipfs-core/src/key/gen.js +++ b/packages/interface-ipfs-core/src/key/gen.js @@ -4,12 +4,15 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) @@ -18,16 +21,18 @@ module.exports = (common, options) => { { type: 'rsa', size: 2048 } ] + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) keyTypes.forEach((kt) => { it(`should generate a new ${kt.type} key`, async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const name = nanoid() const key = await ipfs.key.gen(name, kt) diff --git a/packages/interface-ipfs-core/src/key/import.js b/packages/interface-ipfs-core/src/key/import.js index 0bb9933fd1..e59ebd7162 100644 --- a/packages/interface-ipfs-core/src/key/import.js +++ b/packages/interface-ipfs-core/src/key/import.js @@ -2,31 +2,35 @@ 'use strict' const { nanoid } = require('nanoid') -const keys = require('libp2p-crypto/src/keys') +const { keys } = require('libp2p-crypto') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.key.import', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should import an exported key', async () => { const password = nanoid() - const key = await keys.generateKeyPair('ed25519') + const key = await keys.generateKeyPair('Ed25519') const exported = await key.export(password) const importedKey = await ipfs.key.import('clone', exported, password) diff --git a/packages/interface-ipfs-core/src/key/list.js b/packages/interface-ipfs-core/src/key/list.js index a965b80c5a..192c5fec9d 100644 --- a/packages/interface-ipfs-core/src/key/list.js +++ b/packages/interface-ipfs-core/src/key/list.js @@ -4,25 +4,30 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.key.list', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should list all the keys', async function () { + // @ts-ignore this is mocha this.timeout(60 * 1000) const keys = await Promise.all([1, 2, 3].map(() => ipfs.key.gen(nanoid(), { type: 'rsa', size: 2048 }))) diff --git a/packages/interface-ipfs-core/src/key/rename.js b/packages/interface-ipfs-core/src/key/rename.js index 9eafa9d622..b53097525a 100644 --- a/packages/interface-ipfs-core/src/key/rename.js +++ b/packages/interface-ipfs-core/src/key/rename.js @@ -4,25 +4,30 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.key.rename', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should rename a key', async function () { + // @ts-ignore this is mocha this.timeout(30 * 1000) const oldName = nanoid() diff --git a/packages/interface-ipfs-core/src/key/rm.js b/packages/interface-ipfs-core/src/key/rm.js index df8f7ed143..88ef35c31f 100644 --- a/packages/interface-ipfs-core/src/key/rm.js +++ b/packages/interface-ipfs-core/src/key/rm.js @@ -4,25 +4,30 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.key.rm', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should rm a key', async function () { + // @ts-ignore this is mocha this.timeout(30 * 1000) const key = await ipfs.key.gen(nanoid(), { type: 'rsa', size: 2048 }) diff --git a/packages/interface-ipfs-core/src/ls.js b/packages/interface-ipfs-core/src/ls.js index af316c3cfd..1b47d43384 100644 --- a/packages/interface-ipfs-core/src/ls.js +++ b/packages/interface-ipfs-core/src/ls.js @@ -7,27 +7,34 @@ const all = require('it-all') const { CID } = require('multiformats/cid') const testTimeout = require('./utils/test-timeout') +/** + * @param {string} prefix + */ const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}` -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.ls', function () { this.timeout(120 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should respect timeout option when listing files', () => { return testTimeout(() => ipfs.ls(CID.parse('QmNonExistentCiD8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXg'), { @@ -36,11 +43,17 @@ module.exports = (common, options) => { }) it('should ls with a base58 encoded CID', async function () { + /** + * @param {string} name + */ const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] }) + /** + * @param {string} name + */ const emptyDir = (name) => ({ path: `test-folder/${name}` }) const dirs = [ @@ -64,42 +77,36 @@ module.exports = (common, options) => { const output = await all(ipfs.ls(cid)) expect(output).to.have.lengthOf(6) - expect(output[0].depth).to.equal(1) expect(output[0].name).to.equal('alice.txt') expect(output[0].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt') expect(output[0].size).to.equal(11685) expect(output[0].cid.toString()).to.equal('QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi') expect(output[0].type).to.equal('file') - expect(output[1].depth).to.equal(1) expect(output[1].name).to.equal('empty-folder') expect(output[1].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder') expect(output[1].size).to.equal(0) expect(output[1].cid.toString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') expect(output[1].type).to.equal('dir') - expect(output[2].depth).to.equal(1) expect(output[2].name).to.equal('files') expect(output[2].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files') expect(output[2].size).to.equal(0) expect(output[2].cid.toString()).to.equal('QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74') expect(output[2].type).to.equal('dir') - expect(output[3].depth).to.equal(1) expect(output[3].name).to.equal('holmes.txt') expect(output[3].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt') expect(output[3].size).to.equal(581878) expect(output[3].cid.toString()).to.equal('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr') expect(output[3].type).to.equal('file') - expect(output[4].depth).to.equal(1) expect(output[4].name).to.equal('jungle.txt') expect(output[4].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt') expect(output[4].size).to.equal(2294) expect(output[4].cid.toString()).to.equal('QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9') expect(output[4].type).to.equal('file') - expect(output[5].depth).to.equal(1) expect(output[5].name).to.equal('pp.txt') expect(output[5].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt') expect(output[5].size).to.equal(4540) diff --git a/packages/interface-ipfs-core/src/miscellaneous/dns.js b/packages/interface-ipfs-core/src/miscellaneous/dns.js index a4e54151d3..b71e057904 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/dns.js +++ b/packages/interface-ipfs-core/src/miscellaneous/dns.js @@ -3,25 +3,30 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.dns', function () { this.timeout(60 * 1000) this.retries(3) + + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should non-recursively resolve ipfs.io', async function () { try { @@ -31,6 +36,7 @@ module.exports = (common, options) => { expect(res).to.match(/\/ipns\/.+$/) } catch (err) { if (err.message.includes('could not resolve name')) { + // @ts-ignore this is mocha return this.skip() } @@ -46,6 +52,7 @@ module.exports = (common, options) => { expect(res).to.match(/\/ipfs\/.+$/) } catch (err) { if (err.message.includes('could not resolve name')) { + // @ts-ignore this is mocha return this.skip() } @@ -61,6 +68,7 @@ module.exports = (common, options) => { expect(res).to.match(/\/ipfs\/.+$/) } catch (err) { if (err.message.includes('could not resolve name')) { + // @ts-ignore this is mocha return this.skip() } diff --git a/packages/interface-ipfs-core/src/miscellaneous/id.js b/packages/interface-ipfs-core/src/miscellaneous/id.js index 31fbc42c9e..0cd172ee53 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/id.js +++ b/packages/interface-ipfs-core/src/miscellaneous/id.js @@ -6,24 +6,29 @@ const { Multiaddr } = require('multiaddr') const { isWebWorker } = require('ipfs-utils/src/env') const retry = require('p-retry') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.id', function () { + // @ts-ignore this is mocha this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get the node ID', async () => { const res = await ipfs.id() @@ -60,6 +65,7 @@ module.exports = (common, options) => { it('should return swarm ports opened after startup', async function () { if (isWebWorker) { // TODO: webworkers are not currently dialable + // @ts-ignore this is mocha return this.skip() } @@ -69,19 +75,21 @@ module.exports = (common, options) => { it('should get the id of another node in the swarm', async function () { if (isWebWorker) { // TODO: https://github.com/libp2p/js-libp2p-websockets/issues/129 + // @ts-ignore this is mocha return this.skip() } - const ipfsB = (await common.spawn()).api - await ipfs.swarm.connect(ipfsB.peerId.addresses[0]) + const ipfsB = (await factory.spawn()).api + const ipfsBId = await ipfsB.id() + await ipfs.swarm.connect(ipfsBId.addresses[0]) // have to wait for identify to complete before protocols etc are available for remote hosts await retry(async () => { const result = await ipfs.id({ - peerId: ipfsB.peerId.id + peerId: ipfsBId.id }) - expect(result).to.deep.equal(ipfsB.peerId) + expect(result).to.deep.equal(ipfsBId) }, { retries: 5 }) }) diff --git a/packages/interface-ipfs-core/src/miscellaneous/resolve.js b/packages/interface-ipfs-core/src/miscellaneous/resolve.js index 48c94e6c49..ce4368fa09 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/resolve.js +++ b/packages/interface-ipfs-core/src/miscellaneous/resolve.js @@ -10,25 +10,32 @@ const all = require('it-all') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) describe('.resolve', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfsId before(async () => { - ipfs = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfs = (await factory.spawn({ type: 'proc', ipfsOptions })).api + ipfsId = await ipfs.id() }) - after(() => common.clean()) + after(() => factory.clean()) it('should resolve an IPFS hash', async () => { const content = uint8ArrayFromString('Hello world') @@ -79,6 +86,7 @@ module.exports = (common, options) => { // Test resolve turns /ipns/domain.com into /ipfs/QmHash it('should resolve an IPNS DNS link', async function () { + // @ts-ignore this is mocha this.retries(3) const resolved = await ipfs.resolve('/ipns/ipfs.io') @@ -86,33 +94,37 @@ module.exports = (common, options) => { }) it('should resolve IPNS link recursively by default', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) // webworkers are not dialable because webrtc is not available - const node = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - await ipfs.swarm.connect(node.peerId.addresses[0]) + const node = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const nodeId = await node.id() + await ipfs.swarm.connect(nodeId.addresses[0]) const { path } = await ipfs.add(uint8ArrayFromString('should resolve a record recursive === true')) const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) - await ipfs.name.publish(`/ipns/${ipfs.peerId.id}`, { allowOffline: true, key: 'key-name', resolve: false }) + await ipfs.name.publish(`/ipns/${ipfsId.id}`, { allowOffline: true, key: 'key-name', resolve: false }) return expect(await ipfs.resolve(`/ipns/${keyId}`)) .to.eq(`/ipfs/${path}`) }) it('should resolve IPNS link non-recursively if recursive==false', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) // webworkers are not dialable because webrtc is not available - const node = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - await ipfs.swarm.connect(node.peerId.addresses[0]) + const node = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const nodeId = await node.id() + await ipfs.swarm.connect(nodeId.addresses[0]) const { path } = await ipfs.add(uint8ArrayFromString('should resolve an IPNS key if recursive === false')) const { id: keyId } = await ipfs.key.gen('new-key-name', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) - await ipfs.name.publish(`/ipns/${ipfs.peerId.id}`, { allowOffline: true, key: 'new-key-name', resolve: false }) + await ipfs.name.publish(`/ipns/${ipfsId.id}`, { allowOffline: true, key: 'new-key-name', resolve: false }) return expect(await ipfs.resolve(`/ipns/${keyId}`, { recursive: false })) - .to.eq(`/ipns/${ipfs.peerId.id}`) + .to.eq(`/ipns/${ipfsId.id}`) }) }) } diff --git a/packages/interface-ipfs-core/src/miscellaneous/stop.js b/packages/interface-ipfs-core/src/miscellaneous/stop.js index 80ab85199b..76040cfdef 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/stop.js +++ b/packages/interface-ipfs-core/src/miscellaneous/stop.js @@ -3,40 +3,44 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.stop', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs beforeEach(async () => { - ipfs = await common.spawn() + ipfs = (await factory.spawn()).api }) afterEach(() => { // reset the list of controlled nodes - we've already shut down the // nodes started in this test but the references hang around and the - // next test will call `common.clean()` which will explode when it + // next test will call `factory.clean()` which will explode when it // can't connect to the nodes started by this test. - common.controllers = [] + factory.controllers = [] }) it('should stop the node', async () => { // Should succeed because node is started - await ipfs.api.swarm.peers() + await ipfs.swarm.peers() // Stop the node and try the call again - await ipfs.api.stop() + await ipfs.stop() // Trying to use an API that requires a started node should return an error - return expect(ipfs.api.swarm.peers()).to.eventually.be.rejected() + return expect(ipfs.swarm.peers()).to.eventually.be.rejected() }) }) } diff --git a/packages/interface-ipfs-core/src/miscellaneous/version.js b/packages/interface-ipfs-core/src/miscellaneous/version.js index d7b4bb874e..3b33dd6b35 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/version.js +++ b/packages/interface-ipfs-core/src/miscellaneous/version.js @@ -3,23 +3,27 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.version', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get the node version', async () => { const result = await ipfs.version() diff --git a/packages/interface-ipfs-core/src/name-pubsub/cancel.js b/packages/interface-ipfs-core/src/name-pubsub/cancel.js index d0ab76b495..9ca55c1eaa 100644 --- a/packages/interface-ipfs-core/src/name-pubsub/cancel.js +++ b/packages/interface-ipfs-core/src/name-pubsub/cancel.js @@ -5,27 +5,34 @@ const PeerId = require('peer-id') const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.name.pubsub.cancel', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {string} */ let nodeId before(async () => { - ipfs = (await common.spawn()).api - nodeId = ipfs.peerId.id + ipfs = (await factory.spawn()).api + const peerInfo = await ipfs.id() + nodeId = peerInfo.id }) - after(() => common.clean()) + after(() => factory.clean()) it('should return false when the name that is intended to cancel is not subscribed', async function () { + // @ts-ignore this is mocha this.timeout(60 * 1000) const res = await ipfs.name.pubsub.cancel(nodeId) @@ -35,6 +42,7 @@ module.exports = (common, options) => { }) it('should cancel a subscription correctly returning true', async function () { + // @ts-ignore this is mocha this.timeout(300 * 1000) const peerId = await PeerId.create({ bits: 512 }) diff --git a/packages/interface-ipfs-core/src/name-pubsub/pubsub.js b/packages/interface-ipfs-core/src/name-pubsub/pubsub.js index fc4b9f5c63..e4395bd444 100644 --- a/packages/interface-ipfs-core/src/name-pubsub/pubsub.js +++ b/packages/interface-ipfs-core/src/name-pubsub/pubsub.js @@ -22,7 +22,10 @@ const daemonsOptions = { } } -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + /** * @param {Factory} factory * @param {Object} options @@ -36,9 +39,13 @@ module.exports = (factory, options) => { if (!isNode) return let nodes + /** @type {import('ipfs-core-types').IPFS} */ let nodeA + /** @type {import('ipfs-core-types').IPFS} */ let nodeB + /** @type {import('ipfs-core-types/src/root').IDResult} */ let idA + /** @type {import('ipfs-core-types/src/root').IDResult} */ let idB before(async function () { @@ -66,11 +73,15 @@ module.exports = (factory, options) => { after(() => factory.clean()) it('should publish and then resolve correctly', async function () { + // @ts-ignore this is mocha this.timeout(80 * 1000) let subscribed = false - function checkMessage (msg) { + /** + * @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} + */ + function checkMessage () { subscribed = true } @@ -87,7 +98,7 @@ module.exports = (factory, options) => { await waitFor(async () => { const res = await nodeA.pubsub.peers(topic) - return res && res.length + return Boolean(res && res.length) }, { name: `node A to subscribe to ${topic}` }) await nodeB.pubsub.subscribe(topic, checkMessage) await nodeA.name.publish(ipfsRef, { resolve: false }) @@ -100,6 +111,7 @@ module.exports = (factory, options) => { }) it('should self resolve, publish and then resolve correctly', async function () { + // @ts-ignore this is mocha this.timeout(6000) const emptyDirCid = '/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' const { path } = await nodeA.add(uint8ArrayFromString('pubsub records')) @@ -125,30 +137,32 @@ module.exports = (factory, options) => { }) it('should handle event on publish correctly', async function () { + // @ts-ignore this is mocha this.timeout(80 * 1000) const testAccountName = 'test-account' - let publishedMessageKey - let publishedMessage = null - let publishedMessageData = null - let publishedMessageDataValue = null + /** + * @type {import('ipfs-core-types/src/pubsub').Message} + */ + let publishedMessage + /** + * @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} + */ function checkMessage (msg) { - publishedMessageKey = msg.from publishedMessage = msg - publishedMessageData = ipns.unmarshal(msg.data) - publishedMessageDataValue = uint8ArrayToString(publishedMessageData.value) } const alreadySubscribed = () => { - return publishedMessage !== null + return Boolean(publishedMessage) } // Create account for publish const testAccount = await nodeA.key.gen(testAccountName, { type: 'rsa', - size: 2048 + size: 2048, + 'ipns-base': 'b58mh' }) const keys = ipns.getIdKeys(uint8ArrayFromString(testAccount.id, 'base58btc')) @@ -157,17 +171,29 @@ module.exports = (factory, options) => { await nodeB.pubsub.subscribe(topic, checkMessage) await nodeA.name.publish(ipfsRef, { resolve: false, key: testAccountName }) await waitFor(alreadySubscribed) - const messageKey = await PeerId.createFromB58String(publishedMessageKey) + + // @ts-ignore publishedMessage is set in handler + if (!publishedMessage) { + throw new Error('Pubsub handler not invoked') + } + + const publishedMessageData = ipns.unmarshal(publishedMessage.data) + + if (!publishedMessageData.pubKey) { + throw new Error('No public key found in message data') + } + + const messageKey = await PeerId.createFromB58String(publishedMessage.from) const pubKeyPeerId = await PeerId.createFromPubKey(publishedMessageData.pubKey) expect(pubKeyPeerId.toB58String()).not.to.equal(messageKey.toB58String()) expect(pubKeyPeerId.toB58String()).to.equal(testAccount.id) expect(publishedMessage.from).to.equal(idA.id) expect(messageKey.toB58String()).to.equal(idA.id) - expect(publishedMessageDataValue).to.equal(ipfsRef) + expect(uint8ArrayToString(publishedMessageData.value)).to.equal(ipfsRef) // Verify the signature - await ipns.validate(pubKeyPeerId._pubKey, publishedMessageData) + await ipns.validate(pubKeyPeerId.pubKey, publishedMessageData) }) }) } diff --git a/packages/interface-ipfs-core/src/name-pubsub/state.js b/packages/interface-ipfs-core/src/name-pubsub/state.js index 01da8dbaac..ef33cb6ff0 100644 --- a/packages/interface-ipfs-core/src/name-pubsub/state.js +++ b/packages/interface-ipfs-core/src/name-pubsub/state.js @@ -3,25 +3,30 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.name.pubsub.state', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get the current state of pubsub', async function () { + // @ts-ignore this is mocha this.timeout(50 * 1000) const res = await ipfs.name.pubsub.state() diff --git a/packages/interface-ipfs-core/src/name-pubsub/subs.js b/packages/interface-ipfs-core/src/name-pubsub/subs.js index f6a555d054..26a70ee719 100644 --- a/packages/interface-ipfs-core/src/name-pubsub/subs.js +++ b/packages/interface-ipfs-core/src/name-pubsub/subs.js @@ -4,25 +4,30 @@ const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.name.pubsub.subs', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get an empty array as a result of subscriptions before any resolve', async function () { + // @ts-ignore this is mocha this.timeout(60 * 1000) const res = await ipfs.name.pubsub.subs() @@ -31,6 +36,7 @@ module.exports = (common, options) => { }) it('should get the list of subscriptions updated after a resolve', async function () { + // @ts-ignore this is mocha this.timeout(300 * 1000) const id = 'QmNP1ASen5ZREtiJTtVD3jhMKhoPb1zppET1tgpjHx2NGA' diff --git a/packages/interface-ipfs-core/src/name/publish.js b/packages/interface-ipfs-core/src/name/publish.js index 07f9b75664..a87ab3c7e7 100644 --- a/packages/interface-ipfs-core/src/name/publish.js +++ b/packages/interface-ipfs-core/src/name/publish.js @@ -8,35 +8,46 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const last = require('it-last') const PeerId = require('peer-id') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.name.publish offline', () => { const keyName = nanoid() + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {string} */ let nodeId before(async () => { - ipfs = (await common.spawn()).api - nodeId = ipfs.peerId.id + ipfs = (await factory.spawn()).api + const peerInfo = await ipfs.id() + nodeId = peerInfo.id await ipfs.add(fixture.data, { pin: false }) }) - after(() => common.clean()) + after(() => factory.clean()) it('should publish an IPNS record with the default params', async function () { + // @ts-ignore this is mocha this.timeout(50 * 1000) const value = fixture.cid const keys = await ipfs.key.list() const self = keys.find(key => key.name === 'self') + if (!self) { + throw new Error('No self key found') + } + const res = await ipfs.name.publish(value, { allowOffline: true }) expect(res).to.exist() @@ -51,12 +62,17 @@ module.exports = (common, options) => { }) it('should publish correctly when the file was not added but resolve is disabled', async function () { + // @ts-ignore this is mocha this.timeout(50 * 1000) const value = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' const keys = await ipfs.key.list() const self = keys.find(key => key.name === 'self') + if (!self) { + throw new Error('No self key found') + } + const options = { resolve: false, lifetime: '1m', @@ -72,6 +88,7 @@ module.exports = (common, options) => { }) it('should publish with a key received as param, instead of using the key of the node', async function () { + // @ts-ignore this is mocha this.timeout(90 * 1000) const value = fixture.cid diff --git a/packages/interface-ipfs-core/src/name/resolve.js b/packages/interface-ipfs-core/src/name/resolve.js index 7a8834e2e1..e459894851 100644 --- a/packages/interface-ipfs-core/src/name/resolve.js +++ b/packages/interface-ipfs-core/src/name/resolve.js @@ -9,27 +9,34 @@ const last = require('it-last') const { CID } = require('multiformats/cid') const Digest = require('multiformats/hashes/digest') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.name.resolve offline', function () { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {string} */ let nodeId before(async () => { - ipfs = (await common.spawn()).api - nodeId = ipfs.peerId.id + ipfs = (await factory.spawn()).api + const peerInfo = await ipfs.id() + nodeId = peerInfo.id }) - after(() => common.clean()) + after(() => factory.clean()) it('should resolve a record default options', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const { path } = await ipfs.add(uint8ArrayFromString('should resolve a record default options')) @@ -43,6 +50,7 @@ module.exports = (common, options) => { }) it('should resolve a record from peerid as cidv1 in base32', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const { cid } = await ipfs.add(uint8ArrayFromString('should resolve a record from cidv1b32')) const { id: peerId } = await ipfs.id() @@ -64,6 +72,7 @@ module.exports = (common, options) => { }) it('should resolve a record recursive === true', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const { path } = await ipfs.add(uint8ArrayFromString('should resolve a record recursive === true')) @@ -77,6 +86,7 @@ module.exports = (common, options) => { }) it('should resolve a record default options with remainder', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const { path } = await ipfs.add(uint8ArrayFromString('should resolve a record default options with remainder')) @@ -97,6 +107,7 @@ module.exports = (common, options) => { }) it('should resolve a record recursive === true with remainder', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) const { path } = await ipfs.add(uint8ArrayFromString('should resolve a record recursive = true with remainder')) @@ -131,14 +142,15 @@ module.exports = (common, options) => { }) describe('.name.resolve dns', function () { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs this.retries(5) before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should resolve /ipns/ipfs.io', async () => { expect(await last(ipfs.name.resolve('/ipns/ipfs.io'))) diff --git a/packages/interface-ipfs-core/src/object/data.js b/packages/interface-ipfs-core/src/object/data.js index 86450961f0..6036a4f065 100644 --- a/packages/interface-ipfs-core/src/object/data.js +++ b/packages/interface-ipfs-core/src/object/data.js @@ -5,25 +5,29 @@ const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') const uint8ArrayFromString = require('uint8arrays/from-string') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.data', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get data by CID', async () => { const testObj = { @@ -38,10 +42,12 @@ module.exports = (common, options) => { }) it('returns error for request without argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.data(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.data('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/object/get.js b/packages/interface-ipfs-core/src/object/get.js index 2d925e4702..9e23b06072 100644 --- a/packages/interface-ipfs-core/src/object/get.js +++ b/packages/interface-ipfs-core/src/object/get.js @@ -10,25 +10,29 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { CID } = require('multiformats/cid') const { sha256 } = require('multiformats/hashes/sha2') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.get', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get object by multihash', async () => { const obj = { @@ -70,7 +74,7 @@ module.exports = (common, options) => { } const node1b = { Data: node1a.Data, - Links: node1a.Links.concat(link) + Links: [link] } const node1bCid = await ipfs.object.put(node1b) @@ -121,16 +125,23 @@ module.exports = (common, options) => { }) const node = await ipfs.object.get(result.cid) + + if (!node.Data) { + throw new Error('Node did not have data') + } + const meta = UnixFS.unmarshal(node.Data) expect(meta.fileSize()).to.equal(data.length) }) it('should error for request without argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.get(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.get('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/object/links.js b/packages/interface-ipfs-core/src/object/links.js index 64dc175e31..313fcf7e8b 100644 --- a/packages/interface-ipfs-core/src/object/links.js +++ b/packages/interface-ipfs-core/src/object/links.js @@ -8,25 +8,29 @@ const { CID } = require('multiformats/cid') const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.links', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get empty links by multihash', async () => { const testObj = { @@ -58,7 +62,7 @@ module.exports = (common, options) => { } const node1b = { Data: node1a.Data, - Links: node1a.Links.concat(link) + Links: [link] } const node1bCid = await ipfs.object.put(node1b) @@ -100,10 +104,12 @@ module.exports = (common, options) => { }) it('returns error for request without argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.links(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.links('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/object/new.js b/packages/interface-ipfs-core/src/object/new.js index fa0df1eb7e..756d2f5dee 100644 --- a/packages/interface-ipfs-core/src/object/new.js +++ b/packages/interface-ipfs-core/src/object/new.js @@ -3,25 +3,29 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.new', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should create a new object with no template', async () => { const cid = await ipfs.object.new() diff --git a/packages/interface-ipfs-core/src/object/patch/add-link.js b/packages/interface-ipfs-core/src/object/patch/add-link.js index 3682533e04..8f162ca63f 100644 --- a/packages/interface-ipfs-core/src/object/patch/add-link.js +++ b/packages/interface-ipfs-core/src/object/patch/add-link.js @@ -7,25 +7,29 @@ const { CID } = require('multiformats/cid') const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.patch.addLink', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should add a link to an existing node', async () => { const obj = { @@ -54,7 +58,7 @@ module.exports = (common, options) => { } const node1b = { Data: node1a.Data, - Links: node1a.Links.concat(link) + Links: [link] } const node1bCid = await ipfs.object.put(node1b) @@ -87,10 +91,12 @@ module.exports = (common, options) => { }) it('returns error for request without arguments', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.patch.addLink(null, null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with only one invalid argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.patch.addLink('invalid', null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/object/patch/append-data.js b/packages/interface-ipfs-core/src/object/patch/append-data.js index b6672166ff..4626de1604 100644 --- a/packages/interface-ipfs-core/src/object/patch/append-data.js +++ b/packages/interface-ipfs-core/src/object/patch/append-data.js @@ -4,25 +4,29 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.patch.appendData', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should append data to an existing node', async () => { const obj = { @@ -36,12 +40,14 @@ module.exports = (common, options) => { }) it('returns error for request without key & data', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.patch.appendData(null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request without data', () => { const filePath = 'test/fixtures/test-data/badnode.json' + // @ts-expect-error invalid arg return expect(ipfs.object.patch.appendData(null, filePath)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/object/patch/rm-link.js b/packages/interface-ipfs-core/src/object/patch/rm-link.js index 4b5f574e15..e5e384f06d 100644 --- a/packages/interface-ipfs-core/src/object/patch/rm-link.js +++ b/packages/interface-ipfs-core/src/object/patch/rm-link.js @@ -7,25 +7,29 @@ const { CID } = require('multiformats/cid') const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.patch.rmLink', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should remove a link from an existing node', async () => { const obj1 = { @@ -60,11 +64,13 @@ module.exports = (common, options) => { }) it('returns error for request without arguments', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.patch.rmLink(null, null)).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) it('returns error for request only one invalid argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.patch.rmLink('invalid', null)).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) @@ -73,6 +79,7 @@ module.exports = (common, options) => { const root = '' const link = 'foo' + // @ts-expect-error invalid arg return expect(ipfs.object.patch.rmLink(root, link)).to.eventually.be.rejected .and.be.an.instanceOf(Error) }) diff --git a/packages/interface-ipfs-core/src/object/patch/set-data.js b/packages/interface-ipfs-core/src/object/patch/set-data.js index 8f610116d7..fa61057b8a 100644 --- a/packages/interface-ipfs-core/src/object/patch/set-data.js +++ b/packages/interface-ipfs-core/src/object/patch/set-data.js @@ -4,25 +4,29 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.patch.setData', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should set data for an existing node', async () => { const obj = { @@ -40,12 +44,14 @@ module.exports = (common, options) => { }) it('returns error for request without key & data', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.patch.setData(null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request without data', () => { const filePath = 'test/fixtures/test-data/badnode.json' + // @ts-expect-error invalid arg return expect(ipfs.object.patch.setData(null, filePath)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/object/put.js b/packages/interface-ipfs-core/src/object/put.js index 288d6abebf..095a395295 100644 --- a/packages/interface-ipfs-core/src/object/put.js +++ b/packages/interface-ipfs-core/src/object/put.js @@ -10,25 +10,29 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const first = require('it-first') const drain = require('it-drain') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.put', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should put an object', async () => { const obj = { @@ -84,6 +88,7 @@ module.exports = (common, options) => { }) it('should fail if a string is passed', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.put(nanoid())).to.eventually.be.rejected() }) @@ -104,7 +109,7 @@ module.exports = (common, options) => { } const node1b = { Data: node1a.Data, - Links: node1a.Links.concat(link) + Links: [link] } const cid = await ipfs.object.put(node1b) diff --git a/packages/interface-ipfs-core/src/object/stat.js b/packages/interface-ipfs-core/src/object/stat.js index ba69a37be6..2f6bca03cd 100644 --- a/packages/interface-ipfs-core/src/object/stat.js +++ b/packages/interface-ipfs-core/src/object/stat.js @@ -8,25 +8,29 @@ const { CID } = require('multiformats/cid') const { sha256 } = require('multiformats/hashes/sha2') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.object.stat', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get stats by multihash', async () => { const testObj = { @@ -65,7 +69,7 @@ module.exports = (common, options) => { } const node1b = { Data: node1a.Data, - Links: node1a.Links.concat(link) + Links: [link] } const node1bCid = await ipfs.object.put(node1b) @@ -82,10 +86,12 @@ module.exports = (common, options) => { }) it('returns error for request without argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.stat(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { + // @ts-expect-error invalid arg return expect(ipfs.object.stat('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) diff --git a/packages/interface-ipfs-core/src/pin/add-all.js b/packages/interface-ipfs-core/src/pin/add-all.js index aa318358ac..72d0344456 100644 --- a/packages/interface-ipfs-core/src/pin/add-all.js +++ b/packages/interface-ipfs-core/src/pin/add-all.js @@ -6,21 +6,25 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') const drain = require('it-drain') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pin.addAll', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api await drain( ipfs.addAll( @@ -42,12 +46,16 @@ module.exports = (common, options) => { ) }) - after(() => common.clean()) + after(() => factory.clean()) beforeEach(() => { return clearPins(ipfs) }) + /** + * + * @param {Iterable | AsyncIterable} source + */ async function testAddPinInput (source) { const pinset = await all(ipfs.pin.addAll(source)) diff --git a/packages/interface-ipfs-core/src/pin/add.js b/packages/interface-ipfs-core/src/pin/add.js index a71f1398ec..a7608abf08 100644 --- a/packages/interface-ipfs-core/src/pin/add.js +++ b/packages/interface-ipfs-core/src/pin/add.js @@ -7,21 +7,25 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') const drain = require('it-drain') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pin.add', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api await drain( ipfs.addAll( @@ -43,7 +47,7 @@ module.exports = (common, options) => { ) }) - after(() => common.clean()) + after(() => factory.clean()) beforeEach(() => { return clearPins(ipfs) @@ -94,12 +98,13 @@ module.exports = (common, options) => { return expect(ipfs.pin.add(fixtures.directory.cid, { recursive: false })) - .to.eventually.be.rejected() - .with(/already pinned recursively/) + .to.eventually.be.rejectedWith(/already pinned recursively/) }) it('should fail to pin a hash not in datastore', async function () { + // @ts-ignore this is mocha this.slow(3 * 1000) + // @ts-ignore this is mocha this.timeout(5 * 1000) const falseHash = `${`${fixtures.directory.cid}`.slice(0, -2)}ss` @@ -108,7 +113,9 @@ module.exports = (common, options) => { }) it('needs all children in datastore to pin recursively', async function () { + // @ts-ignore this is mocha this.slow(3 * 1000) + // @ts-ignore this is mocha this.timeout(5 * 1000) await all(ipfs.block.rm(fixtures.directory.files[0].cid)) diff --git a/packages/interface-ipfs-core/src/pin/ls.js b/packages/interface-ipfs-core/src/pin/ls.js index 4e02b08251..3673083ddf 100644 --- a/packages/interface-ipfs-core/src/pin/ls.js +++ b/packages/interface-ipfs-core/src/pin/ls.js @@ -5,22 +5,26 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pin.ls', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api // two files wrapped in directories, only root CID pinned recursively const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) await all(ipfs.addAll(dir, { pin: false, cidVersion: 0 })) @@ -33,7 +37,7 @@ module.exports = (common, options) => { await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) - after(() => common.clean()) + after(() => factory.clean()) // 1st, because ipfs.add pins automatically it('should list all recursive pins', async () => { diff --git a/packages/interface-ipfs-core/src/pin/remote/add.js b/packages/interface-ipfs-core/src/pin/remote/add.js index 3607777771..dcf63be07e 100644 --- a/packages/interface-ipfs-core/src/pin/remote/add.js +++ b/packages/interface-ipfs-core/src/pin/remote/add.js @@ -4,25 +4,29 @@ const { fixtures, clearRemotePins, clearServices } = require('../utils') const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const ENDPOINT = new URL(process.env.PINNING_SERVICE_ENDPOINT || '') - const KEY = process.env.PINNING_SERVIEC_KEY + const KEY = `${process.env.PINNING_SERVICE_KEY}` const SERVICE = 'pinbot' describe('.pin.remote.add', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api await ipfs.pin.remote.service.add(SERVICE, { endpoint: ENDPOINT, key: KEY @@ -30,7 +34,7 @@ module.exports = (common, options) => { }) after(async () => { await clearServices(ipfs) - await common.clean() + await factory.clean() }) beforeEach(async () => { diff --git a/packages/interface-ipfs-core/src/pin/remote/ls.js b/packages/interface-ipfs-core/src/pin/remote/ls.js index 888572937f..9cd5d36a54 100644 --- a/packages/interface-ipfs-core/src/pin/remote/ls.js +++ b/packages/interface-ipfs-core/src/pin/remote/ls.js @@ -6,17 +6,20 @@ const { getDescribe, getIt, expect } = require('../../utils/mocha') const all = require('it-all') const { CID } = require('multiformats/cid') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const ENDPOINT = new URL(process.env.PINNING_SERVICE_ENDPOINT || '') - const KEY = process.env.PINNING_SERVIEC_KEY + const KEY = `${process.env.PINNING_SERVICE_KEY}` const SERVICE = 'pinbot' const cid1 = CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') @@ -27,9 +30,10 @@ module.exports = (common, options) => { describe('.pin.remote.ls', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api await ipfs.pin.remote.service.add(SERVICE, { endpoint: ENDPOINT, key: KEY @@ -37,7 +41,7 @@ module.exports = (common, options) => { }) after(async () => { await clearServices(ipfs) - await common.clean() + await factory.clean() }) beforeEach(async () => { @@ -423,4 +427,8 @@ module.exports = (common, options) => { }) } +/** + * @param {{ cid: CID }} a + * @param {{ cid: CID }} b + */ const byCID = (a, b) => a.cid.toString() > b.cid.toString() ? 1 : -1 diff --git a/packages/interface-ipfs-core/src/pin/remote/rm-all.js b/packages/interface-ipfs-core/src/pin/remote/rm-all.js index 81885afe0a..0094547a98 100644 --- a/packages/interface-ipfs-core/src/pin/remote/rm-all.js +++ b/packages/interface-ipfs-core/src/pin/remote/rm-all.js @@ -6,17 +6,20 @@ const { getDescribe, getIt, expect } = require('../../utils/mocha') const { CID } = require('multiformats/cid') const all = require('it-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const ENDPOINT = new URL(process.env.PINNING_SERVICE_ENDPOINT || '') - const KEY = process.env.PINNING_SERVIEC_KEY + const KEY = `${process.env.PINNING_SERVICE_KEY}` const SERVICE = 'pinbot' const cid1 = CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') @@ -27,9 +30,10 @@ module.exports = (common, options) => { describe('.pin.remote.rmAll()', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api await ipfs.pin.remote.service.add(SERVICE, { endpoint: ENDPOINT, key: KEY @@ -37,7 +41,7 @@ module.exports = (common, options) => { }) after(async () => { await clearServices(ipfs) - await common.clean() + await factory.clean() }) beforeEach(async () => { @@ -153,4 +157,8 @@ module.exports = (common, options) => { }) } +/** + * @param {{ cid: CID }} a + * @param {{ cid: CID }} b + */ const byCID = (a, b) => a.cid.toString() > b.cid.toString() ? 1 : -1 diff --git a/packages/interface-ipfs-core/src/pin/remote/rm.js b/packages/interface-ipfs-core/src/pin/remote/rm.js index a2073233c2..3d99a2e49c 100644 --- a/packages/interface-ipfs-core/src/pin/remote/rm.js +++ b/packages/interface-ipfs-core/src/pin/remote/rm.js @@ -6,17 +6,20 @@ const { getDescribe, getIt, expect } = require('../../utils/mocha') const { CID } = require('multiformats/cid') const all = require('it-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const ENDPOINT = new URL(process.env.PINNING_SERVICE_ENDPOINT || '') - const KEY = process.env.PINNING_SERVIEC_KEY + const KEY = `${process.env.PINNING_SERVICE_KEY}` const SERVICE = 'pinbot' const cid1 = CID.parse('QmbKtKBrmeRHjNCwR4zAfCJdMVu6dgmwk9M9AE9pUM9RgG') @@ -27,9 +30,10 @@ module.exports = (common, options) => { describe('.pin.remote.rm()', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api await ipfs.pin.remote.service.add(SERVICE, { endpoint: ENDPOINT, key: KEY @@ -37,7 +41,7 @@ module.exports = (common, options) => { }) after(async () => { await clearServices(ipfs) - await common.clean() + await factory.clean() }) beforeEach(async () => { @@ -171,4 +175,8 @@ module.exports = (common, options) => { }) } +/** + * @param {{ cid: CID }} a + * @param {{ cid: CID }} b + */ const byCID = (a, b) => a.cid.toString() > b.cid.toString() ? 1 : -1 diff --git a/packages/interface-ipfs-core/src/pin/remote/service.js b/packages/interface-ipfs-core/src/pin/remote/service.js index b0721950a2..ceccbf1928 100644 --- a/packages/interface-ipfs-core/src/pin/remote/service.js +++ b/packages/interface-ipfs-core/src/pin/remote/service.js @@ -4,28 +4,32 @@ const { clearServices } = require('../utils') const { getDescribe, getIt, expect } = require('../../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) const ENDPOINT = new URL(process.env.PINNING_SERVICE_ENDPOINT || '') - const KEY = process.env.PINNING_SERVIEC_KEY + const KEY = `${process.env.PINNING_SERVICE_KEY}` describe('.pin.remote.service', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) after(async () => { - await common.clean() + await factory.clean() }) afterEach(() => clearServices(ipfs)) @@ -44,11 +48,13 @@ module.exports = (common, options) => { }) it('service add requires endpoint', async () => { + // @ts-expect-error missing property const result = ipfs.pin.remote.service.add('noend', { key: 'token' }) await expect(result).to.eventually.be.rejectedWith(/is required/) }) it('service add requires key', async () => { + // @ts-expect-error missing property const result = ipfs.pin.remote.service.add('nokey', { endpoint: ENDPOINT }) @@ -87,7 +93,7 @@ module.exports = (common, options) => { }) const result = ipfs.pin.remote.service.add('pinbot', { - endpoint: 'http://pinbot.io/', + endpoint: new URL('http://pinbot.io/'), key: KEY }) @@ -143,6 +149,7 @@ module.exports = (common, options) => { key: KEY }) await ipfs.pin.remote.service.add('boombot', { + // @ts-expect-error invalid property endpoint: 'http://127.0.0.1:5555', key: 'boom' }) @@ -198,6 +205,7 @@ module.exports = (common, options) => { }) it('expects service name', async () => { + // @ts-expect-error invalid arg const result = ipfs.pin.remote.service.rm() await expect(result).to.eventually.be.rejectedWith(/is required/) }) @@ -205,4 +213,8 @@ module.exports = (common, options) => { }) } +/** + * @param {{ service: string }} a + * @param {{ service: string }} b + */ const byName = (a, b) => a.service > b.service ? 1 : -1 diff --git a/packages/interface-ipfs-core/src/pin/rm-all.js b/packages/interface-ipfs-core/src/pin/rm-all.js index f0ad36408f..b78204072b 100644 --- a/packages/interface-ipfs-core/src/pin/rm-all.js +++ b/packages/interface-ipfs-core/src/pin/rm-all.js @@ -6,21 +6,25 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') const drain = require('it-drain') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pin.rmAll', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs beforeEach(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) await all(ipfs.addAll(dir, { pin: false, cidVersion: 0 })) @@ -29,7 +33,7 @@ module.exports = (common, options) => { await ipfs.add(fixtures.files[1].data, { pin: false }) }) - after(() => common.clean()) + after(() => factory.clean()) beforeEach(() => { return clearPins(ipfs) diff --git a/packages/interface-ipfs-core/src/pin/rm.js b/packages/interface-ipfs-core/src/pin/rm.js index 59f1ae2642..d5421e5452 100644 --- a/packages/interface-ipfs-core/src/pin/rm.js +++ b/packages/interface-ipfs-core/src/pin/rm.js @@ -5,21 +5,25 @@ const { fixtures, expectPinned, clearPins } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const all = require('it-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pin.rm', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs beforeEach(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) await all(ipfs.addAll(dir, { pin: false, cidVersion: 0 })) @@ -27,7 +31,7 @@ module.exports = (common, options) => { await ipfs.add(fixtures.files[1].data, { pin: false }) }) - after(() => common.clean()) + after(() => factory.clean()) beforeEach(() => { return clearPins(ipfs) @@ -57,18 +61,18 @@ module.exports = (common, options) => { }) it('should fail to remove an indirect pin', async () => { - await ipfs.pin.add(fixtures.directory.cid) + await ipfs.pin.add(fixtures.directory.cid, { + recursive: true + }) await expect(ipfs.pin.rm(fixtures.directory.files[0].cid)) - .to.eventually.be.rejected() - .with(/is pinned indirectly under/) + .to.eventually.be.rejectedWith(/pinned indirectly/) await expectPinned(ipfs, fixtures.directory.files[0].cid) }) it('should fail when an item is not pinned', async () => { await expect(ipfs.pin.rm(fixtures.directory.cid)) - .to.eventually.be.rejected() - .with(/is not pinned/) + .to.eventually.be.rejectedWith(/not pinned/) }) }) } diff --git a/packages/interface-ipfs-core/src/pin/utils.js b/packages/interface-ipfs-core/src/pin/utils.js index eea1d59902..524a704944 100644 --- a/packages/interface-ipfs-core/src/pin/utils.js +++ b/packages/interface-ipfs-core/src/pin/utils.js @@ -4,7 +4,6 @@ const { expect } = require('../utils/mocha') const loadFixture = require('aegir/utils/fixtures') const { CID } = require('multiformats/cid') const drain = require('it-drain') -const map = require('it-map') const fromString = require('uint8arrays/from-string') const first = require('it-first') @@ -38,11 +37,17 @@ const fixtures = Object.freeze({ })]) }) +/** + * @param {import('ipfs-core-types').IPFS} ipfs + */ const clearPins = async (ipfs) => { - await drain(ipfs.pin.rmAll(map(ipfs.pin.ls({ type: pinTypes.recursive }), ({ cid }) => cid))) - await drain(ipfs.pin.rmAll(map(ipfs.pin.ls({ type: pinTypes.direct }), ({ cid }) => cid))) + await drain(ipfs.pin.rmAll(ipfs.pin.ls({ type: pinTypes.recursive }))) + await drain(ipfs.pin.rmAll(ipfs.pin.ls({ type: pinTypes.direct }))) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + */ const clearRemotePins = async (ipfs) => { for (const { service } of await ipfs.pin.remote.service.ls()) { const cids = [] @@ -61,6 +66,11 @@ const clearRemotePins = async (ipfs) => { } } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {string} service + * @param {Record} pins + */ const addRemotePins = async (ipfs, service, pins) => { const requests = [] for (const [name, cid] of Object.entries(pins)) { @@ -73,11 +83,20 @@ const addRemotePins = async (ipfs, service, pins) => { await Promise.all(requests) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + */ const clearServices = async (ipfs) => { const services = await ipfs.pin.remote.service.ls() await Promise.all(services.map(({ service }) => ipfs.pin.remote.service.rm(service))) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {CID} cid + * @param {string} type + * @param {boolean} pinned + */ const expectPinned = async (ipfs, cid, type = pinTypes.all, pinned = true) => { if (typeof type === 'boolean') { pinned = type @@ -88,10 +107,20 @@ const expectPinned = async (ipfs, cid, type = pinTypes.all, pinned = true) => { expect(result).to.eql(pinned) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {CID} cid + * @param {string} type + */ const expectNotPinned = (ipfs, cid, type = pinTypes.all) => { return expectPinned(ipfs, cid, type, false) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {CID} cid + * @param {string} type + */ async function isPinnedWithType (ipfs, cid, type) { try { const res = await first(ipfs.pin.ls({ paths: cid, type })) diff --git a/packages/interface-ipfs-core/src/ping/ping.js b/packages/interface-ipfs-core/src/ping/ping.js index 71dbbbccd0..ef073eab22 100644 --- a/packages/interface-ipfs-core/src/ping/ping.js +++ b/packages/interface-ipfs-core/src/ping/ping.js @@ -7,12 +7,15 @@ const all = require('it-all') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -20,21 +23,26 @@ module.exports = (common, options) => { describe('.ping', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let nodeBId before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + nodeBId = await ipfsB.id() + await ipfsA.swarm.connect(nodeBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should send the specified number of packets', async () => { const count = 3 - const responses = await all(ipfsA.ping(ipfsB.peerId.id, { count })) + const responses = await all(ipfsA.ping(nodeBId.id, { count })) responses.forEach(expectIsPingResponse) const pongs = responses.filter(isPong) @@ -56,7 +64,7 @@ module.exports = (common, options) => { }) it('can ping without options', async () => { - const res = await all(ipfsA.ping(ipfsB.peerId.id)) + const res = await all(ipfsA.ping(nodeBId.id)) expect(res.length).to.be.ok() expect(res[0].success).to.be.true() }) diff --git a/packages/interface-ipfs-core/src/ping/utils.js b/packages/interface-ipfs-core/src/ping/utils.js index 8009cada95..229aa94eab 100644 --- a/packages/interface-ipfs-core/src/ping/utils.js +++ b/packages/interface-ipfs-core/src/ping/utils.js @@ -2,6 +2,9 @@ const { expect } = require('../utils/mocha') +/** + * @param {*} obj + */ function expectIsPingResponse (obj) { expect(obj).to.have.a.property('success') expect(obj).to.have.a.property('time') @@ -13,7 +16,11 @@ function expectIsPingResponse (obj) { exports.expectIsPingResponse = expectIsPingResponse -// Determine if a ping response object is a pong, or something else, like a status message +/** + * Determine if a ping response object is a pong, or something else, like a status message + * + * @param {*} pingResponse + */ function isPong (pingResponse) { return Boolean(pingResponse && pingResponse.success && !pingResponse.text) } diff --git a/packages/interface-ipfs-core/src/pubsub/ls.js b/packages/interface-ipfs-core/src/pubsub/ls.js index 85449bf02c..64ee248078 100644 --- a/packages/interface-ipfs-core/src/pubsub/ls.js +++ b/packages/interface-ipfs-core/src/pubsub/ls.js @@ -5,22 +5,27 @@ const { getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pubsub.ls', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {string[]} */ let subscribedTopics = [] before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) afterEach(async () => { @@ -31,7 +36,7 @@ module.exports = (common, options) => { await delay(100) }) - after(() => common.clean()) + after(() => factory.clean()) it('should return an empty list when no topics are subscribed', async () => { const topics = await ipfs.pubsub.ls() diff --git a/packages/interface-ipfs-core/src/pubsub/peers.js b/packages/interface-ipfs-core/src/pubsub/peers.js index 8bdcb1c97f..88b065922b 100644 --- a/packages/interface-ipfs-core/src/pubsub/peers.js +++ b/packages/interface-ipfs-core/src/pubsub/peers.js @@ -7,12 +7,15 @@ const delay = require('delay') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -20,22 +23,37 @@ module.exports = (common, options) => { describe('.pubsub.peers', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs1 + /** @type {import('ipfs-core-types').IPFS} */ let ipfs2 + /** @type {import('ipfs-core-types').IPFS} */ let ipfs3 + /** @type {string[]} */ let subscribedTopics = [] + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfs2Id + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfs3Id before(async () => { - ipfs1 = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfs1 = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfs2 = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - ipfs3 = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfs2 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfs3 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api - const ipfs2Addr = ipfs2.peerId.addresses + ipfs2Id = await ipfs2.id() + ipfs3Id = await ipfs3.id() + + const ipfs2Addr = ipfs2Id.addresses .find(ma => ma.nodeAddress().address === '127.0.0.1') - const ipfs3Addr = ipfs3.peerId.addresses + const ipfs3Addr = ipfs3Id.addresses .find(ma => ma.nodeAddress().address === '127.0.0.1') + if (!ipfs2Addr || !ipfs3Addr) { + throw new Error('Could not find addrs') + } + await ipfs1.swarm.connect(ipfs2Addr) await ipfs1.swarm.connect(ipfs3Addr) await ipfs2.swarm.connect(ipfs3Addr) @@ -51,7 +69,7 @@ module.exports = (common, options) => { await delay(100) }) - after(() => common.clean()) + after(() => factory.clean()) it('should not error when not subscribed to a topic', async () => { const topic = getTopic() @@ -95,7 +113,7 @@ module.exports = (common, options) => { await ipfs2.pubsub.subscribe(topic, sub2) await ipfs3.pubsub.subscribe(topic, sub3) - await waitForPeers(ipfs1, topic, [ipfs2.peerId.id], 30000) + await waitForPeers(ipfs1, topic, [ipfs2Id.id], 30000) }) it('should return peers for a topic - multiple peers', async () => { @@ -110,7 +128,7 @@ module.exports = (common, options) => { await ipfs2.pubsub.subscribe(topic, sub2) await ipfs3.pubsub.subscribe(topic, sub3) - await waitForPeers(ipfs1, topic, [ipfs2.peerId.id, ipfs3.peerId.id], 30000) + await waitForPeers(ipfs1, topic, [ipfs2Id.id, ipfs3Id.id], 30000) }) }) } diff --git a/packages/interface-ipfs-core/src/pubsub/publish.js b/packages/interface-ipfs-core/src/pubsub/publish.js index cab0494ae9..4bcb734906 100644 --- a/packages/interface-ipfs-core/src/pubsub/publish.js +++ b/packages/interface-ipfs-core/src/pubsub/publish.js @@ -6,33 +6,33 @@ const { nanoid } = require('nanoid') const { getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pubsub.publish', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) - - it('should publish message from string', () => { - const topic = getTopic() - return ipfs.pubsub.publish(topic, 'hello friend') - }) + after(() => factory.clean()) it('should fail with undefined msg', async () => { const topic = getTopic() + // @ts-ignore invalid parameter await expect(ipfs.pubsub.publish(topic)).to.eventually.rejectedWith('argument "data" is required') }) diff --git a/packages/interface-ipfs-core/src/pubsub/subscribe.js b/packages/interface-ipfs-core/src/pubsub/subscribe.js index e8ca12b831..4f883c5026 100644 --- a/packages/interface-ipfs-core/src/pubsub/subscribe.js +++ b/packages/interface-ipfs-core/src/pubsub/subscribe.js @@ -14,12 +14,15 @@ const { isWebWorker, isNode } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') const first = require('it-first') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -27,18 +30,29 @@ module.exports = (common, options) => { describe('.pubsub.subscribe', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs1 + /** @type {import('ipfs-core-types').IPFS} */ let ipfs2 + /** @type {string} */ let topic + /** @type {string[]} */ let subscribedTopics = [] + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfs1Id + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfs2Id before(async () => { - ipfs1 = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfs1 = (await factory.spawn({ type: 'proc', ipfsOptions })).api // TODO 'multiple connected nodes' tests fails with go in Firefox // and JS is flaky everywhere // webworkers are not dialable because webrtc is not available - ipfs2 = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfs2 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + + ipfs1Id = await ipfs1.id() + ipfs2Id = await ipfs2.id() }) beforeEach(() => { @@ -56,7 +70,7 @@ module.exports = (common, options) => { await delay(100) }) - after(() => common.clean()) + after(() => factory.clean()) describe('single node', () => { it('should subscribe to one topic', async () => { @@ -74,7 +88,7 @@ module.exports = (common, options) => { expect(msg).to.have.property('seqno') expect(msg.seqno).to.be.an.instanceof(Uint8Array) expect(msg.topicIDs[0]).to.eq(topic) - expect(msg).to.have.property('from', ipfs1.peerId.id) + expect(msg).to.have.property('from', ipfs1Id.id) }) it('should subscribe to one topic with options', async () => { @@ -92,7 +106,7 @@ module.exports = (common, options) => { expect(msg).to.have.property('seqno') expect(msg.seqno).to.be.an.instanceof(Uint8Array) expect(msg.topicIDs[0]).to.eq(topic) - expect(msg).to.have.property('from', ipfs1.peerId.id) + expect(msg).to.have.property('from', ipfs1Id.id) } }) @@ -100,10 +114,12 @@ module.exports = (common, options) => { const msgStream1 = pushable() const msgStream2 = pushable() + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const handler1 = msg => { msgStream1.push(msg) msgStream1.end() } + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const handler2 = msg => { msgStream2.push(msg) msgStream2.end() @@ -161,19 +177,24 @@ module.exports = (common, options) => { ipfs2.pubsub.setMaxListeners(100) } - const ipfs2Addr = ipfs2.peerId.addresses + const ipfs2Addr = ipfs2Id.addresses .find(ma => ma.nodeAddress().address === '127.0.0.1') + if (!ipfs2Addr) { + throw new Error('No address found') + } + return ipfs1.swarm.connect(ipfs2Addr) }) it('should receive messages from a different node with floodsub', async function () { if (!isNode) { + // @ts-ignore this is mocha return this.skip() } const expectedString = 'should receive messages from a different node with floodsub' const topic = `floodsub-${nanoid()}` - const ipfs1 = (await common.spawn({ + const ipfs1 = (await factory.spawn({ ipfsOptions: { config: { Pubsub: { @@ -182,7 +203,8 @@ module.exports = (common, options) => { } } })).api - const ipfs2 = (await common.spawn({ + const ipfs1Id = await ipfs1.id() + const ipfs2 = (await factory.spawn({ type: isWebWorker ? 'go' : undefined, ipfsOptions: { config: { @@ -192,15 +214,18 @@ module.exports = (common, options) => { } } })).api - await ipfs1.swarm.connect(ipfs2.peerId.addresses[0]) + const ipfs2Id = await ipfs2.id() + await ipfs1.swarm.connect(ipfs2Id.addresses[0]) const msgStream1 = pushable() const msgStream2 = pushable() + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub1 = msg => { msgStream1.push(msg) msgStream1.end() } + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub2 = msg => { msgStream2.push(msg) msgStream2.end() @@ -213,16 +238,16 @@ module.exports = (common, options) => { ipfs2.pubsub.subscribe(topic, sub2, { signal: abort2.signal }) ]) - await waitForPeers(ipfs2, topic, [ipfs1.peerId.id], 30000) + await waitForPeers(ipfs2, topic, [ipfs1Id.id], 30000) await ipfs2.pubsub.publish(topic, uint8ArrayFromString(expectedString)) const [sub1Msg] = await all(msgStream1) expect(uint8ArrayToString(sub1Msg.data)).to.be.eql(expectedString) - expect(sub1Msg.from).to.eql(ipfs2.peerId.id) + expect(sub1Msg.from).to.eql(ipfs2Id.id) const [sub2Msg] = await all(msgStream2) expect(uint8ArrayToString(sub2Msg.data)).to.be.eql(expectedString) - expect(sub2Msg.from).to.eql(ipfs2.peerId.id) + expect(sub2Msg.from).to.eql(ipfs2Id.id) abort1.abort() abort2.abort() }) @@ -233,10 +258,12 @@ module.exports = (common, options) => { const msgStream1 = pushable() const msgStream2 = pushable() + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub1 = msg => { msgStream1.push(msg) msgStream1.end() } + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub2 = msg => { msgStream2.push(msg) msgStream2.end() @@ -247,17 +274,17 @@ module.exports = (common, options) => { ipfs2.pubsub.subscribe(topic, sub2) ]) - await waitForPeers(ipfs2, topic, [ipfs1.peerId.id], 30000) + await waitForPeers(ipfs2, topic, [ipfs1Id.id], 30000) await delay(5000) // gossipsub need this delay https://github.com/libp2p/go-libp2p-pubsub/issues/331 await ipfs2.pubsub.publish(topic, uint8ArrayFromString(expectedString)) const [sub1Msg] = await all(msgStream1) expect(uint8ArrayToString(sub1Msg.data)).to.be.eql(expectedString) - expect(sub1Msg.from).to.eql(ipfs2.peerId.id) + expect(sub1Msg.from).to.eql(ipfs2Id.id) const [sub2Msg] = await all(msgStream2) expect(uint8ArrayToString(sub2Msg.data)).to.be.eql(expectedString) - expect(sub2Msg.from).to.eql(ipfs2.peerId.id) + expect(sub2Msg.from).to.eql(ipfs2Id.id) }) it('should round trip a non-utf8 binary buffer', async () => { @@ -267,10 +294,12 @@ module.exports = (common, options) => { const msgStream1 = pushable() const msgStream2 = pushable() + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub1 = msg => { msgStream1.push(msg) msgStream1.end() } + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub2 = msg => { msgStream2.push(msg) msgStream2.end() @@ -281,17 +310,17 @@ module.exports = (common, options) => { ipfs2.pubsub.subscribe(topic, sub2) ]) - await waitForPeers(ipfs2, topic, [ipfs1.peerId.id], 30000) + await waitForPeers(ipfs2, topic, [ipfs1Id.id], 30000) await delay(5000) // gossipsub need this delay https://github.com/libp2p/go-libp2p-pubsub/issues/331 await ipfs2.pubsub.publish(topic, buffer) const [sub1Msg] = await all(msgStream1) expect(uint8ArrayToString(sub1Msg.data, 'base16')).to.be.eql(expectedHex) - expect(sub1Msg.from).to.eql(ipfs2.peerId.id) + expect(sub1Msg.from).to.eql(ipfs2Id.id) const [sub2Msg] = await all(msgStream2) expect(uint8ArrayToString(sub2Msg.data, 'base16')).to.be.eql(expectedHex) - expect(sub2Msg.from).to.eql(ipfs2.peerId.id) + expect(sub2Msg.from).to.eql(ipfs2Id.id) }) it('should receive multiple messages', async () => { @@ -300,26 +329,28 @@ module.exports = (common, options) => { const msgStream1 = pushable() const msgStream2 = pushable() + let sub1Called = 0 + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub1 = msg => { msgStream1.push(msg) - sub1.called++ - if (sub1.called === outbox.length) msgStream1.end() + sub1Called++ + if (sub1Called === outbox.length) msgStream1.end() } - sub1.called = 0 + let sub2Called = 0 + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub2 = msg => { msgStream2.push(msg) - sub2.called++ - if (sub2.called === outbox.length) msgStream2.end() + sub2Called++ + if (sub2Called === outbox.length) msgStream2.end() } - sub2.called = 0 await Promise.all([ ipfs1.pubsub.subscribe(topic, sub1), ipfs2.pubsub.subscribe(topic, sub2) ]) - await waitForPeers(ipfs2, topic, [ipfs1.peerId.id], 30000) + await waitForPeers(ipfs2, topic, [ipfs1Id.id], 30000) await delay(5000) // gossipsub need this delay https://github.com/libp2p/go-libp2p-pubsub/issues/331 for (let i = 0; i < outbox.length; i++) { @@ -327,36 +358,38 @@ module.exports = (common, options) => { } const sub1Msgs = await all(msgStream1) - sub1Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2.peerId.id)) + sub1Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2Id.id)) const inbox1 = sub1Msgs.map(msg => uint8ArrayToString(msg.data)) expect(inbox1.sort()).to.eql(outbox.sort()) const sub2Msgs = await all(msgStream2) - sub2Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2.peerId.id)) + sub2Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2Id.id)) const inbox2 = sub2Msgs.map(msg => uint8ArrayToString(msg.data)) expect(inbox2.sort()).to.eql(outbox.sort()) }) it('should send/receive 100 messages', async function () { + // @ts-ignore this is mocha this.timeout(2 * 60 * 1000) const msgBase = 'msg - ' const count = 100 const msgStream = pushable() + let subCalled = 0 + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn} */ const sub = msg => { msgStream.push(msg) - sub.called++ - if (sub.called === count) msgStream.end() + subCalled++ + if (subCalled === count) msgStream.end() } - sub.called = 0 await Promise.all([ ipfs1.pubsub.subscribe(topic, sub), ipfs2.pubsub.subscribe(topic, () => {}) ]) - await waitForPeers(ipfs1, topic, [ipfs2.peerId.id], 30000) + await waitForPeers(ipfs1, topic, [ipfs2Id.id], 30000) await delay(5000) // gossipsub need this delay https://github.com/libp2p/go-libp2p-pubsub/issues/331 const startTime = new Date().getTime() @@ -373,7 +406,7 @@ module.exports = (common, options) => { console.log(`Send/Receive 100 messages took: ${duration} ms, ${opsPerSec} ops / s`) msgs.forEach(msg => { - expect(msg.from).to.eql(ipfs2.peerId.id) + expect(msg.from).to.eql(ipfs2Id.id) expect(uint8ArrayToString(msg.data).startsWith(msgBase)).to.be.true() }) }) diff --git a/packages/interface-ipfs-core/src/pubsub/unsubscribe.js b/packages/interface-ipfs-core/src/pubsub/unsubscribe.js index 55bf5813a4..abd22d69db 100644 --- a/packages/interface-ipfs-core/src/pubsub/unsubscribe.js +++ b/packages/interface-ipfs-core/src/pubsub/unsubscribe.js @@ -6,31 +6,36 @@ const { getTopic } = require('./utils') const { getDescribe, getIt } = require('../utils/mocha') const waitFor = require('../utils/wait-for') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.pubsub.unsubscribe', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) // Browser/worker has max ~5 open HTTP requests to the same origin const count = isBrowser || isWebWorker || isElectronRenderer ? 5 : 10 it(`should subscribe and unsubscribe ${count} times`, async () => { const someTopic = getTopic() + /** @type {import('ipfs-core-types/src/pubsub').MessageHandlerFn[]} */ const handlers = Array.from(Array(count), () => msg => {}) for (let i = 0; i < count; i++) { diff --git a/packages/interface-ipfs-core/src/pubsub/utils.js b/packages/interface-ipfs-core/src/pubsub/utils.js index a9c52fc8e8..e31d0e901e 100644 --- a/packages/interface-ipfs-core/src/pubsub/utils.js +++ b/packages/interface-ipfs-core/src/pubsub/utils.js @@ -3,6 +3,13 @@ const { nanoid } = require('nanoid') const delay = require('delay') +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {string} topic + * @param {string[]} peersToWait + * @param {number} waitForMs + * @returns + */ async function waitForPeers (ipfs, topic, peersToWait, waitForMs) { const start = Date.now() diff --git a/packages/interface-ipfs-core/src/refs-local.js b/packages/interface-ipfs-core/src/refs-local.js index a137bb4d04..e139b55888 100644 --- a/packages/interface-ipfs-core/src/refs-local.js +++ b/packages/interface-ipfs-core/src/refs-local.js @@ -10,27 +10,34 @@ const { CID } = require('multiformats/cid') const uint8ArrayEquals = require('uint8arrays/equals') const blockstore = require('./utils/blockstore-adapter') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.refs.local', function () { this.timeout(60 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get local refs', async function () { + /** + * @param {string} name + */ const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -44,7 +51,7 @@ module.exports = (common, options) => { const imported = await all(importer(dirs, blockstore(ipfs))) // otherwise go-ipfs doesn't show them in the local refs - await drain(ipfs.pin.addAll(imported.map(i => i.cid))) + await drain(ipfs.pin.addAll(imported.map(i => ({ cid: i.cid })))) const refs = await all(ipfs.refs.local()) const cids = refs.map(r => r.ref) diff --git a/packages/interface-ipfs-core/src/refs.js b/packages/interface-ipfs-core/src/refs.js index 618838c221..8db1f7ed83 100644 --- a/packages/interface-ipfs-core/src/refs.js +++ b/packages/interface-ipfs-core/src/refs.js @@ -11,44 +11,51 @@ const dagPb = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.refs', function () { this.timeout(60 * 1000) - let ipfs, pbRootCb, dagRootCid + /** @type {import('ipfs-core-types').IPFS} */ + let ipfs + /** @type {CID} */ + let pbRootCid + /** @type {CID} */ + let dagRootCid before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) before(async function () { - const cid = await loadPbContent(ipfs, getMockObjects()) - pbRootCb = cid + pbRootCid = await loadPbContent(ipfs, getMockObjects()) }) before(async function () { - const cid = await loadDagContent(ipfs, getMockObjects()) - dagRootCid = cid + dagRootCid = await loadDagContent(ipfs, getMockObjects()) }) - after(() => common.clean()) + after(() => factory.clean()) for (const [name, options] of Object.entries(getRefsTests())) { const { path, params, expected, expectError, expectTimeout } = options // eslint-disable-next-line no-loop-func it(name, async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) // Call out to IPFS - const p = (path ? path(pbRootCb) : pbRootCb) + const p = (path ? path(pbRootCid) : pbRootCid) if (expectTimeout) { return expect(all(ipfs.refs(p, params))).to.eventually.be.rejected @@ -75,6 +82,7 @@ module.exports = (common, options) => { }) it('should get refs with cbor links', async function () { + // @ts-ignore this is mocha this.timeout(20 * 1000) // Call out to IPFS @@ -119,6 +127,9 @@ function getMockObjects () { } } +/** + * @returns {Record string | string[], params: { edges?: boolean, format?: string, recursive?: boolean, unique?: boolean, maxDepth?: number, timeout?: number }, expected: string[], expectError?: boolean, expectTimeout?: boolean }>} + */ function getRefsTests () { return { 'should print added files': { @@ -305,18 +316,33 @@ function getRefsTests () { 'should not be able to specify edges and format': { params: { format: '', edges: true }, + expected: [], expectError: true }, 'should print nothing for non-existent hashes': { path: () => 'QmYmW4HiZhotsoSqnv2o1oSssvkRM8b9RweBoH7ao5nki2', params: { timeout: 2000 }, + expected: [], expectTimeout: true } } } +/** + * @typedef {object} Store + * @property {(data: Uint8Array) => Promise} putData + * @property {(links: { name: string, cid: string }[]) => Promise} putLinks + */ + +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {any} node + */ function loadPbContent (ipfs, node) { + /** + * @type {Store} + */ const store = { putData: (data) => { return ipfs.block.put( @@ -341,7 +367,14 @@ function loadPbContent (ipfs, node) { return loadContent(ipfs, store, node) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {any} node + */ function loadDagContent (ipfs, node) { + /** + * @type {Store} + */ const store = { putData: (data) => { const inner = new UnixFS({ type: 'file', data: data }) @@ -352,6 +385,7 @@ function loadDagContent (ipfs, node) { return ipfs.block.put(serialized) }, putLinks: (links) => { + /** @type {Record} */ const obj = {} for (const { name, cid } of links) { obj[name] = CID.parse(cid) @@ -362,6 +396,12 @@ function loadDagContent (ipfs, node) { return loadContent(ipfs, store, node) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {Store} store + * @param {any} node + * @returns {Promise} + */ async function loadContent (ipfs, store, node) { if (node instanceof Uint8Array) { return store.putData(node) @@ -387,4 +427,6 @@ async function loadContent (ipfs, store, node) { return store.putLinks(res) } + + throw new Error('Please pass either data or object') } diff --git a/packages/interface-ipfs-core/src/repo/gc.js b/packages/interface-ipfs-core/src/repo/gc.js index 8d0a2a5f1a..5de59c6121 100644 --- a/packages/interface-ipfs-core/src/repo/gc.js +++ b/packages/interface-ipfs-core/src/repo/gc.js @@ -8,37 +8,52 @@ const drain = require('it-drain') const { CID } = require('multiformats/cid') const { base64 } = require('multiformats/bases/base64') +/** + * @param {import('ipfs-core-types').IPFS} ipfs + */ async function getBaseEncodedMultihashes (ipfs) { const refs = await all(ipfs.refs.local()) return refs.map(r => base64.encode(CID.parse(r.ref).multihash.bytes)) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {CID} cid + */ async function shouldHaveRef (ipfs, cid) { return expect(getBaseEncodedMultihashes(ipfs)).to.eventually.include(base64.encode(cid.multihash.bytes)) } +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {CID} cid + */ async function shouldNotHaveRef (ipfs, cid) { return expect(getBaseEncodedMultihashes(ipfs)).to.eventually.not.include(base64.encode(cid.multihash.bytes)) } -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.repo.gc', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should run garbage collection', async () => { const res = await ipfs.add(uint8ArrayFromString('apples')) @@ -189,7 +204,7 @@ module.exports = (common, options) => { // The data should now be indirectly pinned const pins = await all(ipfs.pin.ls()) - expect(pins.find(p => p.cid.toString() === dataCid.toString()).type).to.eql('indirect') + expect(pins.find(p => p.cid.toString() === dataCid.toString())).to.have.property('type', 'indirect') // Run garbage collection await drain(ipfs.repo.gc()) diff --git a/packages/interface-ipfs-core/src/repo/stat.js b/packages/interface-ipfs-core/src/repo/stat.js index ae9a002633..f62c0bab4e 100644 --- a/packages/interface-ipfs-core/src/repo/stat.js +++ b/packages/interface-ipfs-core/src/repo/stat.js @@ -4,23 +4,27 @@ const { expectIsRepo } = require('../stats/utils') const { getDescribe, getIt } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.repo.stat', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get repo stats', async () => { const res = await ipfs.repo.stat() diff --git a/packages/interface-ipfs-core/src/repo/version.js b/packages/interface-ipfs-core/src/repo/version.js index 3010424c82..f0ba32298c 100644 --- a/packages/interface-ipfs-core/src/repo/version.js +++ b/packages/interface-ipfs-core/src/repo/version.js @@ -3,23 +3,27 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.repo.version', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get the repo version', async () => { const version = await ipfs.repo.version() diff --git a/packages/interface-ipfs-core/src/stats/bitswap.js b/packages/interface-ipfs-core/src/stats/bitswap.js index 3189cf225a..eef99aa95a 100644 --- a/packages/interface-ipfs-core/src/stats/bitswap.js +++ b/packages/interface-ipfs-core/src/stats/bitswap.js @@ -4,23 +4,27 @@ const { getDescribe, getIt } = require('../utils/mocha') const { expectIsBitswap } = require('./utils') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.stats.bitswap', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get bitswap stats', async () => { const res = await ipfs.stats.bitswap() diff --git a/packages/interface-ipfs-core/src/stats/bw.js b/packages/interface-ipfs-core/src/stats/bw.js index 2b3a2d5479..e58101320b 100644 --- a/packages/interface-ipfs-core/src/stats/bw.js +++ b/packages/interface-ipfs-core/src/stats/bw.js @@ -6,26 +6,35 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const last = require('it-last') const all = require('it-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.stats.bw', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get bandwidth stats ', async () => { const res = await last(ipfs.stats.bw()) + + if (!res) { + throw new Error('No bw stats returned') + } + expectIsBandwidth(null, res) }) diff --git a/packages/interface-ipfs-core/src/stats/repo.js b/packages/interface-ipfs-core/src/stats/repo.js index d6972d5399..f96dc59205 100644 --- a/packages/interface-ipfs-core/src/stats/repo.js +++ b/packages/interface-ipfs-core/src/stats/repo.js @@ -4,23 +4,27 @@ const { expectIsRepo } = require('./utils') const { getDescribe, getIt } = require('../utils/mocha') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.stats.repo', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should get repo stats', async () => { const res = await ipfs.stats.repo() diff --git a/packages/interface-ipfs-core/src/stats/utils.js b/packages/interface-ipfs-core/src/stats/utils.js index 32157eb251..afdcd215f4 100644 --- a/packages/interface-ipfs-core/src/stats/utils.js +++ b/packages/interface-ipfs-core/src/stats/utils.js @@ -2,10 +2,17 @@ const { expect } = require('../utils/mocha') +/** + * @param {any} n + */ const isBigInt = (n) => { return typeof n === 'bigint' } +/** + * @param {Error | null} err + * @param {import('ipfs-core-types/src/bitswap').Stats} stats + */ exports.expectIsBitswap = (err, stats) => { expect(err).to.not.exist() expect(stats).to.exist() @@ -30,6 +37,10 @@ exports.expectIsBitswap = (err, stats) => { expect(isBigInt(stats.dupDataReceived)).to.eql(true) } +/** + * @param {Error | null} err + * @param {import('ipfs-core-types/src/stats').BWResult} stats + */ exports.expectIsBandwidth = (err, stats) => { expect(err).to.not.exist() expect(stats).to.exist() @@ -43,6 +54,10 @@ exports.expectIsBandwidth = (err, stats) => { expect(isBigInt(stats.rateOut)).to.eql(true) } +/** + * @param {Error | null} err + * @param {import('ipfs-core-types/src/repo').StatResult} res + */ exports.expectIsRepo = (err, res) => { expect(err).to.not.exist() expect(res).to.exist() diff --git a/packages/interface-ipfs-core/src/swarm/addrs.js b/packages/interface-ipfs-core/src/swarm/addrs.js index ac5f6f41a2..6b9a431de1 100644 --- a/packages/interface-ipfs-core/src/swarm/addrs.js +++ b/packages/interface-ipfs-core/src/swarm/addrs.js @@ -7,12 +7,15 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -20,17 +23,22 @@ module.exports = (common, options) => { describe('.swarm.addrs', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfsBId before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsBId = await ipfsB.id() + await ipfsA.swarm.connect(ipfsBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should get a list of node addresses', async () => { const peers = await ipfsA.swarm.addrs() diff --git a/packages/interface-ipfs-core/src/swarm/connect.js b/packages/interface-ipfs-core/src/swarm/connect.js index 2dae4cacb9..35173114ed 100644 --- a/packages/interface-ipfs-core/src/swarm/connect.js +++ b/packages/interface-ipfs-core/src/swarm/connect.js @@ -5,28 +5,36 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) describe('.swarm.connect', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfsBId before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsBId = await ipfsB.id() }) - after(() => common.clean()) + after(() => factory.clean()) it('should connect to a peer', async () => { let peers @@ -34,7 +42,7 @@ module.exports = (common, options) => { peers = await ipfsA.swarm.peers() expect(peers).to.have.length(0) - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + await ipfsA.swarm.connect(ipfsBId.addresses[0]) peers = await ipfsA.swarm.peers() expect(peers).to.have.length.above(0) diff --git a/packages/interface-ipfs-core/src/swarm/disconnect.js b/packages/interface-ipfs-core/src/swarm/disconnect.js index d70c6edd76..216b1943a5 100644 --- a/packages/interface-ipfs-core/src/swarm/disconnect.js +++ b/packages/interface-ipfs-core/src/swarm/disconnect.js @@ -5,12 +5,15 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { isWebWorker } = require('ipfs-utils/src/env') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -18,20 +21,25 @@ module.exports = (common, options) => { describe('.swarm.disconnect', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfsBId before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api // webworkers are not dialable because webrtc is not available - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsBId = await ipfsB.id() }) beforeEach(async () => { - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + await ipfsA.swarm.connect(ipfsBId.addresses[0]) }) - after(() => common.clean()) + after(() => factory.clean()) it('should disconnect from a peer', async () => { let peers @@ -39,7 +47,7 @@ module.exports = (common, options) => { peers = await ipfsA.swarm.peers() expect(peers).to.have.length.above(0) - await ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0]) + await ipfsA.swarm.disconnect(ipfsBId.addresses[0]) peers = await ipfsA.swarm.peers() expect(peers).to.have.length(0) diff --git a/packages/interface-ipfs-core/src/swarm/local-addrs.js b/packages/interface-ipfs-core/src/swarm/local-addrs.js index 45da62461d..56895e147a 100644 --- a/packages/interface-ipfs-core/src/swarm/local-addrs.js +++ b/packages/interface-ipfs-core/src/swarm/local-addrs.js @@ -4,32 +4,36 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { isWebWorker } = require('ipfs-utils/src/env') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const describe = getDescribe(options) const it = getIt(options) describe('.swarm.localAddrs', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs before(async () => { - ipfs = (await common.spawn()).api + ipfs = (await factory.spawn()).api }) - after(() => common.clean()) + after(() => factory.clean()) it('should list local addresses the node is listening on', async () => { const multiaddrs = await ipfs.swarm.localAddrs() expect(multiaddrs).to.be.an.instanceOf(Array) - if (isWebWorker && common.opts.type === 'proc') { + if (isWebWorker && factory.opts.type === 'proc') { expect(multiaddrs).to.have.lengthOf(0) } else { expect(multiaddrs).to.not.be.empty() diff --git a/packages/interface-ipfs-core/src/swarm/peers.js b/packages/interface-ipfs-core/src/swarm/peers.js index aab9ce9cbe..c3cfee87ab 100644 --- a/packages/interface-ipfs-core/src/swarm/peers.js +++ b/packages/interface-ipfs-core/src/swarm/peers.js @@ -8,12 +8,15 @@ const { isBrowser, isWebWorker } = require('ipfs-utils/src/env') const { getDescribe, getIt, expect } = require('../utils/mocha') const getIpfsOptions = require('../utils/ipfs-options-websockets-filter-all') -/** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** - * @param {Factory} common + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory * @param {Object} options */ -module.exports = (common, options) => { +module.exports = (factory, options) => { const ipfsOptions = getIpfsOptions() const describe = getDescribe(options) const it = getIt(options) @@ -21,19 +24,24 @@ module.exports = (common, options) => { describe('.swarm.peers', function () { this.timeout(80 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfsA + /** @type {import('ipfs-core-types').IPFS} */ let ipfsB + /** @type {import('ipfs-core-types/src/root').IDResult} */ + let ipfsBId before(async () => { - ipfsA = (await common.spawn({ type: 'proc', ipfsOptions })).api - ipfsB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + ipfsA = (await factory.spawn({ type: 'proc', ipfsOptions })).api + ipfsB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + ipfsBId = await ipfsB.id() + await ipfsA.swarm.connect(ipfsBId.addresses[0]) /* TODO: Seen if we still need this after this is fixed https://github.com/ipfs/js-ipfs/issues/2601 gets resolved */ // await delay(60 * 1000) // wait for open streams in the connection available }) - after(() => common.clean()) + after(() => factory.clean()) it('should list peers this node is connected to', async () => { const peers = await ipfsA.swarm.peers() @@ -70,6 +78,10 @@ module.exports = (common, options) => { // expect(peer).to.have.a.property('streams') }) + /** + * @param {string | string[]} addrs + * @returns + */ function getConfig (addrs) { addrs = Array.isArray(addrs) ? addrs : [addrs] @@ -89,9 +101,10 @@ module.exports = (common, options) => { } it('should list peers only once', async () => { - const nodeA = (await common.spawn({ type: 'proc', ipfsOptions })).api - const nodeB = (await common.spawn({ type: isWebWorker ? 'go' : undefined })).api - await nodeA.swarm.connect(nodeB.peerId.addresses[0]) + const nodeA = (await factory.spawn({ type: 'proc', ipfsOptions })).api + const nodeB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined })).api + const nodeBId = await nodeB.id() + await nodeA.swarm.connect(nodeBId.addresses[0]) await delay(1000) const peersA = await nodeA.swarm.peers() const peersB = await nodeB.swarm.peers() @@ -101,31 +114,32 @@ module.exports = (common, options) => { it('should list peers only once even if they have multiple addresses', async () => { // TODO: Change to port 0, needs: https://github.com/ipfs/interface-ipfs-core/issues/152 - const config = getConfig(isBrowser && common.opts.type !== 'go' + const config = getConfig(isBrowser && factory.opts.type !== 'go' ? [ - process.env.SIGNALA_SERVER, - process.env.SIGNALB_SERVER + `${process.env.SIGNALA_SERVER}`, + `${process.env.SIGNALB_SERVER}` ] : [ '/ip4/127.0.0.1/tcp/26545/ws', '/ip4/127.0.0.1/tcp/26546/ws' ]) - const nodeA = (await common.spawn({ + const nodeA = (await factory.spawn({ // browser nodes have webrtc-star addresses which can't be dialled by go so make the other // peer a js-ipfs node to get a tcp address that can be dialled. Also, webworkers are not // diable so don't use a in-proc node for webworkers - type: ((isBrowser && common.opts.type === 'go') || isWebWorker) ? 'js' : 'proc', + type: ((isBrowser && factory.opts.type === 'go') || isWebWorker) ? 'js' : 'proc', ipfsOptions })).api - const nodeB = (await common.spawn({ + const nodeAId = await nodeA.id() + const nodeB = (await factory.spawn({ type: isWebWorker ? 'go' : undefined, ipfsOptions: { config } })).api - await nodeB.swarm.connect(nodeA.peerId.addresses[0]) + await nodeB.swarm.connect(nodeAId.addresses[0]) await delay(1000) const peersA = await nodeA.swarm.peers() diff --git a/packages/interface-ipfs-core/src/utils/blockstore-adapter.js b/packages/interface-ipfs-core/src/utils/blockstore-adapter.js index b635c2f123..8cbd2c8bb6 100644 --- a/packages/interface-ipfs-core/src/utils/blockstore-adapter.js +++ b/packages/interface-ipfs-core/src/utils/blockstore-adapter.js @@ -7,12 +7,18 @@ const dagCbor = require('@ipld/dag-cbor') const { sha256 } = require('multiformats/hashes/sha2') const uint8ArrayToString = require('uint8arrays/to-string') +/** + * @type {Record} + */ const formats = { [raw.code]: raw.name, [dagPb.code]: dagPb.name, [dagCbor.code]: dagCbor.name } +/** + * @type {Record} + */ const hashes = { [sha256.code]: sha256.name } @@ -28,7 +34,7 @@ class IPFSBlockstore extends BlockstoreAdapter { } /** - * @param {import(multiformats/cid).CID} cid + * @param {import('multiformats/cid').CID} cid * @param {Uint8Array} buf */ async put (cid, buf) { diff --git a/packages/interface-ipfs-core/src/utils/create-sharded-directory.js b/packages/interface-ipfs-core/src/utils/create-sharded-directory.js index 3631041aa7..5b11705cbb 100644 --- a/packages/interface-ipfs-core/src/utils/create-sharded-directory.js +++ b/packages/interface-ipfs-core/src/utils/create-sharded-directory.js @@ -4,6 +4,10 @@ const { expect } = require('./mocha') const isShardAtPath = require('./is-shard-at-path') const last = require('it-last') +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {number} [files] + */ module.exports = async (ipfs, files = 1001) => { const dirPath = `/sharded-dir-${Math.random()}` @@ -19,6 +23,10 @@ module.exports = async (ipfs, files = 1001) => { pin: false })) + if (!result) { + throw new Error('No result received from ipfs.addAll') + } + await ipfs.files.cp(`/ipfs/${result.cid}`, dirPath) await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true('Tried to create a shared directory but the result was not a shard') diff --git a/packages/interface-ipfs-core/src/utils/create-two-shards.js b/packages/interface-ipfs-core/src/utils/create-two-shards.js index 62bbf95307..e84d8502f2 100644 --- a/packages/interface-ipfs-core/src/utils/create-two-shards.js +++ b/packages/interface-ipfs-core/src/utils/create-two-shards.js @@ -4,6 +4,10 @@ const { expect } = require('./mocha') const isShardAtPath = require('./is-shard-at-path') const last = require('it-last') +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {number} fileCount + */ const createTwoShards = async (ipfs, fileCount) => { const dirPath = `/sharded-dir-${Math.random()}` const files = new Array(fileCount).fill(0).map((_, index) => ({ @@ -20,19 +24,35 @@ const createTwoShards = async (ipfs, fileCount) => { })) const nextFile = someFiles.pop() - const { cid: dirWithAllFiles } = await last(ipfs.addAll(allFiles, { + if (!nextFile) { + throw new Error('No nextFile found') + } + + const res1 = await last(ipfs.addAll(allFiles, { // for js-ipfs - go-ipfs shards everything when sharding is turned on shardSplitThreshold: files.length - 1, preload: false, pin: false })) - const { cid: dirWithSomeFiles } = await last(ipfs.addAll(someFiles, { + + if (!res1) { + throw new Error('No result received from ipfs.addAll') + } + + const { cid: dirWithAllFiles } = res1 + const res2 = await last(ipfs.addAll(someFiles, { // for js-ipfs - go-ipfs shards everything when sharding is turned on shardSplitThreshold: files.length - 1, preload: false, pin: false })) + if (!res2) { + throw new Error('No result received from ipfs.addAll') + } + + const { cid: dirWithSomeFiles } = res2 + await expect(isShardAtPath(`/ipfs/${dirWithAllFiles}`, ipfs)).to.eventually.be.true() await expect(isShardAtPath(`/ipfs/${dirWithSomeFiles}`, ipfs)).to.eventually.be.true() diff --git a/packages/interface-ipfs-core/src/utils/index.js b/packages/interface-ipfs-core/src/utils/index.js index 7046a117a5..82bbc89736 100644 --- a/packages/interface-ipfs-core/src/utils/index.js +++ b/packages/interface-ipfs-core/src/utils/index.js @@ -9,6 +9,7 @@ const ONE_MEG = Math.pow(2, 20) exports.fixtures = Object.freeze({ directory: Object.freeze({ cid: CID.parse('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'), + /** @type {Record} */ files: Object.freeze({ 'pp.txt': loadFixture('test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), 'holmes.txt': loadFixture('test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), diff --git a/packages/interface-ipfs-core/src/utils/ipfs-options-websockets-filter-all.js b/packages/interface-ipfs-core/src/utils/ipfs-options-websockets-filter-all.js index 63b891bac7..3d94f36da1 100644 --- a/packages/interface-ipfs-core/src/utils/ipfs-options-websockets-filter-all.js +++ b/packages/interface-ipfs-core/src/utils/ipfs-options-websockets-filter-all.js @@ -1,6 +1,8 @@ 'use strict' +// @ts-ignore no types const WS = require('libp2p-websockets') +// @ts-ignore no types const filters = require('libp2p-websockets/src/filters') const transportKey = WS.prototype[Symbol.toStringTag] diff --git a/packages/interface-ipfs-core/src/utils/is-shard-at-path.js b/packages/interface-ipfs-core/src/utils/is-shard-at-path.js index a0a48c63c7..28f9645c21 100644 --- a/packages/interface-ipfs-core/src/utils/is-shard-at-path.js +++ b/packages/interface-ipfs-core/src/utils/is-shard-at-path.js @@ -2,6 +2,10 @@ const { UnixFS } = require('ipfs-unixfs') +/** + * @param {string} path + * @param {import('ipfs-core-types').IPFS} ipfs + */ module.exports = async (path, ipfs) => { const stats = await ipfs.files.stat(path) const { value: node } = await ipfs.dag.get(stats.cid) diff --git a/packages/interface-ipfs-core/src/utils/mocha.js b/packages/interface-ipfs-core/src/utils/mocha.js index de96e039a2..f4698f887f 100644 --- a/packages/interface-ipfs-core/src/utils/mocha.js +++ b/packages/interface-ipfs-core/src/utils/mocha.js @@ -5,11 +5,27 @@ const { expect } = require('aegir/utils/chai') module.exports.expect = expect -const isObject = (o) => Object.prototype.toString.call(o) === '[object Object]' - -// Get a "describe" function that is optionally 'skipped' or 'onlyed' -// If skip/only are boolean true, or an object with a reason property, then we -// want to skip/only the whole suite +/** + * @typedef {object} Skip + * @property {string} [name] + * @property {string} [reason] + */ + +/** + * @param {any} o + * @returns {o is Skip} + */ +const isSkip = (o) => Object.prototype.toString.call(o) === '[object Object]' && (o.name || o.reason) + +/** + * Get a "describe" function that is optionally 'skipped' or 'onlyed' + * If skip/only are boolean true, or an object with a reason property, then we + * want to skip/only the whole suite + * + * @param {object} [config] + * @param {boolean | Skip | (string | Skip)[]} [config.skip] + * @param {boolean} [config.only] + */ function getDescribe (config) { if (config) { if (config.skip === true) { @@ -20,30 +36,46 @@ function getDescribe (config) { return describe.only // eslint-disable-line } - if (isObject(config.skip)) { - if (!config.skip.reason) { - return describe.skip - } + if (Array.isArray(config.skip)) { + const skipArr = config.skip + /** + * @param {string} name + * @param {*} impl + */ const _describe = (name, impl) => { - describe.skip(`${name} (${config.skip.reason})`, impl) + const skip = skipArr.find(skip => { + if (typeof skip === 'string') { + return skip === name + } + + return skip.name === name + }) + + if (skip) { + return describe.skip(`${name} (${typeof skip === 'string' ? '🤷' : skip.reason})`, impl) + } + + describe(name, impl) } _describe.skip = describe.skip _describe.only = describe.only // eslint-disable-line return _describe - } - - if (Array.isArray(config.skip)) { - const _describe = (name, impl) => { - const skip = config.skip.find(skip => skip === name || skip.name === name) + } else if (isSkip(config.skip)) { + const skip = config.skip - if (skip) { - return describe.skip(`${name} (${skip.reason})`, impl) - } + if (!skip.reason) { + return describe.skip + } - describe(name, impl) + /** + * @param {string} name + * @param {*} impl + */ + const _describe = (name, impl) => { + describe.skip(`${name} (${skip.reason})`, impl) } _describe.skip = describe.skip @@ -58,18 +90,29 @@ function getDescribe (config) { module.exports.getDescribe = getDescribe -// Get an "it" function that is optionally 'skipped' or 'onlyed' -// If skip/only is an array, then we _might_ want to skip/only the specific -// test if one of the items in the array is the same as the test name or if one -// of the items in the array is an object with a name property that is the same -// as the test name. +/** + * Get an "it" function that is optionally 'skipped' or 'onlyed' + * If skip/only is an array, then we _might_ want to skip/only the specific + * test if one of the items in the array is the same as the test name or if one + * of the items in the array is an object with a name property that is the same + * as the test name. + * + * @param {object} [config] + * @param {boolean | Skip | (string | Skip)[]} [config.skip] + * @param {boolean} [config.only] + */ function getIt (config) { if (!config) return it + /** + * @param {string} name + * @param {*} impl + * @returns + */ const _it = (name, impl) => { if (Array.isArray(config.skip)) { const skip = config.skip - .map((s) => isObject(s) ? s : { name: s }) + .map((s) => isSkip(s) ? s : { name: s, reason: '🤷' }) .find((s) => s.name === name) if (skip) { @@ -80,7 +123,7 @@ function getIt (config) { if (Array.isArray(config.only)) { const only = config.only - .map((o) => isObject(o) ? o : { name: o }) + .map((o) => isSkip(o) ? o : { name: o, reason: '🤷' }) .find((o) => o.name === name) if (only) { diff --git a/packages/interface-ipfs-core/src/utils/suite.js b/packages/interface-ipfs-core/src/utils/suite.js index ce94029874..44b20649c7 100644 --- a/packages/interface-ipfs-core/src/utils/suite.js +++ b/packages/interface-ipfs-core/src/utils/suite.js @@ -1,16 +1,37 @@ 'use strict' -const isObject = (o) => Object.prototype.toString.call(o) === '[object Object]' - +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + * @typedef {object} Skip + * @property {string} [name] + * @property {string} [reason] + */ + +/** + * @param {any} o + * @returns {o is Skip} + */ +const isSkip = (o) => Object.prototype.toString.call(o) === '[object Object]' && (o.name || o.reason) + +/** + * @param {*} tests + * @param {*} [parent] + */ function createSuite (tests, parent) { - const suite = (createCommon, options) => { + /** + * @param {Factory} factory + * @param {object} [options] + * @param {boolean | Skip | (string | Skip)[]} [options.skip] + * @param {boolean} [options.only] + */ + const suite = (factory, options = {}) => { Object.keys(tests).forEach(t => { const opts = Object.assign({}, options) const suiteName = parent ? `${parent}.${t}` : t if (Array.isArray(opts.skip)) { const skip = opts.skip - .map((s) => isObject(s) ? s : { name: s }) + .map((s) => isSkip(s) ? s : { name: s, reason: '🤷' }) .find((s) => s.name === suiteName) if (skip) { @@ -24,7 +45,7 @@ function createSuite (tests, parent) { } } - tests[t](createCommon, opts) + tests[t](factory, opts) }) } diff --git a/packages/interface-ipfs-core/src/utils/test-timeout.js b/packages/interface-ipfs-core/src/utils/test-timeout.js index 62edda326a..2dbc2d44e2 100644 --- a/packages/interface-ipfs-core/src/utils/test-timeout.js +++ b/packages/interface-ipfs-core/src/utils/test-timeout.js @@ -2,6 +2,10 @@ const drain = require('it-drain') +/** + * @param {*} fn + * @returns {Promise} + */ module.exports = (fn) => { return new Promise((resolve, reject) => { // some operations are either synchronous so cannot time out, or complete during @@ -15,7 +19,7 @@ module.exports = (fn) => { res = drain(res) } - res.then((result) => { + res.then((/** @type {*} */ result) => { const timeTaken = Date.now() - start if (timeTaken < 100) { @@ -26,7 +30,7 @@ module.exports = (fn) => { } reject(new Error(`API call did not time out after ${timeTaken}ms, got ${JSON.stringify(result, null, 2)}`)) - }, (err) => { + }, (/** @type {Error} */ err) => { if (err.name === 'TimeoutError') { return resolve() } diff --git a/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js b/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js index 4b919e1f53..bfb20f7870 100644 --- a/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js +++ b/packages/interface-ipfs-core/src/utils/traverse-leaf-nodes.js @@ -1,6 +1,19 @@ 'use strict' +/** + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('multiformats/cid').CID} CID + */ + +/** + * @param {import('ipfs-core-types').IPFS} ipfs + * @param {CID} cid + */ module.exports = async function * traverseLeafNodes (ipfs, cid) { + /** + * @param {import('multiformats/cid').CID} cid + * @returns {AsyncIterable<{ node: PBNode, cid: CID }>} + */ async function * traverse (cid) { const { value: node } = await ipfs.dag.get(cid) diff --git a/packages/interface-ipfs-core/src/utils/wait-for.js b/packages/interface-ipfs-core/src/utils/wait-for.js index 283dddc20d..8cf7044400 100644 --- a/packages/interface-ipfs-core/src/utils/wait-for.js +++ b/packages/interface-ipfs-core/src/utils/wait-for.js @@ -3,10 +3,17 @@ const delay = require('delay') const errCode = require('err-code') -// Wait for async function `test` to resolve true or timeout after -// options.timeout milliseconds. +/** + * Wait for async function `test` to resolve true or timeout after options.timeout milliseconds. + * + * @param {() => Promise | boolean} test + * @param {object} [options] + * @param {number} [options.timeout] + * @param {number} [options.interval] + * @param {string} [options.name] + */ module.exports = async function waitFor (test, options) { - options = Object.assign({ timeout: 5000, interval: 0, name: 'event' }, options) + const opts = Object.assign({ timeout: 5000, interval: 0, name: 'event' }, options) const start = Date.now() while (true) { @@ -14,10 +21,10 @@ module.exports = async function waitFor (test, options) { return } - if (Date.now() > start + options.timeout) { - throw errCode(new Error(`Timed out waiting for ${options.name}`), 'ERR_TIMEOUT') + if (Date.now() > start + opts.timeout) { + throw errCode(new Error(`Timed out waiting for ${opts.name}`), 'ERR_TIMEOUT') } - await delay(options.interval) + await delay(opts.interval) } } diff --git a/packages/interface-ipfs-core/tsconfig.json b/packages/interface-ipfs-core/tsconfig.json new file mode 100644 index 0000000000..e36425404c --- /dev/null +++ b/packages/interface-ipfs-core/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "types" + ], + "references": [ + { + "path": "../ipfs-core-types" + } + ] +} diff --git a/packages/ipfs-cli/package.json b/packages/ipfs-cli/package.json index a64892ed54..2614b3bd89 100644 --- a/packages/ipfs-cli/package.json +++ b/packages/ipfs-cli/package.json @@ -21,7 +21,7 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", @@ -48,8 +48,10 @@ "it-concat": "^2.0.0", "it-first": "^1.0.4", "it-glob": "0.0.13", + "it-map": "^1.0.5", "it-pipe": "^1.1.0", "it-split": "^0.0.1", + "it-tar": "^4.0.0", "jsondiffpatch": "^0.4.1", "libp2p-crypto": "^0.19.6", "mafmt": "^10.0.0", @@ -60,7 +62,6 @@ "pretty-bytes": "^5.4.1", "progress": "^2.0.3", "stream-to-it": "^0.2.2", - "streaming-iterables": "^6.0.0", "uint8arrays": "^2.1.6", "yargs": "^16.0.3" }, @@ -68,8 +69,10 @@ "@types/progress": "^2.0.3", "@types/yargs": "^16.0.0", "aegir": "^34.0.2", + "it-to-buffer": "^2.0.0", "nanoid": "^3.1.12", "ncp": "^2.0.0", + "pako": "^2.0.4", "peer-id": "^0.15.1", "rimraf": "^3.0.2", "sinon": "^11.1.1", diff --git a/packages/ipfs-cli/src/commands/get.js b/packages/ipfs-cli/src/commands/get.js index 2d09c5c05f..4cd993b1a9 100644 --- a/packages/ipfs-cli/src/commands/get.js +++ b/packages/ipfs-cli/src/commands/get.js @@ -5,16 +5,17 @@ const path = require('path') // @ts-ignore no types const toIterable = require('stream-to-it') const { pipe } = require('it-pipe') -const { map } = require('streaming-iterables') const { default: parseDuration } = require('parse-duration') const { stripControlCharacters } = require('../utils') +const { extract } = require('it-tar') +const map = require('it-map') module.exports = { command: 'get ', - describe: 'Fetch a file or directory with files references from an IPFS Path', + describe: 'Download IPFS objects', builder: { output: { @@ -30,6 +31,22 @@ module.exports = { timeout: { type: 'string', coerce: parseDuration + }, + archive: { + alias: 'a', + type: 'boolean', + desc: 'Output a TAR archive' + }, + compress: { + alias: 'C', + type: 'boolean', + desc: 'Compress the output with GZIP compression' + }, + compressionLevel: { + alias: ['l', 'compression-level'], + type: 'number', + desc: 'The level of compression (1-9)', + default: 6 } }, @@ -40,30 +57,75 @@ module.exports = { * @param {string} argv.output * @param {boolean} argv.force * @param {number} argv.timeout + * @param {boolean} argv.archive + * @param {boolean} argv.compress + * @param {number} argv.compressionLevel */ - async handler ({ ctx: { ipfs, print }, ipfsPath, output, force, timeout }) { + async handler ({ ctx: { ipfs, print }, ipfsPath, output, force, timeout, archive, compress, compressionLevel }) { print(`Saving file(s) ${stripControlCharacters(ipfsPath)}`) - for await (const file of ipfs.get(ipfsPath, { - timeout - })) { - const fullFilePath = path.join(output, file.path) + if (output.substring(0, output.length) !== output && !force) { + throw new Error(`File prefix invalid, would write to files outside of ${output}, pass --force to override`) + } - if (fullFilePath.substring(0, output.length) !== output && !force) { - throw new Error(`File prefix invalid, would write to files outside of ${output}, pass --force to override`) + if (archive || compress) { + if (output === process.cwd()) { + output = path.join(output, ipfsPath) } - if (file.type === 'file') { - await fs.promises.mkdir(path.join(output, path.dirname(file.path)), { recursive: true }) - await pipe( - file.content, - map(chunk => chunk.slice()), // BufferList to Buffer - toIterable.sink(fs.createWriteStream(fullFilePath)) - ) - } else { - // this is a dir - await fs.promises.mkdir(fullFilePath, { recursive: true }) - } + await fs.promises.mkdir(path.dirname(output), { recursive: true }) + await pipe( + ipfs.get(ipfsPath, { + timeout, + archive, + compress, + compressionLevel + }), + toIterable.sink(fs.createWriteStream(output)) + ) + + return } + + /** + * @type {any[]} + */ + await pipe( + ipfs.get(ipfsPath, { + timeout, + archive, + compress, + compressionLevel + }), + extract(), + async function extractTarball (source) { + for await (const { header, body } of source) { + const outputPath = path.join(output, header.name) + + if (outputPath.substring(0, output.length) !== output && !force) { + throw new Error(`File prefix invalid, would write to files outside of ${output}, pass --force to override`) + } + + if (header.type === 'file') { + await fs.promises.mkdir(path.dirname(outputPath), { recursive: true }) + await pipe( + body, + /** + * @param {AsyncIterable} source + */ + (source) => map(source, buf => buf.slice()), + toIterable.sink(fs.createWriteStream(outputPath)) + ) + } else if (header.type === 'directory') { + await fs.promises.mkdir(outputPath, { recursive: true }) + } else { + throw new Error(`Unknown tar entry type ${header.type}`) + } + + await fs.promises.chmod(outputPath, header.mode) + await fs.promises.utimes(outputPath, header.mtime, header.mtime) + } + } + ) } } diff --git a/packages/ipfs-cli/src/commands/ls.js b/packages/ipfs-cli/src/commands/ls.js index 268d658489..46bd2c1075 100644 --- a/packages/ipfs-cli/src/commands/ls.js +++ b/packages/ipfs-cli/src/commands/ls.js @@ -17,17 +17,6 @@ module.exports = { type: 'boolean', default: false }, - r: { - alias: 'recursive', - desc: 'List subdirectories recursively', - type: 'boolean', - default: false - }, - 'resolve-type': { - desc: 'Resolve linked objects to find out their types. (not implemented yet)', - type: 'boolean', - default: false // should be true when implemented - }, 'cid-base': { describe: 'Number base to display CIDs in.', type: 'string', @@ -43,12 +32,11 @@ module.exports = { * @param {object} argv * @param {import('../types').Context} argv.ctx * @param {string} argv.key - * @param {boolean} argv.recursive * @param {boolean} argv.headers * @param {string} argv.cidBase * @param {number} argv.timeout */ - async handler ({ ctx: { ipfs, print }, key, recursive, headers, cidBase, timeout }) { + async handler ({ ctx: { ipfs, print }, key, headers, cidBase, timeout }) { // replace multiple slashes key = key.replace(/\/(\/+)/g, '/') @@ -99,7 +87,7 @@ module.exports = { const base = await ipfs.bases.getBase(cidBase) - for await (const link of ipfs.ls(key, { recursive, timeout })) { + for await (const link of ipfs.ls(key, { timeout })) { const mode = link.mode != null ? formatMode(link.mode, link.type === 'dir') : '' const mtime = link.mtime != null ? formatMtime(link.mtime) : '-' const cid = link.cid.toString(base.encoder) @@ -115,7 +103,7 @@ module.exports = { } } - printLink(mode, mtime, cid, size, name, link.depth) + printLink(mode, mtime, cid, size, name) } } } diff --git a/packages/ipfs-cli/test/get.js b/packages/ipfs-cli/test/get.js index c4b73f3bb6..0eff974b39 100644 --- a/packages/ipfs-cli/test/get.js +++ b/packages/ipfs-cli/test/get.js @@ -9,9 +9,48 @@ const { CID } = require('multiformats/cid') const cli = require('./utils/cli') const sinon = require('sinon') const uint8ArrayFromString = require('uint8arrays/from-string') +const { pack } = require('it-tar') +const { pipe } = require('it-pipe') +const { gzip, inflate } = require('pako') +const map = require('it-map') +const toBuffer = require('it-to-buffer') const defaultOptions = { - timeout: undefined + timeout: undefined, + archive: undefined, + compress: undefined, + compressionLevel: 6 +} + +/** + * @param {import('it-tar').TarImportCandidate[]} files + */ +async function * tarballed (files) { + yield * pipe( + files, + pack(), + /** + * @param {AsyncIterable} source + */ + (source) => map(source, buf => buf.slice()) + ) +} + +/** + * @param {AsyncIterable} bytes + * @param {number} level + */ +async function * gzipped (bytes, level = 6) { + yield * pipe( + bytes, + async function * (source) { + const buf = await toBuffer(source) + + yield gzip(buf, { + level + }) + } + ) } describe('get', () => { @@ -26,13 +65,18 @@ describe('get', () => { }) it('should get file', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'file', - path: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: `${cid}`, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = path.join(process.cwd(), cid.toString()) await clean(outPath) @@ -41,19 +85,24 @@ describe('get', () => { expect(out) .to.equal(`Saving file(s) ${cid}\n`) - expect(fs.readFileSync(outPath)).to.deep.equal(buf) + expect(fs.readFileSync(outPath)).to.equalBytes(buf) await clean(outPath) }) it('get file with output option', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'file', - path: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: `${cid}`, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = path.join(process.cwd(), 'derp') await clean(outPath) @@ -62,19 +111,168 @@ describe('get', () => { expect(out) .to.equal(`Saving file(s) ${cid}\n`) - expect(fs.readFileSync(path.join(outPath, cid.toString()))).to.deep.equal(buf) + expect(fs.readFileSync(path.join(outPath, cid.toString()))).to.equalBytes(buf) + + await clean(outPath) + }) + + it('should get gzipped file', async () => { + ipfs.get.withArgs(cid.toString(), { + ...defaultOptions, + compress: true + }).returns( + gzipped( + async function * () { + yield buf + }() + ) + ) + + const outPath = path.join(process.cwd(), cid.toString()) + await clean(outPath) + + const out = await cli(`get ${cid} --compress`, { ipfs }) + expect(out) + .to.equal(`Saving file(s) ${cid}\n`) + + expect(inflate(fs.readFileSync(outPath))).to.equalBytes(buf) + + await clean(outPath) + }) + + it('should get gzipped file with short compress option', async () => { + ipfs.get.withArgs(cid.toString(), { + ...defaultOptions, + compress: true + }).returns( + gzipped( + async function * () { + yield buf + }() + ) + ) + + const outPath = path.join(process.cwd(), cid.toString()) + await clean(outPath) + + const out = await cli(`get ${cid} -C`, { ipfs }) + expect(out) + .to.equal(`Saving file(s) ${cid}\n`) + + expect(inflate(fs.readFileSync(outPath))).to.equalBytes(buf) + + await clean(outPath) + }) + + it('should get gzipped file with compression level', async () => { + const compressionLevel = 9 + + ipfs.get.withArgs(cid.toString(), { + ...defaultOptions, + compress: true, + compressionLevel + }).returns( + gzipped( + (async function * () { + yield buf + }()), + compressionLevel + ) + ) + + const outPath = path.join(process.cwd(), cid.toString()) + await clean(outPath) + + const out = await cli(`get ${cid} --compress --compression-level ${compressionLevel}`, { ipfs }) + expect(out) + .to.equal(`Saving file(s) ${cid}\n`) + + expect(inflate(fs.readFileSync(outPath))).to.equalBytes(buf) + + await clean(outPath) + }) + + it('should get gzipped file with short compression level', async () => { + const compressionLevel = 9 + + ipfs.get.withArgs(cid.toString(), { + ...defaultOptions, + compress: true, + compressionLevel + }).returns( + gzipped( + (async function * () { + yield buf + }()), + compressionLevel + ) + ) + + const outPath = path.join(process.cwd(), cid.toString()) + await clean(outPath) + + const out = await cli(`get ${cid} --compress -l ${compressionLevel}`, { ipfs }) + expect(out) + .to.equal(`Saving file(s) ${cid}\n`) + + expect(inflate(fs.readFileSync(outPath))).to.equalBytes(buf) + + await clean(outPath) + }) + + it('get gzipped directory', async () => { + ipfs.get.withArgs(cid.toString(), { + ...defaultOptions, + compress: true, + archive: true + }).returns( + gzipped( + tarballed([{ + header: { + name: `${cid}`, + type: 'directory', + size: 0 + } + }, { + header: { + name: `${cid}/foo.txt`, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) + ) + + const outPath = path.join(process.cwd(), cid.toString()) + await clean(outPath) + + const out = await cli(`get ${cid} --archive true --compress true`, { ipfs }) + expect(out).to.eql( + `Saving file(s) ${cid}\n` + ) + + expect(fs.statSync(outPath).isFile()).to.be.true() + expect(fs.readFileSync(outPath).slice(0, 2)).to.equalBytes([0x1F, 0x8B]) // gzip magic bytes await clean(outPath) }) it('get file with short output option', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'file', - path: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: `${cid}`, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = path.join(process.cwd(), 'herp') await clean(outPath) @@ -83,16 +281,21 @@ describe('get', () => { expect(out) .to.equal(`Saving file(s) ${cid}\n`) - expect(fs.readFileSync(path.join(outPath, cid.toString()))).to.deep.equal(buf) + expect(fs.readFileSync(path.join(outPath, cid.toString()))).to.equalBytes(buf) await clean(outPath) }) it('get directory', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'dir', - path: cid.toString() - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: `${cid}`, + type: 'directory', + size: 0 + } + }]) + ) const outPath = path.join(process.cwd(), cid.toString()) await clean(outPath) @@ -107,16 +310,24 @@ describe('get', () => { }) it('get recursively', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'dir', - path: cid.toString() - }, { - type: 'file', - path: `${cid}/foo.txt`, - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: `${cid}`, + type: 'directory', + size: 0 + } + }, { + header: { + name: `${cid}/foo.txt`, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = path.join(process.cwd(), cid.toString()) await clean(outPath) @@ -128,7 +339,7 @@ describe('get', () => { expect(fs.statSync(outPath).isDirectory()).to.be.true() expect(fs.statSync(path.join(outPath, 'foo.txt')).isFile()).to.be.true() - expect(fs.readFileSync(path.join(outPath, 'foo.txt'))).to.deep.equal(buf) + expect(fs.readFileSync(path.join(outPath, 'foo.txt'))).to.equalBytes(buf) await clean(outPath) }) @@ -137,13 +348,18 @@ describe('get', () => { ipfs.get.withArgs(cid.toString(), { ...defaultOptions, timeout: 1000 - }).returns([{ - type: 'file', - path: cid.toString(), - content: function * () { - yield buf - } - }]) + }).returns( + tarballed([{ + header: { + name: `${cid}`, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = path.join(process.cwd(), cid.toString()) await clean(outPath) @@ -152,19 +368,24 @@ describe('get', () => { expect(out) .to.equal(`Saving file(s) ${cid}\n`) - expect(fs.readFileSync(outPath)).to.deep.equal(buf) + expect(fs.readFileSync(outPath)).to.equalBytes(buf) await clean(outPath) }) it('should not get file with path traversal characters that result in leaving the output directory', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'file', - path: '../foo.txt', - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: '../foo.txt', + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = path.join(process.cwd(), 'derp') @@ -172,13 +393,18 @@ describe('get', () => { }) it('should get file with path traversal characters that result in leaving the output directory when forced', async () => { - ipfs.get.withArgs(cid.toString(), defaultOptions).returns([{ - type: 'file', - path: '../foo.txt', - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(cid.toString(), defaultOptions).returns( + tarballed([{ + header: { + name: '../foo.txt', + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const dir = path.join(process.cwd(), 'derp') const outPath = path.join(process.cwd(), 'derp', 'herp') @@ -188,7 +414,7 @@ describe('get', () => { expect(out) .to.equal(`Saving file(s) ${cid}\n`) - expect(fs.readFileSync(path.join(dir, 'foo.txt'))).to.deep.equal(buf) + expect(fs.readFileSync(path.join(dir, 'foo.txt'))).to.equalBytes(buf) await clean(dir) }) @@ -202,13 +428,18 @@ describe('get', () => { const ipfsPath = `${cid}/foo/bar` const junkPath = `${cid}/foo\b/bar` - ipfs.get.withArgs(junkPath, defaultOptions).returns([{ - type: 'file', - path: junkPath, - content: function * () { - yield buf - } - }]) + ipfs.get.withArgs(junkPath, defaultOptions).returns( + tarballed([{ + header: { + name: junkPath, + type: 'file', + size: buf.length + }, + body: (async function * () { + yield buf + }()) + }]) + ) const outPath = `${process.cwd()}/${junkPath}` await clean(outPath) @@ -217,7 +448,7 @@ describe('get', () => { expect(out) .to.equal(`Saving file(s) ${ipfsPath}\n`) - expect(fs.readFileSync(outPath)).to.deep.equal(buf) + expect(fs.readFileSync(outPath)).to.equalBytes(buf) await clean(outPath) }) diff --git a/packages/ipfs-cli/test/ls.js b/packages/ipfs-cli/test/ls.js index bcf638bc13..339bd0b61c 100644 --- a/packages/ipfs-cli/test/ls.js +++ b/packages/ipfs-cli/test/ls.js @@ -9,7 +9,6 @@ const { base58btc } = require('multiformats/bases/base58') const { base64 } = require('multiformats/bases/base64') const defaultOptions = { - recursive: false, timeout: undefined } @@ -29,16 +28,14 @@ describe('ls', () => { mtime: null, cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', - name: 'blocks', - depth: 0 + name: 'blocks' }, { mode: 0o644, mtime: null, cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', - size: 3928, - depth: 0 + size: 3928 }]) ipfs.ls.withArgs('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', { @@ -49,16 +46,14 @@ describe('ls', () => { mtime: null, cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', - name: 'blocks', - depth: 0 + name: 'blocks' }, { mode: 0o644, mtime: null, cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', - size: 3928, - depth: 0 + size: 3928 }]) ipfs.ls.withArgs('/ipfs/Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', defaultOptions).returns([{ @@ -66,16 +61,14 @@ describe('ls', () => { mtime: null, cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), type: 'dir', - name: 'blocks', - depth: 0 + name: 'blocks' }, { mode: 0o644, mtime: null, cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), type: 'file', name: 'config', - size: 3928, - depth: 0 + size: 3928 }]) ipfs.ls.withArgs('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z/blocks', defaultOptions).returns([{ @@ -84,52 +77,14 @@ describe('ls', () => { cid: CID.parse('QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD'), type: 'file', name: 'CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data', - size: 10849, - depth: 0 + size: 10849 }, { mode: 0o644, mtime: null, cid: CID.parse('QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx'), type: 'file', name: 'CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data', - size: 10807, - depth: 0 - }]) - - ipfs.ls.withArgs('Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', { - ...defaultOptions, - recursive: true - }).returns([{ - mode: 0o755, - mtime: null, - cid: CID.parse('QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT'), - type: 'dir', - name: 'blocks', - depth: 0 - }, { - mode: 0o644, - mtime: null, - cid: CID.parse('QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD'), - type: 'file', - name: 'CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data', - size: 10849, - depth: 1 - }, { - mode: 0o644, - mtime: null, - cid: CID.parse('QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx'), - type: 'file', - name: 'CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data', - size: 10807, - depth: 1 - }, { - mode: 0o644, - mtime: null, - cid: CID.parse('QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN'), - type: 'file', - name: 'config', - size: 3928, - depth: 0 + size: 10807 }]) ipfs.ls.withArgs('bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq', defaultOptions).returns([{ @@ -203,18 +158,6 @@ describe('ls', () => { ) }) - it('recursively follows folders, -r', async () => { - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - - const out = await cli('ls -r Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z', { ipfs }) - expect(out).to.eql( - 'drwxr-xr-x - QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT - blocks/\n' + - '-rw-r--r-- - QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD 10849 CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data\n' + - '-rw-r--r-- - QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx 10807 CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data\n' + - '-rw-r--r-- - QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN 3928 config\n' - ) - }) - it('should ls and print CIDs encoded in specified base', async () => { ipfs.bases.getBase.withArgs('base64').returns(base64) diff --git a/packages/ipfs-client/package.json b/packages/ipfs-client/package.json index 6aff91ecac..5eae443a89 100644 --- a/packages/ipfs-client/package.json +++ b/packages/ipfs-client/package.json @@ -26,7 +26,7 @@ "scripts": { "build": "aegir build", "test": "echo please run test:interface:client in the ipfs package instead", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i aegir -i rimraf" diff --git a/packages/ipfs-core-types/package.json b/packages/ipfs-core-types/package.json index a68fa02844..0c927f14df 100644 --- a/packages/ipfs-core-types/package.json +++ b/packages/ipfs-core-types/package.json @@ -7,7 +7,7 @@ "homepage": "https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs-core-types#readme", "bugs": "https://github.com/ipfs/js-ipfs/issues", "scripts": { - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "aegir ts -p check" }, "files": [ diff --git a/packages/ipfs-core-types/src/config/profiles/index.d.ts b/packages/ipfs-core-types/src/config/profiles/index.d.ts index 146a90aa1a..327aabdd35 100644 --- a/packages/ipfs-core-types/src/config/profiles/index.d.ts +++ b/packages/ipfs-core-types/src/config/profiles/index.d.ts @@ -1,4 +1,5 @@ import type { AbortOptions } from '../../utils' +import type { Config } from '../' export interface API { /** @@ -21,8 +22,7 @@ export interface Profile { export interface ProfilesApplyOptions extends AbortOptions { dryRun?: boolean } - export interface ProfilesApplyResult { - original: object - updated: object + original: Config + updated: Config } diff --git a/packages/ipfs-core-types/src/dag/index.d.ts b/packages/ipfs-core-types/src/dag/index.d.ts index 9545e4041a..523b8cc6c0 100644 --- a/packages/ipfs-core-types/src/dag/index.d.ts +++ b/packages/ipfs-core-types/src/dag/index.d.ts @@ -103,7 +103,7 @@ export interface API { * Import all blocks from one or more CARs and optionally recursively pin the roots identified * within the CARs. */ - import: (sources: AsyncIterable> | Iterable>, options?: ImportOptions & OptionExtension) => AsyncIterable + import: (sources: Iterable | AsyncIterable | AsyncIterable> | Iterable>, options?: ImportOptions & OptionExtension) => AsyncIterable } export interface GetOptions extends AbortOptions, PreloadOptions { diff --git a/packages/ipfs-core-types/src/pin/index.d.ts b/packages/ipfs-core-types/src/pin/index.d.ts index ff15758d98..640eab38d3 100644 --- a/packages/ipfs-core-types/src/pin/index.d.ts +++ b/packages/ipfs-core-types/src/pin/index.d.ts @@ -124,7 +124,9 @@ export interface AddAllOptions extends AbortOptions { lock?: boolean } -export interface AddInput { +export type AddInput = CID | AddInputWithOptions + +export interface AddInputWithOptions { /** * A CID to pin - nb. you must pass either `cid` or `path`, not both */ diff --git a/packages/ipfs-core-types/src/pubsub/index.d.ts b/packages/ipfs-core-types/src/pubsub/index.d.ts index 1a63a4ba4a..6acafdb718 100644 --- a/packages/ipfs-core-types/src/pubsub/index.d.ts +++ b/packages/ipfs-core-types/src/pubsub/index.d.ts @@ -38,7 +38,7 @@ export interface API { * await ipfs.pubsub.unsubscribe(topic); * ``` */ - unsubscribe: (topic: string, handler: MessageHandlerFn, options?: AbortOptions & OptionExtension) => Promise + unsubscribe: (topic: string, handler?: MessageHandlerFn, options?: AbortOptions & OptionExtension) => Promise /** * Publish a data message to a pubsub topic @@ -72,6 +72,8 @@ export interface API { * ``` */ peers: (topic: string, options?: AbortOptions & OptionExtension) => Promise + + setMaxListeners?: (max: number) => void } export interface Message { diff --git a/packages/ipfs-core-types/src/root.d.ts b/packages/ipfs-core-types/src/root.d.ts index 06be36d42f..a827f548ab 100644 --- a/packages/ipfs-core-types/src/root.d.ts +++ b/packages/ipfs-core-types/src/root.d.ts @@ -23,7 +23,7 @@ export interface API { * Fetch a file or an entire directory tree from IPFS that is addressed by a * valid IPFS Path */ - get: (ipfsPath: IPFSPath, options?: GetOptions & OptionExtension) => AsyncIterable + get: (ipfsPath: IPFSPath, options?: GetOptions & OptionExtension) => AsyncIterable /** * Lists a directory from IPFS that is addressed by a valid IPFS Path @@ -138,41 +138,16 @@ export interface API { isOnline: () => boolean } -export interface File { - readonly type: 'file' +export interface IPFSEntry { + readonly type: 'dir' | 'file' readonly cid: CID readonly name: string - - /** - * File path - */ readonly path: string - /** - * File content - */ - readonly content?: AsyncIterable - mode?: number - mtime?: Mtime - size: number - depth: number -} - -export interface Directory { - type: 'dir' - cid: CID - name: string - /** - * Directory path - */ - path: string mode?: number mtime?: Mtime size: number - depth: number } -export type IPFSEntry = File | Directory - export interface AddProgressFn { (bytes: number, path?: string): void } export interface AddOptions extends AbortOptions { @@ -281,11 +256,14 @@ export interface CatOptions extends AbortOptions, PreloadOptions { length?: number } -export interface GetOptions extends AbortOptions, PreloadOptions {} +export interface GetOptions extends AbortOptions, PreloadOptions { + archive?: boolean + compress?: boolean + compressionLevel?: number +} export interface ListOptions extends AbortOptions, PreloadOptions { - recursive?: boolean - includeContent?: boolean + } export interface IDOptions extends AbortOptions { diff --git a/packages/ipfs-core-utils/package.json b/packages/ipfs-core-utils/package.json index cf45c4528d..8adb13bce4 100644 --- a/packages/ipfs-core-utils/package.json +++ b/packages/ipfs-core-utils/package.json @@ -34,7 +34,7 @@ "test:electron": "aegir test -t electron-main", "test:electron-renderer": "aegir test -t electron-renderer", "test:node": "aegir test -t node", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i rimraf -i ipfs-core-types", "build": "aegir build --no-bundle" diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js b/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js index 15f3750f1c..c04cc99686 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js @@ -1,7 +1,7 @@ 'use strict' const normaliseContent = require('./normalise-content.browser') -const normaliseInput = require('./normalise-input') +const normalise = require('./normalise-input') /** * @typedef {import('ipfs-core-types/src/utils').ImportCandidateStream} ImportCandidateStream @@ -20,5 +20,11 @@ const normaliseInput = require('./normalise-input') * @param {ImportCandidateStream} input * @returns {AsyncGenerator} */ -// @ts-ignore -module.exports = (input) => normaliseInput(input, normaliseContent) +function normaliseInput (input) { + // @ts-ignore normaliseContent returns Blob and not AsyncIterator + return normalise(input, normaliseContent) +} + +module.exports = { + normaliseInput +} diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.js b/packages/ipfs-core-utils/src/files/normalise-input/index.js index 13b51b6099..7bb005f93f 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.js @@ -1,14 +1,14 @@ 'use strict' const normaliseContent = require('./normalise-content') -const normaliseInput = require('./normalise-input') +const normalise = require('./normalise-input') /** * @typedef {import('ipfs-core-types/src/utils').ImportCandidateStream} ImportCandidateStream * @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImportCandidate */ -/* +/** * Transforms any of the `ipfs.add` input types into * * ``` @@ -18,7 +18,11 @@ const normaliseInput = require('./normalise-input') * See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options * * @param {ImportCandidateStream} input - * @returns {AsyncGenerator} */ -// @ts-ignore TODO vmx 2021-03-30 enable again -module.exports = (input) => normaliseInput(input, normaliseContent) +function normaliseInput (input) { + return normalise(input, normaliseContent) +} + +module.exports = { + normaliseInput +} diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js index b9fd3ba864..63362bd207 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js @@ -13,7 +13,6 @@ const { const { parseMtime, parseMode -// @ts-ignore - TODO vmx 2021-03-30 enable again } = require('ipfs-unixfs') /** diff --git a/packages/ipfs-core-utils/src/pins/normalise-input.js b/packages/ipfs-core-utils/src/pins/normalise-input.js index cc7b9765d6..8f17423745 100644 --- a/packages/ipfs-core-utils/src/pins/normalise-input.js +++ b/packages/ipfs-core-utils/src/pins/normalise-input.js @@ -46,7 +46,7 @@ const { CID } = require('multiformats/cid') * @returns {AsyncIterable} */ // eslint-disable-next-line complexity -module.exports = async function * normaliseInput (input) { +async function * normaliseInput (input) { // must give us something if (input === null || input === undefined) { throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') @@ -151,3 +151,7 @@ function toPin (input) { return pin } + +module.exports = { + normaliseInput +} diff --git a/packages/ipfs-core-utils/test/files/normalise-input.spec.js b/packages/ipfs-core-utils/test/files/normalise-input.spec.js index ef8f1ae774..d4fc9686c5 100644 --- a/packages/ipfs-core-utils/test/files/normalise-input.spec.js +++ b/packages/ipfs-core-utils/test/files/normalise-input.spec.js @@ -10,10 +10,10 @@ const { File } = require('@web-std/file') const { Blob, ReadableStream } = globalThis const { isBrowser, isWebWorker, isElectronRenderer } = require('ipfs-utils/src/env') -let normalise = require('../../src/files/normalise-input') +let { normaliseInput } = require('../../src/files/normalise-input') if (isBrowser || isWebWorker || isElectronRenderer) { - normalise = require('../../src/files/normalise-input/index.browser') + normaliseInput = require('../../src/files/normalise-input/index.browser').normaliseInput } const STRING = () => 'hello world' @@ -46,7 +46,7 @@ async function verifyNormalisation (input) { } async function testContent (input) { - const result = await all(normalise(input)) + const result = await all(normaliseInput(input)) await verifyNormalisation(result) } diff --git a/packages/ipfs-core-utils/test/pins/normalise-input.spec.js b/packages/ipfs-core-utils/test/pins/normalise-input.spec.js index 541d69aac9..7d11df4207 100644 --- a/packages/ipfs-core-utils/test/pins/normalise-input.spec.js +++ b/packages/ipfs-core-utils/test/pins/normalise-input.spec.js @@ -3,7 +3,7 @@ /* eslint-env mocha */ const { expect } = require('aegir/utils/chai') -const normalise = require('../../src/pins/normalise-input') +const { normaliseInput } = require('../../src/pins/normalise-input') const all = require('it-all') const { CID } = require('multiformats/cid') @@ -13,7 +13,7 @@ const OBJECT_CID = () => ({ cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHb const OBJECT_PATH = () => ({ path: '/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn/path/to/file.txt', recursive: true, metadata: { key: 'hello world' } }) async function verifyNormalisation (input, withOptions) { - const result = await all(normalise(input)) + const result = await all(normaliseInput(input)) expect(result).to.have.lengthOf(1) expect(result[0]).to.have.property('path') diff --git a/packages/ipfs-core/package.json b/packages/ipfs-core/package.json index 45660b98ce..d2575c3476 100644 --- a/packages/ipfs-core/package.json +++ b/packages/ipfs-core/package.json @@ -43,7 +43,7 @@ }, "scripts": { "build": "aegir build", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "aegir test", "test:node": "aegir test -t node", "test:browser": "aegir test -t browser", @@ -79,8 +79,8 @@ "ipfs-http-client": "^51.0.1", "ipfs-repo": "^11.0.1", "ipfs-unixfs": "^5.0.0", - "ipfs-unixfs-exporter": "^6.0.0", - "ipfs-unixfs-importer": "^8.0.0", + "ipfs-unixfs-exporter": "^6.0.2", + "ipfs-unixfs-importer": "^8.0.2", "ipfs-utils": "^8.1.4", "ipns": "^0.13.2", "is-domain-name": "^1.0.1", @@ -94,6 +94,8 @@ "it-peekable": "^1.0.2", "it-pipe": "^1.1.0", "it-pushable": "^1.4.2", + "it-tar": "^4.0.0", + "it-to-buffer": "^2.0.0", "just-safe-set": "^2.2.1", "libp2p": "^0.32.0", "libp2p-bootstrap": "^0.13.0", @@ -118,6 +120,7 @@ "multiformats": "^9.4.1", "native-abort-controller": "^1.0.3", "p-queue": "^6.6.1", + "pako": "^1.0.2", "parse-duration": "^1.0.0", "peer-id": "^0.15.1", "streaming-iterables": "^6.0.0", @@ -125,12 +128,14 @@ }, "devDependencies": { "@types/dlv": "^1.1.2", + "@types/pako": "^1.0.2", + "@types/rimraf": "^3.0.1", "aegir": "^34.0.2", "delay": "^5.0.0", "go-ipfs": "0.8.0", "interface-blockstore-tests": "^1.0.0", "interface-ipfs-core": "^0.148.0", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "iso-url": "^1.0.0", "nanoid": "^3.1.12", "p-defer": "^3.0.0", diff --git a/packages/ipfs-core/src/components/add-all/index.js b/packages/ipfs-core/src/components/add-all/index.js index 0ca65c6868..410baa3aa8 100644 --- a/packages/ipfs-core/src/components/add-all/index.js +++ b/packages/ipfs-core/src/components/add-all/index.js @@ -1,7 +1,7 @@ 'use strict' const { importer } = require('ipfs-unixfs-importer') -const normaliseAddInput = require('ipfs-core-utils/src/files/normalise-input/index') +const { normaliseInput } = require('ipfs-core-utils/src/files/normalise-input/index') const { parseChunkerString } = require('./utils') const { pipe } = require('it-pipe') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') @@ -84,7 +84,7 @@ module.exports = ({ repo, preload, options }) => { } const iterator = pipe( - normaliseAddInput(source), + normaliseInput(source), /** * @param {AsyncIterable} source */ diff --git a/packages/ipfs-core/src/components/dag/import.js b/packages/ipfs-core/src/components/dag/import.js index 4b095ad69a..788f521fac 100644 --- a/packages/ipfs-core/src/components/dag/import.js +++ b/packages/ipfs-core/src/components/dag/import.js @@ -36,6 +36,7 @@ module.exports = ({ repo }) => { } if (value) { + // @ts-ignore peekable.push(value) } @@ -48,6 +49,7 @@ module.exports = ({ repo }) => { // @ts-ignore cars = [peekable] } else { + // @ts-ignore cars = peekable } diff --git a/packages/ipfs-core/src/components/get.js b/packages/ipfs-core/src/components/get.js index df1421dc1f..cd0e003397 100644 --- a/packages/ipfs-core/src/components/get.js +++ b/packages/ipfs-core/src/components/get.js @@ -1,10 +1,18 @@ 'use strict' -const exporter = require('ipfs-unixfs-exporter') +const { exporter, recursive } = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const { normalizeCidPath, mapFile } = require('../utils') +const { normalizeCidPath } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const { CID } = require('multiformats/cid') +const { pack } = require('it-tar') +const { pipe } = require('it-pipe') +const { gzip } = require('pako') +const map = require('it-map') +const toBuffer = require('it-to-buffer') + +// https://www.gnu.org/software/gzip/manual/gzip.html +const DEFAULT_COMPRESSION_LEVEL = 6 /** * @typedef {Object} Context @@ -18,6 +26,10 @@ module.exports = function ({ repo, preload }) { * @type {import('ipfs-core-types/src/root').API["get"]} */ async function * get (ipfsPath, options = {}) { + if (options.compressionLevel < 0 || options.compressionLevel > 9) { + throw errCode(new Error('Compression level must be between 1 and 9'), 'ERR_INVALID_PARAMS') + } + if (options.preload !== false) { let pathComponents @@ -31,13 +43,126 @@ module.exports = function ({ repo, preload }) { } const ipfsPathOrCid = CID.asCID(ipfsPath) || ipfsPath + const file = await exporter(ipfsPathOrCid, repo.blocks, options) + + if (file.type === 'file' || file.type === 'raw') { + const args = [] + + if (!options.compress || options.archive === true) { + args.push([{ + header: { + name: file.path, + mode: file.type === 'file' && file.unixfs.mode, + mtime: file.type === 'file' && file.unixfs.mtime ? new Date(file.unixfs.mtime.secs * 1000) : undefined, + size: file.size, + type: 'file' + }, + body: file.content() + }], + pack(), + /** + * @param {AsyncIterable} source + */ + (source) => map(source, buf => buf.slice()) + ) + } else { + args.push( + file.content + ) + } - for await (const file of exporter.recursive(ipfsPathOrCid, repo.blocks, options)) { - yield mapFile(file, { - ...options, - includeContent: true - }) + if (options.compress) { + args.push( + /** + * @param {AsyncIterable} source + */ + async function * (source) { + const buf = await toBuffer(source) + + yield gzip(buf, { + level: options.compressionLevel || DEFAULT_COMPRESSION_LEVEL + }) + } + ) + } + + // @ts-ignore cannot derive type + yield * pipe(...args) + + return } + + if (file.type === 'directory') { + /** @type {any[]} */ + const args = [ + recursive(ipfsPathOrCid, repo.blocks, options), + /** + * @param {AsyncIterable} source + */ + async function * (source) { + for await (const entry of source) { + /** @type {import('it-tar').TarImportCandidate} */ + const output = { + header: { + name: entry.path, + size: entry.size + } + } + + if (entry.type === 'file') { + output.header.type = 'file' + output.header.mode = entry.unixfs.mode != null ? entry.unixfs.mode : undefined + output.header.mtime = entry.unixfs.mtime ? new Date(entry.unixfs.mtime.secs * 1000) : undefined + output.body = entry.content() + } else if (entry.type === 'raw') { + output.header.type = 'file' + output.body = entry.content() + } else if (entry.type === 'directory') { + output.header.type = 'directory' + output.header.mode = entry.unixfs.mode != null ? entry.unixfs.mode : undefined + output.header.mtime = entry.unixfs.mtime ? new Date(entry.unixfs.mtime.secs * 1000) : undefined + } else { + throw errCode(new Error('Not a UnixFS node'), 'ERR_NOT_UNIXFS') + } + + yield output + } + }, + pack(), + /** + * @param {AsyncIterable} source + */ + (source) => map(source, buf => buf.slice()) + ] + + if (options.compress) { + if (!options.archive) { + throw errCode(new Error('file is not regular'), 'ERR_INVALID_PATH') + } + + if (options.compress) { + args.push( + /** + * @param {AsyncIterable} source + */ + async function * (source) { + const buf = await toBuffer(source) + + yield gzip(buf, { + level: options.compressionLevel || DEFAULT_COMPRESSION_LEVEL + }) + } + ) + } + } + + // @ts-ignore cannot derive type + yield * pipe(...args) + + return + } + + throw errCode(new Error('Not a UnixFS node'), 'ERR_NOT_UNIXFS') } return withTimeoutOption(get) diff --git a/packages/ipfs-core/src/components/index.js b/packages/ipfs-core/src/components/index.js index 05a861ea0a..a6e053ade5 100644 --- a/packages/ipfs-core/src/components/index.js +++ b/packages/ipfs-core/src/components/index.js @@ -55,6 +55,8 @@ const Multibases = require('ipfs-core-utils/src/multibases') * @typedef {import('../types').Print} Print * @typedef {import('./storage')} StorageAPI * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * @typedef {import('multiformats/bases/interface').MultibaseCodec} MultibaseCodec */ class IPFS { @@ -77,13 +79,23 @@ class IPFS { // libp2p can be a function, while IPNS router config expects libp2p config const ipns = new IPNSAPI(options) + /** @type {MultihashHasher[]} */ + const multihashHashers = Object.values(hashes); + + (options.ipld && options.ipld.hashers ? options.ipld.hashers : []).forEach(hasher => multihashHashers.push(hasher)) + this.hashers = new Multihashes({ - hashers: Object.values(hashes).concat(options.ipld && options.ipld.hashers ? options.ipld.hashers : []), + hashers: multihashHashers, loadHasher: options.ipld && options.ipld.loadHasher }) + /** @type {MultibaseCodec[]} */ + const multibaseCodecs = Object.values(bases); + + (options.ipld && options.ipld.bases ? options.ipld.bases : []).forEach(base => multibaseCodecs.push(base)) + this.bases = new Multibases({ - bases: Object.values(bases).concat(options.ipld && options.ipld.bases ? options.ipld.bases : []), + bases: multibaseCodecs, loadBase: options.ipld && options.ipld.loadBase }) @@ -233,8 +245,13 @@ class IPFS { decode: (id) => id } + /** @type {BlockCodec[]} */ + const blockCodecs = Object.values(codecs); + + [dagPb, dagCbor, id].concat((options.ipld && options.ipld.codecs) || []).forEach(codec => blockCodecs.push(codec)) + const multicodecs = new Multicodecs({ - codecs: Object.values(codecs).concat([dagPb, dagCbor, id]).concat((options.ipld && options.ipld.codecs) || []), + codecs: blockCodecs, loadCodec: options.ipld && options.ipld.loadCodec }) diff --git a/packages/ipfs-core/src/components/ls.js b/packages/ipfs-core/src/components/ls.js index 60570cff46..57859c376e 100644 --- a/packages/ipfs-core/src/components/ls.js +++ b/packages/ipfs-core/src/components/ls.js @@ -1,6 +1,6 @@ 'use strict' -const { exporter, recursive } = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') @@ -26,32 +26,16 @@ module.exports = function ({ repo, preload }) { } const ipfsPathOrCid = CID.asCID(legacyPath) || legacyPath - const file = await exporter(ipfsPathOrCid, repo.blocks, options) if (file.type === 'file') { - yield mapFile(file, options) + yield mapFile(file) return } if (file.type === 'directory') { - if (options.recursive) { - for await (const child of recursive(file.cid, repo.blocks, options)) { - if (file.cid.toString() === child.cid.toString()) { - continue - } - - yield mapFile(child, options) - } - - return - } - for await (const child of file.content()) { - const entry = mapFile(child, options) - entry.depth-- - - yield entry + yield mapFile(child) } return diff --git a/packages/ipfs-core/src/components/pin/add-all.js b/packages/ipfs-core/src/components/pin/add-all.js index a9bd600090..4a9d964d63 100644 --- a/packages/ipfs-core/src/components/pin/add-all.js +++ b/packages/ipfs-core/src/components/pin/add-all.js @@ -3,7 +3,7 @@ const { resolvePath } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/pins/normalise-input') const { PinTypes } = require('ipfs-repo') /** diff --git a/packages/ipfs-core/src/components/pin/ls.js b/packages/ipfs-core/src/components/pin/ls.js index b6927cc556..3284a847bd 100644 --- a/packages/ipfs-core/src/components/pin/ls.js +++ b/packages/ipfs-core/src/components/pin/ls.js @@ -2,7 +2,7 @@ 'use strict' const { PinTypes } = require('ipfs-repo') -const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/pins/normalise-input') const { resolvePath } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const errCode = require('err-code') diff --git a/packages/ipfs-core/src/components/pin/rm-all.js b/packages/ipfs-core/src/components/pin/rm-all.js index c282d3fbf2..26c8b97b01 100644 --- a/packages/ipfs-core/src/components/pin/rm-all.js +++ b/packages/ipfs-core/src/components/pin/rm-all.js @@ -1,6 +1,6 @@ 'use strict' -const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/pins/normalise-input') const { resolvePath } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const { PinTypes } = require('ipfs-repo') diff --git a/packages/ipfs-core/src/index.js b/packages/ipfs-core/src/index.js index 29b3e93d19..8e9746ded8 100644 --- a/packages/ipfs-core/src/index.js +++ b/packages/ipfs-core/src/index.js @@ -12,6 +12,18 @@ const { create } = require('./components') /** * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {import('./types').Options} Options + * @typedef {import('./types').Libp2pFactoryFn} Libp2pFactoryFn + * @typedef {import('./types').Libp2pFactoryFnArgs} Libp2pFactoryFnArgs + * @typedef {import('./types').InitOptions} InitOptions + * @typedef {import('./types').RelayOptions} RelayOptions + * @typedef {import('./types').PreloadOptions} PreloadOptions + * @typedef {import('./types').ExperimentalOptions} ExperimentalOptions + * @typedef {import('./types').Preload} Preload + * @typedef {import('./types').MfsPreload} MfsPreload + * @typedef {import('./types').LoadBaseFn} LoadBaseFn + * @typedef {import('./types').LoadCodecFn} LoadCodecFn + * @typedef {import('./types').LoadHasherFn} LoadHasherFn + * @typedef {import('./types').IPLDOptions} IPLDOptions */ module.exports = { diff --git a/packages/ipfs-core/src/types.d.ts b/packages/ipfs-core/src/types.d.ts index de398f18dc..37bd06b61b 100644 --- a/packages/ipfs-core/src/types.d.ts +++ b/packages/ipfs-core/src/types.d.ts @@ -3,13 +3,16 @@ import type PeerId from 'peer-id' import type { Config as IPFSConfig } from 'ipfs-core-types/src/config' import type Libp2p, { Libp2pOptions } from 'libp2p' -import type IPFSRepo from 'ipfs-repo' +import type { IPFSRepo } from 'ipfs-repo' import type { ProgressCallback as MigrationProgressCallback } from 'ipfs-repo-migrations' import type Network, { Options as NetworkOptions } from './components/network' +import type { Datastore } from 'interface-datastore' + import type Service from './utils/service' import type { CID } from 'multiformats/cid' -import type { BlockCodec, MultibaseCodec } from 'multiformats/codecs/interface' +import type { BlockCodec } from 'multiformats/codecs/interface' +import type { MultibaseCodec } from 'multiformats/bases/interface' import type { MultihashHasher } from 'multiformats/hashes/interface' export interface Options { @@ -122,7 +125,17 @@ export interface Options { silent?: boolean } -export interface Libp2pFactoryFn { ({ libp2pOptions: Libp2pOptions, options: Options, config: IPFSConfig, datastore: Datastore, peerId: PeerId }): Libp2p } +export interface Libp2pFactoryFnArgs { + libp2pOptions: Libp2pOptions + options: Options + config: IPFSConfig + datastore: Datastore + peerId: PeerId +} + +export interface Libp2pFactoryFn { + (args: Libp2pFactoryFnArgs): Promise +} /** * On first run js-IPFS will initialize a repo which can be customized through this settings @@ -229,11 +242,6 @@ export interface MfsPreload { export type NetworkService = Service -export interface Block { - cid: CID - bytes: Uint8Array -} - export interface LoadBaseFn { (codeOrName: number | string): Promise> } export interface LoadCodecFn { (codeOrName: number | string): Promise> } export interface LoadHasherFn { (codeOrName: number | string): Promise } @@ -244,13 +252,5 @@ export interface IPLDOptions { loadHasher: LoadHasherFn bases: Array> codecs: Array> - hashers: Array> -} - -export interface BlockCodecStore { - getCodec: (codeOrName: number | string) => Promise> -} - -export interface MultihashHasherStore { - getHasher: (codeOrName: number | string) => Promise> + hashers: MultihashHasher[] } diff --git a/packages/ipfs-core/src/utils.js b/packages/ipfs-core/src/utils.js index aa40108b2e..6ffcd35f6b 100644 --- a/packages/ipfs-core/src/utils.js +++ b/packages/ipfs-core/src/utils.js @@ -69,7 +69,7 @@ const normalizeCidPath = (path) => { * * @param {import('ipfs-repo').IPFSRepo} repo * @param {import('ipfs-core-utils/src/multicodecs')} codecs - * @param {CID | string} ipfsPath - A CID or IPFS path + * @param {CID | string | Uint8Array} ipfsPath - A CID or IPFS path * @param {{ path?: string, signal?: AbortSignal }} [options] - Optional options passed directly to dag.resolve * @returns {Promise<{ cid: CID, remainderPath: string}>} */ @@ -122,10 +122,8 @@ const resolvePath = async function (repo, codecs, ipfsPath, options = {}) { * @typedef {import('ipfs-unixfs-exporter').UnixFSEntry} UnixFSEntry * * @param {UnixFSEntry} file - * @param {Object} [options] - * @param {boolean} [options.includeContent] */ -const mapFile = (file, options = {}) => { +const mapFile = (file) => { if (file.type !== 'file' && file.type !== 'directory' && file.type !== 'raw') { // file.type === object | identity not supported yet throw new Error(`Unknown node type '${file.type}'`) @@ -136,7 +134,6 @@ const mapFile = (file, options = {}) => { cid: file.cid, path: file.path, name: file.name, - depth: file.path.split('/').length, size: file.size, type: 'file' } @@ -158,13 +155,6 @@ const mapFile = (file, options = {}) => { } } - if (options.includeContent) { - if (file.type === 'file' || file.type === 'raw') { - // @ts-expect-error - content is readonly - output.content = file.content() - } - } - return output } diff --git a/packages/ipfs-core/test/add-all.spec.js b/packages/ipfs-core/test/add-all.spec.js index ed156ac1ff..ee0f25dccc 100644 --- a/packages/ipfs-core/test/add-all.spec.js +++ b/packages/ipfs-core/test/add-all.spec.js @@ -13,6 +13,7 @@ describe('add-all/utils', () => { }) it('handles a null chunker string', () => { + // @ts-expect-error null is not string | undefined const options = utils.parseChunkerString(null) expect(options.chunker).to.equal('fixed') }) @@ -26,20 +27,30 @@ describe('add-all/utils', () => { it('parses a rabin string without size', () => { const options = utils.parseChunkerString('rabin') expect(options.chunker).to.equal('rabin') - expect(options.avgChunkSize).to.equal(262144) + + if (options.chunker === 'rabin') { + expect(options.avgChunkSize).to.equal(262144) + } }) it('parses a rabin string with only avg size', () => { const options = utils.parseChunkerString('rabin-512') expect(options.chunker).to.equal('rabin') - expect(options.avgChunkSize).to.equal(512) + + if (options.chunker === 'rabin') { + expect(options.avgChunkSize).to.equal(512) + } }) it('parses a rabin string with min, avg, and max', () => { const options = utils.parseChunkerString('rabin-42-92-184') expect(options.chunker).to.equal('rabin') - expect(options.minChunkSize).to.equal(42) - expect(options.avgChunkSize).to.equal(92) + + if (options.chunker === 'rabin') { + expect(options.minChunkSize).to.equal(42) + expect(options.avgChunkSize).to.equal(92) + } + expect(options.maxChunkSize).to.equal(184) }) diff --git a/packages/ipfs-core/test/block-storage.spec.js b/packages/ipfs-core/test/block-storage.spec.js index 6f2c1d96c8..8736661313 100644 --- a/packages/ipfs-core/test/block-storage.spec.js +++ b/packages/ipfs-core/test/block-storage.spec.js @@ -10,15 +10,26 @@ const BlockStorage = require('../src/block-storage') * @typedef {import('interface-blockstore').Blockstore} Blockstore */ +class MockBitswap extends MemoryBlockstore { + /** + * @param {boolean} started + */ + constructor (started) { + super() + + this.isStarted = () => started + } +} + describe('block-storage', () => { describe('interface-blockstore (bitswap online)', () => { suite({ setup: () => { // bitswap forwards on to the blockstore so just // use the same instance to represent both - const blockstore = new MemoryBlockstore() - blockstore.isStarted = () => true + const blockstore = new MockBitswap(true) + // @ts-ignore MockBitswap is missing some properties return new BlockStorage(blockstore, blockstore) }, teardown: () => {} @@ -30,9 +41,9 @@ describe('block-storage', () => { setup: () => { // bitswap forwards on to the blockstore so just // use the same instance to represent both - const blockstore = new MemoryBlockstore() - blockstore.isStarted = () => false + const blockstore = new MockBitswap(false) + // @ts-ignore MockBitswap is missing some properties return new BlockStorage(blockstore, blockstore) }, teardown: () => {} diff --git a/packages/ipfs-core/test/bootstrapers.js b/packages/ipfs-core/test/bootstrapers.js index 5b4c1805bd..c10594f7e5 100644 --- a/packages/ipfs-core/test/bootstrapers.js +++ b/packages/ipfs-core/test/bootstrapers.js @@ -10,7 +10,9 @@ const createNode = require('./utils/create-node') * WebSockets Bootstrappers easily <3 */ describe('Check that a js-ipfs node can indeed contact the bootstrappers', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { diff --git a/packages/ipfs-core/test/config.spec.js b/packages/ipfs-core/test/config.spec.js index 3bd9196694..de652d7804 100644 --- a/packages/ipfs-core/test/config.spec.js +++ b/packages/ipfs-core/test/config.spec.js @@ -11,7 +11,9 @@ const bootstrapList = require('../src/runtime/config-nodejs.js')().Bootstrap describe('config', function () { this.timeout(10 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { diff --git a/packages/ipfs-core/test/create-node.spec.js b/packages/ipfs-core/test/create-node.spec.js index d0d4dba41f..71499c4536 100644 --- a/packages/ipfs-core/test/create-node.spec.js +++ b/packages/ipfs-core/test/create-node.spec.js @@ -7,13 +7,14 @@ const sinon = require('sinon') const { isNode } = require('ipfs-utils/src/env') const tmpDir = require('ipfs-utils/src/temp-dir') const PeerId = require('peer-id') -const { supportedKeys } = require('libp2p-crypto/src/keys') +const { keys: { supportedKeys } } = require('libp2p-crypto') const IPFS = require('../src') const defer = require('p-defer') const uint8ArrayToString = require('uint8arrays/to-string') const createTempRepo = require('./utils/create-repo') describe('create node', function () { + /** @type {import('ipfs-repo').IPFSRepo} */ let tempRepo beforeEach(async () => { @@ -60,7 +61,7 @@ describe('create node', function () { it('should create and initialize with algorithm', async () => { const ipfs = await IPFS.create({ - init: { algorithm: 'ed25519' }, + init: { algorithm: 'Ed25519' }, start: false, repo: tempRepo, config: { Addresses: { Swarm: [] } } @@ -68,7 +69,7 @@ describe('create node', function () { const id = await ipfs.id() const config = await ipfs.config.getAll() - const peerId = await PeerId.createFromPrivKey(config.Identity.PrivKey) + const peerId = await PeerId.createFromPrivKey(`${config.Identity?.PrivKey}`) expect(peerId.privKey).is.instanceOf(supportedKeys.ed25519.Ed25519PrivateKey) expect(id.id).to.equal(peerId.toB58String()) }) @@ -120,7 +121,7 @@ describe('create node', function () { const config = await node.config.getAll() expect(config.Identity).to.exist() - expect(config.Identity.PrivKey.length).is.below(1024) + expect(config.Identity?.PrivKey.length).is.below(1024) await node.stop() }) @@ -129,7 +130,7 @@ describe('create node', function () { this.timeout(30 * 1000) - sinon.spy(console, 'log') + const spy = sinon.spy(console, 'log') const ipfs = await IPFS.create({ silent: true, @@ -144,9 +145,9 @@ describe('create node', function () { }) // eslint-disable-next-line no-console - expect(console.log.called).to.be.false() + expect(spy.called).to.be.false() // eslint-disable-next-line no-console - console.log.restore() + spy.restore() await ipfs.stop() }) @@ -167,7 +168,7 @@ describe('create node', function () { }) const config = await node.config.getAll() - expect(config.Addresses.Swarm).to.eql(['/ip4/127.0.0.1/tcp/9977']) + expect(config.Addresses?.Swarm).to.eql(['/ip4/127.0.0.1/tcp/9977']) expect(config.Bootstrap).to.eql([]) await node.stop() }) @@ -188,7 +189,7 @@ describe('create node', function () { await expect(node.pubsub.peers('topic')) .to.eventually.be.rejected() - .with.a.property('code').that.equals('ERR_NOT_ENABLED') + .with.property('code').that.equals('ERR_NOT_ENABLED') await node.stop() }) @@ -217,6 +218,10 @@ describe('create node', function () { this.timeout(2 * 60 * 1000) let _nodeNumber = 0 + /** + * @param {import('ipfs-repo').IPFSRepo} repo + * @returns + */ function createNode (repo) { _nodeNumber++ return IPFS.create({ @@ -297,7 +302,7 @@ describe('create node', function () { spec: 1, config: { Identity: { - PeerId: id.toString(), + PeerID: id.toString(), PrivKey: uint8ArrayToString(id.marshalPrivKey(), 'base64pad') } }, diff --git a/packages/ipfs-core/test/exports.spec.js b/packages/ipfs-core/test/exports.spec.js index 14da7cff22..28c07fb801 100644 --- a/packages/ipfs-core/test/exports.spec.js +++ b/packages/ipfs-core/test/exports.spec.js @@ -8,7 +8,7 @@ const { multiaddr } = require('multiaddr') const PeerId = require('peer-id') const { expect } = require('aegir/utils/chai') -const Ipfs = require('../') +const Ipfs = require('../src') describe('exports', () => { it('should export the expected types and utilities', () => { diff --git a/packages/ipfs-core/test/init.spec.js b/packages/ipfs-core/test/init.spec.js index 93c1074708..abea71ac15 100644 --- a/packages/ipfs-core/test/init.spec.js +++ b/packages/ipfs-core/test/init.spec.js @@ -5,9 +5,8 @@ const { expect } = require('aegir/utils/chai') const { isNode } = require('ipfs-utils/src/env') const { CID } = require('multiformats/cid') -const { nanoid } = require('nanoid') const PeerId = require('peer-id') -const { supportedKeys } = require('libp2p-crypto/src/keys') +const { keys: { supportedKeys } } = require('libp2p-crypto') const createNode = require('./utils/create-node') const privateKey = 'CAASqAkwggSkAgEAAoIBAQChVmiObYo6pkKrMSd3OzW1cTL+RDmX1rkETYGKWV9TPXMNgElFTYoYHqT9QZomj5RI8iUmHccjzqr4J0mV+E0NpvHHOLlmDZ82lAw2Zx7saUkeQWvC0S9Z0o3aTx2sSubZV53rSomkZgQH4fYTs4RERejV4ltzLFdzQQBwWrBvlagpPHUCxKDUCnE5oIzdbD26ltWViPBWr7TfotzC8Lyi/tceqCpHMUJGMbsVgypnlgpey07MBvs71dVh5LcRen/ztsQO6Yju4D3QgWoyD0SIUdJFvBzEwL9bSiA3QjUc/fkGd7EcdN5bebYOqAi4ZIiAMLp3i4+B8Tzq/acull43AgMBAAECggEBAIDgZE75o4SsEO9tKWht7L5OeXxxBUyMImkUfJkGQUZd/MzZIC5y/Q+9UvBW+gs5gCsw+onTGaM50Iq/32Ej4nE4XURVxIuH8BmJ86N1hlc010qK2cjajqeCsPulXT+m6XbOLYCpnv+q2idt0cL1EH/1FEPeOEztK8ION4qIdw36SoykfTx/RqtkKHtS01AwN82EOPbWk7huyQT5R5MsCZmRJXBFkpNtiL+8619BH2aVlghHO4NouF9wQjdz/ysVuyYg+3rX2cpGjuHDTZ6hVQiJD1lF6D+dua7UPyHYAG2iRQiKZmCjitt9ywzPxiRaYF/aZ02FEMWckZulR09axskCgYEAzjl6ER8WwxYHn4tHse+CrIIF2z5cscdrh7KSwd3Rse9hIIBDJ/0KkvoYd1IcWrS8ywLrRfSLIjEU9u7IN1m+IRVWJ61fXNqOHm9clAu6qNhCN6W2+JfxDkUygTwmsq0v3huO+qkiMQz+a4nAXJe8Utd36ywgPhVGxFa/7x1v1N0CgYEAyEdiYRFf1aQZcO7+B2FH+tkGJsB30VIBhcpG9EukuQUUulLHhScc/KRj+EFAACLdkTqlVI0xVYIWaaCXwoQCWKixjZ5mYPC+bBLgn4IoDS6XTdHtR7Vn3UUvGTKsM0/z4e8/0eSzGNCHoYez9IoBlPNic0sQuST4jzgS2RYnFCMCgYASWSzSLyjwTJp7CIJlg4Dl5l+tBRxsOOkJVssV8q2AnmLO6HqRKUNylkvs+eJJ88DEc0sJm1txvFo4KkCoJBT1jpduyk8szMlOTew3w99kvHEP0G+6KJKrCV8X/okW5q/WnC8ZgEjpglV0rfnugxWfbUpfIzrvKydzuqAzHzRfBQKBgQDANtKSeoxRjEbmfljLWHAure8bbgkQmfXgI7xpZdfXwqqcECpw/pLxXgycDHOSLeQcJ/7Y4RGCEXHVOk2sX+mokW6mjmmPjD4VlyCBtfcef6KzC1EBS3c9g9KqCln+fTOBmY7UsPu6SxiAzK7HeVP/Un8gS+Dm8DalrZlZQ8uJpQKBgF6mL/Xo/XUOiz2jAD18l8Y6s49bA9H2CoLpBGTV1LfY5yTFxRy4R3qnX/IzsKy567sbtkEFKJxplc/RzCQfrgbdj7k26SbKtHR3yERaFGRYq8UeAHeYC1/N19LF5BMQL4y5R4PJ1SFPeJCL/wXiMqs1maTqvKqtc4bbegNdwlxn' @@ -17,10 +16,16 @@ const secpPrivateKey = 'CAISIKCfwZsMEwmzLxGv9duM6j6YQzMx2V46+Yl3laV24Qus' describe('init', function () { if (!isNode) return + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {import('ipfs-repo').IPFSRepo} */ let repo + /** @type {() => Promise} */ let cleanup + /** + * @param {import('../src/types').InitOptions} options + */ const init = async (options) => { const res = await createNode({ init: options, @@ -48,7 +53,7 @@ describe('init', function () { }) it('should init successfully with a keychain pass', async () => { - await init({ bits: 512, pass: nanoid() }) + await init({ bits: 512 }) const res = await repo.exists() expect(res).to.equal(true) @@ -57,15 +62,15 @@ describe('init', function () { expect(config.Keychain).to.exist() - const peerId = await PeerId.createFromPrivKey(config.Identity.PrivKey) + const peerId = await PeerId.createFromPrivKey(`${config.Identity?.PrivKey}`) expect(peerId.privKey).is.instanceOf(supportedKeys.rsa.RsaPrivateKey) }) - it('should init with a key algorithm (ed25519)', async () => { - await init({ algorithm: 'ed25519' }) + it('should init with a key algorithm (Ed25519)', async () => { + await init({ algorithm: 'Ed25519' }) const config = await repo.config.getAll() - const peerId = await PeerId.createFromPrivKey(config.Identity.PrivKey) + const peerId = await PeerId.createFromPrivKey(`${config.Identity?.PrivKey}`) expect(peerId.privKey).is.instanceOf(supportedKeys.ed25519.Ed25519PrivateKey) }) @@ -73,7 +78,7 @@ describe('init', function () { await init({ algorithm: 'secp256k1' }) const config = await repo.config.getAll() - const peerId = await PeerId.createFromPrivKey(config.Identity.PrivKey) + const peerId = await PeerId.createFromPrivKey(`${config.Identity?.PrivKey}`) expect(peerId.privKey).is.instanceOf(supportedKeys.secp256k1.Secp256k1PrivateKey) }) @@ -83,28 +88,28 @@ describe('init', function () { await init({ bits: 1024 }) const config = await repo.config.getAll() - expect(config.Identity.PrivKey.length).is.above(256) + expect(config.Identity?.PrivKey.length).is.above(256) }) it('should allow a pregenerated key to be used', async () => { await init({ privateKey }) const config = await repo.config.getAll() - expect(config.Identity.PeerID).is.equal('QmRsooYQasV5f5r834NSpdUtmejdQcpxXkK6qsozZWEihC') + expect(config.Identity?.PeerID).is.equal('QmRsooYQasV5f5r834NSpdUtmejdQcpxXkK6qsozZWEihC') }) it('should allow a pregenerated ed25519 key to be used', async () => { await init({ privateKey: edPrivateKey }) const config = await repo.config.getAll() - expect(config.Identity.PeerID).is.equal('12D3KooWRm8J3iL796zPFi2EtGGtUJn58AG67gcqzMFHZnnsTzqD') + expect(config.Identity?.PeerID).is.equal('12D3KooWRm8J3iL796zPFi2EtGGtUJn58AG67gcqzMFHZnnsTzqD') }) it('should allow a pregenerated secp256k1 key to be used', async () => { await init({ privateKey: secpPrivateKey }) const config = await repo.config.getAll() - expect(config.Identity.PeerID).is.equal('16Uiu2HAm5qw8UyXP2RLxQUx5KvtSN8DsTKz8quRGqGNC3SYiaB8E') + expect(config.Identity?.PeerID).is.equal('16Uiu2HAm5qw8UyXP2RLxQUx5KvtSN8DsTKz8quRGqGNC3SYiaB8E') }) it('should write init docs', async () => { @@ -135,6 +140,6 @@ describe('init', function () { const config = await repo.config.getAll() expect(config.Bootstrap).to.be.empty() - expect(config.Discovery.MDNS.Enabled).to.be.true() + expect(config.Discovery?.MDNS?.Enabled).to.be.true() }) }) diff --git a/packages/ipfs-core/test/ipld.spec.js b/packages/ipfs-core/test/ipld.spec.js index e2adcad5a6..7f4ed9b81f 100644 --- a/packages/ipfs-core/test/ipld.spec.js +++ b/packages/ipfs-core/test/ipld.spec.js @@ -5,14 +5,18 @@ const { expect } = require('aegir/utils/chai') const createNode = require('./utils/create-node') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') +const Digest = require('multiformats/hashes/digest') describe('ipld', function () { this.timeout(10 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { + /** @type {import('multiformats/codecs/interface').BlockCodec<1337, string>} */ const customCodec = { name: 'custom-codec', code: 1337, @@ -20,10 +24,37 @@ describe('ipld', function () { decode: (buf) => uint8ArrayToString(buf) } + /** @type {import('multiformats/hashes/interface').MultihashHasher} */ + const customHasher = { + digest: (input) => Promise.resolve(Digest.create(1338, input)), + name: 'custom-hasher', + code: 1338 + } + + /** @type {import('multiformats/bases/interface').MultibaseCodec} */ + const customBase = { + name: 'custom-base', + prefix: '1339', + encoder: { + name: 'custom-base', + prefix: '1339', + encode: (input) => uint8ArrayToString(input) + }, + decoder: { + decode: (input) => uint8ArrayFromString(input) + } + } + const res = await createNode({ ipld: { codecs: [ customCodec + ], + hashers: [ + customHasher + ], + bases: [ + customBase ] } }) diff --git a/packages/ipfs-core/test/key-exchange.spec.js b/packages/ipfs-core/test/key-exchange.spec.js index abc48495a0..90d8310e37 100644 --- a/packages/ipfs-core/test/key-exchange.spec.js +++ b/packages/ipfs-core/test/key-exchange.spec.js @@ -9,9 +9,13 @@ const createNode = require('./utils/create-node') describe('key exchange', function () { this.timeout(20 * 1000) + /** @type {string} */ let selfPem const passwordPem = nanoid() + + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { diff --git a/packages/ipfs-core/test/libp2p.spec.js b/packages/ipfs-core/test/libp2p.spec.js index 0fc25dc610..3a3c40899c 100644 --- a/packages/ipfs-core/test/libp2p.spec.js +++ b/packages/ipfs-core/test/libp2p.spec.js @@ -9,6 +9,9 @@ const EE = require('events') const libp2pComponent = require('../src/components/libp2p') const { NOISE: Crypto } = require('libp2p-noise') +/** + * @type {import('libp2p-interfaces/src/transport/types').TransportFactory} + */ class DummyTransport { get [Symbol.toStringTag] () { return 'DummyTransport' @@ -37,9 +40,21 @@ describe('libp2p customization', function () { // Provide some extra time for ci since we're starting libp2p nodes in each test this.timeout(25 * 1000) + /** + * @type {import('interface-datastore').Datastore} + */ let datastore + /** + * @type {import('peer-id')} + */ let peerId + /** + * @type {import('ipfs-core-types/src/config').Config} + */ let testConfig + /** + * @type {import('libp2p') | null} + */ let libp2p before(async function () { @@ -75,15 +90,18 @@ describe('libp2p customization', function () { it('should allow for using a libp2p bundle', async () => { libp2p = await libp2pComponent({ options: { - libp2p: (opts) => { + /** @type {import('../src/types').Libp2pFactoryFn} */ + libp2p: async (opts) => { return Libp2p.create({ peerId: opts.peerId, + // @ts-ignore DummyTransport is not complete implementation modules: { transport: [DummyTransport], connEncryption: [Crypto] }, config: { relay: { enabled: false } } }) } }, peerId, + // @ts-ignore repo is not complete implementation repo: { datastore }, print: console.log, // eslint-disable-line no-console config: testConfig @@ -99,15 +117,18 @@ describe('libp2p customization', function () { it('should pass libp2p options to libp2p bundle function', async () => { libp2p = await libp2pComponent({ options: { - libp2p: (opts) => { + /** @type {import('../src/types').Libp2pFactoryFn} */ + libp2p: async (opts) => { return Libp2p.create({ peerId: opts.peerId, + // @ts-ignore DummyTransport is not complete implementation modules: { transport: [DummyTransport], connEncryption: [Crypto] }, config: { relay: { enabled: false } } }) } }, peerId, + // @ts-ignore repo is not complete implementation repo: { datastore }, print: console.log, // eslint-disable-line no-console config: testConfig @@ -125,6 +146,7 @@ describe('libp2p customization', function () { it('should use options by default', async () => { libp2p = await libp2pComponent({ peerId, + // @ts-ignore repo is not complete implementation repo: { datastore }, print: console.log, // eslint-disable-line no-console config: testConfig @@ -160,13 +182,16 @@ describe('libp2p customization', function () { libp2p = await libp2pComponent({ peerId, + // @ts-ignore repo is not complete implementation repo: { datastore }, print: console.log, // eslint-disable-line no-console config: testConfig, options: { libp2p: { modules: { + // @ts-ignore DummyTransport is not complete implementation transport: [DummyTransport], + // @ts-ignore DummyDiscovery is not complete implementation peerDiscovery: [DummyDiscovery] }, config: { relay: { enabled: false } }, @@ -197,6 +222,7 @@ describe('libp2p customization', function () { libp2p = await libp2pComponent({ peerId, + // @ts-ignore repo is not complete implementation repo: { datastore }, print: console.log, // eslint-disable-line no-console config: { @@ -216,11 +242,12 @@ describe('libp2p customization', function () { it('should select gossipsub as pubsub router', async () => { libp2p = await libp2pComponent({ peerId, + // @ts-ignore repo is not complete implementation repo: { datastore }, print: console.log, // eslint-disable-line no-console config: { ...testConfig, - Pubsub: { Router: 'gossipsub' } + Pubsub: { PubSubRouter: 'gossipsub' } } }) diff --git a/packages/ipfs-core/test/mfs-preload.spec.js b/packages/ipfs-core/test/mfs-preload.spec.js index 1ab979f5e7..1254d80c06 100644 --- a/packages/ipfs-core/test/mfs-preload.spec.js +++ b/packages/ipfs-core/test/mfs-preload.spec.js @@ -15,6 +15,9 @@ const fakeCid = async () => { return CID.createV0(mh) } +/** + * @param {CID[]} cids + */ const createMockFilesStat = (cids = []) => { let n = 0 return () => { @@ -23,15 +26,22 @@ const createMockFilesStat = (cids = []) => { } const createMockPreload = () => { + /** @type {import('../src/types').Preload & { cids: CID[] }} */ const preload = cid => preload.cids.push(cid) + preload.start = () => {} + preload.stop = () => {} preload.cids = [] + return preload } describe('MFS preload', () => { // CIDs returned from our mock files.stat function + /** @type {{ initial: CID, same: CID, updated: CID }} */ let testCids + /** @type {ReturnType} */ let mockPreload + /** @type {import('ipfs-core-types/src/files').API} */ let mockFiles beforeEach(async () => { @@ -43,6 +53,7 @@ describe('MFS preload', () => { updated: await fakeCid() } + // @ts-ignore not whole file api mockFiles = { stat: createMockFilesStat([testCids.initial, testCids.same, testCids.same, testCids.updated]) } }) diff --git a/packages/ipfs-core/test/name.spec.js b/packages/ipfs-core/test/name.spec.js index 403906a768..4455a9223b 100644 --- a/packages/ipfs-core/test/name.spec.js +++ b/packages/ipfs-core/test/name.spec.js @@ -20,56 +20,59 @@ const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' describe('name', function () { describe('republisher', function () { this.timeout(120 * 1000) + /** @type {IpnsRepublisher} */ let republisher afterEach(async () => { if (republisher) { await republisher.stop() - republisher = null } }) it('should republish entries', async function () { + // @ts-expect-error sinon.stub() is not complete publisher implementation republisher = new IpnsRepublisher(sinon.stub(), sinon.stub(), sinon.stub(), sinon.stub(), { initialBroadcastInterval: 200, broadcastInterval: 500 }) - republisher._republishEntries = sinon.stub() + const stub = republisher._republishEntries = sinon.stub() await republisher.start() - expect(republisher._republishEntries.calledOnce).to.equal(false) + expect(stub.calledOnce).to.equal(false) // Initial republish should happen after ~200ms await delay(300) - expect(republisher._republishEntries.calledOnce).to.equal(true) + expect(stub.calledOnce).to.equal(true) // Subsequent republishes should happen after ~700 await delay(600) - expect(republisher._republishEntries.calledTwice).to.equal(true) + expect(stub.calledTwice).to.equal(true) }) it('should not republish self key twice', async function () { const mockKeychain = { listKeys: () => Promise.resolve([{ name: 'self' }]) } + // @ts-expect-error sinon.stub() is not complete publisher implementation republisher = new IpnsRepublisher(sinon.stub(), sinon.stub(), sinon.stub(), mockKeychain, { initialBroadcastInterval: 100, broadcastInterval: 1000, pass: 'pass' }) - republisher._republishEntry = sinon.stub() + const stub = republisher._republishEntry = sinon.stub() await republisher.start() - expect(republisher._republishEntry.calledOnce).to.equal(false) + expect(stub.calledOnce).to.equal(false) // Initial republish should happen after ~100ms await delay(200) - expect(republisher._republishEntry.calledOnce).to.equal(true) + expect(stub.calledOnce).to.equal(true) }) it('should error if run republish again', async () => { + // @ts-expect-error sinon.stub() is not complete publisher implementation republisher = new IpnsRepublisher(sinon.stub(), sinon.stub(), sinon.stub(), sinon.stub(), { initialBroadcastInterval: 50, broadcastInterval: 100 @@ -80,38 +83,46 @@ describe('name', function () { await expect(republisher.start()) .to.eventually.be.rejected() - .with.a.property('code').that.equals('ERR_REPUBLISH_ALREADY_RUNNING') + .with.property('code').that.equals('ERR_REPUBLISH_ALREADY_RUNNING') }) }) describe('publisher', () => { it('should fail to publish if does not receive private key', () => { + // @ts-expect-error constructor needs args const publisher = new IpnsPublisher() + // @ts-expect-error invalid argument return expect(publisher.publish(null, ipfsRef)) .to.eventually.be.rejected() .with.property('code', 'ERR_INVALID_PRIVATE_KEY') }) it('should fail to publish if an invalid private key is received', () => { + // @ts-expect-error constructor needs args const publisher = new IpnsPublisher() + // @ts-expect-error invalid argument return expect(publisher.publish({ bytes: 'not that valid' }, ipfsRef)) .to.eventually.be.rejected() // .that.eventually.has.property('code', 'ERR_INVALID_PRIVATE_KEY') TODO: libp2p-crypto needs to throw err-code }) it('should fail to publish if _updateOrCreateRecord fails', async () => { + // @ts-expect-error constructor needs args const publisher = new IpnsPublisher() const err = new Error('error') const peerId = await PeerId.create() sinon.stub(publisher, '_updateOrCreateRecord').rejects(err) + // @ts-expect-error invalid argument return expect(publisher.publish(peerId.privKey, ipfsRef)) .to.eventually.be.rejectedWith(err) }) it('should fail to publish if _putRecordToRouting receives an invalid peer id', () => { + // @ts-expect-error constructor needs args const publisher = new IpnsPublisher() + // @ts-expect-error invalid argument return expect(publisher._putRecordToRouting(undefined, undefined)) .to.eventually.be.rejected() .with.property('code', 'ERR_INVALID_PEER_ID') @@ -122,9 +133,11 @@ describe('name', function () { const datastore = { get: sinon.stub().rejects(new Error('boom')) } + // @ts-expect-error routing is not complete implementation const publisher = new IpnsPublisher(routing, datastore) const peerId = await PeerId.create() + // @ts-expect-error invalid argument await expect(publisher.publish(peerId.privKey, ipfsRef)) .to.eventually.be.rejected() .with.property('code', 'ERR_DETERMINING_PUBLISHED_RECORD') @@ -138,9 +151,11 @@ describe('name', function () { get: sinon.stub().rejects(errCode(new Error('not found'), 'ERR_NOT_FOUND')), put: sinon.stub().rejects(new Error('error-unexpected')) } + // @ts-expect-error routing is not complete implementation const publisher = new IpnsPublisher(routing, datastore) const peerId = await PeerId.create() + // @ts-expect-error invalid argument await expect(publisher.publish(peerId.privKey, ipfsRef)) .to.eventually.be.rejected() .with.property('code', 'ERR_STORING_IN_DATASTORE') @@ -149,13 +164,14 @@ describe('name', function () { describe('resolver', () => { it('should resolve an inlined public key', async () => { - const peerId = await PeerId.create({ keyType: 'ed25519' }) + const peerId = await PeerId.create({ keyType: 'Ed25519' }) const value = `/ipfs/${peerId.toB58String()}` const record = await ipns.create(peerId.privKey, uint8ArrayFromString(value), 1, 10e3) const routing = { get: sinon.stub().returns(ipns.marshal(record)) } + // @ts-expect-error routing is not complete implementation const resolver = new IpnsResolver(routing) const resolved = await resolver.resolve(`/ipns/${peerId.toB58String()}`) @@ -163,13 +179,16 @@ describe('name', function () { }) it('should fail to resolve if the received name is not a string', () => { + // @ts-expect-error constructor needs args const resolver = new IpnsResolver() + // @ts-expect-error invalid argument return expect(resolver.resolve(false)) .to.eventually.be.rejected() .with.property('code', 'ERR_INVALID_NAME') }) it('should fail to resolve if receives an invalid ipns path', () => { + // @ts-expect-error constructor needs args const resolver = new IpnsResolver() return expect(resolver.resolve('ipns/')) .to.eventually.be.rejected() @@ -180,6 +199,7 @@ describe('name', function () { const routing = { get: sinon.stub().rejects(new Error('boom')) } + // @ts-expect-error routing is not complete implementation const resolver = new IpnsResolver(routing) const peerId = await PeerId.create() @@ -192,6 +212,7 @@ describe('name', function () { const routing = { get: sinon.stub().rejects(errCode(new Error('not found'), 'ERR_NOT_FOUND')) } + // @ts-expect-error routing is not complete implementation const resolver = new IpnsResolver(routing) const peerId = await PeerId.create() @@ -204,6 +225,7 @@ describe('name', function () { const routing = { get: sinon.stub().resolves('not-a-buffer') } + // @ts-expect-error routing is not complete implementation const resolver = new IpnsResolver(routing) const peerId = await PeerId.create() @@ -216,8 +238,11 @@ describe('name', function () { describe('routing config', function () { it('should use only the offline datastore by default', () => { const config = getIpnsRoutingConfig({ + // @ts-expect-error sinon.stub() is not complete implementation libp2p: sinon.stub(), + // @ts-expect-error sinon.stub() is not complete implementation repo: sinon.stub(), + // @ts-expect-error sinon.stub() is not complete implementation peerId: sinon.stub(), options: {} }) @@ -228,8 +253,11 @@ describe('name', function () { it('should use only the offline datastore if offline', () => { const config = getIpnsRoutingConfig({ + // @ts-expect-error sinon.stub() is not complete implementation libp2p: sinon.stub(), + // @ts-expect-error sinon.stub() is not complete implementation repo: sinon.stub(), + // @ts-expect-error sinon.stub() is not complete implementation peerId: sinon.stub(), options: { offline: true @@ -244,7 +272,9 @@ describe('name', function () { const peerId = await PeerId.create() const config = getIpnsRoutingConfig({ + // @ts-expect-error sinon.stub() is not complete implementation libp2p: { pubsub: sinon.stub() }, + // @ts-expect-error sinon.stub() is not complete implementation repo: { datastore: sinon.stub() }, peerId, options: { @@ -263,8 +293,11 @@ describe('name', function () { const dht = sinon.stub() const config = getIpnsRoutingConfig({ + // @ts-expect-error sinon.stub() is not complete implementation libp2p: { _dht: dht }, + // @ts-expect-error sinon.stub() is not complete implementation repo: sinon.stub(), + // @ts-expect-error sinon.stub() is not complete implementation peerId: sinon.stub(), options: { libp2p: { diff --git a/packages/ipfs-core/test/preload.spec.js b/packages/ipfs-core/test/preload.spec.js index 70a7b6834c..87e368fefe 100644 --- a/packages/ipfs-core/test/preload.spec.js +++ b/packages/ipfs-core/test/preload.spec.js @@ -10,7 +10,9 @@ const createNode = require('./utils/create-node') const dagPb = require('@ipld/dag-pb') describe('preload', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { @@ -77,6 +79,10 @@ describe('preload', () => { const rootDir = res.find(file => file.path === 'dir0') expect(rootDir).to.exist() + if (!rootDir) { + throw new Error('rootDir did not exist') + } + await MockPreloadNode.waitForCids(rootDir.cid) }) @@ -97,6 +103,10 @@ describe('preload', () => { const wrappingDir = res.find(file => file.path === '') expect(wrappingDir).to.exist() + if (!wrappingDir) { + throw new Error('wrappingDir did not exist') + } + await MockPreloadNode.waitForCids(wrappingDir.cid) }) @@ -131,6 +141,10 @@ describe('preload', () => { const wrappingDir = res.find(file => file.path === '') expect(wrappingDir).to.exist() + if (!wrappingDir) { + throw new Error('wrappingDir did not exist') + } + // Adding these files with have preloaded wrappingDir.hash, clear it out await MockPreloadNode.clearPreloadCids() @@ -165,7 +179,7 @@ describe('preload', () => { const cid = await ipfs.object.patch.addLink(parent.cid, { Name: 'link', Hash: link.cid, - Tsize: link.node.size + Tsize: dagPb.encode(link.node).length }) await MockPreloadNode.waitForCids(cid) }) @@ -187,7 +201,7 @@ describe('preload', () => { }) await MockPreloadNode.clearPreloadCids() - const cid = await ipfs.object.patch.rmLink(parentCid, { Name: 'link' }) + const cid = await ipfs.object.patch.rmLink(parentCid, 'link') await MockPreloadNode.waitForCids(cid) }) @@ -286,7 +300,9 @@ describe('preload', () => { describe('preload disabled', function () { this.timeout(50 * 1000) + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { diff --git a/packages/ipfs-core/test/pubsub.spec.js b/packages/ipfs-core/test/pubsub.spec.js index 5b68023f3f..4e2e4e193d 100644 --- a/packages/ipfs-core/test/pubsub.spec.js +++ b/packages/ipfs-core/test/pubsub.spec.js @@ -8,7 +8,9 @@ const { expect } = require('aegir/utils/chai') const createNode = require('./utils/create-node') describe('pubsub disabled', () => { + /** @type {import('ipfs-core-types').IPFS} */ let ipfs + /** @type {() => Promise} */ let cleanup before(async () => { diff --git a/packages/ipfs-core/test/utils.spec.js b/packages/ipfs-core/test/utils.spec.js index ed4f9e0632..4a37292638 100644 --- a/packages/ipfs-core/test/utils.spec.js +++ b/packages/ipfs-core/test/utils.spec.js @@ -10,9 +10,13 @@ const all = require('it-all') const codecs = require('./utils/codecs') describe('utils', () => { + /** @type {import('multiformats/cid').CID} */ let rootCid + /** @type {import('multiformats/cid').CID} */ let aboutCid + /** @type {string} */ let aboutPath + /** @type {Uint8Array} */ let aboutMultihash describe('resolvePath', function () { @@ -69,7 +73,7 @@ describe('utils', () => { return expect(utils.resolvePath(repo, codecs, `${aboutPath}/fusion`)) .to.eventually.be.rejected() .and.have.property('message') - .that.includes(`no link named "fusion" under ${aboutCid}`) + .that.include(`no link named "fusion" under ${aboutCid}`) }) }) }) diff --git a/packages/ipfs-core/test/utils/clean.js b/packages/ipfs-core/test/utils/clean.js index 248233e641..92d371e175 100644 --- a/packages/ipfs-core/test/utils/clean.js +++ b/packages/ipfs-core/test/utils/clean.js @@ -4,6 +4,9 @@ const rimraf = require('rimraf') const fs = require('fs').promises const { promisify } = require('util') +/** + * @param {string} dir + */ module.exports = async dir => { try { await fs.access(dir) diff --git a/packages/ipfs-core/test/utils/create-node.js b/packages/ipfs-core/test/utils/create-node.js index 3770de5b2e..96bff38b6d 100644 --- a/packages/ipfs-core/test/utils/create-node.js +++ b/packages/ipfs-core/test/utils/create-node.js @@ -1,12 +1,15 @@ 'use strict' const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) -const IPFS = require('../../') +const { create } = require('../../src') const createTempRepo = require('./create-repo') +/** + * @param {import('../../src/types').Options} config + */ module.exports = async (config = {}) => { const repo = await createTempRepo() - const ipfs = await IPFS.create(mergeOptions({ + const ipfs = await create(mergeOptions({ silent: true, repo, config: { diff --git a/packages/ipfs-core/test/utils/create-repo.js b/packages/ipfs-core/test/utils/create-repo.js index d6cebf0a82..db758693f4 100644 --- a/packages/ipfs-core/test/utils/create-repo.js +++ b/packages/ipfs-core/test/utils/create-repo.js @@ -1,6 +1,7 @@ 'use strict' const { nanoid } = require('nanoid') +// @ts-expect-error locks is missing from types? const { createRepo, locks: { memory } } = require('ipfs-repo') const codecs = require('./codecs') const createBackend = require('./create-backend') @@ -11,7 +12,7 @@ const { Key } = require('interface-datastore') * @param {string} [options.path] * @param {number} [options.version] * @param {number} [options.spec] - * @param {boolean} [options.true] + * @param {boolean} [options.autoMigrate] * @param {(version: number, percentComplete: string, message: string) => void} [options.onMigrationProgress] * @param {import('ipfs-core-types/src/config').Config} [options.config] */ diff --git a/packages/ipfs-core/test/utils/mock-preload-node-utils.js b/packages/ipfs-core/test/utils/mock-preload-node-utils.js index fe71a6d79a..1d8e93a0cc 100644 --- a/packages/ipfs-core/test/utils/mock-preload-node-utils.js +++ b/packages/ipfs-core/test/utils/mock-preload-node-utils.js @@ -7,12 +7,21 @@ const errCode = require('err-code') const HTTP = require('ipfs-utils/src/http') const waitFor = require('./wait-for') +/** + * @typedef {import('multiformats/cid').CID} CID + */ + const defaultPort = 1138 const defaultAddr = `/dnsaddr/localhost/tcp/${defaultPort}` module.exports.defaultAddr = defaultAddr -// Get the stored preload CIDs for the server at `addr` +/** + * Get the stored preload CIDs for the server at `addr` + * + * @param {string} [addr] + * @returns {Promise} + */ const getPreloadCids = async (addr) => { const res = await HTTP.get(`${toUri(addr || defaultAddr)}/cids`) return res.json() @@ -20,25 +29,37 @@ const getPreloadCids = async (addr) => { module.exports.getPreloadCids = getPreloadCids -// Clear the stored preload URLs for the server at `addr` - +/** + * Clear the stored preload URLs for the server at `addr` + * + * @param {string} [addr] + */ module.exports.clearPreloadCids = addr => { return HTTP.delete(`${toUri(addr || defaultAddr)}/cids`) } -// Wait for the passed CIDs to appear in the CID list from the preload node +/** + * Wait for the passed CIDs to appear in the CID list from the preload node + * + * @param {CID | CID[] | string | string[]} cids + * @param {object} [opts] + * @param {number} [opts.timeout] + * @param {string} [opts.addr] + */ module.exports.waitForCids = async (cids, opts) => { - opts = opts || {} - opts.timeout = opts.timeout || 1000 + const options = opts || {} + options.timeout = options.timeout || 1000 - cids = Array.isArray(cids) ? cids : [cids] - cids = cids.map(cid => cid.toString()) // Allow passing CID instance + const cidArr = Array.isArray(cids) ? cids : [cids] + const cidStrs = cidArr.map(cid => cid.toString()) // Allow passing CID instance await waitFor(async () => { - const preloadCids = await getPreloadCids(opts.addr) + const preloadCids = await getPreloadCids(options.addr) // See if our cached preloadCids includes all the cids we're looking for. - const { missing, duplicates } = cids.reduce((results, cid) => { + /** @type {{ missing: string[], duplicates: string[] }} */ + const results = { missing: [], duplicates: [] } + const { missing, duplicates } = cidStrs.reduce((results, cid) => { const count = preloadCids.filter(preloadedCid => preloadedCid === cid).length if (count === 0) { results.missing.push(cid) @@ -46,7 +67,7 @@ module.exports.waitForCids = async (cids, opts) => { results.duplicates.push(cid) } return results - }, { missing: [], duplicates: [] }) + }, results) if (duplicates.length) { throw errCode(new Error(`Multiple occurrences of ${duplicates} found`), 'ERR_DUPLICATE') @@ -56,6 +77,6 @@ module.exports.waitForCids = async (cids, opts) => { }, { name: 'CIDs to be preloaded', interval: 5, - timeout: opts.timeout + timeout: options.timeout }) } diff --git a/packages/ipfs-core/test/utils/mock-preload-node.js b/packages/ipfs-core/test/utils/mock-preload-node.js index cd779bfa44..e764227aca 100644 --- a/packages/ipfs-core/test/utils/mock-preload-node.js +++ b/packages/ipfs-core/test/utils/mock-preload-node.js @@ -14,8 +14,11 @@ module.exports.defaultAddr = defaultAddr // called with, and you can ask it for them and also clear them by issuing a // GET/DELETE request to /cids. module.exports.createNode = () => { + /** @type {string[]} */ let cids = [] + /** @type {ReturnType & { start: (opts?: any) => Promise, stop: () => Promise }} */ + // @ts-ignore start/stop props are added later const server = http.createServer((req, res) => { res.setHeader('Access-Control-Allow-Origin', '*') res.setHeader('Access-Control-Request-Method', '*') @@ -28,9 +31,12 @@ module.exports.createNode = () => { return } - if (req.url.startsWith('/api/v0/refs')) { + if (req.url?.startsWith('/api/v0/refs')) { const arg = new URL(`https://ipfs.io${req.url}`).searchParams.get('arg') - cids = cids.concat(arg) + + if (arg) { + cids = cids.concat(arg) + } } else if (req.method === 'DELETE' && req.url === '/cids') { res.statusCode = 204 cids = [] @@ -43,7 +49,6 @@ module.exports.createNode = () => { res.end() }) - server.start = (opts = {}) => new Promise(resolve => server.listen({ port: defaultPort, ...opts }, resolve)) server.stop = () => new Promise(resolve => server.close(resolve)) diff --git a/packages/ipfs-core/test/utils/wait-for.js b/packages/ipfs-core/test/utils/wait-for.js index 283dddc20d..f429d253bc 100644 --- a/packages/ipfs-core/test/utils/wait-for.js +++ b/packages/ipfs-core/test/utils/wait-for.js @@ -3,10 +3,17 @@ const delay = require('delay') const errCode = require('err-code') -// Wait for async function `test` to resolve true or timeout after -// options.timeout milliseconds. +/** + * Wait for async function `test` to resolve true or timeout after options.timeout milliseconds + * + * @param {() => boolean | Promise} test + * @param {object} options + * @param {number} [options.timeout] + * @param {string} [options.name] + * @param {number} [options.interval] + */ module.exports = async function waitFor (test, options) { - options = Object.assign({ timeout: 5000, interval: 0, name: 'event' }, options) + const opts = Object.assign({ timeout: 5000, interval: 1000, name: 'event' }, options) const start = Date.now() while (true) { @@ -14,10 +21,10 @@ module.exports = async function waitFor (test, options) { return } - if (Date.now() > start + options.timeout) { - throw errCode(new Error(`Timed out waiting for ${options.name}`), 'ERR_TIMEOUT') + if (Date.now() > start + opts.timeout) { + throw errCode(new Error(`Timed out waiting for ${opts.name}`), 'ERR_TIMEOUT') } - await delay(options.interval) + await delay(opts.interval) } } diff --git a/packages/ipfs-core/tsconfig.json b/packages/ipfs-core/tsconfig.json index fe926578b4..ed099cc10b 100644 --- a/packages/ipfs-core/tsconfig.json +++ b/packages/ipfs-core/tsconfig.json @@ -5,6 +5,7 @@ }, "include": [ "src", + "test", "package.json" ], "references": [ diff --git a/packages/ipfs-daemon/package.json b/packages/ipfs-daemon/package.json index 86e94a9353..455f3bc682 100644 --- a/packages/ipfs-daemon/package.json +++ b/packages/ipfs-daemon/package.json @@ -21,7 +21,7 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "npm run test:node", "test:node": "aegir test -t node -- --exit", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", diff --git a/packages/ipfs-daemon/src/index.js b/packages/ipfs-daemon/src/index.js index 0cfc55f9d5..4cd255cabe 100644 --- a/packages/ipfs-daemon/src/index.js +++ b/packages/ipfs-daemon/src/index.js @@ -71,9 +71,9 @@ class Daemon { } /** - * @type {import('ipfs-core/src/types').Libp2pFactoryFn} + * @type {import('ipfs-core').Libp2pFactoryFn} */ -function getLibp2p ({ libp2pOptions, options, config, peerId }) { +async function getLibp2p ({ libp2pOptions, options, config, peerId }) { // Attempt to use any of the WebRTC versions available globally let electronWebRTC let wrtc @@ -103,7 +103,7 @@ function getLibp2p ({ libp2pOptions, options, config, peerId }) { } const Libp2p = require('libp2p') - return new Libp2p(libp2pOptions) + return Libp2p.create(libp2pOptions) } module.exports = Daemon diff --git a/packages/ipfs-grpc-client/package.json b/packages/ipfs-grpc-client/package.json index 2ac9e7508d..4f0b01eed5 100644 --- a/packages/ipfs-grpc-client/package.json +++ b/packages/ipfs-grpc-client/package.json @@ -26,7 +26,7 @@ "scripts": { "build": "aegir build", "test": "aegir test", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i aegir -i rimraf -i ipfs-grpc-protocol -i ipfs-core-types" diff --git a/packages/ipfs-grpc-client/src/core-api/add-all.js b/packages/ipfs-grpc-client/src/core-api/add-all.js index e85244118e..0f1160d0bb 100644 --- a/packages/ipfs-grpc-client/src/core-api/add-all.js +++ b/packages/ipfs-grpc-client/src/core-api/add-all.js @@ -1,6 +1,6 @@ 'use strict' -const normaliseInput = require('ipfs-core-utils/src/files/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/files/normalise-input') const { CID } = require('multiformats/cid') const bidiToDuplex = require('../utils/bidi-to-duplex') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') diff --git a/packages/ipfs-grpc-server/package.json b/packages/ipfs-grpc-server/package.json index defee78a72..b9750cc8a1 100644 --- a/packages/ipfs-grpc-server/package.json +++ b/packages/ipfs-grpc-server/package.json @@ -22,7 +22,7 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "clean": "rimraf ./dist", diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index 1775d70c11..1a707172b0 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -38,7 +38,7 @@ "test:electron-renderer": "aegir test -t electron-renderer", "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i ipfs-core -i rimraf -i ipfs-core-types -i abort-controller" @@ -56,8 +56,6 @@ "ipfs-utils": "^8.1.4", "it-first": "^1.0.6", "it-last": "^1.0.4", - "it-map": "^1.0.4", - "it-tar": "^3.0.0", "it-to-stream": "^1.0.0", "merge-options": "^3.0.4", "multiaddr": "^10.0.0", @@ -72,9 +70,8 @@ "aegir": "^34.0.2", "delay": "^5.0.0", "go-ipfs": "0.8.0", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "it-all": "^1.0.4", - "it-concat": "^2.0.0", "it-first": "^1.0.4", "nock": "^13.0.2", "p-defer": "^3.0.0", diff --git a/packages/ipfs-http-client/src/config/replace.js b/packages/ipfs-http-client/src/config/replace.js index 172a9ffeed..9fc3a0aa97 100644 --- a/packages/ipfs-http-client/src/config/replace.js +++ b/packages/ipfs-http-client/src/config/replace.js @@ -21,7 +21,6 @@ module.exports = configure(api => { const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('config/replace', { timeout: options.timeout, signal, diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index 49c7549054..678b930193 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -36,7 +36,6 @@ module.exports = (codecs, options) => { const controller = new AbortController() const signal = abortSignal(controller.signal, settings.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('dag/put', { timeout: settings.timeout, signal, diff --git a/packages/ipfs-http-client/src/dht/put.js b/packages/ipfs-http-client/src/dht/put.js index 1e7de6b49d..6b86b96f8a 100644 --- a/packages/ipfs-http-client/src/dht/put.js +++ b/packages/ipfs-http-client/src/dht/put.js @@ -7,6 +7,7 @@ const toUrlSearchParams = require('../lib/to-url-search-params') const multipartRequest = require('../lib/multipart-request') const abortSignal = require('../lib/abort-signal') const { AbortController } = require('native-abort-controller') +const uint8ArrayToString = require('uint8arrays/to-string') /** * @typedef {import('../types').HTTPClientExtraOptions} HTTPClientExtraOptions @@ -22,12 +23,11 @@ module.exports = configure(api => { const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('dht/put', { timeout: options.timeout, signal, searchParams: toUrlSearchParams({ - arg: key, + arg: uint8ArrayToString(key), ...options }), ...( diff --git a/packages/ipfs-http-client/src/files/write.js b/packages/ipfs-http-client/src/files/write.js index 0f88fe133e..aa3b19d7ad 100644 --- a/packages/ipfs-http-client/src/files/write.js +++ b/packages/ipfs-http-client/src/files/write.js @@ -22,7 +22,6 @@ module.exports = configure(api => { const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('files/write', { timeout: options.timeout, signal, diff --git a/packages/ipfs-http-client/src/get.js b/packages/ipfs-http-client/src/get.js index a836285366..80228c9670 100644 --- a/packages/ipfs-http-client/src/get.js +++ b/packages/ipfs-http-client/src/get.js @@ -1,11 +1,8 @@ 'use strict' -// @ts-ignore no types -const Tar = require('it-tar') const { CID } = require('multiformats/cid') const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') -const map = require('it-map') /** * @typedef {import('./types').HTTPClientExtraOptions} HTTPClientExtraOptions @@ -17,36 +14,25 @@ module.exports = configure(api => { * @type {RootAPI["get"]} */ async function * get (path, options = {}) { + /** @type {Record} */ + const opts = { + arg: `${path instanceof Uint8Array ? CID.decode(path) : path}`, + ...options + } + + if (opts.compressionLevel) { + opts['compression-level'] = opts.compressionLevel + delete opts.compressionLevel + } + const res = await api.post('get', { timeout: options.timeout, signal: options.signal, - searchParams: toUrlSearchParams({ - arg: `${path instanceof Uint8Array ? CID.decode(path) : path}`, - ...options - }), + searchParams: toUrlSearchParams(opts), headers: options.headers }) - const extractor = Tar.extract() - - for await (const { header, body } of extractor(res.iterator())) { - if (header.type === 'directory') { - // @ts-ignore - Missing the following properties from type 'Directory': - // cid, name, size, depthts - yield { - type: 'dir', - path: header.name - } - } else { - // @ts-ignore - Missing the following properties from type 'File': - // cid, name, size, depthts - yield { - type: 'file', - path: header.name, - content: map(body, (chunk) => chunk.slice()) // convert bl to Buffer/Uint8Array - } - } - } + yield * res.iterator() } return get diff --git a/packages/ipfs-http-client/src/index.js b/packages/ipfs-http-client/src/index.js index 521aaa79a5..b72389d998 100644 --- a/packages/ipfs-http-client/src/index.js +++ b/packages/ipfs-http-client/src/index.js @@ -10,16 +10,15 @@ const Multihashes = require('ipfs-core-utils/src/multihashes') const Multibases = require('ipfs-core-utils/src/multibases') const dagPb = require('@ipld/dag-pb') const dagCbor = require('@ipld/dag-cbor') -const raw = require('multiformats/codecs/raw') -const json = require('multiformats/codecs/json') -const { sha256, sha512 } = require('multiformats/hashes/sha2') const { identity } = require('multiformats/hashes/identity') -const { base58btc } = require('multiformats/bases/base58') +const { bases, hashes, codecs } = require('multiformats/basics') /** * @typedef {import('./types').EndpointConfig} EndpointConfig * @typedef {import('./types').Options} Options * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher + * @typedef {import('multiformats/bases/interface').MultibaseCodec} MultibaseCodec * @typedef {import('./types').IPFSHTTPClient} IPFSHTTPClient */ @@ -37,16 +36,33 @@ function create (options = {}) { decode: (id) => id } - const bases = new Multibases({ - bases: [base58btc].concat(options.ipld && options.ipld.bases ? options.ipld.bases : []), + /** @type {MultibaseCodec[]} */ + const multibaseCodecs = Object.values(bases); + + (options.ipld && options.ipld.bases ? options.ipld.bases : []).forEach(base => multibaseCodecs.push(base)) + + const multibases = new Multibases({ + bases: multibaseCodecs, loadBase: options.ipld && options.ipld.loadBase }) - const codecs = new Multicodecs({ - codecs: [dagPb, dagCbor, raw, json, id].concat(options.ipld?.codecs || []), + + /** @type {BlockCodec[]} */ + const blockCodecs = Object.values(codecs); + + [dagPb, dagCbor, id].concat((options.ipld && options.ipld.codecs) || []).forEach(codec => blockCodecs.push(codec)) + + const multicodecs = new Multicodecs({ + codecs: blockCodecs, loadCodec: options.ipld && options.ipld.loadCodec }) - const hashers = new Multihashes({ - hashers: [sha256, sha512, identity].concat(options.ipld && options.ipld.hashers ? options.ipld.hashers : []), + + /** @type {MultihashHasher[]} */ + const multihashHashers = Object.values(hashes); + + (options.ipld && options.ipld.hashers ? options.ipld.hashers : []).forEach(hasher => multihashHashers.push(hasher)) + + const multihashes = new Multihashes({ + hashers: multihashHashers, loadHasher: options.ipld && options.ipld.loadHasher }) @@ -60,7 +76,7 @@ function create (options = {}) { cat: require('./cat')(options), commands: require('./commands')(options), config: require('./config')(options), - dag: require('./dag')(codecs, options), + dag: require('./dag')(multicodecs, options), dht: require('./dht')(options), diag: require('./diag')(options), dns: require('./dns')(options), @@ -74,7 +90,7 @@ function create (options = {}) { ls: require('./ls')(options), mount: require('./mount')(options), name: require('./name')(options), - object: require('./object')(codecs, options), + object: require('./object')(multicodecs, options), pin: require('./pin')(options), ping: require('./ping')(options), pubsub: require('./pubsub')(options), @@ -86,9 +102,9 @@ function create (options = {}) { stop: require('./stop')(options), swarm: require('./swarm')(options), version: require('./version')(options), - bases, - codecs, - hashers + bases: multibases, + codecs: multicodecs, + hashers: multihashes } return client diff --git a/packages/ipfs-http-client/src/lib/multipart-request.browser.js b/packages/ipfs-http-client/src/lib/multipart-request.browser.js index e5c98001c7..ca69022d75 100644 --- a/packages/ipfs-http-client/src/lib/multipart-request.browser.js +++ b/packages/ipfs-http-client/src/lib/multipart-request.browser.js @@ -2,7 +2,7 @@ // Import browser version otherwise electron-renderer will end up with node // version and fail. -const normaliseInput = require('ipfs-core-utils/src/files/normalise-input/index.browser') +const { normaliseInput } = require('ipfs-core-utils/src/files/normalise-input/index.browser') const modeToString = require('./mode-to-string') /** diff --git a/packages/ipfs-http-client/src/lib/multipart-request.node.js b/packages/ipfs-http-client/src/lib/multipart-request.node.js index ea1af0237a..29d80cd9aa 100644 --- a/packages/ipfs-http-client/src/lib/multipart-request.node.js +++ b/packages/ipfs-http-client/src/lib/multipart-request.node.js @@ -1,6 +1,6 @@ 'use strict' -const normaliseInput = require('ipfs-core-utils/src/files/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/files/normalise-input') const { nanoid } = require('nanoid') const modeToString = require('./mode-to-string') const merge = require('merge-options').bind({ ignoreUndefined: true }) diff --git a/packages/ipfs-http-client/src/ls.js b/packages/ipfs-http-client/src/ls.js index 1e3fd98af4..73a935548e 100644 --- a/packages/ipfs-http-client/src/ls.js +++ b/packages/ipfs-http-client/src/ls.js @@ -39,8 +39,7 @@ module.exports = configure((api, opts) => { path: pathStr + (link.Name ? `/${link.Name}` : ''), size: link.Size, cid: hash, - type: typeOf(link), - depth: link.Depth || 1 + type: typeOf(link) } if (link.Mode) { diff --git a/packages/ipfs-http-client/src/name/publish.js b/packages/ipfs-http-client/src/name/publish.js index 947dcbbcd2..8eccd7168d 100644 --- a/packages/ipfs-http-client/src/name/publish.js +++ b/packages/ipfs-http-client/src/name/publish.js @@ -18,7 +18,7 @@ module.exports = configure(api => { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: path, + arg: `${path}`, ...options }), headers: options.headers diff --git a/packages/ipfs-http-client/src/object/patch/append-data.js b/packages/ipfs-http-client/src/object/patch/append-data.js index 6eab020370..fe94bf97ec 100644 --- a/packages/ipfs-http-client/src/object/patch/append-data.js +++ b/packages/ipfs-http-client/src/object/patch/append-data.js @@ -21,7 +21,6 @@ module.exports = configure(api => { const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('object/patch/append-data', { timeout: options.timeout, signal, diff --git a/packages/ipfs-http-client/src/object/patch/set-data.js b/packages/ipfs-http-client/src/object/patch/set-data.js index b9fa089274..42d737d847 100644 --- a/packages/ipfs-http-client/src/object/patch/set-data.js +++ b/packages/ipfs-http-client/src/object/patch/set-data.js @@ -21,7 +21,6 @@ module.exports = configure(api => { const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('object/patch/set-data', { timeout: options.timeout, signal, diff --git a/packages/ipfs-http-client/src/pin/add-all.js b/packages/ipfs-http-client/src/pin/add-all.js index bf0a4e2901..3a820e2674 100644 --- a/packages/ipfs-http-client/src/pin/add-all.js +++ b/packages/ipfs-http-client/src/pin/add-all.js @@ -2,7 +2,7 @@ const { CID } = require('multiformats/cid') const configure = require('../lib/configure') -const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/pins/normalise-input') const toUrlSearchParams = require('../lib/to-url-search-params') /** diff --git a/packages/ipfs-http-client/src/pin/rm-all.js b/packages/ipfs-http-client/src/pin/rm-all.js index 98a61dc31c..4aed6af2c7 100644 --- a/packages/ipfs-http-client/src/pin/rm-all.js +++ b/packages/ipfs-http-client/src/pin/rm-all.js @@ -2,7 +2,7 @@ const { CID } = require('multiformats/cid') const configure = require('../lib/configure') -const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +const { normaliseInput } = require('ipfs-core-utils/src/pins/normalise-input') const toUrlSearchParams = require('../lib/to-url-search-params') /** diff --git a/packages/ipfs-http-client/src/pubsub/publish.js b/packages/ipfs-http-client/src/pubsub/publish.js index 29b9750d95..b97f00024a 100644 --- a/packages/ipfs-http-client/src/pubsub/publish.js +++ b/packages/ipfs-http-client/src/pubsub/publish.js @@ -25,7 +25,6 @@ module.exports = configure(api => { const controller = new AbortController() const signal = abortSignal(controller.signal, options.signal) - // @ts-ignore https://github.com/ipfs/js-ipfs-utils/issues/90 const res = await api.post('pubsub/pub', { timeout: options.timeout, signal, diff --git a/packages/ipfs-http-client/src/pubsub/subscribe.js b/packages/ipfs-http-client/src/pubsub/subscribe.js index db99bf128f..cbd3ab86c2 100644 --- a/packages/ipfs-http-client/src/pubsub/subscribe.js +++ b/packages/ipfs-http-client/src/pubsub/subscribe.js @@ -41,39 +41,37 @@ module.exports = (options, subsTracker) => { const ffWorkaround = setTimeout(() => done(), 1000) // Do this async to not block Firefox - setTimeout(() => { - api.post('pubsub/sub', { - timeout: options.timeout, - signal: options.signal, - searchParams: toUrlSearchParams({ - arg: topic, - ...options - }), - headers: options.headers - }) - .catch((err) => { - // Initial subscribe fail, ensure we clean up - subsTracker.unsubscribe(topic, handler) + api.post('pubsub/sub', { + timeout: options.timeout, + signal: options.signal, + searchParams: toUrlSearchParams({ + arg: topic, + ...options + }), + headers: options.headers + }) + .catch((err) => { + // Initial subscribe fail, ensure we clean up + subsTracker.unsubscribe(topic, handler) - fail(err) + fail(err) + }) + .then((response) => { + clearTimeout(ffWorkaround) + + if (!response) { + // if there was no response, the subscribe failed + return + } + + readMessages(response, { + onMessage: handler, + onEnd: () => subsTracker.unsubscribe(topic, handler), + onError: options.onError }) - .then((response) => { - clearTimeout(ffWorkaround) - - if (!response) { - // if there was no response, the subscribe failed - return - } - readMessages(response, { - onMessage: handler, - onEnd: () => subsTracker.unsubscribe(topic, handler), - onError: options.onError - }) - - done() - }) - }, 0) + done() + }) return result } diff --git a/packages/ipfs-http-client/src/types.d.ts b/packages/ipfs-http-client/src/types.d.ts index e0be2081b5..8784c13302 100644 --- a/packages/ipfs-http-client/src/types.d.ts +++ b/packages/ipfs-http-client/src/types.d.ts @@ -2,6 +2,8 @@ import { Agent as HttpAgent } from 'http' import { Agent as HttpsAgent } from 'https' import { Multiaddr } from 'multiaddr' import type { BlockCodec } from 'multiformats/codecs/interface' +import type { MultihashHasher } from 'multiformats/hashes/interface' +import type { MultibaseCodec } from 'multiformats/bases/interface' import type { IPFS } from 'ipfs-core-types' export interface Options { @@ -12,7 +14,7 @@ export interface Options { timeout?: number | string apiPath?: string url?: URL|string|Multiaddr - ipld?: IPLDOptions + ipld?: Partial agent?: HttpAgent | HttpsAgent } diff --git a/packages/ipfs-http-client/test/get.spec.js b/packages/ipfs-http-client/test/get.spec.js deleted file mode 100644 index 451cbfad07..0000000000 --- a/packages/ipfs-http-client/test/get.spec.js +++ /dev/null @@ -1,79 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const { expect } = require('aegir/utils/chai') -const all = require('it-all') -const concat = require('it-concat') - -const f = require('./utils/factory')() - -describe('.get (specific go-ipfs features)', function () { - this.timeout(60 * 1000) - - const smallFile = { - cid: 'Qmf412jQZiuVUtdgnB36FXFX7xg5V6KEbSJ4dpQuhkLyfD', - data: 'hello world' - } - - let ipfs - - before(async () => { - ipfs = (await f.spawn()).api - await ipfs.add(smallFile.data) - }) - - after(() => f.clean()) - - it('no compression args', async () => { - const files = await all(ipfs.get(smallFile.cid)) - - expect(files).to.be.length(1) - const content = await concat(files[0].content) - expect(content.toString()).to.contain(smallFile.data.toString()) - }) - - it('archive true', async () => { - const files = await all(ipfs.get(smallFile.cid, { archive: true })) - - expect(files).to.be.length(1) - const content = await concat(files[0].content) - expect(content.toString()).to.contain(smallFile.data.toString()) - }) - - it('err with out of range compression level', async () => { - await expect(all(ipfs.get(smallFile.cid, { - compress: true, - compressionLevel: 10 - }))).to.eventually.be.rejectedWith('compression level must be between 1 and 9') - }) - - // TODO Understand why this test started failing - it.skip('with compression level', async () => { - await all(ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 })) - }) - - it('add path containing "+"s (for testing get)', async () => { - const filename = 'ti,c64x+mega++mod-pic.txt' - const subdir = 'tmp/c++files' - const expectedCid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' - const path = `${subdir}/${filename}` - const files = await all(ipfs.addAll([{ - path, - content: path - }])) - - expect(files[2].cid.toString()).to.equal(expectedCid) - }) - - it('get path containing "+"s', async () => { - const cid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' - const files = await all(ipfs.get(cid)) - - expect(files).to.be.an('array').with.lengthOf(3) - expect(files[0]).to.have.property('path', cid) - expect(files[1]).to.have.property('path', `${cid}/c++files`) - expect(files[2]).to.have.property('path', `${cid}/c++files/ti,c64x+mega++mod-pic.txt`) - }) -}) diff --git a/packages/ipfs-http-client/test/pubsub.spec.js b/packages/ipfs-http-client/test/pubsub.spec.js index bf47216e0e..cadc2ad234 100644 --- a/packages/ipfs-http-client/test/pubsub.spec.js +++ b/packages/ipfs-http-client/test/pubsub.spec.js @@ -20,7 +20,7 @@ describe('.pubsub', function () { this.timeout(30 * 1000) // slow CI ctl = await await f.spawn({ - args: '--enable-pubsub-experiment' + args: ['--enable-pubsub-experiment'] }) ipfs = ctl.api diff --git a/packages/ipfs-http-gateway/package.json b/packages/ipfs-http-gateway/package.json index 0110dee9af..d12478cfa6 100644 --- a/packages/ipfs-http-gateway/package.json +++ b/packages/ipfs-http-gateway/package.json @@ -32,7 +32,7 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", diff --git a/packages/ipfs-http-server/package.json b/packages/ipfs-http-server/package.json index 2f9578e842..42fb27bab7 100644 --- a/packages/ipfs-http-server/package.json +++ b/packages/ipfs-http-server/package.json @@ -21,7 +21,7 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", @@ -54,7 +54,6 @@ "it-pipe": "^1.1.0", "it-pushable": "^1.4.2", "it-reduce": "^1.0.5", - "it-tar": "^3.0.0", "joi": "^17.2.1", "just-safe-set": "^2.2.1", "multiaddr": "^10.0.0", diff --git a/packages/ipfs-http-server/src/api/resources/files-regular.js b/packages/ipfs-http-server/src/api/resources/files-regular.js index b2359491b6..a5ee3e607a 100644 --- a/packages/ipfs-http-server/src/api/resources/files-regular.js +++ b/packages/ipfs-http-server/src/api/resources/files-regular.js @@ -1,8 +1,6 @@ 'use strict' const multipart = require('../../utils/multipart-request-parser') -// @ts-ignore no types -const tar = require('it-tar') const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') const { pipe } = require('it-pipe') @@ -12,15 +10,6 @@ const merge = require('it-merge') const { PassThrough } = require('stream') const map = require('it-map') -/** - * @param {AsyncIterable} source - */ -const toBuffer = async function * (source) { - for await (const chunk of source) { - yield chunk.slice() - } -} - exports.cat = { options: { validate: { @@ -89,8 +78,8 @@ exports.get = { query: Joi.object() .keys({ path: Joi.ipfsPath().required(), - archive: Joi.boolean().default(false), - compress: Joi.boolean().default(false), + archive: Joi.boolean(), + compress: Joi.boolean(), compressionLevel: Joi.number().integer().min(1).max(9), timeout: Joi.timeout() }) @@ -98,6 +87,10 @@ exports.get = { override: true, ignoreUndefined: true }) + .rename('compression-level', 'compressionLevel', { + override: true, + ignoreUndefined: true + }) } }, @@ -117,34 +110,20 @@ exports.get = { }, query: { path, + archive, + compress, + compressionLevel, timeout } } = request - return streamResponse(request, h, () => pipe( - ipfs.get(path, { - timeout, - signal - }), - /** - * @param {AsyncIterable} source - */ - async function * (source) { - for await (const file of source) { - const header = { - name: file.path - } - - if (file.type === 'file' && file.content != null) { - yield { header: { ...header, size: file.size }, body: toBuffer(file.content) } - } else { - yield { header: { ...header, type: 'directory' } } - } - } - }, - tar.pack(), - toBuffer - )) + return streamResponse(request, h, () => ipfs.get(path, { + timeout, + archive, + compress, + compressionLevel, + signal + })) } } @@ -364,7 +343,6 @@ exports.ls = { path: Joi.ipfsPath().required(), cidBase: Joi.string().default('base58btc'), stream: Joi.boolean().default(false), - recursive: Joi.boolean().default(false), timeout: Joi.timeout() }) .rename('arg', 'path', { @@ -395,7 +373,6 @@ exports.ls = { query: { path, cidBase, - recursive, stream, timeout } @@ -441,7 +418,6 @@ exports.ls = { if (!stream) { try { const links = await all(ipfs.ls(path, { - recursive, signal, timeout })) @@ -453,7 +429,6 @@ exports.ls = { } return streamResponse(request, h, () => pipe( ipfs.ls(path, { - recursive, signal, timeout }), diff --git a/packages/ipfs-http-server/test/inject/files.js b/packages/ipfs-http-server/test/inject/files.js index b6cdb05c57..eb40bdea0d 100644 --- a/packages/ipfs-http-server/test/inject/files.js +++ b/packages/ipfs-http-server/test/inject/files.js @@ -330,7 +330,10 @@ describe('/files', () => { describe('/get', () => { const defaultOptions = { signal: sinon.match.instanceOf(AbortSignal), - timeout: undefined + timeout: undefined, + archive: undefined, + compress: undefined, + compressionLevel: undefined } it('only accepts POST', () => { @@ -341,9 +344,7 @@ describe('/files', () => { ipfs.get.withArgs(`${cid}`, { ...defaultOptions, timeout: 1000 - }).returns([{ - path: 'path' - }]) + }).returns(async function * () { yield { path: 'path' } }()) const res = await http({ method: 'POST', @@ -356,7 +357,6 @@ describe('/files', () => { describe('/ls', () => { const defaultOptions = { - recursive: false, signal: sinon.match.instanceOf(AbortSignal), timeout: undefined } @@ -464,44 +464,6 @@ describe('/files', () => { }) }) - it('should list directory contents recursively', async () => { - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - ipfs.files.stat.withArgs(`/ipfs/${cid}`).returns({ - type: 'directory' - }) - ipfs.ls.withArgs(`${cid}`, { - ...defaultOptions, - recursive: true - }).returns([{ - name: 'link', - cid, - size: 10, - type: 'file', - depth: 1, - mode: 0o420 - }]) - - const res = await http({ - method: 'POST', - url: `/api/v0/ls?arg=${cid}&recursive=true` - }, { ipfs }) - - expect(res).to.have.property('statusCode', 200) - expect(res).to.have.deep.nested.property('result.Objects[0]', { - Hash: `${cid}`, - Links: [{ - Depth: 1, - Hash: cid.toString(), - Mode: '0420', - Mtime: undefined, - MtimeNsecs: undefined, - Name: 'link', - Size: 10, - Type: 2 - }] - }) - }) - // TODO: unskip after switch to v1 CIDs by default it.skip('should return base64 encoded CIDs', async () => { ipfs.bases.getBase.withArgs('base64').returns(base64) diff --git a/packages/ipfs-message-port-client/package.json b/packages/ipfs-message-port-client/package.json index 0e415f2413..568d574706 100644 --- a/packages/ipfs-message-port-client/package.json +++ b/packages/ipfs-message-port-client/package.json @@ -27,7 +27,7 @@ "build": "aegir build", "test": "echo 'Only interface tests live here'", "test:interface:message-port-client": "aegir test -t browser --bail -f ./test/interface-message-port-client.js", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i ipfs-core -i rimraf -i ipfs-core-types -i esbuild" diff --git a/packages/ipfs-message-port-client/src/core.js b/packages/ipfs-message-port-client/src/core.js index 061146c3b0..671b1ad2ed 100644 --- a/packages/ipfs-message-port-client/src/core.js +++ b/packages/ipfs-message-port-client/src/core.js @@ -159,15 +159,14 @@ const decodeAddedData = ({ path, cid, mode, mtime, size }) => { * @param {EncodedIPFSEntry} encodedEntry * @returns {import('ipfs-core-types/src/root').IPFSEntry} */ -const decodeLsEntry = ({ depth, name, path, size, cid, type, mode, mtime }) => ({ +const decodeLsEntry = ({ name, path, size, cid, type, mode, mtime }) => ({ cid: decodeCID(cid), type, name, path, mode, mtime, - size, - depth + size }) /** diff --git a/packages/ipfs-message-port-protocol/package.json b/packages/ipfs-message-port-protocol/package.json index efc54229a8..eb50bb5ee3 100644 --- a/packages/ipfs-message-port-protocol/package.json +++ b/packages/ipfs-message-port-protocol/package.json @@ -40,7 +40,7 @@ "test:webworker": "aegir test -t webworker", "test:chrome": "aegir test -t browser -t webworker", "test:firefox": "aegir test -t browser -t webworker -- --browsers firefox", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i rimraf -i ipfs-core-types" diff --git a/packages/ipfs-message-port-server/package.json b/packages/ipfs-message-port-server/package.json index c186b72399..ea3043c286 100644 --- a/packages/ipfs-message-port-server/package.json +++ b/packages/ipfs-message-port-server/package.json @@ -32,7 +32,7 @@ "test:webworker": "aegir test -t webworker", "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rimraf ./dist", "dep-check": "aegir dep-check -i rimraf -i ipfs-core-types" diff --git a/packages/ipfs-message-port-server/src/core.js b/packages/ipfs-message-port-server/src/core.js index 4cb3f5505a..9aa4c46ef2 100644 --- a/packages/ipfs-message-port-server/src/core.js +++ b/packages/ipfs-message-port-server/src/core.js @@ -289,15 +289,14 @@ const encodeLsResult = entries => { /** * @param {IPFSEntry} entry */ -const encodeLsEntry = ({ depth, name, path, size, cid, type, mode, mtime }) => ({ +const encodeLsEntry = ({ name, path, size, cid, type, mode, mtime }) => ({ cid: encodeCID(cid), type, name, path, mode, mtime, - size, - depth + size }) /** diff --git a/packages/ipfs/.aegir.js b/packages/ipfs/.aegir.js index 73412c5c84..78aeb95abb 100644 --- a/packages/ipfs/.aegir.js +++ b/packages/ipfs/.aegir.js @@ -81,7 +81,7 @@ module.exports = { return { env: { PINNING_SERVICE_ENDPOINT: pinningService.endpoint, - PINNING_SERVIEC_KEY: pinningService.token, + PINNING_SERVICE_KEY: pinningService.token, ECHO_SERVER: `http://${echoServer.host}:${echoServer.port}`, IPFSD_SERVER: `http://127.0.0.1:${ipfsdPort}`, SIGNALA_SERVER: `/ip4/127.0.0.1/tcp/${signalAPort}/ws/p2p-webrtc-star`, @@ -98,7 +98,7 @@ module.exports = { return { env: { PINNING_SERVICE_ENDPOINT: pinningService.endpoint, - PINNING_SERVIEC_KEY: pinningService.token, + PINNING_SERVICE_KEY: pinningService.token, ECHO_SERVER: `http://${echoServer.host}:${echoServer.port}` }, echoServer, diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index 7ba3420bc5..b61553ccf7 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -25,7 +25,7 @@ }, "scripts": { "build": "aegir build", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "test": "echo 'Only interface tests live here'", "test:interface:core": "aegir test -f test/interface-core.js", "test:interface:client": "aegir test -f test/interface-client.js", @@ -56,9 +56,9 @@ "ipfs-client": "^0.5.1", "ipfs-core-types": "^0.6.1", "ipfs-http-client": "^51.0.1", - "ipfs-interop": "^6.0.0", + "ipfs-interop": "^6.0.1", "ipfs-utils": "^8.1.4", - "ipfsd-ctl": "^9.0.0", + "ipfsd-ctl": "^10.0.3", "iso-url": "^1.0.0", "libp2p-webrtc-star": "^0.23.0", "merge-options": "^3.0.4",