diff --git a/migrations/index.js b/migrations/index.js index e4e7d8f..75cce24 100644 --- a/migrations/index.js +++ b/migrations/index.js @@ -12,4 +12,5 @@ const emptyMigration = { module.exports = [ Object.assign({version: 7}, emptyMigration), + require('./migration-8') ] diff --git a/migrations/migration-8/blocks-to-multihash.js b/migrations/migration-8/blocks-to-multihash.js new file mode 100644 index 0000000..d6897cd --- /dev/null +++ b/migrations/migration-8/blocks-to-multihash.js @@ -0,0 +1,79 @@ +const path = require('path') +const CID = require('cids') +const Key = require('interface-datastore').Key +const core = require('datastore-core') +const ShardingStore = core.ShardingDatastore +const base32 = require('base32.js') +const utils = require('../../src/utils') +const log = require('debug')('ipfs-repo-migrations:migration-8') + +// This function in js-ipfs-repo defaults to not using sharding +// but the default value of the options.sharding is True hence this +// function defaults to use sharding. +async function maybeWithSharding (filestore, options) { + if (options.sharding === false) { + return filestore + } + + const shard = new core.shard.NextToLast(2) + return await ShardingStore.createOrOpen(filestore, shard) +} + +function keyToMultihash(key){ + // Key to CID + const decoder = new base32.Decoder() + const buff = decoder.finalize(key.toString().slice(1)) + const cid = new CID(Buffer.from(buff)) + + // CID to multihash + const enc = new base32.Encoder() + return new Key('/' + enc.finalize(cid.multihash), false) +} + +function keyToCid(key){ + // Key to CID + const decoder = new base32.Decoder() + const buff = decoder.write(key.toString().slice(1)).finalize() + const cid = new CID(1, 'raw', Buffer.from(buff)) + + // CID to Key + const enc = new base32.Encoder() + return new Key('/' + enc.finalize(cid.buffer), false) +} + +async function process(repoPath, options, keyFunction){ + const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, 'blocks') + + const baseStore = new StorageBackend(path.join(repoPath, 'blocks'), storageOptions) + const store = await maybeWithSharding(baseStore, storageOptions) + + try { + const batch = store.batch() + let counter = 0 + for await (const block of store.query({})) { + const newKey = keyFunction(block.key) + + // If the Key is CIDv0 then it is raw multihash and nothing is changing + if(newKey.toString() !== block.key.toString()){ + counter += 1 + + log(`Migrating Block from ${block.key.toString()} to ${newKey.toString()}`) + batch.delete(block.key) + batch.put(newKey, block.value) + } + } + + log(`Changing ${ counter } blocks`) + await batch.commit() + } finally { + await store.close() + } +} + +exports.migrate = function blocksMigrate (repoPath, options) { + return process(repoPath, options, keyToMultihash) +} + +exports.revert = function blocksRevert (repoPath, options) { + return process(repoPath, options, keyToCid) +} diff --git a/migrations/migration-8/index.js b/migrations/migration-8/index.js new file mode 100644 index 0000000..949f7d5 --- /dev/null +++ b/migrations/migration-8/index.js @@ -0,0 +1,38 @@ +'use strict' + +const keysEncoding = require('./keys-encoding') +const blocksToMultihash = require('./blocks-to-multihash') +const log = require('debug')('ipfs-repo-migrations:migration-8') + +async function migrate (repoPath, options) { + await keysEncoding.migrate(repoPath, options) + + try{ + await blocksToMultihash.migrate(repoPath, options) + }catch (e) { + log('During migration of Blockstore to multihash exception was raised! Reverting keys part of migration!') + await keysEncoding.revert(repoPath, options) + + throw e + } +} + +async function revert (repoPath, options) { + await keysEncoding.revert(repoPath, options) + + try{ + await blocksToMultihash.revert(repoPath, options) + }catch (e) { + log('During reversion of Blockstore to CID exception was raised! Migrating keys part of migration!') + await keysEncoding.migrate(repoPath, options) + + throw e + } +} + +module.exports = { + version: 8, + description: 'Transforms key\'s names into base32 encoding and converts Block store to use multihashes', + migrate, + revert +} diff --git a/migrations/migration-8/keys-encoding.js b/migrations/migration-8/keys-encoding.js new file mode 100644 index 0000000..029e17d --- /dev/null +++ b/migrations/migration-8/keys-encoding.js @@ -0,0 +1,62 @@ +const utils = require('../../src/utils') +const path = require('path') +const base32 = require('base32.js') +const Key = require('interface-datastore').Key +const log = require('debug')('ipfs-repo-migrations:migration-8') + +const KEY_PREFIX = 'key_' + +function encode (name) { + name = Buffer.from(name) + const encoder = new base32.Encoder({ type: 'rfc4648' }) + return (KEY_PREFIX + encoder.finalize(name)).toLowerCase() +} + +function decode (name) { + if (!name.startsWith(KEY_PREFIX)) { + throw Error('Unknown format of key\'s name!') + } + + const decoder = new base32.Decoder({ type: 'rfc4648' }) + const decodedNameBuff = decoder.finalize(name.replace(KEY_PREFIX, '').toUpperCase()) + return Buffer.from(decodedNameBuff).toString() +} + +async function processFolder (store, prefix, fileNameProcessor) { + const query = { + prefix: `/${ prefix }` + } + + const files = store.query(query) + for await (let file of files) { + const name = String(file.key._buf).replace(`/${ prefix }/`, '') + const encodedFileName = fileNameProcessor(name) + const newKey = new Key(`${ prefix }/${ encodedFileName }`) + + await store.delete(file.key) + log(`Translating key's name '${ file.key }' into '${ newKey }'`) + await store.put(newKey, file.value) + } +} + +async function process (repoPath, options, processor) { + const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, 'keys') + + const store = new StorageBackend(path.join(repoPath, 'keys'), storageOptions) + try { + const info = processFolder(store, 'info', processor) + const data = processFolder(store, 'pkcs8', processor) + + return await Promise.all([info, data]) + } finally { + await store.close() + } +} + +exports.migrate = async function keyEncode (repoPath, options) { + return process(repoPath, options, encode) +} + +exports.revert = async function keyDecode (repoPath, options) { + return process(repoPath, options, decode) +} diff --git a/package.json b/package.json index a80f413..5b26148 100644 --- a/package.json +++ b/package.json @@ -44,13 +44,16 @@ "docs": "aegir docs" }, "dependencies": { + "base32.js": "~0.1.0", "chalk": "^2.4.2", - "datastore-fs": "~0.9.1", - "datastore-level": "~0.12.1", + "cids": "~0.7.0", + "datastore-core": "~0.7.0", + "datastore-fs": "~0.9.0", + "datastore-level": "~0.12.0", "debug": "^4.1.0", - "interface-datastore": "~0.8.0", - "proper-lockfile": "^4.1.1", - "yargs": "^14.2.0", + "interface-datastore": "~0.7.0", + "proper-lockfile": "^3.2.0", + "yargs": "^12.0.5", "yargs-promise": "^1.1.0" }, "devDependencies": { diff --git a/test/browser.js b/test/browser.js index b341bf9..f0646cf 100644 --- a/test/browser.js +++ b/test/browser.js @@ -43,6 +43,10 @@ describe('Browser specific tests', () => { require('./version-test')(createRepo, repoCleanup) }) + describe('migrations tests', () => { + require('./migrations/migration-8-test')(createRepo, repoCleanup) + }) + describe('init tests', () => { require('./init-test')(createRepo, repoCleanup) }) diff --git a/test/migrations/migration-8-test.js b/test/migrations/migration-8-test.js new file mode 100644 index 0000000..ee44ee3 --- /dev/null +++ b/test/migrations/migration-8-test.js @@ -0,0 +1,131 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const chaiAsPromised = require('chai-as-promised') +chai.use(chaiAsPromised) +const expect = chai.expect + +const path = require('path') +const keysMigration = require('../../migrations/migration-8/keys-encoding') +const blocksMigration = require('../../migrations/migration-8/blocks-to-multihash') +const Key = require('interface-datastore').Key +const Datastore = require('datastore-fs') +const core = require('datastore-core') +const ShardingStore = core.ShardingDatastore + +const keysFixtures = [ + ['aAa', 'key_mfawc'], + ['bbb', 'key_mjrge'], + ['self', 'key_onswyzq'] +] + +const blocksFixtures = [ + ['AFKREIBFG77IKIKDMBDUFDCSPK7H5TE5LNPMCSXYLPML27WSTT5YA5IUNU', 'CIQCKN76QUQUGYCHIKGFE6V6P3GJ2W26YFFPQW6YXV7NFHH3QB2RI3I'] +] + +async function bootstrapKeys (dir, encoded) { + const store = new Datastore(path.join(dir, 'keys'), { extension: '.data', createIfMissing: true }) + await store.open() + + let name + for (const keyNames of keysFixtures) { + name = encoded ? keyNames[1] : keyNames[0] + await store.put(new Key(`/pkcs8/${name}`), '') + await store.put(new Key(`/info/${name}`), '') + } + + await store.close() +} + +async function validateKeys (dir, shouldBeEncoded) { + const store = new Datastore(path.join(dir, 'keys'), { extension: '.data', createIfMissing: false }) + await store.open() + + let name + for (const keyNames of keysFixtures) { + name = shouldBeEncoded ? keyNames[1] : keyNames[0] + expect(await store.has(new Key(`/pkcs8/${name}`))).to.be.true(name) + expect(await store.has(new Key(`/info/${name}`))).to.be.true(name) + } + + await store.close() +} + +async function bootstrapBlocks (dir, encoded) { + const baseStore = new Datastore(path.join(dir, 'blocks'), { extension: '.data', createIfMissing: true }) + const shard = new core.shard.NextToLast(2) + const store = await ShardingStore.createOrOpen(baseStore, shard) + + let name + for (const blocksNames of blocksFixtures) { + name = encoded ? blocksNames[1] : blocksNames[0] + await store.put(new Key(name), '') + } + + await store.close() +} + +async function validateBlocks (dir, shouldBeEncoded) { + const baseStore = new Datastore(path.join(dir, 'blocks'), { extension: '.data', createIfMissing: false }) + const shard = new core.shard.NextToLast(2) + const store = await ShardingStore.createOrOpen(baseStore, shard) + + let newName, oldName + for (const blockNames of blocksFixtures) { + newName = shouldBeEncoded ? blockNames[1] : blockNames[0] + oldName = shouldBeEncoded ? blockNames[0] : blockNames[1] + expect(await store.has(new Key(oldName))).to.be.false(oldName) + expect(await store.has(new Key(newName))).to.be.true(newName) + } + + await store.close() +} + +module.exports = (setup, cleanup) => { + describe('migration 8', () => { + let dir + + beforeEach(async () => { + dir = await setup() + }) + afterEach(() => cleanup(dir)) + + it('should migrate keys forward', async () => { + await bootstrapKeys(dir, false) + await keysMigration.migrate(dir) + await validateKeys(dir, true) + }) + + it('should migrate keys backward', async () => { + await bootstrapKeys(dir, true) + await keysMigration.revert(dir) + await validateKeys(dir, false) + }) + + it('should fail to migrate keys backward with invalid key name', async () => { + const store = new Datastore(path.join(dir, 'keys'), { extension: '.data', createIfMissing: true }) + await store.open() + + await store.put(new Key('/pkcs8/mfawc'), '') + await store.put(new Key('/info/mfawc'), '') + + await store.close() + + expect(keysMigration.revert(dir)).to.eventually.rejectedWith('Unknown format of key\'s name!') + }) + + it('should migrate blocks forward', async () => { + await bootstrapBlocks(dir, false) + await blocksMigration.migrate(dir) + await validateBlocks(dir, true) + }) + // + // it('should migrate blocks backward', async () => { + // await bootstrapKeys(dir, true) + // await blocksMigration.revert(dir) + // await validateKeys(dir, false) + // }) + }) +} diff --git a/test/node.js b/test/node.js index 032889c..584275d 100644 --- a/test/node.js +++ b/test/node.js @@ -43,6 +43,10 @@ describe('Node specific tests', () => { require('./version-test')(createRepo, repoCleanup) }) + describe('migrations tests', () => { + require('./migrations/migration-8-test')(createRepo, repoCleanup) + }) + describe('init tests', () => { require('./init-test')(createRepo, repoCleanup) })