From 4129206102d8a643270306b5c56d1ba9a0ae2458 Mon Sep 17 00:00:00 2001 From: Anthony Marcar Date: Thu, 29 Nov 2018 23:57:08 +1100 Subject: [PATCH] feat(gatsby): add lokijs nodes db implementation (#9919) This PR adds a feature flag `GATSBY_DB_NODES` that can be used to change the storage engine for the gatsby data layer (`nodes`). - `redux` (default) which uses the existing redux implementation, and `sift` for querying. Or, you can use - `loki` which uses the loki in-memory database to store and query. This PR re-implements functionality in #9338, but with all tests passing and addressing previous feedback. It should also be easier to review since it builds on several refactorings. Some things to note: 1. I [submitted a PR to lokijs](https://github.com/techfort/LokiJS/pull/718) which still hasn't been merged, though the author says he'll start working on it soon. Therefore, in in interim, I've published [@moocar/lokijs](https://www.npmjs.com/package/@moocar/lokijs). 1. I haven't implemented auto indexing of query fields yet. I'll attack that next. 1. I suggest we ask the community to try out the feature flag once it's merged to get feedback. All the tests pass, but this is such a big change that we'll want to test it gradually 1. While loki uses the same mongo query language as sift, they do have different behavior. Most of my time on this PR was spent ensuring that loki behaves **exactly** like sift. See [db/loki/nodes-query.js](https://github.com/gatsbyjs/gatsby/blob/cddbe893a4ce638babb1cbe5e5da4c13b6f5e57d/packages/gatsby/src/db/loki/nodes-query.js). But there's a chance a few edge cases have slipped through the cracks. 1. the feature flag works with the tests too `GATSBY_DB_NODES=loki yarn test`. We should perhaps look into running this on all PRs --- packages/gatsby/package.json | 1 + packages/gatsby/src/bootstrap/index.js | 27 + .../src/db/__tests__/fixtures/ensure-loki.js | 8 + .../src/db/__tests__/node-tracking-test.js | 124 ++- packages/gatsby/src/db/__tests__/nodes.js | 347 ++++++++ .../gatsby/src/db/loki/__tests__/index.js | 12 + .../src/db/loki/__tests__/nodes-query-test.js | 56 ++ .../gatsby/src/db/loki/__tests__/nodes.js | 30 + .../gatsby/src/db/loki/custom-comparators.js | 218 +++++ packages/gatsby/src/db/loki/index.js | 126 +++ packages/gatsby/src/db/loki/nodes-query.js | 309 +++++++ packages/gatsby/src/db/loki/nodes.js | 345 +++++++- packages/gatsby/src/db/node-tracking.js | 18 +- packages/gatsby/src/db/nodes-query.js | 52 ++ packages/gatsby/src/db/nodes.js | 72 +- .../__tests__/__snapshots__/nodes.js.snap | 14 +- packages/gatsby/src/redux/__tests__/nodes.js | 360 +------- packages/gatsby/src/redux/actions.js | 1 + packages/gatsby/src/redux/index.js | 2 +- packages/gatsby/src/redux/nodes.js | 32 - packages/gatsby/src/redux/reducers/index.js | 24 +- packages/gatsby/src/redux/run-sift.js | 8 +- ...p => connection-input-fields-test.js.snap} | 4 +- .../__tests__/build-node-connections-test.js | 5 +- .../schema/__tests__/build-node-types-test.js | 23 +- .../__tests__/connection-input-fields-test.js | 272 ++++++ .../connections-filter-on-linked-nodes.js | 293 +++++++ .../infer-graphql-input-type-test.js | 811 +----------------- .../__tests__/infer-graphql-type-test.js | 4 + .../gatsby/src/schema/__tests__/run-query.js | 470 ++++++++++ .../src/schema/build-node-connections.js | 2 +- .../gatsby/src/schema/build-node-types.js | 22 +- packages/gatsby/src/schema/index.js | 4 +- .../src/schema/infer-graphql-input-fields.js | 1 + .../gatsby/src/schema/infer-graphql-type.js | 22 +- packages/gatsby/src/schema/lazy-fields.js | 64 ++ yarn.lock | 5 + 37 files changed, 2850 insertions(+), 1338 deletions(-) create mode 100644 packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js create mode 100644 packages/gatsby/src/db/__tests__/nodes.js create mode 100644 packages/gatsby/src/db/loki/__tests__/index.js create mode 100644 packages/gatsby/src/db/loki/__tests__/nodes-query-test.js create mode 100644 packages/gatsby/src/db/loki/__tests__/nodes.js create mode 100644 packages/gatsby/src/db/loki/custom-comparators.js create mode 100644 packages/gatsby/src/db/loki/index.js create mode 100644 packages/gatsby/src/db/loki/nodes-query.js create mode 100644 packages/gatsby/src/db/nodes-query.js rename packages/gatsby/src/schema/__tests__/__snapshots__/{infer-graphql-input-type-test.js.snap => connection-input-fields-test.js.snap} (89%) create mode 100644 packages/gatsby/src/schema/__tests__/connection-input-fields-test.js create mode 100644 packages/gatsby/src/schema/__tests__/connections-filter-on-linked-nodes.js create mode 100644 packages/gatsby/src/schema/__tests__/run-query.js create mode 100644 packages/gatsby/src/schema/lazy-fields.js diff --git a/packages/gatsby/package.json b/packages/gatsby/package.json index 4b6259155b822..4e9fe529e5411 100644 --- a/packages/gatsby/package.json +++ b/packages/gatsby/package.json @@ -16,6 +16,7 @@ "@babel/polyfill": "^7.0.0", "@babel/runtime": "^7.0.0", "@babel/traverse": "^7.0.0", + "@moocar/lokijs": "^1.0.1", "@reach/router": "^1.1.1", "autoprefixer": "^8.6.5", "babel-core": "7.0.0-bridge.0", diff --git a/packages/gatsby/src/bootstrap/index.js b/packages/gatsby/src/bootstrap/index.js index e60ce6796ec5f..fdd5ec63c8beb 100644 --- a/packages/gatsby/src/bootstrap/index.js +++ b/packages/gatsby/src/bootstrap/index.js @@ -20,6 +20,7 @@ const report = require(`gatsby-cli/lib/reporter`) const getConfigFile = require(`./get-config-file`) const tracer = require(`opentracing`).globalTracer() const preferDefault = require(`./prefer-default`) +const nodeTracking = require(`../db/node-tracking`) // Show stack trace on unhandled promises. process.on(`unhandledRejection`, (reason, p) => { @@ -221,6 +222,32 @@ module.exports = async (args: BootstrapArgs) => { activity.end() + if (process.env.GATSBY_DB_NODES === `loki`) { + const loki = require(`../db/loki`) + // Start the nodes database (in memory loki js with interval disk + // saves). If data was saved from a previous build, it will be + // loaded here + activity = report.activityTimer(`start nodes db`, { + parentSpan: bootstrapSpan, + }) + activity.start() + const dbSaveFile = `${program.directory}/.cache/loki/loki.db` + try { + await loki.start({ + saveFile: dbSaveFile, + }) + } catch (e) { + report.error( + `Error starting DB. Perhaps try deleting ${path.dirname(dbSaveFile)}` + ) + } + activity.end() + } + + // By now, our nodes database has been loaded, so ensure that we + // have tracked all inline objects + nodeTracking.trackDbNodes() + // Copy our site files to the root of the site. activity = report.activityTimer(`copy gatsby files`, { parentSpan: bootstrapSpan, diff --git a/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js b/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js new file mode 100644 index 0000000000000..e02a612f28ce0 --- /dev/null +++ b/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js @@ -0,0 +1,8 @@ +const { backend } = require(`../../nodes`) + +module.exports = () => { + if (backend === `loki`) { + const lokiDb = require(`../../loki`) + beforeAll(lokiDb.start) + } +} diff --git a/packages/gatsby/src/db/__tests__/node-tracking-test.js b/packages/gatsby/src/db/__tests__/node-tracking-test.js index c73b760e5e390..491a346c95ed9 100644 --- a/packages/gatsby/src/db/__tests__/node-tracking-test.js +++ b/packages/gatsby/src/db/__tests__/node-tracking-test.js @@ -1,72 +1,57 @@ -const { readFile, writeFile } = require(`fs-extra`) - -jest.mock(`fs`) -jest.mock(`fs-extra`, () => { +const { store } = require(`../../redux`) +const { + boundActionCreators: { createNode }, +} = require(`../../redux/actions`) +const { getNode } = require(`../../db/nodes`) +const { findRootNodeAncestor, trackDbNodes } = require(`../node-tracking`) +const nodeTypes = require(`../../schema/build-node-types`) +const { run: runQuery } = require(`../nodes-query`) +require(`./fixtures/ensure-loki`)() + +function makeNode() { return { - readFile: jest.fn(() => `contents`), - writeFile: jest.fn(), - } -}) - -afterEach(() => { - readFile.mockClear() - writeFile.mockClear() -}) - -describe(`Track root nodes`, () => { - const reduxStatePath = `${process.cwd()}/.cache/redux-state.json` - const MOCK_FILE_INFO = {} - MOCK_FILE_INFO[reduxStatePath] = ` - { - "nodes": { - "id1": { - "id": "id1", - "parent": null, - "children": [], - "inlineObject": { - "field": "fieldOfFirstNode" - }, - "inlineArray": [ - 1, 2, 3 - ], - "internal": { - "type": "TestNode", - "contentDigest": "digest1", - "owner": "test" - } - } - } - } - ` - require(`fs`).__setMockFiles(MOCK_FILE_INFO) - - const { getNode } = require(`../../db/nodes`) - const { findRootNodeAncestor } = require(`../node-tracking`) - const { runQuery } = require(`../../db/nodes`) - const buildNodeTypes = require(`../../schema/build-node-types`) - const { - boundActionCreators: { createNode }, - } = require(`../../redux/actions`) - - createNode( - { - id: `id2`, - parent: null, - children: [], - inlineObject: { - field: `fieldOfSecondNode`, - }, - inlineArray: [1, 2, 3], - internal: { - type: `TestNode`, - contentDigest: `digest2`, - }, + id: `id1`, + parent: null, + children: [], + inlineObject: { + field: `fieldOfFirstNode`, + }, + inlineArray: [1, 2, 3], + internal: { + type: `TestNode`, + contentDigest: `digest1`, + owner: `test`, }, - { - name: `test`, + } +} + +describe(`track root nodes`, () => { + beforeEach(() => { + const nodes = [makeNode()] + store.dispatch({ type: `DELETE_CACHE` }) + for (const node of nodes) { + store.dispatch({ type: `CREATE_NODE`, payload: node }) } - ) - + trackDbNodes() + createNode( + { + id: `id2`, + parent: null, + children: [], + inlineObject: { + field: `fieldOfSecondNode`, + }, + inlineArray: [1, 2, 3], + internal: { + type: `TestNode`, + contentDigest: `digest2`, + }, + }, + { + name: `test`, + } + ) + }) describe(`Tracks nodes read from redux state cache`, () => { it(`Tracks inline objects`, () => { const node = getNode(`id1`) @@ -75,7 +60,6 @@ describe(`Track root nodes`, () => { expect(trackedRootNode).toEqual(node) }) - it(`Tracks inline arrays`, () => { const node = getNode(`id1`) const inlineObject = node.inlineArray @@ -83,7 +67,6 @@ describe(`Track root nodes`, () => { expect(trackedRootNode).toEqual(node) }) - it(`Doesn't track copied objects`, () => { const node = getNode(`id1`) const copiedInlineObject = { ...node.inlineObject } @@ -92,7 +75,6 @@ describe(`Track root nodes`, () => { expect(trackedRootNode).not.toEqual(node) }) }) - describe(`Tracks nodes created using createNode action`, () => { it(`Tracks inline objects`, () => { const node = getNode(`id2`) @@ -103,11 +85,11 @@ describe(`Track root nodes`, () => { }) }) - describe(`Tracks nodes returned by running sift`, () => { + describe(`Tracks nodes returned by queries`, () => { let type beforeAll(async () => { - type = (await buildNodeTypes({})).testNode.nodeObjectType + type = (await nodeTypes.buildAll({})).testNode.nodeObjectType }) it(`Tracks objects when running query without filter`, async () => { diff --git a/packages/gatsby/src/db/__tests__/nodes.js b/packages/gatsby/src/db/__tests__/nodes.js new file mode 100644 index 0000000000000..61fc4437822de --- /dev/null +++ b/packages/gatsby/src/db/__tests__/nodes.js @@ -0,0 +1,347 @@ +const { actions } = require(`../../redux/actions`) +const { getNode, getNodes } = require(`../nodes`) +const { store } = require(`../../redux`) +require(`./fixtures/ensure-loki`)() + +describe(`nodes db tests`, () => { + beforeEach(() => { + store.dispatch({ type: `DELETE_CACHE` }) + }) + it(`deletes previously transformed children nodes when the parent node is updated`, () => { + store.dispatch( + actions.createNode( + { + id: `hi`, + children: [], + parent: null, + internal: { + contentDigest: `hasdfljds`, + type: `Test`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi-1`, + children: [], + parent: `hi`, + internal: { + contentDigest: `hasdfljds-1`, + type: `Test-1`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createParentChildLink( + { + parent: getNode(`hi`), + child: getNode(`hi-1`), + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi-1-1`, + children: [], + parent: `hi-1`, + internal: { + contentDigest: `hasdfljds-1-1`, + type: `Test-1-1`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createParentChildLink( + { + parent: getNode(`hi-1`), + child: getNode(`hi-1-1`), + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi`, + children: [], + parent: `test`, + internal: { + contentDigest: `hasdfljds2`, + type: `Test`, + }, + }, + { + name: `tests`, + } + ) + ) + expect(getNodes()).toHaveLength(1) + }) + + it(`deletes previously transformed children nodes when the parent node is deleted`, () => { + store.dispatch( + actions.createNode( + { + id: `hi`, + children: [], + parent: `test`, + internal: { + contentDigest: `hasdfljds`, + type: `Test`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi2`, + children: [], + parent: `test`, + internal: { + contentDigest: `hasdfljds`, + type: `Test`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi-1`, + children: [], + parent: `hi`, + internal: { + contentDigest: `hasdfljds-1`, + type: `Test-1`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createParentChildLink( + { + parent: getNode(`hi`), + child: getNode(`hi-1`), + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi-1-1`, + children: [], + parent: `hi-1`, + internal: { + contentDigest: `hasdfljds-1-1`, + type: `Test-1-1`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createParentChildLink( + { + parent: getNode(`hi-1`), + child: getNode(`hi-1-1`), + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.deleteNode( + { + node: getNode(`hi`), + }, + { + name: `tests`, + } + ) + ) + expect(getNodes()).toHaveLength(1) + }) + it(`deletes previously transformed children nodes when parent nodes are deleted`, () => { + store.dispatch( + actions.createNode( + { + id: `hi`, + children: [], + parent: `test`, + internal: { + contentDigest: `hasdfljds`, + type: `Test`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi-1`, + children: [], + parent: `hi`, + internal: { + contentDigest: `hasdfljds-1`, + type: `Test-1`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createParentChildLink( + { + parent: getNode(`hi`), + child: getNode(`hi-1`), + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createNode( + { + id: `hi-1-1`, + children: [], + parent: `hi-1`, + internal: { + contentDigest: `hasdfljds-1-1`, + type: `Test-1-1`, + }, + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.createParentChildLink( + { + parent: getNode(`hi-1`), + child: getNode(`hi-1-1`), + }, + { + name: `tests`, + } + ) + ) + store.dispatch( + actions.deleteNode( + { node: getNode(`hi`) }, + { + name: `tests`, + } + ) + ) + expect(getNodes()).toHaveLength(0) + }) + it(`allows deleting nodes`, () => { + actions.createNode( + { + id: `hi`, + children: [], + parent: `test`, + internal: { + contentDigest: `hasdfljds`, + type: `Test`, + }, + pickle: true, + deep: { + array: [ + 0, + 1, + { + boom: true, + }, + ], + }, + }, + { + name: `tests`, + } + ) + actions.deleteNode({ + node: getNode(`hi`), + }) + expect(getNode(`hi`)).toBeUndefined() + }) + + it(`warns when using old deleteNode signature `, () => { + console.warn = jest.fn() + store.dispatch( + actions.createNode( + { + id: `hi`, + children: [], + parent: `test`, + internal: { + contentDigest: `hasdfljds`, + type: `Test`, + }, + }, + { + name: `tests`, + } + ) + ) + expect(getNode(`hi`)).toMatchObject({ id: `hi` }) + store.dispatch( + actions.deleteNode(`hi`, getNode(`hi`), { + name: `tests`, + }) + ) + expect(getNode(`hi`)).toBeUndefined() + const deprecationNotice = `Calling "deleteNode" with a nodeId is deprecated. Please pass an object containing a full node instead: deleteNode({ node })` + expect(console.warn).toHaveBeenCalledWith(deprecationNotice) + console.warn.mockRestore() + }) + + it(`does not crash when delete node is called on undefined`, () => { + actions.deleteNode(undefined, { + name: `tests`, + }) + expect(getNodes()).toHaveLength(0) + }) +}) diff --git a/packages/gatsby/src/db/loki/__tests__/index.js b/packages/gatsby/src/db/loki/__tests__/index.js new file mode 100644 index 0000000000000..45e0988c4f064 --- /dev/null +++ b/packages/gatsby/src/db/loki/__tests__/index.js @@ -0,0 +1,12 @@ +const { colls, getDb, start } = require(`../index`) + +describe(`db`, () => { + start() + it(`should create system collections`, () => { + const db = getDb() + const nodeMetaColl = db.getCollection(colls.nodeMeta.name) + const nodeTypesColl = db.getCollection(colls.nodeTypes.name) + expect(nodeMetaColl).toBeDefined() + expect(nodeTypesColl).toBeDefined() + }) +}) diff --git a/packages/gatsby/src/db/loki/__tests__/nodes-query-test.js b/packages/gatsby/src/db/loki/__tests__/nodes-query-test.js new file mode 100644 index 0000000000000..fbde8ade5970e --- /dev/null +++ b/packages/gatsby/src/db/loki/__tests__/nodes-query-test.js @@ -0,0 +1,56 @@ +if (process.env.GATSBY_DB_NODES === `loki`) { + const _ = require(`lodash`) + const nodeTypes = require(`../../../schema/build-node-types`) + const { store } = require(`../../../redux`) + const runQuery = require(`../nodes-query`) + const { getNodeTypeCollection } = require(`../nodes`) + const lokiDb = require(`../index`) + + function makeNodes() { + return [ + { + id: `1`, + internal: { type: `Test` }, + children: [], + foo: `bar`, + }, + ] + } + + async function runQueries(nodes, n) { + for (const node of nodes) { + store.dispatch({ type: `CREATE_NODE`, payload: node }) + } + const gqlType = nodeTypes.buildNodeObjectType({ + typeName: `Test`, + nodes, + pluginFields: [], + processedTypes: {}, + }) + const queryArgs = { filter: { foo: { eq: `bar` } } } + const args = { gqlType, queryArgs } + return await Promise.all(_.map(new Array(n), () => runQuery(args))) + } + + describe(`query indexing`, () => { + beforeEach(async () => { + await lokiDb.start() + store.dispatch({ type: `DELETE_CACHE` }) + }) + it(`does not create index when query run 1 time`, async () => { + await runQueries(makeNodes(), 1) + const coll = getNodeTypeCollection(`Test`) + expect(coll.binaryIndices.hasOwnProperty(`foo`)).toEqual(false) + }) + + it(`creates index when query run 5 times`, async () => { + await runQueries(makeNodes(), 5) + const coll = getNodeTypeCollection(`Test`) + expect(coll.binaryIndices.hasOwnProperty(`foo`)).toEqual(true) + }) + }) +} else { + it(`skipping loki nodes-query-test`, () => { + expect(true).toEqual(true) + }) +} diff --git a/packages/gatsby/src/db/loki/__tests__/nodes.js b/packages/gatsby/src/db/loki/__tests__/nodes.js new file mode 100644 index 0000000000000..b520d666a05ac --- /dev/null +++ b/packages/gatsby/src/db/loki/__tests__/nodes.js @@ -0,0 +1,30 @@ +const { start, getDb, colls } = require(`../index`) +const { createNode, deleteNode } = require(`../nodes`) + +const type = `Test` +const node = { + id: `1`, + foo: `bar`, + internal: { type: type }, +} + +beforeAll(start) + +describe(`node`, () => { + it(`should create node ID index`, () => { + createNode(node) + const nodeMetaColl = getDb().getCollection(colls.nodeMeta.name) + expect(nodeMetaColl).toBeDefined() + const nodeMeta = nodeMetaColl.by(`id`, node.id) + const nodeTypeColl = getDb().getCollection(nodeMeta.typeCollName) + expect(nodeTypeColl).toBeDefined() + expect(nodeTypeColl.name).toEqual(`gatsby:nodeType:${type}`) + }) + + it(`should delete node ID index`, () => { + deleteNode(node) + const nodeMetaColl = getDb().getCollection(colls.nodeMeta.name) + const nodeMeta = nodeMetaColl.by(`id`, node.id) + expect(nodeMeta).toBeUndefined() + }) +}) diff --git a/packages/gatsby/src/db/loki/custom-comparators.js b/packages/gatsby/src/db/loki/custom-comparators.js new file mode 100644 index 0000000000000..78eef70924d70 --- /dev/null +++ b/packages/gatsby/src/db/loki/custom-comparators.js @@ -0,0 +1,218 @@ +// Gatsby has very specific sorting requirements. Specifically, +// undefined > null > everything else. So, if an orderby `desc` is +// specified, then nulls and undefined values are of higher rank. This +// is due to the use of lodash's `orderBy` function in +// `run-sift`. +// +// The below functions are alternate versions of the comparators used +// by loki that match lodash's behavior +// +// Note: This is quite hacky and not officially supported by Loki, but +// works quite well. +// +// The below implementation is a direct copy of Loki, except that +// undefined's rank is 11, and null's rank is 10. Whereas in loki, +// they are both of rank 1 + +function ltHelper(prop1, prop2, equal) { + var cv1, cv2, t1, t2 + + // if one of the params is falsy or strictly true or not equal to itself + // 0, 0.0, "", NaN, null, undefined, not defined, false, true + if ( + !prop1 || + !prop2 || + prop1 === true || + prop2 === true || + prop1 !== prop1 || + prop2 !== prop2 + ) { + switch (prop1) { + case undefined: + t1 = 11 + break + case null: + t1 = 10 + break + case false: + t1 = 3 + break + case true: + t1 = 4 + break + case ``: + t1 = 5 + break + // if strict equal probably 0 so sort higher, otherwise + // probably NaN so sort lower than even null + default: + t1 = prop1 === prop1 ? 9 : 0 + break + } + + switch (prop2) { + case undefined: + t2 = 11 + break + case null: + t2 = 10 + break + case false: + t2 = 3 + break + case true: + t2 = 4 + break + case ``: + t2 = 5 + break + default: + t2 = prop2 === prop2 ? 9 : 0 + break + } + + // one or both is edge case + if (t1 !== 9 || t2 !== 9) { + return t1 === t2 ? equal : t1 < t2 + } + } + + // if both are numbers (string encoded or not), compare as numbers + cv1 = Number(prop1) + cv2 = Number(prop2) + + if (cv1 === cv1 && cv2 === cv2) { + if (cv1 < cv2) return true + if (cv1 > cv2) return false + return equal + } + + if (cv1 === cv1 && cv2 !== cv2) { + return true + } + + if (cv2 === cv2 && cv1 !== cv1) { + return false + } + + if (prop1 < prop2) return true + if (prop1 > prop2) return false + if (prop1 == prop2) return equal + + // not strict equal nor less than nor gt so must be mixed types, convert to string and use that to compare + cv1 = prop1.toString() + cv2 = prop2.toString() + + if (cv1 < cv2) { + return true + } + + if (cv1 == cv2) { + return equal + } + + return false +} + +function gtHelper(prop1, prop2, equal) { + var cv1, cv2, t1, t2 + + // 'falsy' and Boolean handling + if ( + !prop1 || + !prop2 || + prop1 === true || + prop2 === true || + prop1 !== prop1 || + prop2 !== prop2 + ) { + switch (prop1) { + case undefined: + t1 = 11 + break + case null: + t1 = 10 + break + case false: + t1 = 3 + break + case true: + t1 = 4 + break + case ``: + t1 = 5 + break + // NaN 0 + default: + t1 = prop1 === prop1 ? 9 : 0 + break + } + + switch (prop2) { + case undefined: + t2 = 11 + break + case null: + t2 = 10 + break + case false: + t2 = 3 + break + case true: + t2 = 4 + break + case ``: + t2 = 5 + break + default: + t2 = prop2 === prop2 ? 9 : 0 + break + } + + // one or both is edge case + if (t1 !== 9 || t2 !== 9) { + return t1 === t2 ? equal : t1 > t2 + } + } + + // if both are numbers (string encoded or not), compare as numbers + cv1 = Number(prop1) + cv2 = Number(prop2) + if (cv1 === cv1 && cv2 === cv2) { + if (cv1 > cv2) return true + if (cv1 < cv2) return false + return equal + } + + if (cv1 === cv1 && cv2 !== cv2) { + return false + } + + if (cv2 === cv2 && cv1 !== cv1) { + return true + } + + if (prop1 > prop2) return true + if (prop1 < prop2) return false + if (prop1 == prop2) return equal + + // not strict equal nor less than nor gt so must be dates or mixed types + // convert to string and use that to compare + cv1 = prop1.toString() + cv2 = prop2.toString() + + if (cv1 > cv2) { + return true + } + + if (cv1 == cv2) { + return equal + } + + return false +} + +module.exports = { + ltHelper, + gtHelper, +} diff --git a/packages/gatsby/src/db/loki/index.js b/packages/gatsby/src/db/loki/index.js new file mode 100644 index 0000000000000..09ab744376788 --- /dev/null +++ b/packages/gatsby/src/db/loki/index.js @@ -0,0 +1,126 @@ +const _ = require(`lodash`) +const fs = require(`fs-extra`) +const path = require(`path`) +const loki = require(`@moocar/lokijs`) +const uuidv4 = require(`uuid/v4`) +const customComparators = require(`./custom-comparators`) + +// Ensure sorting behavior matches old lodash `orderBy` +// implementation. See `custom-comparators.js` for why. +loki.Comparators.lt = customComparators.ltHelper +loki.Comparators.gt = customComparators.gtHelper + +// Loki is a document store with the same semantics as mongo. This +// means there are no tables or relationships. Just a bunch of +// collections, each with objects. +// +// Gatsby stores nodes in collections by splitting them up by their +// `node.internal.type`. All nodes of a particular type go in 1 +// collection. The below `colls` object contains the metadata for +// these collections, and the "meta collections" used to track them. +// +// You won't use these directly. They are used by the collection +// functions in `./nodes.js`. E.g `getTypeCollName()` and +// `getNodeTypeCollection` +const colls = { + // Each object has keys `id` and `typeCollName`. It's a way of + // quickly looking up the collection that a node is contained in. + // E.g { id: `someNodeId`, typeCollName: `gatsby:nodeType:myType` } + nodeMeta: { + name: `gatsby:nodeMeta`, + options: { + unique: [`id`], + indices: [`id`], + }, + }, + // The list of all node type collections. Each object has keys + // `type` and `collName` so you can quickly look up the collection + // name for a node type. + // e.g { type: `myType`, collName: `gatsby:nodeType:myType` } + nodeTypes: { + name: `gatsby:nodeTypes`, + options: { + unique: [`type`, `collName`], + indices: [`type`], + }, + }, +} + +// Must be set using `start()` +let db + +/** + * Ensures that the collections that support nodes have been + * created. See `colls` var in this file + */ +function ensureNodeCollections(db) { + _.forEach(colls, collInfo => { + const { name, options } = collInfo + db.addCollection(name, options) + }) +} + +function startFileDb(saveFile) { + return new Promise((resolve, reject) => { + const dbOptions = { + autoload: true, + autoloadCallback: err => { + if (err) { + reject(err) + } else { + resolve() + } + }, + autosave: true, + autosaveInterval: 1000, + } + db = new loki(saveFile, dbOptions) + }) +} + +async function startInMemory() { + // Use uuid purely for a random name + db = new loki(uuidv4()) +} + +/** + * Starts a loki database. If the file already exists, it will be + * loaded as the database state. If not, a new database will be + * created. If `saveFile` is omitted, an in-memory DB will be created. + * + * @param {string} saveFile on disk file that the database will be + * saved and loaded from. If this is omitted, an in-memory database + * will be created instead + * @returns {Promise} promise that is resolved once the database and + * the existing state has been loaded (if there was an existing + * saveFile) + */ +async function start({ saveFile } = {}) { + if (saveFile && !_.isString(saveFile)) { + throw new Error(`saveFile must be a path`) + } + if (saveFile) { + const saveDir = path.dirname(saveFile) + await fs.ensureDir(saveDir) + await startFileDb(saveFile) + } else { + await startInMemory() + } + ensureNodeCollections(db) +} + +/** + * Returns a reference to the database. If undefined, the db has not been + * initalized yet. Call `start()` + * + * @returns {Object} database, or undefined + */ +function getDb() { + return db +} + +module.exports = { + start, + getDb, + colls, +} diff --git a/packages/gatsby/src/db/loki/nodes-query.js b/packages/gatsby/src/db/loki/nodes-query.js new file mode 100644 index 0000000000000..3620b08c05cb3 --- /dev/null +++ b/packages/gatsby/src/db/loki/nodes-query.js @@ -0,0 +1,309 @@ +const _ = require(`lodash`) +const prepareRegex = require(`../../utils/prepare-regex`) +const { getNodeTypeCollection } = require(`./nodes`) +const sift = require(`sift`) +const { emitter } = require(`../../redux`) + +// Cleared on DELETE_CACHE +const fieldUsages = {} +const FIELD_INDEX_THRESHOLD = 5 + +emitter.on(`DELETE_CACHE`, () => { + for (var field in fieldUsages) { + delete fieldUsages[field] + } +}) + +// Takes a raw graphql filter and converts it into a mongo-like args +// object that can be understood by the `sift` library. E.g `eq` +// becomes `$eq` +function siftifyArgs(object) { + const newObject = {} + _.each(object, (v, k) => { + if (_.isPlainObject(v)) { + if (k === `elemMatch`) { + k = `$elemMatch` + } + newObject[k] = siftifyArgs(v) + } else { + // Compile regex first. + if (k === `regex`) { + newObject[`$regex`] = prepareRegex(v) + } else if (k === `glob`) { + const Minimatch = require(`minimatch`).Minimatch + const mm = new Minimatch(v) + newObject[`$regex`] = mm.makeRe() + } else { + newObject[`$${k}`] = v + } + } + }) + return newObject +} + +// filter nodes using the `sift` library. But isn't this a loki query +// file? Yes, but we need to support all functionality provided by +// `run-sift`, and there are some operators that loki can't +// support. Like `elemMatch`, so for those fields, we fall back to +// sift +function runSift(nodes, query) { + if (nodes) { + const siftQuery = { + $elemMatch: siftifyArgs(query), + } + return sift(siftQuery, nodes) + } else { + return null + } +} + +// Takes a raw graphql filter and converts it into a mongo-like args +// object that can be understood by loki. E.g `eq` becomes +// `$eq`. gqlFilter should be the raw graphql filter returned from +// graphql-js. e.g gqlFilter: +// +// { +// internal: { +// type: { +// eq: "TestNode" +// }, +// content: { +// glob: "et" +// } +// }, +// id: { +// glob: "12*" +// } +// } +// +// would return +// +// { +// internal: { +// type: { +// $eq: "TestNode" // append $ to eq +// }, +// content: { +// $regex: new MiniMatch(v) // convert glob to regex +// } +// }, +// id: { +// $regex: // as above +// } +// } +function toMongoArgs(gqlFilter, lastFieldType) { + const mongoArgs = {} + _.each(gqlFilter, (v, k) => { + if (_.isPlainObject(v)) { + if (k === `elemMatch`) { + // loki doesn't support elemMatch, so use sift (see runSift + // comment above) + mongoArgs[`$where`] = obj => { + const result = runSift(obj, v) + return result && result.length > 0 + } + } else { + const gqlFieldType = lastFieldType.getFields()[k].type + mongoArgs[k] = toMongoArgs(v, gqlFieldType) + } + } else { + // Compile regex first. + if (k === `regex`) { + mongoArgs[`$regex`] = prepareRegex(v) + } else if (k === `glob`) { + const Minimatch = require(`minimatch`).Minimatch + const mm = new Minimatch(v) + mongoArgs[`$regex`] = mm.makeRe() + } else if ( + k === `in` && + lastFieldType && + lastFieldType.constructor.name === `GraphQLList` + ) { + mongoArgs[`$containsAny`] = v + } else if ( + k === `nin` && + lastFieldType.constructor.name === `GraphQLList` + ) { + mongoArgs[`$containsNone`] = v + } else if (k === `ne` && v === null) { + mongoArgs[`$ne`] = undefined + } else if (k === `nin` && lastFieldType.name === `Boolean`) { + mongoArgs[`$nin`] = v.concat([false]) + } else { + mongoArgs[`$${k}`] = v + } + } + }) + return mongoArgs +} + +// Converts a nested mongo args object into a dotted notation. acc +// (accumulator) must be a reference to an empty object. The converted +// fields will be added to it. E.g +// +// { +// internal: { +// type: { +// $eq: "TestNode" +// }, +// content: { +// $regex: new MiniMatch(v) +// } +// }, +// id: { +// $regex: newMiniMatch(v) +// } +// } +// +// After execution, acc would be: +// +// { +// "internal.type": { +// $eq: "TestNode" +// }, +// "internal.content": { +// $regex: new MiniMatch(v) +// }, +// "id": { +// $regex: // as above +// } +// } +function dotNestedFields(acc, o, path = ``) { + if (_.isPlainObject(o)) { + if (_.isPlainObject(_.sample(o))) { + _.forEach(o, (v, k) => { + dotNestedFields(acc, v, path + `.` + k) + }) + } else { + acc[_.trimStart(path, `.`)] = o + } + } +} + +// The query language that Gatsby has used since day 1 is `sift`. Both +// sift and loki are mongo-like query languages, but they have some +// subtle differences. One is that in sift, a nested filter such as +// `{foo: {bar: {ne: true} } }` will return true if the foo field +// doesn't exist, is null, or bar is null. Whereas loki will return +// false if the foo field doesn't exist or is null. This ensures that +// loki queries behave like sift +function fixNeTrue(flattenedFields) { + return _.transform(flattenedFields, (result, v, k) => { + if (v[`$ne`] === true) { + const s = k.split(`.`) + if (s.length > 1) { + result[s[0]] = { + $or: [ + { + $exists: false, + }, + { + $where: obj => obj === null || obj[s[1]] !== true, + }, + ], + } + return result + } + } + result[k] = v + return result + }) +} + +// Converts graphQL args to a loki filter +function convertArgs(gqlArgs, gqlType) { + const dottedFields = {} + dotNestedFields(dottedFields, toMongoArgs(gqlArgs.filter, gqlType)) + return fixNeTrue(dottedFields) +} + +// Converts graphql Sort args into the form expected by loki, which is +// a vector where the first value is a field name, and the second is a +// boolean `isDesc`. Nested fields delimited by `___` are replaced by +// periods. E.g +// +// { +// fields: [ `frontmatter___date`, `id` ], +// order: `desc` +// } +// +// would return +// +// [ [ `frontmatter.date`, true ], [ `id`, false ] ] +// +// Note that the GraphQL Sort API provided by Gatsby doesn't allow the +// order to be specified per field. The sift implementation uses +// lodash `orderBy`, but only applies the sort order to the first +// field. So we do the same here +function toSortFields(sortArgs) { + const { fields, order } = sortArgs + const lokiSortFields = [] + for (let i = 0; i < fields.length; i++) { + const dottedField = fields[i].replace(/___/g, `.`) + const isDesc = i === 0 ? _.lowerCase(order) === `desc` : false + lokiSortFields.push([dottedField, isDesc]) + } + return lokiSortFields +} + +// Every time we run a query, we increment a counter for each of its +// fields, so that we can determine which fields are used the +// most. Any time a field is seen more than `FIELD_INDEX_THRESHOLD` +// times, we create a loki index so that future queries with that +// field will execute faster. +function ensureFieldIndexes(coll, lokiArgs) { + _.forEach(lokiArgs, (v, fieldName) => { + // Increment the usages of the field + _.update(fieldUsages, fieldName, n => (n ? n + 1 : 1)) + // If we have crossed the threshold, then create the index + if (_.get(fieldUsages, fieldName) === FIELD_INDEX_THRESHOLD) { + // Loki ensures that this is a noop if index already exists. E.g + // if it was previously added via a sort field + coll.ensureIndex(fieldName) + } + }) +} + +/** + * Runs the graphql query over the loki nodes db. + * + * @param {Object} args. Object with: + * + * {Object} gqlType: built during `./build-node-types.js` + * + * {Object} queryArgs: The raw graphql query as a js object. E.g `{ + * filter: { fields { slug: { eq: "/somepath" } } } }` + * + * {Object} context: The context from the QueryJob + * + * {boolean} firstOnly: Whether to return the first found match, or + * all matching results + * + * @returns {promise} A promise that will eventually be resolved with + * a collection of matching objects (even if `firstOnly` is true) + */ +async function runQuery({ gqlType, queryArgs, context = {}, firstOnly }) { + // Clone args as for some reason graphql-js removes the constructor + // from nested objects which breaks a check in sift.js. + const gqlArgs = JSON.parse(JSON.stringify(queryArgs)) + const lokiArgs = convertArgs(gqlArgs, gqlType) + const coll = getNodeTypeCollection(gqlType.name) + ensureFieldIndexes(coll, lokiArgs) + let chain = coll.chain().find(lokiArgs, firstOnly) + + if (gqlArgs.sort) { + const sortFields = toSortFields(gqlArgs.sort) + + // Create an index for each sort field. Indexing requires sorting + // so we lose nothing by ensuring an index is added for each sort + // field. Loki ensures this is a noop if the index already exists + for (const sortField of sortFields) { + coll.ensureIndex(sortField[0]) + } + chain = chain.compoundsort(sortFields) + } + + return chain.data() +} + +module.exports = runQuery diff --git a/packages/gatsby/src/db/loki/nodes.js b/packages/gatsby/src/db/loki/nodes.js index efe043758165c..9407c552100a7 100644 --- a/packages/gatsby/src/db/loki/nodes.js +++ b/packages/gatsby/src/db/loki/nodes.js @@ -1,12 +1,341 @@ -function notSupported() { - throw new Error(`Loki not supported yet`) +const _ = require(`lodash`) +const invariant = require(`invariant`) +const { getDb, colls } = require(`./index`) + +///////////////////////////////////////////////////////////////////// +// Node collection metadata +///////////////////////////////////////////////////////////////////// + +function makeTypeCollName(type) { + return `gatsby:nodeType:${type}` +} + +/** + * Creates a collection that will contain nodes of a certain type. The + * name of the collection for type `MyType` will be something like + * `gatsby:nodeType:MyType` (see `makeTypeCollName`) + */ +function createNodeTypeCollection(type) { + const collName = makeTypeCollName(type) + const nodeTypesColl = getDb().getCollection(colls.nodeTypes.name) + invariant(nodeTypesColl, `Collection ${colls.nodeTypes.name} should exist`) + nodeTypesColl.insert({ type, collName }) + // TODO what if `addCollection` fails? We will have inserted into + // nodeTypesColl but no collection will exist. Need to make this + // into a transaction + const options = { + unique: [`id`], + indices: [`id`], + disableMeta: true, + } + const coll = getDb().addCollection(collName, options) + return coll +} + +/** + * Returns the name of the collection that contains nodes of the + * specified type, where type is the node's `node.internal.type` + */ +function getTypeCollName(type) { + const nodeTypesColl = getDb().getCollection(colls.nodeTypes.name) + invariant(nodeTypesColl, `Collection ${colls.nodeTypes.name} should exist`) + let nodeTypeInfo = nodeTypesColl.by(`type`, type) + return nodeTypeInfo ? nodeTypeInfo.collName : undefined +} + +/** + * Returns a reference to the collection that contains nodes of the + * specified type, where type is the node's `node.internal.type` + */ +function getNodeTypeCollection(type) { + const typeCollName = getTypeCollName(type) + let coll + if (typeCollName) { + coll = getDb().getCollection(typeCollName) + invariant( + coll, + `Type [${type}] Collection doesn't exist for nodeTypeInfo: [${typeCollName}]` + ) + return coll + } else { + return undefined + } +} + +/** + * Deletes all empty node type collections, unless `force` is true, in + * which case it deletes the collections even if they have nodes in + * them + */ +function deleteNodeTypeCollections(force = false) { + const nodeTypesColl = getDb().getCollection(colls.nodeTypes.name) + // find() returns all objects in collection + const nodeTypes = nodeTypesColl.find() + for (const nodeType of nodeTypes) { + let coll = getDb().getCollection(nodeType.collName) + if (coll.count() === 0 || force) { + getDb().removeCollection(coll.name) + nodeTypesColl.remove(nodeType) + } + } +} + +/** + * Deletes all nodes from all the node type collections, including the + * id -> type metadata. There will be no nodes related data in loki + * after this is called + */ +function deleteAll() { + const db = getDb() + if (db) { + deleteNodeTypeCollections(true) + db.getCollection(colls.nodeMeta.name).clear() + } +} + +///////////////////////////////////////////////////////////////////// +// Queries +///////////////////////////////////////////////////////////////////// + +/** + * Returns the node with `id` == id, or null if not found + */ +function getNode(id) { + if (!id) { + return null + } + // First, find out which collection the node is in + const nodeMetaColl = getDb().getCollection(colls.nodeMeta.name) + invariant(nodeMetaColl, `nodeMeta collection should exist`) + const nodeMeta = nodeMetaColl.by(`id`, id) + if (nodeMeta) { + // Now get the collection and query it by the `id` field, which + // has an index on it + const { typeCollName } = nodeMeta + const typeColl = getDb().getCollection(typeCollName) + invariant( + typeColl, + `type collection ${typeCollName} referenced by nodeMeta but doesn't exist` + ) + return typeColl.by(`id`, id) + } else { + return undefined + } +} + +/** + * Returns all nodes of a type (where `typeName == + * node.internal.type`). This is an O(1) operation since nodes are + * already stored in seperate collections by type + */ +function getNodesByType(typeName) { + invariant(typeName, `typeName is null`) + const collName = getTypeCollName(typeName) + const coll = getDb().getCollection(collName) + if (!coll) return [] + return coll.data +} + +/** + * Returns the collection of all nodes. This should be deprecated and + * `getNodesByType` should be used instead. Or at least where possible + */ +function getNodes() { + const nodeTypes = getDb().getCollection(colls.nodeTypes.name).data + return _.flatMap(nodeTypes, nodeType => getNodesByType(nodeType.type)) +} + +/** + * Looks up the node by id, records a dependency between the node and + * the path, and then returns the node + * + * @param {string} id node id to lookup + * @param {string} path the page path to record a node dependency + * against + * @returns {Object} node or undefined if not found + */ +function getNodeAndSavePathDependency(id, path) { + invariant(id, `id is null`) + invariant(id, `path is null`) + const createPageDependency = require(`../../redux/actions/add-page-dependency`) + const node = getNode(id) + createPageDependency({ path, nodeId: id }) + return node +} + +/** + * Determine if node has changed (by comparing its + * `internal.contentDigest` + * + * @param {string} id + * @param {string} digest + * @returns {boolean} + */ +function hasNodeChanged(id, digest) { + const node = getNode(id) + if (!node) { + return true + } else { + return node.internal.contentDigest !== digest + } +} + +///////////////////////////////////////////////////////////////////// +// Create/Update/Delete +///////////////////////////////////////////////////////////////////// + +/** + * Creates a node in the DB. Will create a collection for the node + * type if one hasn't been created yet + * + * @param {Object} node The node to add. Must have an `id` and + * `internal.type` + */ +function createNode(node, oldNode) { + invariant(node.internal, `node has no "internal" field`) + invariant(node.internal.type, `node has no "internal.type" field`) + invariant(node.id, `node has no "id" field`) + + const type = node.internal.type + + // Loki doesn't provide "upsert", so if the node already exists, we + // delete and then create it + if (oldNode) { + deleteNode(oldNode) + } + + let nodeTypeColl = getNodeTypeCollection(type) + if (!nodeTypeColl) { + nodeTypeColl = createNodeTypeCollection(type) + } + + const nodeMetaColl = getDb().getCollection(colls.nodeMeta.name) + invariant(nodeMetaColl, `Collection ${colls.nodeMeta.name} should exist`) + nodeMetaColl.insert({ id: node.id, typeCollName: nodeTypeColl.name }) + // TODO what if this insert fails? We will have inserted the id -> + // collName mapping, but there won't be any nodes in the type + // collection. Need to create a transaction around this + return nodeTypeColl.insert(node) +} + +/** + * Updates a node in the DB. The contents of `node` will completely + * overwrite value in the DB. Note, `node` must be a loki node. i.e it + * has `$loki` and `meta` fields. + * + * @param {Object} node The new node information. This should be all + * the node information. Not just changes + */ +function updateNode(node) { + invariant(node.internal, `node has no "internal" field`) + invariant(node.internal.type, `node has no "internal.type" field`) + invariant(node.id, `node has no "id" field`) + + const type = node.internal.type + + let coll = getNodeTypeCollection(type) + invariant(coll, `${type} collection doesn't exist. When trying to update`) + coll.update(node) +} + +/** + * Deletes a node from its type collection and removes its id -> + * collName mapping. Function is idempotent. If the node has already + * been deleted, this is a noop. + * + * @param {Object} the node to delete. Must have an `id` and + * `internal.type` + */ +function deleteNode(node) { + invariant(node.internal, `node has no "internal" field`) + invariant(node.internal.type, `node has no "internal.type" field`) + invariant(node.id, `node has no "id" field`) + + const type = node.internal.type + + let nodeTypeColl = getNodeTypeCollection(type) + if (!nodeTypeColl) { + invariant( + nodeTypeColl, + `${type} collection doesn't exist. When trying to delete` + ) + } + + if (nodeTypeColl.by(`id`, node.id)) { + const nodeMetaColl = getDb().getCollection(colls.nodeMeta.name) + invariant(nodeMetaColl, `Collection ${colls.nodeMeta.name} should exist`) + nodeMetaColl.findAndRemove({ id: node.id }) + // TODO What if this `remove()` fails? We will have removed the id + // -> collName mapping, but not the actual node in the + // collection. Need to make this into a transaction + nodeTypeColl.remove(node) + } + // idempotent. Do nothing if node wasn't already in DB +} + +/** + * deprecated + */ +function deleteNodes(nodes) { + for (const node of nodes) { + deleteNode(node) + } } +///////////////////////////////////////////////////////////////////// +// Reducer +///////////////////////////////////////////////////////////////////// + +function reducer(state = new Map(), action) { + switch (action.type) { + case `DELETE_CACHE`: + deleteAll() + return null + + case `CREATE_NODE`: { + createNode(action.payload, action.oldNode) + return null + } + + case `ADD_FIELD_TO_NODE`: + case `ADD_CHILD_NODE_TO_PARENT_NODE`: + updateNode(action.payload) + return null + + case `DELETE_NODE`: { + deleteNode(action.payload) + return null + } + + case `DELETE_NODES`: { + deleteNodes(action.payload) + return null + } + + default: + return null + } +} + +///////////////////////////////////////////////////////////////////// +// Exports +///////////////////////////////////////////////////////////////////// + module.exports = { - getNodes: notSupported(), - getNode: notSupported(), - getNodesByType: notSupported(), - hasNodeChanged: notSupported(), - loadNodeContent: notSupported(), - getNodeAndSavePathDependency: notSupported(), + getNodeTypeCollection, + + getNodes, + getNode, + getNodesByType, + hasNodeChanged, + getNodeAndSavePathDependency, + + createNode, + updateNode, + deleteNode, + + deleteNodeTypeCollections, + deleteAll, + + reducer, } diff --git a/packages/gatsby/src/db/node-tracking.js b/packages/gatsby/src/db/node-tracking.js index 8d9905cbbfe5c..fa0e5587facb5 100644 --- a/packages/gatsby/src/db/node-tracking.js +++ b/packages/gatsby/src/db/node-tracking.js @@ -1,5 +1,5 @@ const _ = require(`lodash`) -const { getNode, getNodes } = require(`../db/nodes`) +const { getNode, getNodes } = require(`./nodes`) /** * Map containing links between inline objects or arrays @@ -57,7 +57,8 @@ const findRootNodeAncestor = (obj, predicate = null) => { while ( (!predicate || !predicate(rootNode)) && (rootNodeId = getRootNodeId(rootNode) || rootNode.parent) && - (getNode(rootNode.parent) !== undefined || getNode(rootNodeId)) && + ((rootNode.parent && getNode(rootNode.parent) !== undefined) || + getNode(rootNodeId)) && whileCount < 101 ) { if (rootNodeId) { @@ -77,14 +78,15 @@ const findRootNodeAncestor = (obj, predicate = null) => { return !predicate || predicate(rootNode) ? rootNode : null } +function trackDbNodes() { + _.each(getNodes(), node => { + trackInlineObjectsInRootNode(node) + }) +} + /** * @callback nodePredicate * @param {Node} node Node that is examined */ - exports.findRootNodeAncestor = findRootNodeAncestor - -// Track nodes that are already in store -_.each(getNodes(), node => { - trackInlineObjectsInRootNode(node) -}) +exports.trackDbNodes = trackDbNodes diff --git a/packages/gatsby/src/db/nodes-query.js b/packages/gatsby/src/db/nodes-query.js new file mode 100644 index 0000000000000..09273499d9ba4 --- /dev/null +++ b/packages/gatsby/src/db/nodes-query.js @@ -0,0 +1,52 @@ +const backend = process.env.GATSBY_DB_NODES || `redux` +const lokiRunQuery = require(`./loki/nodes-query`) +const siftRunQuery = require(`../redux/run-sift`) +const lazyFields = require(`../schema/lazy-fields`) + +function chooseQueryEngine(args) { + const { queryArgs, gqlType } = args + const { filter } = queryArgs + if (backend === `loki` && !lazyFields.contains(filter, gqlType)) { + return lokiRunQuery + } else { + return siftRunQuery + } +} + +/** + * Runs the query over all nodes of type. It must first select the + * appropriate query engine. Sift, or Loki. Sift is used by default, + * or if the query includes "lazy fields", those that need to be + * resolved before being queried. These could be either plugin fields, + * i.e those declared by plugins during the + * `setFieldsOnGraphQLNodeType` API, or they could be linked + * fields. See `../redux/run-sift.js` for more. + * + * If the query does *not* include lazy fields, and environment + * variable `GATSBY_DB_NODES` = `loki` then we can perform a much + * faster pure data query using loki. See `loki/nodes-query.js` for + * more. + * + * @param {Object} args. Object with: + * + * {Object} gqlType: built during `./build-node-types.js` + * + * {Object} queryArgs: The raw graphql query as a js object. E.g `{ + * filter: { fields { slug: { eq: "/somepath" } } } }` + * + * {Object} context: The context from the QueryJob + * + * {boolean} firstOnly: Whether to return the first found match, or + * all matching result. + * + * @returns {promise} A promise that will eventually be resolved with + * a collection of matching objects (even if `firstOnly` is true, in + * which case it will be a collection of length 1 or zero) + */ +function run(args) { + const queryFunction = chooseQueryEngine(args) + + return queryFunction(args) +} + +module.exports.run = run diff --git a/packages/gatsby/src/db/nodes.js b/packages/gatsby/src/db/nodes.js index 343474c9ec42f..281c3049597c3 100644 --- a/packages/gatsby/src/db/nodes.js +++ b/packages/gatsby/src/db/nodes.js @@ -1,3 +1,6 @@ +const _ = require(`lodash`) +const { store } = require(`../redux`) + const backend = process.env.GATSBY_DB_NODES || `redux` let nodesDb switch (backend) { @@ -13,52 +16,35 @@ switch (backend) { ) } -module.exports = nodesDb - -///////////////////////////////////////////////////////////////////// -// Run Query -///////////////////////////////////////////////////////////////////// - -function chooseQueryEngine(queryArgs) { - if (backend === `loki`) { - throw new Error(`loki not supported yet`) - } else { - return require(`../redux/run-sift`) - } -} +module.exports = { ...nodesDb } +module.exports.backend = backend /** - * Runs the query over all nodes of type. It must first select the - * appropriate query engine. Sift, or Loki. Sift is used if the query - * includes plugin fields, i.e those declared by plugins during the - * `setFieldsOnGraphQLNodeType` API. If it does, then we must iterate - * through all nodes calling the plugin field to make sure it's - * realized, then we can perform the query. See `query-sift.js` for - * more. - * - * If the query does *not* include plugin fields, then we can perform - * a much faster pure data query using loki. See `query-loki.js` for - * more. - * - * @param {Object} args. Object with: + * Get content for a node from the plugin that created it. * - * {Object} gqlType: built during `./build-node-types.js` - * - * {Object} queryArgs: The raw graphql query as a js object. E.g `{ - * filter: { fields { slug: { eq: "/somepath" } } } }` - * - * {Object} context: The context from the QueryJob - * - * {boolean} firstOnly: Whether to return the first found match, or - * all matching result. - * - * @returns {promise} A promise that will eventually be resolved with - * a collection of matching objects (even if `firstOnly` is true) + * @param {Object} node + * @returns {promise} */ -function runQuery(args) { - const queryFunction = chooseQueryEngine(args.queryArgs) +module.exports.loadNodeContent = node => { + if (_.isString(node.internal.content)) { + return Promise.resolve(node.internal.content) + } else { + return new Promise(resolve => { + // Load plugin's loader function + const plugin = store + .getState() + .flattenedPlugins.find(plug => plug.name === node.internal.owner) + const { loadNodeContent } = require(plugin.resolve) + if (!loadNodeContent) { + throw new Error( + `Could not find function loadNodeContent for plugin ${plugin.name}` + ) + } - return queryFunction(args) + return loadNodeContent(node).then(content => { + // TODO update node's content field here. + resolve(content) + }) + }) + } } - -module.exports.runQuery = runQuery diff --git a/packages/gatsby/src/redux/__tests__/__snapshots__/nodes.js.snap b/packages/gatsby/src/redux/__tests__/__snapshots__/nodes.js.snap index a73ef6c730b9b..18ba526c49176 100644 --- a/packages/gatsby/src/redux/__tests__/__snapshots__/nodes.js.snap +++ b/packages/gatsby/src/redux/__tests__/__snapshots__/nodes.js.snap @@ -24,6 +24,7 @@ Map { exports[`Create and update nodes allows creating nodes 1`] = ` Object { + "oldNode": undefined, "payload": Object { "children": Array [], "id": "hi", @@ -126,16 +127,3 @@ exports[`Create and update nodes throws error if a node sets a value on "fields" \\"name\\": \\"pluginA\\" }" `; - -exports[`Create and update nodes warns when using old deleteNode signature 1`] = ` -Object { - "children": Array [], - "id": "hi", - "internal": Object { - "contentDigest": "hasdfljds", - "owner": "tests", - "type": "Test", - }, - "parent": "test", -} -`; diff --git a/packages/gatsby/src/redux/__tests__/nodes.js b/packages/gatsby/src/redux/__tests__/nodes.js index 95e380749d1ed..f2954a7974678 100644 --- a/packages/gatsby/src/redux/__tests__/nodes.js +++ b/packages/gatsby/src/redux/__tests__/nodes.js @@ -1,9 +1,15 @@ +const Redux = require(`redux`) const { actions } = require(`../actions`) -const { store } = require(`../index`) -const { getNode } = require(`../nodes`) const nodeReducer = require(`../reducers/nodes`) const nodeTouchedReducer = require(`../reducers/nodes-touched`) +jest.mock(`../../db/nodes`) +jest.mock(`../nodes`) + +const store = Redux.createStore( + Redux.combineReducers({ nodeReducer, nodeTouchedReducer }), + {} +) describe(`Create and update nodes`, () => { beforeEach(() => { store.dispatch({ @@ -84,349 +90,6 @@ describe(`Create and update nodes`, () => { expect(state.get(`hi`).deep2.boom).toEqual(`foo`) }) - it(`deletes previously transformed children nodes when the parent node is updated`, () => { - store.dispatch( - actions.createNode( - { - id: `hi`, - children: [], - parent: null, - internal: { - contentDigest: `hasdfljds`, - type: `Test`, - }, - }, - { - name: `tests`, - } - ) - ) - - store.dispatch( - actions.createNode( - { - id: `hi-1`, - children: [], - parent: `hi`, - internal: { - contentDigest: `hasdfljds-1`, - type: `Test-1`, - }, - }, - { - name: `tests`, - } - ) - ) - - store.dispatch( - actions.createParentChildLink( - { - parent: store.getState().nodes.get(`hi`), - child: store.getState().nodes.get(`hi-1`), - }, - { - name: `tests`, - } - ) - ) - - store.dispatch( - actions.createNode( - { - id: `hi-1-1`, - children: [], - parent: `hi-1`, - internal: { - contentDigest: `hasdfljds-1-1`, - type: `Test-1-1`, - }, - }, - { - name: `tests`, - } - ) - ) - - store.dispatch( - actions.createParentChildLink( - { - parent: store.getState().nodes.get(`hi-1`), - child: store.getState().nodes.get(`hi-1-1`), - }, - { - name: `tests`, - } - ) - ) - - store.dispatch( - actions.createNode( - { - id: `hi`, - children: [], - parent: `test`, - internal: { - contentDigest: `hasdfljds2`, - type: `Test`, - }, - }, - { - name: `tests`, - } - ) - ) - expect(store.getState().nodes.size).toEqual(1) - }) - - it(`deletes previously transformed children nodes when the parent node is deleted`, () => { - store.dispatch( - actions.createNode( - { - id: `hi`, - children: [], - parent: `test`, - internal: { - contentDigest: `hasdfljds`, - type: `Test`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createNode( - { - id: `hi2`, - children: [], - parent: `test`, - internal: { - contentDigest: `hasdfljds`, - type: `Test`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createNode( - { - id: `hi-1`, - children: [], - parent: `hi`, - internal: { - contentDigest: `hasdfljds-1`, - type: `Test-1`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createParentChildLink( - { - parent: store.getState().nodes.get(`hi`), - child: getNode(`hi-1`), - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createNode( - { - id: `hi-1-1`, - children: [], - parent: `hi-1`, - internal: { - contentDigest: `hasdfljds-1-1`, - type: `Test-1-1`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createParentChildLink( - { - parent: getNode(`hi-1`), - child: getNode(`hi-1-1`), - }, - { - name: `tests`, - } - ) - ) - - store.dispatch( - actions.deleteNode( - { - node: getNode(`hi`), - }, - { - name: `tests`, - } - ) - ) - expect(store.getState().nodes.size).toEqual(1) - }) - - it(`deletes previously transformed children nodes when parent nodes are deleted`, () => { - store.dispatch( - actions.createNode( - { - id: `hi`, - children: [], - parent: `test`, - internal: { - contentDigest: `hasdfljds`, - type: `Test`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createNode( - { - id: `hi-1`, - children: [], - parent: `hi`, - internal: { - contentDigest: `hasdfljds-1`, - type: `Test-1`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createParentChildLink( - { - parent: getNode(`hi`), - child: getNode(`hi-1`), - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createNode( - { - id: `hi-1-1`, - children: [], - parent: `hi-1`, - internal: { - contentDigest: `hasdfljds-1-1`, - type: `Test-1-1`, - }, - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.createParentChildLink( - { - parent: getNode(`hi-1`), - child: getNode(`hi-1-1`), - }, - { - name: `tests`, - } - ) - ) - store.dispatch( - actions.deleteNode( - { node: getNode(`hi`) }, - { - name: `tests`, - } - ) - ) - expect(store.getState().nodes.size).toEqual(0) - }) - - it(`allows deleting nodes`, () => { - actions.createNode( - { - id: `hi`, - children: [], - parent: `test`, - internal: { - contentDigest: `hasdfljds`, - type: `Test`, - }, - pickle: true, - deep: { - array: [ - 0, - 1, - { - boom: true, - }, - ], - }, - }, - { - name: `tests`, - } - ) - actions.deleteNode({ - node: getNode(`hi`), - }) - expect(getNode(`hi`)).toBeUndefined() - }) - - it(`warns when using old deleteNode signature `, () => { - console.warn = jest.fn() - store.dispatch( - actions.createNode( - { - id: `hi`, - children: [], - parent: `test`, - internal: { - contentDigest: `hasdfljds`, - type: `Test`, - }, - }, - { - name: `tests`, - } - ) - ) - - expect(getNode(`hi`)).toMatchSnapshot() - store.dispatch( - actions.deleteNode(`hi`, getNode(`hi`), { - name: `tests`, - }) - ) - - expect(getNode(`hi`)).toBeUndefined() - - const deprecationNotice = `Calling "deleteNode" with a nodeId is deprecated. Please pass an object containing a full node instead: deleteNode({ node })` - expect(console.warn).toHaveBeenCalledWith(deprecationNotice) - - console.warn.mockRestore() - }) - it(`nodes that are added are also "touched"`, () => { const action = actions.createNode( { @@ -586,11 +249,4 @@ describe(`Create and update nodes`, () => { expect(callActionCreator).toThrowErrorMatchingSnapshot() }) - - it(`does not crash when delete node is called on undefined`, () => { - actions.deleteNode(undefined, { - name: `tests`, - }) - expect(store.getState().nodes.size).toEqual(0) - }) }) diff --git a/packages/gatsby/src/redux/actions.js b/packages/gatsby/src/redux/actions.js index 6474c00817843..02a9d0276d072 100644 --- a/packages/gatsby/src/redux/actions.js +++ b/packages/gatsby/src/redux/actions.js @@ -601,6 +601,7 @@ actions.createNode = ( updateNodeAction = { type: `CREATE_NODE`, plugin, + oldNode, ...actionOptions, payload: node, } diff --git a/packages/gatsby/src/redux/index.js b/packages/gatsby/src/redux/index.js index 203b0438aac81..103afb0fb62bb 100644 --- a/packages/gatsby/src/redux/index.js +++ b/packages/gatsby/src/redux/index.js @@ -77,7 +77,7 @@ const saveState = state => { pickedState.staticQueryComponents ) pickedState.components = mapToObject(pickedState.components) - pickedState.nodes = mapToObject(pickedState.nodes) + pickedState.nodes = pickedState.nodes ? mapToObject(pickedState.nodes) : [] const stringified = stringify(pickedState, null, 2) fs.writeFile( `${process.cwd()}/.cache/redux-state.json`, diff --git a/packages/gatsby/src/redux/nodes.js b/packages/gatsby/src/redux/nodes.js index 05e937019e132..32fedcba4b3e5 100644 --- a/packages/gatsby/src/redux/nodes.js +++ b/packages/gatsby/src/redux/nodes.js @@ -1,5 +1,3 @@ -const _ = require(`lodash`) -const Promise = require(`bluebird`) const { store } = require(`./index`) /** @@ -46,36 +44,6 @@ exports.hasNodeChanged = (id, digest) => { } } -/** - * Get content for a node from the plugin that created it. - * - * @param {Object} node - * @returns {promise} - */ -exports.loadNodeContent = node => { - if (_.isString(node.internal.content)) { - return Promise.resolve(node.internal.content) - } else { - return new Promise(resolve => { - // Load plugin's loader function - const plugin = store - .getState() - .flattenedPlugins.find(plug => plug.name === node.internal.owner) - const { loadNodeContent } = require(plugin.resolve) - if (!loadNodeContent) { - throw new Error( - `Could not find function loadNodeContent for plugin ${plugin.name}` - ) - } - - return loadNodeContent(node).then(content => { - // TODO update node's content field here. - resolve(content) - }) - }) - } -} - /** * Get node and save path dependency. * diff --git a/packages/gatsby/src/redux/reducers/index.js b/packages/gatsby/src/redux/reducers/index.js index 5e53ff8667722..9eb38dd86ce11 100644 --- a/packages/gatsby/src/redux/reducers/index.js +++ b/packages/gatsby/src/redux/reducers/index.js @@ -1,6 +1,28 @@ +const reduxNodes = require(`./nodes`) +const lokiNodes = require(`../../db/loki/nodes`).reducer + +const backend = process.env.GATSBY_DB_NODES || `redux` + +function getNodesReducer() { + let nodesReducer + switch (backend) { + case `redux`: + nodesReducer = reduxNodes + break + case `loki`: + nodesReducer = lokiNodes + break + default: + throw new Error( + `Unsupported DB nodes backend (value of env var GATSBY_DB_NODES)` + ) + } + return nodesReducer +} + module.exports = { program: require(`./program`), - nodes: require(`./nodes`), + nodes: getNodesReducer(), nodesTouched: require(`./nodes-touched`), lastAction: require(`./last-action`), plugins: require(`./plugins`), diff --git a/packages/gatsby/src/redux/run-sift.js b/packages/gatsby/src/redux/run-sift.js index 558c7f9d48769..dff3d3681f50f 100644 --- a/packages/gatsby/src/redux/run-sift.js +++ b/packages/gatsby/src/redux/run-sift.js @@ -275,7 +275,13 @@ function handleMany(siftArgs, nodes, sort) { .map(field => field.replace(/___/g, `.`)) .map(field => v => _.get(v, field)) - result = _.orderBy(result, convertedFields, sort.order) + // Gatsby's sort interface only allows one sort order (e.g `desc`) + // to be specified. However, multiple sort fields can be + // provided. This is inconsistent. The API should allow the + // setting of an order per field. Until the API can be changed + // (probably v3), we apply the sort order to the first field only, + // implying asc order for the remaining fields. + result = _.orderBy(result, convertedFields, [sort.order]) } return result } diff --git a/packages/gatsby/src/schema/__tests__/__snapshots__/infer-graphql-input-type-test.js.snap b/packages/gatsby/src/schema/__tests__/__snapshots__/connection-input-fields-test.js.snap similarity index 89% rename from packages/gatsby/src/schema/__tests__/__snapshots__/infer-graphql-input-type-test.js.snap rename to packages/gatsby/src/schema/__tests__/__snapshots__/connection-input-fields-test.js.snap index e3a3efd24dee7..436d81ea4fb0b 100644 --- a/packages/gatsby/src/schema/__tests__/__snapshots__/infer-graphql-input-type-test.js.snap +++ b/packages/gatsby/src/schema/__tests__/__snapshots__/connection-input-fields-test.js.snap @@ -1,9 +1,9 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`GraphQL Input args can query object arrays 1`] = ` +exports[`connection input fields can query object arrays 1`] = ` Object { "data": Object { - "allNode": Object { + "allTest": Object { "edges": Array [ Object { "node": Object { diff --git a/packages/gatsby/src/schema/__tests__/build-node-connections-test.js b/packages/gatsby/src/schema/__tests__/build-node-connections-test.js index f92ad69a82d37..8878f4652d929 100644 --- a/packages/gatsby/src/schema/__tests__/build-node-connections-test.js +++ b/packages/gatsby/src/schema/__tests__/build-node-connections-test.js @@ -2,8 +2,9 @@ const { graphql, GraphQLObjectType, GraphQLSchema } = require(`graphql`) const _ = require(`lodash`) const createPageDependency = require(`../../redux/actions/add-page-dependency`) jest.mock(`../../redux/actions/add-page-dependency`) -const buildNodeTypes = require(`../build-node-types`) +const nodeTypes = require(`../build-node-types`) const nodeConnections = require(`../build-node-connections`) +require(`../../db/__tests__/fixtures/ensure-loki`)() describe(`build-node-connections`, () => { let schema, store, types, connections @@ -48,7 +49,7 @@ describe(`build-node-connections`, () => { }, ].forEach(n => store.dispatch({ type: `CREATE_NODE`, payload: n })) - types = await buildNodeTypes({}) + types = await nodeTypes.buildAll({}) connections = await nodeConnections.buildAll(_.values(types)) schema = new GraphQLSchema({ diff --git a/packages/gatsby/src/schema/__tests__/build-node-types-test.js b/packages/gatsby/src/schema/__tests__/build-node-types-test.js index 3edbc5eb5d67e..a30c752bba6a9 100644 --- a/packages/gatsby/src/schema/__tests__/build-node-types-test.js +++ b/packages/gatsby/src/schema/__tests__/build-node-types-test.js @@ -5,13 +5,14 @@ const { GraphQLString, } = require(`graphql`) const _ = require(`lodash`) +require(`../../db/__tests__/fixtures/ensure-loki`)() jest.mock(`../../utils/api-runner-node`) const apiRunnerNode = require(`../../utils/api-runner-node`) const createPageDependency = require(`../../redux/actions/add-page-dependency`) jest.mock(`../../redux/actions/add-page-dependency`) -const buildNodeTypes = require(`../build-node-types`) +const nodeTypes = require(`../build-node-types`) describe(`build-node-types`, () => { let schema, store, types @@ -30,10 +31,7 @@ describe(`build-node-types`, () => { pluginField: { type: GraphQLString, description: `test description`, - resolve: parent => { - console.log(`in resolver: ${parent}`) - return `pluginFieldValue` - }, + resolve: parent => `pluginFieldValue`, }, }, ] @@ -70,7 +68,7 @@ describe(`build-node-types`, () => { }, ].forEach(n => store.dispatch({ type: `CREATE_NODE`, payload: n })) - types = await buildNodeTypes({}) + types = await nodeTypes.buildAll({}) schema = new GraphQLSchema({ query: new GraphQLObjectType({ name: `RootQueryType`, @@ -166,6 +164,19 @@ describe(`build-node-types`, () => { expect(result.parent.pluginField).toEqual(`pluginFieldValue`) }) + it(`should allow filtering on plugin fields`, async () => { + const result = await runQuery( + ` + { + parent(pluginField: { eq: "pluginFieldValue"}) { + pluginField + } + } + ` + ) + expect(result.parent.pluginField).toEqual(`pluginFieldValue`) + }) + it(`should create root query type page dependency`, async () => { await runQuery(` { parent(id: { eq: "p1" }) { id } } `) diff --git a/packages/gatsby/src/schema/__tests__/connection-input-fields-test.js b/packages/gatsby/src/schema/__tests__/connection-input-fields-test.js new file mode 100644 index 0000000000000..29a458c3f7321 --- /dev/null +++ b/packages/gatsby/src/schema/__tests__/connection-input-fields-test.js @@ -0,0 +1,272 @@ +const _ = require(`lodash`) +const { graphql } = require(`graphql`) +const nodeTypes = require(`../build-node-types`) +const nodeConnections = require(`../build-node-connections`) +const { buildNodesSchema } = require(`../index`) +const { clearUnionTypes } = require(`../infer-graphql-type`) +const { store } = require(`../../redux`) +require(`../../db/__tests__/fixtures/ensure-loki`)() + +function makeNodes() { + return [ + { + id: `0`, + internal: { type: `Test` }, + children: [], + index: 0, + name: `The Mad Max`, + string: `a`, + float: 1.5, + hair: 1, + date: `2006-07-22T22:39:53.000Z`, + anArray: [1, 2, 3, 4], + key: { + withEmptyArray: [], + }, + anotherKey: { + withANested: { + nestedKey: `foo`, + emptyArray: [], + anotherEmptyArray: [], + }, + }, + frontmatter: { + date: `2006-07-22T22:39:53.000Z`, + title: `The world of dash and adventure`, + blue: 100, + }, + anObjectArray: [ + { aString: `some string`, aNumber: 2, aBoolean: true }, + { aString: `some string`, aNumber: 2, anArray: [1, 2] }, + ], + boolean: true, + }, + { + id: `1`, + internal: { type: `Test` }, + children: [], + index: 1, + name: `The Mad Wax`, + string: `b`, + float: 2.5, + hair: 2, + anArray: [1, 2, 5, 4], + anotherKey: { + withANested: { + nestedKey: `foo`, + }, + }, + frontmatter: { + date: `2006-07-22T22:39:53.000Z`, + title: `The world of slash and adventure`, + blue: 10010, + circle: `happy`, + }, + boolean: false, + data: { + tags: [ + { + tag: { + document: [ + { + data: { + tag: `Design System`, + }, + number: 3, + }, + ], + }, + }, + ], + }, + }, + { + id: `2`, + internal: { type: `Test` }, + children: [], + index: 2, + name: `The Mad Wax`, + string: `c`, + float: 3.5, + hair: 0, + date: `2006-07-29T22:39:53.000Z`, + anotherKey: { + withANested: { + nestedKey: `bar`, + }, + }, + frontmatter: { + date: `2006-07-22T22:39:53.000Z`, + title: `The world of shave and adventure`, + blue: 10010, + circle: `happy`, + }, + data: { + tags: [ + { + tag: { + document: [ + { + data: { + tag: `Gatsby`, + }, + }, + ], + }, + }, + { + tag: { + document: [ + { + data: { + tag: `Design System`, + }, + number: 5, + }, + ], + }, + }, + ], + }, + }, + ] +} + +async function queryResult(nodesData, query, { types = [] } = {}) { + store.dispatch({ type: `DELETE_CACHE` }) + for (const node of nodesData) { + store.dispatch({ type: `CREATE_NODE`, payload: node }) + } + clearUnionTypes() + const typesGQL = await nodeTypes.buildAll({}) + const connections = nodeConnections.buildAll(_.values(typesGQL)) + + // Pull off just the graphql node from each type object. + const nodes = _.mapValues(typesGQL, `node`) + + const schema = buildNodesSchema({ ...connections, ...nodes }) + + return graphql(schema, query) +} + +describe(`connection input fields`, () => { + it(`returns list of distinct values in a field`, async () => { + let result = await queryResult( + makeNodes(), + ` + { + allTest { + totalCount + names: distinct(field: name) + array: distinct(field: anArray) + blue: distinct(field: frontmatter___blue) + # Only one node has this field + circle: distinct(field: frontmatter___circle) + nestedField: distinct(field: anotherKey___withANested___nestedKey) + } + } + ` + ) + + expect(result.errors).not.toBeDefined() + + expect(result.data.allTest.names.length).toEqual(2) + expect(result.data.allTest.names[0]).toEqual(`The Mad Max`) + + expect(result.data.allTest.array.length).toEqual(5) + expect(result.data.allTest.array[0]).toEqual(`1`) + + expect(result.data.allTest.blue.length).toEqual(2) + expect(result.data.allTest.blue[0]).toEqual(`100`) + + expect(result.data.allTest.circle.length).toEqual(1) + expect(result.data.allTest.circle[0]).toEqual(`happy`) + + expect(result.data.allTest.nestedField.length).toEqual(2) + expect(result.data.allTest.nestedField[0]).toEqual(`bar`) + expect(result.data.allTest.nestedField[1]).toEqual(`foo`) + }) + + it(`handles the group connection field`, async () => { + let result = await queryResult( + makeNodes(), + ` { + allTest { + blue: group(field: frontmatter___blue) { + field + fieldValue + totalCount + } + anArray: group(field: anArray) { + field + fieldValue + totalCount + } + } + }` + ) + expect(result.errors).not.toBeDefined() + + expect(result.data.allTest.blue).toHaveLength(2) + expect(result.data.allTest.blue[0].fieldValue).toEqual(`100`) + expect(result.data.allTest.blue[0].field).toEqual(`frontmatter.blue`) + expect(result.data.allTest.blue[0].totalCount).toEqual(1) + + expect(result.data.allTest.anArray).toHaveLength(5) + expect(result.data.allTest.anArray[0].fieldValue).toEqual(`1`) + expect(result.data.allTest.anArray[0].field).toEqual(`anArray`) + expect(result.data.allTest.anArray[0].totalCount).toEqual(2) + }) + + it(`handles the nested group connection field`, async () => { + let result = await queryResult( + makeNodes(), + ` { + allTest { + nestedKey: group(field: anotherKey___withANested___nestedKey) { + field + fieldValue + totalCount + } + } + }` + ) + + expect(result.errors).not.toBeDefined() + expect(result.data.allTest.nestedKey).toHaveLength(2) + expect(result.data.allTest.nestedKey[0].fieldValue).toEqual(`bar`) + expect(result.data.allTest.nestedKey[0].field).toEqual( + `anotherKey.withANested.nestedKey` + ) + expect(result.data.allTest.nestedKey[0].totalCount).toEqual(1) + expect(result.data.allTest.nestedKey[1].fieldValue).toEqual(`foo`) + expect(result.data.allTest.nestedKey[1].field).toEqual( + `anotherKey.withANested.nestedKey` + ) + expect(result.data.allTest.nestedKey[1].totalCount).toEqual(2) + }) + + it(`can query object arrays`, async () => { + let result = await queryResult( + makeNodes(), + ` + { + allTest { + edges { + node { + anObjectArray { + aString + aNumber + aBoolean + } + } + } + } + } + ` + ) + expect(result.errors).not.toBeDefined() + + expect(result).toMatchSnapshot() + }) +}) diff --git a/packages/gatsby/src/schema/__tests__/connections-filter-on-linked-nodes.js b/packages/gatsby/src/schema/__tests__/connections-filter-on-linked-nodes.js new file mode 100644 index 0000000000000..711eb2dabc10e --- /dev/null +++ b/packages/gatsby/src/schema/__tests__/connections-filter-on-linked-nodes.js @@ -0,0 +1,293 @@ +const _ = require(`lodash`) +const { graphql } = require(`graphql`) +const nodeTypes = require(`../build-node-types`) +const nodeConnections = require(`../build-node-connections`) +const { buildNodesSchema } = require(`../index`) +const { clearUnionTypes } = require(`../infer-graphql-type`) +const { getExampleValues } = require(`../data-tree-utils`) +const { + inferInputObjectStructureFromNodes, +} = require(`../infer-graphql-input-fields`) +const { store } = require(`../../redux`) +require(`../../db/__tests__/fixtures/ensure-loki`)() + +function makeNodes() { + return [ + { id: `child_1`, internal: { type: `Child` }, hair: `brown`, children: [] }, + { + id: `child_2`, + internal: { type: `Child` }, + children: [], + hair: `blonde`, + height: 101, + }, + { + id: `linked_A`, + internal: { type: `Linked_A` }, + children: [], + array: [{ linked___NODE: `linked_B` }], + single: { linked___NODE: `linked_B` }, + }, + { id: `linked_B`, internal: { type: `Linked_B` }, children: [] }, + ] +} + +async function queryResult(nodesData, query, { types = [] } = {}) { + for (const node of nodesData) { + store.dispatch({ type: `CREATE_NODE`, payload: node }) + } + const typesGQL = await nodeTypes.buildAll({}) + const connections = nodeConnections.buildAll(_.values(typesGQL)) + + // Pull off just the graphql node from each type object. + const nodes = _.mapValues(typesGQL, `node`) + + const schema = buildNodesSchema({ ...connections, ...nodes }) + + return graphql(schema, query) +} + +describe(`filtering on linked nodes`, () => { + beforeEach(() => { + store.dispatch({ type: `DELETE_CACHE` }) + }) + clearUnionTypes() + it(`filters on linked nodes via id`, async () => { + let result = await queryResult( + makeNodes().concat([ + { + id: `child_2_link`, + internal: { type: `Test` }, + children: [], + linked___NODE: `child_2`, + foo: `bar`, + }, + { + id: `child_1_linked`, + internal: { type: `Test` }, + children: [], + linked___NODE: `child_1`, + foo: `baz`, + }, + ]), + ` + { + allTest(filter: { linked: { hair: { eq: "blonde" } } }) { + edges { node { linked { hair, height }, foo } } + } + } + ` + ) + expect(result.data.allTest.edges.length).toEqual(1) + expect(result.data.allTest.edges[0].node.linked.hair).toEqual(`blonde`) + expect(result.data.allTest.edges[0].node.linked.height).toEqual(101) + expect(result.data.allTest.edges[0].node.foo).toEqual(`bar`) + }) + + it(`returns nested linked fields`, async () => { + let result = await queryResult( + [ + { + id: `child_2`, + internal: { type: `Child` }, + children: [], + hair: `blonde`, + height: 101, + }, + { + id: `child_1_link`, + internal: { type: `Test` }, + children: [], + nested: { + linked___NODE: `child_2`, + }, + foo: `bar`, + }, + ], + ` + { + allTest(filter: { nested: { linked: { hair: { eq: "blonde" } } } }) { + edges { node { nested { linked { hair, height } }, foo } } + } + } + ` + ) + expect(result.data.allTest.edges[0].node.nested.linked.hair).toEqual( + `blonde` + ) + expect(result.data.allTest.edges[0].node.nested.linked.height).toEqual(101) + expect(result.data.allTest.edges[0].node.foo).toEqual(`bar`) + }) + + it(`returns all matching linked nodes`, async () => { + let result = await queryResult( + makeNodes().concat([ + { + id: `child_2_link`, + internal: { type: `Test` }, + children: [], + linked___NODE: `child_2`, + foo: `bar`, + }, + { + id: `child_2_link2`, + internal: { type: `Test` }, + children: [], + linked___NODE: `child_2`, + foo: `baz`, + }, + ]), + ` + { + allTest(filter: { linked: { hair: { eq: "blonde" } } }) { + edges { node { linked { hair, height }, foo } } + } + } + ` + ) + expect(result.data.allTest.edges[0].node.linked.hair).toEqual(`blonde`) + expect(result.data.allTest.edges[0].node.linked.height).toEqual(101) + expect(result.data.allTest.edges[0].node.foo).toEqual(`bar`) + expect(result.data.allTest.edges[1].node.foo).toEqual(`baz`) + }) + + it(`handles elemMatch operator`, async () => { + let result = await queryResult( + makeNodes().concat([ + { + id: `1`, + internal: { type: `Test` }, + children: [], + linked___NODE: [`child_1`, `child_2`], + foo: `bar`, + }, + { + id: `2`, + internal: { type: `Test` }, + children: [], + linked___NODE: [`child_1`], + foo: `baz`, + }, + { + id: `3`, + internal: { type: `Test` }, + children: [], + linked___NODE: [`child_2`], + foo: `foo`, + }, + { + id: `4`, + internal: { type: `Test` }, + children: [], + array: [{ linked___NODE: [`child_1`, `child_2`] }], + foo: `lorem`, + }, + { + id: `5`, + internal: { type: `Test` }, + children: [], + array: [ + { linked___NODE: [`child_1`] }, + { linked___NODE: [`child_2`] }, + ], + foo: `ipsum`, + }, + { + id: `6`, + internal: { type: `Test` }, + children: [], + array: [{ linked___NODE: [`child_1`] }], + foo: `sit`, + }, + { + id: `7`, + internal: { type: `Test` }, + children: [], + array: [{ linked___NODE: [`child_2`] }], + foo: `dolor`, + }, + { + id: `8`, + internal: { type: `Test` }, + children: [], + foo: `ipsum`, + }, + ]), + ` + { + eq:allTest(filter: { linked: { elemMatch: { hair: { eq: "brown" } } } }) { + edges { node { foo } } + } + in:allTest(filter: { linked: { elemMatch: { hair: { in: ["brown", "blonde"] } } } }) { + edges { node { foo } } + } + insideInlineArrayEq:allTest(filter: { array: { elemMatch: { linked: { elemMatch: { hair: { eq: "brown" } } } } } }) { + edges { node { foo } } + } + insideInlineArrayIn:allTest(filter: { array: { elemMatch: { linked: { elemMatch: { hair: { in: ["brown", "blonde"] } } } } } }) { + edges { node { foo } } + } + } + ` + ) + + const itemToEdge = item => { + return { + node: { + foo: item, + }, + } + } + + expect(result.data.eq.edges).toEqual([`bar`, `baz`].map(itemToEdge)) + expect(result.data.in.edges).toEqual([`bar`, `baz`, `foo`].map(itemToEdge)) + expect(result.data.insideInlineArrayEq.edges).toEqual( + [`lorem`, `ipsum`, `sit`].map(itemToEdge) + ) + expect(result.data.insideInlineArrayIn.edges).toEqual( + [`lorem`, `ipsum`, `sit`, `dolor`].map(itemToEdge) + ) + }) + + it(`doesn't mutate node object`, async () => { + const allTestNodes = makeNodes() + await queryResult( + allTestNodes.concat([ + { + id: `1`, + internal: { type: `Test` }, + children: [], + test: [ + { + linked___NODE: `linked_A`, + }, + ], + }, + ]), + ` + { + allTest { + edges { node { hair } } + } + } + ` + ) + const originalTestNode = allTestNodes.find( + node => node.internal.type === `Linked_A` + ) + delete originalTestNode.children + + expect(getExampleValues({ typeName: `Linked_A` })).toEqual(originalTestNode) + }) + + it(`skips fields with missing nodes`, async () => { + const fields = inferInputObjectStructureFromNodes({ + nodes: [], + exampleValue: { + movie___NODE: `foobar`, + }, + }).inferredFields + + expect(Object.keys(fields)).toHaveLength(0) + }) +}) diff --git a/packages/gatsby/src/schema/__tests__/infer-graphql-input-type-test.js b/packages/gatsby/src/schema/__tests__/infer-graphql-input-type-test.js index a9926b3a4e33d..fae2c8441c5ab 100644 --- a/packages/gatsby/src/schema/__tests__/infer-graphql-input-type-test.js +++ b/packages/gatsby/src/schema/__tests__/infer-graphql-input-type-test.js @@ -1,4 +1,3 @@ -const _ = require(`lodash`) const { graphql, GraphQLString, GraphQLObjectType } = require(`graphql`) const { inferObjectStructureFromNodes } = require(`../infer-graphql-type`) @@ -7,18 +6,14 @@ const { buildNodesSchema } = require(`../index`) const { inferInputObjectStructureFromNodes, } = require(`../infer-graphql-input-fields`) -const { - getExampleValues, - clearTypeExampleValues, -} = require(`../data-tree-utils`) - -let mockNodes -jest.unmock(`../../db/nodes`) -const nodesDb = require(`../../db/nodes`) -nodesDb.getNodesByType = () => mockNodes +const { clearTypeExampleValues } = require(`../data-tree-utils`) +const { store } = require(`../../redux`) +require(`../../db/__tests__/fixtures/ensure-loki`)() function queryResult(nodes, query, { types = [] } = {}) { - mockNodes = nodes + for (const node of nodes) { + store.dispatch({ type: `CREATE_NODE`, payload: node }) + } const nodeObjectType = new GraphQLObjectType({ name: `Node`, fields: inferObjectStructureFromNodes({ @@ -42,123 +37,12 @@ beforeEach(() => { }) describe(`GraphQL Input args`, () => { - const nodes = [ - { - index: 0, - name: `The Mad Max`, - string: `a`, - float: 1.5, - hair: 1, - date: `2006-07-22T22:39:53.000Z`, - anArray: [1, 2, 3, 4], - key: { - withEmptyArray: [], - }, - anotherKey: { - withANested: { - nestedKey: `foo`, - emptyArray: [], - anotherEmptyArray: [], - }, - }, - frontmatter: { - date: `2006-07-22T22:39:53.000Z`, - title: `The world of dash and adventure`, - blue: 100, - }, - anObjectArray: [ - { aString: `some string`, aNumber: 2, aBoolean: true }, - { aString: `some string`, aNumber: 2, anArray: [1, 2] }, - ], - boolean: true, - }, - { - index: 1, - name: `The Mad Wax`, - string: `b`, - float: 2.5, - hair: 2, - anArray: [1, 2, 5, 4], - anotherKey: { - withANested: { - nestedKey: `foo`, - }, - }, - frontmatter: { - date: `2006-07-22T22:39:53.000Z`, - title: `The world of slash and adventure`, - blue: 10010, - circle: `happy`, - }, - boolean: false, - data: { - tags: [ - { - tag: { - document: [ - { - data: { - tag: `Design System`, - }, - number: 3, - }, - ], - }, - }, - ], - }, - }, - { - index: 2, - name: `The Mad Wax`, - string: `c`, - float: 3.5, - hair: 0, - date: `2006-07-29T22:39:53.000Z`, - anotherKey: { - withANested: { - nestedKey: `bar`, - }, - }, - frontmatter: { - date: `2006-07-22T22:39:53.000Z`, - title: `The world of shave and adventure`, - blue: 10010, - circle: `happy`, - }, - data: { - tags: [ - { - tag: { - document: [ - { - data: { - tag: `Gatsby`, - }, - }, - ], - }, - }, - { - tag: { - document: [ - { - data: { - tag: `Design System`, - }, - number: 5, - }, - ], - }, - }, - ], - }, - }, - ] - + beforeEach(() => { + store.dispatch({ type: `DELETE_CACHE` }) + }) it(`filters out null example values`, async () => { let result = await queryResult( - [{ foo: null, bar: `baz` }], + [{ id: `1`, internal: { type: `Bar` }, foo: null, bar: `baz` }], ` { allNode(foo: { eq: "bar" }) { @@ -175,7 +59,7 @@ describe(`GraphQL Input args`, () => { it(`filters out empty objects`, async () => { let result = await queryResult( - [{ foo: {}, bar: `baz` }], + [{ id: `1`, internal: { type: `Bar` }, foo: {}, bar: `baz` }], ` { allNode(foo: { eq: "bar" }) { @@ -192,7 +76,7 @@ describe(`GraphQL Input args`, () => { it(`filters out empty arrays`, async () => { let result = await queryResult( - [{ foo: [], bar: `baz` }], + [{ id: `1`, internal: { type: `Bar` }, foo: [], bar: `baz` }], ` { allNode(foo: { eq: "bar" }) { @@ -209,7 +93,14 @@ describe(`GraphQL Input args`, () => { it(`filters out sparse arrays`, async () => { let result = await queryResult( - [{ foo: [undefined, null, null], bar: `baz` }], + [ + { + id: `1`, + internal: { type: `Bar` }, + foo: [undefined, null, null], + bar: `baz`, + }, + ], ` { allNode(foo: { eq: "bar" }) { @@ -234,7 +125,14 @@ describe(`GraphQL Input args`, () => { }) let result = await queryResult( - [{ linked___NODE: `baz`, foo: `bar` }], + [ + { + id: `1`, + internal: { type: `Bar` }, + linked___NODE: `baz`, + foo: `bar`, + }, + ], ` { allNode(linked___NODE: { eq: "baz" }) { @@ -301,657 +199,4 @@ describe(`GraphQL Input args`, () => { expect(fields.float.type.name.endsWith(`Float`)).toBe(true) expect(fields.longint.type.name.endsWith(`Float`)).toBe(true) }) - - it(`handles eq operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { eq: 2 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(1) - expect(result.data.allNode.edges[0].node.hair).toEqual(2) - }) - - it(`handles eq operator with false value`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {boolean: { eq: false }}) { - edges { node { name }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(1) - expect(result.data.allNode.edges[0].node.name).toEqual(`The Mad Wax`) - }) - - it(`handles eq operator with 0`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { eq: 0 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(1) - expect(result.data.allNode.edges[0].node.hair).toEqual(0) - }) - - it(`handles ne operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { ne: 2 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.hair).toEqual(1) - }) - - it(`handles lt operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { lt: 2 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.hair).toEqual(1) - expect(result.data.allNode.edges[1].node.hair).toEqual(0) - }) - - it(`handles lte operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { lte: 1 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.hair).toEqual(1) - expect(result.data.allNode.edges[1].node.hair).toEqual(0) - }) - - it(`handles gt operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { gt: 0 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.hair).toEqual(1) - expect(result.data.allNode.edges[1].node.hair).toEqual(2) - }) - - it(`handles gte operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {hair: { gte: 1 }}) { - edges { node { hair }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.hair).toEqual(1) - expect(result.data.allNode.edges[1].node.hair).toEqual(2) - }) - - it(`handles the regex operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {name: { regex: "/^the.*wax/i" }}) { - edges { node { name }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.name).toEqual(`The Mad Wax`) - }) - - it(`handles the in operator for scalars`, async () => { - let result = await queryResult( - nodes, - ` - { - string:allNode(filter: { string: { in: ["b", "c"] }}) { - edges { node { index }} - } - int:allNode(filter: { index: { in: [0, 2] }}) { - edges { node { index }} - } - float:allNode(filter: { float: { in: [1.5, 2.5] }}) { - edges { node { index }} - } - boolean:allNode(filter: { boolean: { in: [true, null] }}) { - edges { node { index }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.string.edges.length).toEqual(2) - expect(result.data.string.edges[0].node.index).toEqual(1) - expect(result.data.string.edges[1].node.index).toEqual(2) - expect(result.data.int.edges.length).toEqual(2) - expect(result.data.int.edges[0].node.index).toEqual(0) - expect(result.data.int.edges[1].node.index).toEqual(2) - expect(result.data.float.edges.length).toEqual(2) - expect(result.data.float.edges[0].node.index).toEqual(0) - expect(result.data.float.edges[1].node.index).toEqual(1) - expect(result.data.boolean.edges.length).toEqual(2) - expect(result.data.boolean.edges[0].node.index).toEqual(0) - expect(result.data.boolean.edges[1].node.index).toEqual(2) - }) - - it(`handles the in operator for array`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {anArray: { in: [5] }}) { - edges { node { name }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(1) - expect(result.data.allNode.edges[0].node.name).toEqual(`The Mad Wax`) - }) - - it(`handles the elemMatch operator for array of objects`, async () => { - let result = await queryResult( - nodes, - ` - { - test1:allNode(filter: {data: {tags: {elemMatch: {tag: {document: {elemMatch: {data: {tag: {eq: "Gatsby"}}}}}}}}}) { - edges { node { index }} - } - test2:allNode(filter: {data: {tags: {elemMatch: {tag: {document: {elemMatch: {data: {tag: {eq: "Design System"}}}}}}}}}) { - edges { node { index }} - } - test3:allNode(filter: {data: {tags: {elemMatch: {tag: {document: {elemMatch: {number: {lt: 4}}}}}}}}) { - edges { node { index }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.test1.edges.length).toEqual(1) - expect(result.data.test1.edges[0].node.index).toEqual(2) - expect(result.data.test2.edges.length).toEqual(2) - expect(result.data.test2.edges[0].node.index).toEqual(1) - expect(result.data.test2.edges[1].node.index).toEqual(2) - expect(result.data.test3.edges.length).toEqual(1) - expect(result.data.test3.edges[0].node.index).toEqual(1) - }) - - it(`handles the nin operator for array`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {anArray: { nin: [5] }}) { - edges { node { anArray }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - - result.data.allNode.edges.forEach(edge => { - expect(edge.node.anArray).not.toEqual(expect.arrayContaining([5])) - }) - }) - - it(`handles the nin operator for scalars`, async () => { - let result = await queryResult( - nodes, - ` - { - string:allNode(filter: { string: { nin: ["b", "c"] }}) { - edges { node { string }} - } - int:allNode(filter: { index: { nin: [0, 2] }}) { - edges { node { index }} - } - float:allNode(filter: { float: { nin: [1.5] }}) { - edges { node { float }} - } - boolean:allNode(filter: { boolean: { nin: [true, null] }}) { - edges { node { boolean }} - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - - expect(result.data.string.edges.length).toEqual(1) - result.data.string.edges.forEach(edge => { - expect(edge.node.string).not.toEqual(`b`) - expect(edge.node.string).not.toEqual(`c`) - }) - - expect(result.data.int.edges.length).toEqual(1) - result.data.int.edges.forEach(edge => { - expect(edge.node.index).not.toEqual(0) - expect(edge.node.index).not.toEqual(2) - }) - - expect(result.data.float.edges.length).toEqual(2) - result.data.float.edges.forEach(edge => { - expect(edge.node.float).not.toEqual(1.5) - }) - - expect(result.data.boolean.edges.length).toEqual(1) - result.data.boolean.edges.forEach(edge => { - expect(edge.node.boolean).not.toEqual(null) - expect(edge.node.boolean).not.toEqual(true) - }) - }) - - it(`handles the glob operator`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(limit: 10, filter: {name: { glob: "*Wax" }}) { - edges { node { name }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.name).toEqual(`The Mad Wax`) - }) - - it(`filters date fields`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode(filter: {date: { ne: null }}) { - edges { node { index }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(2) - expect(result.data.allNode.edges[0].node.index).toEqual(0) - expect(result.data.allNode.edges[1].node.index).toEqual(2) - }) - - it(`sorts results`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode( - limit: 10, - sort: { - fields: [frontmatter___blue], - order: DESC - } - ) { - edges { node { name }} - } - } - ` - ) - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.edges.length).toEqual(3) - expect(result.data.allNode.edges[0].node.name).toEqual(`The Mad Wax`) - }) - - it(`returns list of distinct values in a field`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode { - totalCount - names: distinct(field: name) - array: distinct(field: anArray) - blue: distinct(field: frontmatter___blue) - # Only one node has this field - circle: distinct(field: frontmatter___circle) - nestedField: distinct(field: anotherKey___withANested___nestedKey) - } - } - ` - ) - - expect(result.errors).not.toBeDefined() - - expect(result.data.allNode.names.length).toEqual(2) - expect(result.data.allNode.names[0]).toEqual(`The Mad Max`) - - expect(result.data.allNode.array.length).toEqual(5) - expect(result.data.allNode.array[0]).toEqual(`1`) - - expect(result.data.allNode.blue.length).toEqual(2) - expect(result.data.allNode.blue[0]).toEqual(`100`) - - expect(result.data.allNode.circle.length).toEqual(1) - expect(result.data.allNode.circle[0]).toEqual(`happy`) - - expect(result.data.allNode.nestedField.length).toEqual(2) - expect(result.data.allNode.nestedField[0]).toEqual(`bar`) - expect(result.data.allNode.nestedField[1]).toEqual(`foo`) - }) - - it(`handles the group connection field`, async () => { - let result = await queryResult( - nodes, - ` { - allNode { - blue: group(field: frontmatter___blue) { - field - fieldValue - totalCount - } - anArray: group(field: anArray) { - field - fieldValue - totalCount - } - } - }` - ) - expect(result.errors).not.toBeDefined() - - expect(result.data.allNode.blue).toHaveLength(2) - expect(result.data.allNode.blue[0].fieldValue).toEqual(`100`) - expect(result.data.allNode.blue[0].field).toEqual(`frontmatter.blue`) - expect(result.data.allNode.blue[0].totalCount).toEqual(1) - - expect(result.data.allNode.anArray).toHaveLength(5) - expect(result.data.allNode.anArray[0].fieldValue).toEqual(`1`) - expect(result.data.allNode.anArray[0].field).toEqual(`anArray`) - expect(result.data.allNode.anArray[0].totalCount).toEqual(2) - }) - - it(`handles the nested group connection field`, async () => { - let result = await queryResult( - nodes, - ` { - allNode { - nestedKey: group(field: anotherKey___withANested___nestedKey) { - field - fieldValue - totalCount - } - } - }` - ) - - expect(result.errors).not.toBeDefined() - expect(result.data.allNode.nestedKey).toHaveLength(2) - expect(result.data.allNode.nestedKey[0].fieldValue).toEqual(`bar`) - expect(result.data.allNode.nestedKey[0].field).toEqual( - `anotherKey.withANested.nestedKey` - ) - expect(result.data.allNode.nestedKey[0].totalCount).toEqual(1) - expect(result.data.allNode.nestedKey[1].fieldValue).toEqual(`foo`) - expect(result.data.allNode.nestedKey[1].field).toEqual( - `anotherKey.withANested.nestedKey` - ) - expect(result.data.allNode.nestedKey[1].totalCount).toEqual(2) - }) - - it(`can query object arrays`, async () => { - let result = await queryResult( - nodes, - ` - { - allNode { - edges { - node { - anObjectArray { - aString - aNumber - aBoolean - } - } - } - } - } - ` - ) - expect(result.errors).not.toBeDefined() - - expect(result).toMatchSnapshot() - }) -}) - -describe(`filtering on linked nodes`, () => { - let types - const allNodes = [ - { id: `child_1`, internal: { type: `Child` }, hair: `brown` }, - { - id: `child_2`, - internal: { type: `Child` }, - hair: `blonde`, - height: 101, - }, - { - id: `linked_A`, - internal: { type: `Linked_A` }, - array: [{ linked___NODE: `linked_B` }], - single: { linked___NODE: `linked_B` }, - }, - { id: `linked_B`, internal: { type: `Linked_B` } }, - ] - const typeMap = _.groupBy(allNodes, node => node.internal.type) - - const { store } = require(`../../redux`) - allNodes.forEach(node => { - store.dispatch({ - type: `CREATE_NODE`, - payload: node, - }) - }) - - types = _.toPairs(typeMap).map(([type, nodes]) => { - return { - name: type, - nodeObjectType: new GraphQLObjectType({ - name: type, - fields: () => - inferObjectStructureFromNodes({ - nodes, - types, - }), - }), - } - }) - - it(`filters on linked nodes via id`, async () => { - let result = await queryResult( - [ - { linked___NODE: `child_2`, foo: `bar` }, - { linked___NODE: `child_1`, foo: `baz` }, - ], - ` - { - allNode(filter: { linked: { hair: { eq: "blonde" } } }) { - edges { node { linked { hair, height }, foo } } - } - } - `, - { types } - ) - expect(result.data.allNode.edges.length).toEqual(1) - expect(result.data.allNode.edges[0].node.linked.hair).toEqual(`blonde`) - expect(result.data.allNode.edges[0].node.linked.height).toEqual(101) - expect(result.data.allNode.edges[0].node.foo).toEqual(`bar`) - }) - - it(`returns all matching linked nodes`, async () => { - let result = await queryResult( - [ - { linked___NODE: `child_2`, foo: `bar` }, - { linked___NODE: `child_2`, foo: `baz` }, - ], - ` - { - allNode(filter: { linked: { hair: { eq: "blonde" } } }) { - edges { node { linked { hair, height }, foo } } - } - } - `, - { types } - ) - expect(result.data.allNode.edges[0].node.linked.hair).toEqual(`blonde`) - expect(result.data.allNode.edges[0].node.linked.height).toEqual(101) - expect(result.data.allNode.edges[0].node.foo).toEqual(`bar`) - expect(result.data.allNode.edges[1].node.foo).toEqual(`baz`) - }) - - it(`handles elemMatch operator`, async () => { - let result = await queryResult( - [ - { linked___NODE: [`child_1`, `child_2`], foo: `bar` }, - { linked___NODE: [`child_1`], foo: `baz` }, - { linked___NODE: [`child_2`], foo: `foo` }, - { array: [{ linked___NODE: [`child_1`, `child_2`] }], foo: `lorem` }, - { - array: [ - { linked___NODE: [`child_1`] }, - { linked___NODE: [`child_2`] }, - ], - foo: `ipsum`, - }, - { array: [{ linked___NODE: [`child_1`] }], foo: `sit` }, - { array: [{ linked___NODE: [`child_2`] }], foo: `dolor` }, - { foo: `ipsum` }, - ], - ` - { - eq:allNode(filter: { linked: { elemMatch: { hair: { eq: "brown" } } } }) { - edges { node { foo } } - } - in:allNode(filter: { linked: { elemMatch: { hair: { in: ["brown", "blonde"] } } } }) { - edges { node { foo } } - } - insideInlineArrayEq:allNode(filter: { array: { elemMatch: { linked: { elemMatch: { hair: { eq: "brown" } } } } } }) { - edges { node { foo } } - } - insideInlineArrayIn:allNode(filter: { array: { elemMatch: { linked: { elemMatch: { hair: { in: ["brown", "blonde"] } } } } } }) { - edges { node { foo } } - } - } - `, - { types } - ) - - const itemToEdge = item => { - return { - node: { - foo: item, - }, - } - } - - expect(result.data.eq.edges).toEqual([`bar`, `baz`].map(itemToEdge)) - expect(result.data.in.edges).toEqual([`bar`, `baz`, `foo`].map(itemToEdge)) - expect(result.data.insideInlineArrayEq.edges).toEqual( - [`lorem`, `ipsum`, `sit`].map(itemToEdge) - ) - expect(result.data.insideInlineArrayIn.edges).toEqual( - [`lorem`, `ipsum`, `sit`, `dolor`].map(itemToEdge) - ) - }) - - it(`doesn't mutate node object`, async () => { - await queryResult( - [ - { - test: [ - { - linked___NODE: `linked_A`, - }, - ], - }, - ], - ` - { - allNode { - edges { node { hair } } - } - } - `, - { types } - ) - const originalNode = allNodes.find( - node => node.internal.type === `Linked_A` - ) - - expect(getExampleValues({ typeName: `Linked_A` })).toEqual(originalNode) - }) - - it(`skips fields with missing nodes`, async () => { - const fields = inferInputObjectStructureFromNodes({ - nodes: [], - exampleValue: { - movie___NODE: `foobar`, - }, - }).inferredFields - - expect(Object.keys(fields)).toHaveLength(0) - }) }) diff --git a/packages/gatsby/src/schema/__tests__/infer-graphql-type-test.js b/packages/gatsby/src/schema/__tests__/infer-graphql-type-test.js index 28444eda7e2a7..24886bfb7fca8 100644 --- a/packages/gatsby/src/schema/__tests__/infer-graphql-type-test.js +++ b/packages/gatsby/src/schema/__tests__/infer-graphql-type-test.js @@ -13,6 +13,7 @@ const { clearUnionTypes, } = require(`../infer-graphql-type`) const { clearTypeNames } = require(`../create-type-name`) +require(`../../db/__tests__/fixtures/ensure-loki`)() function queryResult(nodes, fragment, { types = [], ignoreFields } = {}) { const schema = new GraphQLSchema({ @@ -522,6 +523,8 @@ describe(`GraphQL type inferance`, () => { beforeEach(() => { ;({ store } = require(`../../redux`)) + store.dispatch({ type: `DELETE_CACHE` }) + const { setFileNodeRootType } = require(`../types/type-file`) const fileType = { name: `File`, @@ -612,6 +615,7 @@ describe(`GraphQL type inferance`, () => { beforeEach(() => { ;({ store } = require(`../../redux`)) + store.dispatch({ type: `DELETE_CACHE` }) types = [ { name: `Child`, diff --git a/packages/gatsby/src/schema/__tests__/run-query.js b/packages/gatsby/src/schema/__tests__/run-query.js new file mode 100644 index 0000000000000..d905cd73050f8 --- /dev/null +++ b/packages/gatsby/src/schema/__tests__/run-query.js @@ -0,0 +1,470 @@ +const { GraphQLObjectType } = require(`graphql`) +const nodesQuery = require(`../../db/nodes-query`) +const { inferObjectStructureFromNodes } = require(`../infer-graphql-type`) +const { store } = require(`../../redux`) +require(`../../db/__tests__/fixtures/ensure-loki`)() + +const makeNodes = () => [ + { + id: `0`, + internal: { type: `Test` }, + index: 0, + name: `The Mad Max`, + string: `a`, + float: 1.5, + hair: 1, + date: `2006-07-22T22:39:53.000Z`, + anArray: [1, 2, 3, 4], + key: { + withEmptyArray: [], + }, + anotherKey: { + withANested: { + nestedKey: `foo`, + emptyArray: [], + anotherEmptyArray: [], + }, + }, + frontmatter: { + date: `2006-07-22T22:39:53.000Z`, + title: `The world of dash and adventure`, + tags: [`moo`, `foo`], + blue: 100, + }, + anObjectArray: [ + { aString: `some string`, aNumber: 2, aBoolean: true }, + { aString: `some string`, aNumber: 2, anArray: [1, 2] }, + ], + boolean: true, + }, + { + id: `1`, + internal: { type: `Test` }, + index: 1, + name: `The Mad Wax`, + string: `b`, + float: 2.5, + hair: 2, + anArray: [1, 2, 5, 4], + waxOnly: { + foo: true, + }, + anotherKey: { + withANested: { + nestedKey: `foo`, + }, + }, + frontmatter: { + date: `2006-07-22T22:39:53.000Z`, + title: `The world of slash and adventure`, + blue: 10010, + circle: `happy`, + }, + boolean: false, + data: { + tags: [ + { + tag: { + document: [ + { + data: { + tag: `Design System`, + }, + number: 3, + }, + ], + }, + }, + ], + }, + }, + { + id: `2`, + internal: { type: `Test` }, + index: 2, + name: `The Mad Wax`, + string: `c`, + float: 3.5, + hair: 0, + date: `2006-07-29T22:39:53.000Z`, + waxOnly: null, + anotherKey: { + withANested: { + nestedKey: `bar`, + }, + }, + frontmatter: { + date: `2006-07-22T22:39:53.000Z`, + title: `The world of shave and adventure`, + blue: 10010, + circle: `happy`, + }, + data: { + tags: [ + { + tag: { + document: [ + { + data: { + tag: `Gatsby`, + }, + }, + ], + }, + }, + { + tag: { + document: [ + { + data: { + tag: `Design System`, + }, + number: 5, + }, + ], + }, + }, + ], + }, + }, +] + +function makeGqlType(nodes) { + return new GraphQLObjectType({ + name: `Test`, + fields: inferObjectStructureFromNodes({ + nodes, + types: [{ name: `Test` }], + }), + }) +} + +function resetDb(nodes) { + store.dispatch({ type: `DELETE_CACHE` }) + for (const node of nodes) { + store.dispatch({ type: `CREATE_NODE`, payload: node }) + } +} + +async function runQuery(queryArgs) { + const nodes = makeNodes() + resetDb(nodes) + const gqlType = makeGqlType(nodes) + const context = {} + const args = { + gqlType, + context, + firstOnly: false, + queryArgs, + } + return await nodesQuery.run(args) +} + +async function runFilter(filter) { + return await runQuery({ filter }) +} + +describe(`Filter fields`, () => { + it(`handles eq operator`, async () => { + let result = await runFilter({ hair: { eq: 2 } }) + + expect(result.length).toEqual(1) + expect(result[0].hair).toEqual(2) + }) + + it(`handles eq operator with false value`, async () => { + let result = await runFilter({ boolean: { eq: false } }) + + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`handles eq operator with 0`, async () => { + let result = await runFilter({ hair: { eq: 0 } }) + + expect(result.length).toEqual(1) + expect(result[0].hair).toEqual(0) + }) + + it(`handles ne operator`, async () => { + let result = await runFilter({ hair: { ne: 2 } }) + + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + }) + + it(`handles nested ne: true operator`, async () => { + let result = await runFilter({ waxOnly: { foo: { ne: true } } }) + + expect(result.length).toEqual(2) + }) + + it(`handles lt operator`, async () => { + let result = await runFilter({ hair: { lt: 2 } }) + + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(0) + }) + + it(`handles lte operator`, async () => { + let result = await runFilter({ hair: { lte: 1 } }) + + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(0) + }) + + it(`handles gt operator`, async () => { + let result = await runFilter({ hair: { gt: 0 } }) + + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(2) + }) + + it(`handles gte operator`, async () => { + let result = await runFilter({ hair: { gte: 1 } }) + + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(2) + }) + + it(`handles the regex operator`, async () => { + let result = await runFilter({ name: { regex: `/^the.*wax/i` } }) + expect(result.length).toEqual(2) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`handles the in operator for strings`, async () => { + let result = await runFilter({ string: { in: [`b`, `c`] } }) + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(1) + }) + + it(`handles the in operator for ints`, async () => { + let result = await runFilter({ index: { in: [0, 2] } }) + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(0) + expect(result[1].index).toEqual(2) + }) + + it(`handles the in operator for floats`, async () => { + let result = await runFilter({ float: { in: [1.5, 2.5] } }) + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(0) + expect(result[1].index).toEqual(1) + }) + + it(`handles the in operator for booleans`, async () => { + let result = await runFilter({ boolean: { in: [true] } }) + expect(result.length).toEqual(1) // 2 + expect(result[0].index).toEqual(0) + // expect(result[1].index).toEqual(2) + }) + + it(`handles the in operator for array`, async () => { + let result = await runFilter({ anArray: { in: [5] } }) + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`handles the nested in operator for array of strings`, async () => { + let result = await runFilter({ frontmatter: { tags: { in: [`moo`] } } }) + expect(result).toHaveLength(1) + expect(result[0].name).toEqual(`The Mad Max`) + }) + + it(`handles the elemMatch operator for array of objects`, async () => { + let result = await runFilter({ + data: { + tags: { + elemMatch: { + tag: { + document: { + elemMatch: { + data: { + tag: { eq: `Gatsby` }, + }, + }, + }, + }, + }, + }, + }, + }) + + expect(result.length).toEqual(1) + expect(result[0].index).toEqual(2) + }) + + it(`handles the elemMatch operator for array of objects (2)`, async () => { + let result = await runFilter({ + data: { + tags: { + elemMatch: { + tag: { + document: { + elemMatch: { + data: { + tag: { eq: `Design System` }, + }, + }, + }, + }, + }, + }, + }, + }) + + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(1) + expect(result[1].index).toEqual(2) + }) + + it(`handles the elemMatch operator for array of objects (number)`, async () => { + let result = await runFilter({ + data: { + tags: { + elemMatch: { + tag: { + document: { + elemMatch: { + number: { lt: 4 }, + }, + }, + }, + }, + }, + }, + }) + + expect(result.length).toEqual(1) + expect(result[0].index).toEqual(1) + }) + + it(`handles the nin operator for array`, async () => { + let result = await runFilter({ anArray: { nin: [5] } }) + + expect(result.length).toEqual(2) + + result.forEach(edge => { + expect(edge.anArray).not.toEqual(expect.arrayContaining([5])) + }) + }) + + it(`handles the nin operator for strings`, async () => { + let result = await runFilter({ string: { nin: [`b`, `c`] } }) + + expect(result.length).toEqual(1) + result.forEach(edge => { + expect(edge.string).not.toEqual(`b`) + expect(edge.string).not.toEqual(`c`) + }) + }) + + it(`handles the nin operator for ints`, async () => { + let result = await runFilter({ index: { nin: [0, 2] } }) + + expect(result.length).toEqual(1) + result.forEach(edge => { + expect(edge.index).not.toEqual(0) + expect(edge.index).not.toEqual(2) + }) + }) + + it(`handles the nin operator for floats`, async () => { + let result = await runFilter({ float: { nin: [1.5] } }) + + expect(result.length).toEqual(2) + result.forEach(edge => { + expect(edge.float).not.toEqual(1.5) + }) + }) + + it(`handles the nin operator for booleans`, async () => { + let result = await runFilter({ boolean: { nin: [true, null] } }) + + expect(result.length).toEqual(1) + result.forEach(edge => { + expect(edge.boolean).not.toEqual(null) + expect(edge.boolean).not.toEqual(true) + }) + }) + + it(`handles the glob operator`, async () => { + let result = await runFilter({ name: { glob: `*Wax` } }) + + expect(result.length).toEqual(2) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`filters date fields`, async () => { + let result = await runFilter({ date: { ne: null } }) + + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(0) + expect(result[1].index).toEqual(2) + }) +}) + +describe(`collection fields`, () => { + it(`sorts results`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`frontmatter___blue`], + order: `desc`, + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`sorts results with desc has null fields first`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`waxOnly`], + order: `desc`, + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts results with asc has null fields last`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`waxOnly`], + order: `asc`, + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`applies order (asc/desc) to all sort fields`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`frontmatter___blue`, `id`], + order: `desc`, + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) // blue = 10010, id = 1 + expect(result[1].id).toEqual(`2`) // blue = 10010, id = 2 + expect(result[2].id).toEqual(`0`) // blue = 100, id = 0 + }) +}) diff --git a/packages/gatsby/src/schema/build-node-connections.js b/packages/gatsby/src/schema/build-node-connections.js index 5bbd2d44888a2..6ea3524d86970 100644 --- a/packages/gatsby/src/schema/build-node-connections.js +++ b/packages/gatsby/src/schema/build-node-connections.js @@ -12,7 +12,7 @@ const buildSortArg = require(`./create-sort-field`) const buildConnectionFields = require(`./build-connection-fields`) const createPageDependency = require(`../redux/actions/add-page-dependency`) const { connectionFromArray } = require(`graphql-skip-limit`) -const { runQuery } = require(`../db/nodes`) +const { run: runQuery } = require(`../db/nodes-query`) function handleQueryResult({ results, queryArgs, path }) { if (results && results.length) { diff --git a/packages/gatsby/src/schema/build-node-types.js b/packages/gatsby/src/schema/build-node-types.js index 6376251882d24..3045878bf878d 100644 --- a/packages/gatsby/src/schema/build-node-types.js +++ b/packages/gatsby/src/schema/build-node-types.js @@ -21,7 +21,8 @@ const { getNodes, getNode } = require(`../db/nodes`) const pageDependencyResolver = require(`./page-dependency-resolver`) const { setFileNodeRootType } = require(`./types/type-file`) const { clearTypeExampleValues } = require(`./data-tree-utils`) -const { runQuery } = require(`../db/nodes`) +const { run: runQuery } = require(`../db/nodes-query`) +const lazyFields = require(`./lazy-fields`) import type { ProcessedNodeType } from "./infer-graphql-type" @@ -171,7 +172,7 @@ function buildNodeObjectType({ }) } -async function buildProcessedType(nodes, typeName, processedTypes, span) { +async function buildProcessedType({ nodes, typeName, processedTypes, span }) { const intermediateType = {} intermediateType.name = typeName @@ -188,6 +189,9 @@ async function buildProcessedType(nodes, typeName, processedTypes, span) { const pluginInputFields = inferInputObjectStructureFromFields({ fields: mergedFieldsFromPlugins, }) + _.each(pluginInputFields.inferredFields, (fieldConfig, fieldName) => { + lazyFields.add(typeName, fieldName) + }) const gqlType = buildNodeObjectType({ typeName, @@ -227,7 +231,7 @@ function groupNodesByType(nodes) { ) } -module.exports = async ({ parentSpan }) => { +async function buildAll({ parentSpan }) { const spanArgs = parentSpan ? { childOf: parentSpan } : {} const span = tracer.startSpan(`build schema`, spanArgs) @@ -243,12 +247,12 @@ module.exports = async ({ parentSpan }) => { await Promise.all( _.map(types, async (nodes, typeName) => { const fieldName = _.camelCase(typeName) - const processedType = await buildProcessedType( + const processedType = await buildProcessedType({ nodes, typeName, processedTypes, - span - ) + span, + }) processedTypes[fieldName] = processedType // Special case to construct linked file type used by type inferring if (typeName === `File`) { @@ -262,3 +266,9 @@ module.exports = async ({ parentSpan }) => { return processedTypes } + +module.exports = { + buildProcessedType, + buildNodeObjectType, + buildAll, +} diff --git a/packages/gatsby/src/schema/index.js b/packages/gatsby/src/schema/index.js index 99ba20836be44..802ed401b7714 100644 --- a/packages/gatsby/src/schema/index.js +++ b/packages/gatsby/src/schema/index.js @@ -3,7 +3,7 @@ const _ = require(`lodash`) const { GraphQLSchema, GraphQLObjectType } = require(`graphql`) const { mergeSchemas } = require(`graphql-tools`) -const buildNodeTypes = require(`./build-node-types`) +const nodeTypes = require(`./build-node-types`) const nodeConnections = require(`./build-node-connections`) const { store } = require(`../redux`) const invariant = require(`invariant`) @@ -21,7 +21,7 @@ module.exports.buildNodesSchema = buildNodesSchema module.exports.build = async ({ parentSpan }) => { clearUnionTypes() - const typesGQL = await buildNodeTypes({ parentSpan }) + const typesGQL = await nodeTypes.buildAll({ parentSpan }) const connections = nodeConnections.buildAll(_.values(typesGQL)) // Pull off just the graphql node from each type object. diff --git a/packages/gatsby/src/schema/infer-graphql-input-fields.js b/packages/gatsby/src/schema/infer-graphql-input-fields.js index cffcd9d5b639a..efcb3d425cdbf 100644 --- a/packages/gatsby/src/schema/infer-graphql-input-fields.js +++ b/packages/gatsby/src/schema/infer-graphql-input-fields.js @@ -210,6 +210,7 @@ function inferGraphQLInputFields({ const EXCLUDE_KEYS = { parent: 1, children: 1, + $loki: 1, } type InferInputOptions = { diff --git a/packages/gatsby/src/schema/infer-graphql-type.js b/packages/gatsby/src/schema/infer-graphql-type.js index df32260b360df..4e9c24ebd5802 100644 --- a/packages/gatsby/src/schema/infer-graphql-type.js +++ b/packages/gatsby/src/schema/infer-graphql-type.js @@ -26,6 +26,7 @@ const DateType = require(`./types/type-date`) const FileType = require(`./types/type-file`) const is32BitInteger = require(`../utils/is-32-bit-integer`) const unionTypes = new Map() +const lazyFields = require(`./lazy-fields`) import type { GraphQLOutputType } from "graphql" import type { @@ -113,21 +114,24 @@ function inferGraphQLType({ return { type: GraphQLBoolean } case `string`: return { type: GraphQLString } - case `object`: + case `object`: { + const typeName = createTypeName(fieldName) return { type: new GraphQLObjectType({ - name: createTypeName(fieldName), + name: typeName, fields: _inferObjectStructureFromNodes( { ...otherArgs, selector, nodes, types, + typeName, }, exampleValue ), }), } + } case `number`: return is32BitInteger(exampleValue) ? { type: GraphQLInt } @@ -304,18 +308,20 @@ type inferTypeOptions = { types: ProcessedNodeType[], ignoreFields?: string[], selector?: string, + typeName?: string, } const EXCLUDE_KEYS = { id: 1, parent: 1, children: 1, + $loki: 1, } // Call this for the top level node + recursively for each sub-object. // E.g. This gets called for Markdown and then for its frontmatter subobject. function _inferObjectStructureFromNodes( - { nodes, types, selector, ignoreFields }: inferTypeOptions, + { nodes, types, selector, ignoreFields, typeName }: inferTypeOptions, exampleValue: ?Object ): GraphQLFieldConfigMap<*, *> { const config = store.getState().config @@ -325,12 +331,15 @@ function _inferObjectStructureFromNodes( // Ensure nodes have internal key with object. nodes = nodes.map(n => (n.internal ? n : { ...n, internal: {} })) - const typeName: string = nodes[0].internal.type + const rootTypeName: string = nodes[0].internal.type + if (!typeName) { + typeName = rootTypeName + } let resolvedExample: Object = exampleValue != null ? exampleValue - : getExampleValues({ nodes, typeName, ignoreFields }) + : getExampleValues({ nodes, typeName: rootTypeName, ignoreFields }) const inferredFields = {} _.each(resolvedExample, (value, key) => { @@ -341,7 +350,7 @@ function _inferObjectStructureFromNodes( // Several checks to see if a field is pointing to custom type // before we try automatic inference. const nextSelector = selector ? `${selector}.${key}` : key - const fieldSelector = `${typeName}.${nextSelector}` + const fieldSelector = `${rootTypeName}.${nextSelector}` let fieldName = key let inferredField @@ -356,6 +365,7 @@ function _inferObjectStructureFromNodes( } else if (key.includes(`___NODE`)) { ;[fieldName] = key.split(`___`) inferredField = inferFromFieldName(value, nextSelector, types) + lazyFields.add(typeName, fieldName) } // Replace unsupported values diff --git a/packages/gatsby/src/schema/lazy-fields.js b/packages/gatsby/src/schema/lazy-fields.js new file mode 100644 index 0000000000000..5d3646a95ae15 --- /dev/null +++ b/packages/gatsby/src/schema/lazy-fields.js @@ -0,0 +1,64 @@ +// A normal Graphql field resolver will accept a node as an argument +// and return a field from that node. Whereas a lazy field will need +// to perform some side effects or non-deterministic behavior to +// return its value. Therefore, when a query filter includes a lazy +// field, we need to evaluate the field resolvers on all nodes before +// running the query. Examples of lazy fields include: +// +// - a markdown `wordcount` field (lazily calculates word count on its +// content) +// - image sharp processing field (lazily generates optimized images) +// +// Lazy fields are declared using the exported `add` function. This +// should be done during schema generation when fields are being +// created. Then at query time, we can use the exported `contains` +// function to figure out if a type/field pair is lazy, and therefore +// use sift for querying instead of loki + +const _ = require(`lodash`) +const { GraphQLList, GraphQLObjectType } = require(`graphql`) + +// Note: fields are never deleted from here. So a long running +// `develop` session, where nodes are being deleted might mean that +// fields exist here that aren't on any DB nodes anymore. This isn't +// ideal, BUT, the worst case is that queries will be executed by +// sift, rather than loki, so not a big deal +const typeFields = new Map() + +function contains(filters, fieldType) { + return _.some(filters, (fieldFilter, fieldName) => { + // If a field has been previously flagged as a lazy field, then + // return true + const storedFields = typeFields.get(fieldType.name) + if (storedFields && storedFields.has(fieldName)) { + return true + } else { + // Otherwise, the filter field might be an array of linked + // nodes, in which case we might filter via an elemMatch + // field. Or, it might be a nested linked object. In either + // case, we recurse + const gqlFieldType = fieldType.getFields()[fieldName]?.type + if (gqlFieldType) { + if (gqlFieldType instanceof GraphQLList && fieldFilter.elemMatch) { + return contains(fieldFilter.elemMatch, gqlFieldType.ofType) + } else if (gqlFieldType instanceof GraphQLObjectType) { + return contains(fieldFilter, gqlFieldType) + } + } + } + return false + }) +} + +function add(typeName, fieldName) { + if (typeFields.get(typeName)) { + typeFields.get(typeName).add(fieldName) + } else { + typeFields.set(typeName, new Set([fieldName])) + } +} + +module.exports = { + contains, + add, +} diff --git a/yarn.lock b/yarn.lock index da1a4d0b5d641..51e3230108825 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1620,6 +1620,11 @@ npmlog "^4.1.2" write-file-atomic "^2.3.0" +"@moocar/lokijs@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@moocar/lokijs/-/lokijs-1.0.1.tgz#545227c173030dd0e6b1e6bdcef528012a6c325c" + integrity sha512-7kqLSxGjYTJ+a+DkJ71bJSF3LLuOShSFCXfv5Eg2qVpCQp/E1JTlAp+rHgVy2HAu8QLuePKx57xURwt6o1EuFA== + "@mrmlnc/readdir-enhanced@^2.2.1": version "2.2.1" resolved "https://registry.yarnpkg.com/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz#524af240d1a360527b730475ecfa1344aa540dde"