Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SRS caching #1197

Merged
merged 19 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,11 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
- Use cached prover keys in `compile()` when running in Node.js https://github.com/o1-labs/o1js/pull/1187
- Caching is configurable by passing a custom `Cache` (new export) to `compile()`
- By default, prover keys are stored in an OS-dependent cache directory; `~/.cache/pickles` on Mac and Linux
- Use cached setup points (SRS and Lagrange bases) when running in Node.js https://github.com/o1-labs/o1js/pull/1197
- Also, speed up SRS generation by using multiple threads
- Together with caching of prover keys, this speeds up compilation time by roughly
- **86%** when everything is cached
- **34%** when nothing is cached

## [0.13.1](https://github.com/o1-labs/o1js/compare/c2f392fe5...045faa7)

Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
"dev": "npx tsc -p tsconfig.node.json && node src/build/copy-to-dist.js",
"make": "make -C ../../.. snarkyjs",
"make:no-types": "npm run clean && make -C ../../.. snarkyjs_no_types",
"wasm": "./src/bindings/scripts/update-wasm-and-types.sh",
"bindings": "cd ../../.. && ./scripts/update-snarkyjs-bindings.sh && cd src/lib/snarkyjs",
"build": "node src/build/copy-artifacts.js && rimraf ./dist/node && npm run dev && node src/build/buildNode.js",
"build:test": "npx tsc -p tsconfig.test.json && cp src/snarky.d.ts dist/node/snarky.d.ts",
Expand Down
2 changes: 1 addition & 1 deletion src/bindings
2 changes: 2 additions & 0 deletions src/examples/simple_zkapp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ let zkapp = new SimpleZkapp(zkappAddress);

if (doProofs) {
console.log('compile');
console.time('compile');
await SimpleZkapp.compile();
console.timeEnd('compile');
}

console.log('deploy');
Expand Down
2 changes: 1 addition & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export {
Undefined,
Void,
} from './lib/proof_system.js';
export { Cache } from './lib/proof-system/cache.js';
export { Cache, CacheHeader } from './lib/proof-system/cache.js';

export {
Token,
Expand Down
6 changes: 6 additions & 0 deletions src/lib/ml/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ const MlArray = {
map<T, S>([, ...arr]: MlArray<T>, map: (t: T) => S): MlArray<S> {
return [0, ...arr.map(map)];
},
mapTo<T, S>(arr: T[], map: (t: T) => S): MlArray<S> {
return [0, ...arr.map(map)];
},
mapFrom<T, S>([, ...arr]: MlArray<T>, map: (t: T) => S): S[] {
return arr.map(map);
},
};

const MlTuple = Object.assign(
Expand Down
90 changes: 86 additions & 4 deletions src/lib/proof-system/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,26 @@ import {
} from '../util/fs.js';
import { jsEnvironment } from '../../bindings/crypto/bindings/env.js';

export { Cache, CacheHeader, cacheHeaderVersion };
// external API
export { Cache, CacheHeader };

// internal API
export { readCache, writeCache, withVersion, cacheHeaderVersion };

/**
* Interface for storing and retrieving values, for caching.
* `read()` and `write()` can just throw errors on failure.
*
* The data that will be passed to the cache for writing is exhaustively described by the {@link CacheHeader} type.
* It represents one of the following:
* - The SRS. This is a deterministic lists of curve points (one per curve) that needs to be generated just once,
* to be used for polynomial commitments.
* - Lagrange basis commitments. Similar to the SRS, this will be created once for every power-of-2 circuit size.
* - Prover and verifier keys for every compiled circuit.
*
* Per smart contract or ZkProgram, several different keys are created:
* - a step prover key (`step-pk`) and verification key (`step-vk`) _for every method_.
* - a wrap prover key (`wrap-pk`) and verification key (`wrap-vk`) for the entire contract.
*/
type Cache = {
/**
Expand All @@ -20,20 +35,30 @@ type Cache = {
* @param header A small header to identify what is read from the cache.
*/
read(header: CacheHeader): Uint8Array | undefined;

/**
* Write a value to the cache.
*
* @param header A small header to identify what is written to the cache. This will be used by `read()` to retrieve the data.
* @param value The value to write to the cache, as a byte array.
*/
write(header: CacheHeader, value: Uint8Array): void;

/**
* Indicates whether the cache is writable.
*/
canWrite: boolean;

/**
* If `debug` is toggled, `read()` and `write()` errors are logged to the console.
*
* By default, cache errors are silent, because they don't necessarily represent an error condition,
* but could just be a cache miss, or file system permissions incompatible with writing data.
*/
debug?: boolean;
};

const cacheHeaderVersion = 0.1;
const cacheHeaderVersion = 1;

type CommonHeader = {
/**
Expand All @@ -54,6 +79,7 @@ type CommonHeader = {
*/
dataType: 'string' | 'bytes';
};

type StepKeyHeader<Kind> = {
kind: Kind;
programName: string;
Expand All @@ -62,9 +88,10 @@ type StepKeyHeader<Kind> = {
hash: string;
};
type WrapKeyHeader<Kind> = { kind: Kind; programName: string; hash: string };
type PlainHeader<Kind> = { kind: Kind };

/**
* A header that is passed to the caching layer, to support richer caching strategies.
* A header that is passed to the caching layer, to support rich caching strategies.
*
* Both `uniqueId` and `programId` can safely be used as a file path.
*/
Expand All @@ -73,9 +100,57 @@ type CacheHeader = (
| StepKeyHeader<'step-vk'>
| WrapKeyHeader<'wrap-pk'>
| WrapKeyHeader<'wrap-vk'>
| PlainHeader<'srs'>
| PlainHeader<'lagrange-basis'>
) &
CommonHeader;

function withVersion(
header: Omit<CacheHeader, 'version'>,
version = cacheHeaderVersion
): CacheHeader {
let uniqueId = `${header.uniqueId}-${version}`;
return { ...header, version, uniqueId } as CacheHeader;
}

// default methods to interact with a cache

function readCache(cache: Cache, header: CacheHeader): Uint8Array | undefined;
function readCache<T>(
cache: Cache,
header: CacheHeader,
transform: (x: Uint8Array) => T
): T | undefined;
function readCache<T>(
cache: Cache,
header: CacheHeader,
transform?: (x: Uint8Array) => T
): T | undefined {
try {
let result = cache.read(header);
if (result === undefined) {
if (cache.debug) console.trace('cache miss');
return undefined;
}
if (transform === undefined) return result as any as T;
return transform(result);
} catch (e) {
if (cache.debug) console.log('Failed to read cache', e);
return undefined;
}
}

function writeCache(cache: Cache, header: CacheHeader, value: Uint8Array) {
if (!cache.canWrite) return false;
try {
cache.write(header, value);
return true;
} catch (e) {
if (cache.debug) console.log('Failed to write cache', e);
return false;
}
}

const None: Cache = {
read() {
throw Error('not available');
Expand All @@ -86,7 +161,7 @@ const None: Cache = {
canWrite: false,
};

const FileSystem = (cacheDirectory: string): Cache => ({
const FileSystem = (cacheDirectory: string, debug?: boolean): Cache => ({
read({ persistentId, uniqueId, dataType }) {
if (jsEnvironment !== 'node') throw Error('file system not available');

Expand Down Expand Up @@ -116,6 +191,7 @@ const FileSystem = (cacheDirectory: string): Cache => ({
});
},
canWrite: jsEnvironment === 'node',
debug,
});

const FileSystemDefault = FileSystem(cacheDir('o1js'));
Expand All @@ -124,12 +200,18 @@ const Cache = {
/**
* Store data on the file system, in a directory of your choice.
*
* Data will be stored in two files per cache entry: a data file and a `.header` file.
* The header file just contains a unique string which is used to determine whether we can use the cached data.
*
* Note: this {@link Cache} only caches data in Node.js.
*/
FileSystem,
/**
* Store data on the file system, in a standard cache directory depending on the OS.
*
* Data will be stored in two files per cache entry: a data file and a `.header` file.
* The header file just contains a unique string which is used to determine whether we can use the cached data.
*
* Note: this {@link Cache} only caches data in Node.js.
*/
FileSystemDefault,
Expand Down
28 changes: 13 additions & 15 deletions src/lib/proof_system.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,13 @@ import { hashConstant } from './hash.js';
import { MlArray, MlBool, MlResult, MlTuple, MlUnit } from './ml/base.js';
import { MlFieldArray, MlFieldConstArray } from './ml/fields.js';
import { FieldConst, FieldVar } from './field.js';
import { Cache } from './proof-system/cache.js';
import { Cache, readCache, writeCache } from './proof-system/cache.js';
import {
decodeProverKey,
encodeProverKey,
parseHeader,
} from './proof-system/prover-keys.js';
import { setSrsCache, unsetSrsCache } from '../bindings/crypto/bindings/srs.js';

// public API
export {
Expand Down Expand Up @@ -591,25 +592,20 @@ async function compileProgram({
0,
function read_(mlHeader) {
let header = parseHeader(proofSystemTag.name, methodIntfs, mlHeader);
try {
let bytes = cache.read(header);
if (bytes === undefined) return MlResult.unitError();
return MlResult.ok(decodeProverKey(mlHeader, bytes));
} catch (e: any) {
return MlResult.unitError();
}
let result = readCache(cache, header, (bytes) =>
decodeProverKey(mlHeader, bytes)
);
if (result === undefined) return MlResult.unitError();
return MlResult.ok(result);
},
function write_(mlHeader, value) {
if (!cache.canWrite) return MlResult.unitError();

let header = parseHeader(proofSystemTag.name, methodIntfs, mlHeader);
try {
let bytes = encodeProverKey(value);
cache.write(header, bytes);
return MlResult.ok(undefined);
} catch (e: any) {
return MlResult.unitError();
}
let didWrite = writeCache(cache, header, encodeProverKey(value));

if (!didWrite) return MlResult.unitError();
return MlResult.ok(undefined);
},
MlBool(cache.canWrite),
];
Expand All @@ -619,6 +615,7 @@ async function compileProgram({
withThreadPool(async () => {
let result: ReturnType<typeof Pickles.compile>;
let id = snarkContext.enter({ inCompile: true });
setSrsCache(cache);
try {
result = Pickles.compile(MlArray.to(rules), {
publicInputSize: publicInputType.sizeInFields(),
Expand All @@ -628,6 +625,7 @@ async function compileProgram({
});
} finally {
snarkContext.leave(id);
unsetSrsCache();
}
let { getVerificationKey, provers, verify, tag } = result;
CompiledTag.store(proofSystemTag, tag);
Expand Down
Loading