diff --git a/docker-compose.provernet.yml b/docker-compose.provernet.yml index 83e6cd1913f..372d2602e9e 100644 --- a/docker-compose.provernet.yml +++ b/docker-compose.provernet.yml @@ -65,7 +65,8 @@ services: ARCHIVER_POLLING_INTERVAL_MS: 1000 ARCHIVER_VIEM_POLLING_INTERVAL_MS: 1000 PROVER_VIEM_POLLING_INTERVAL_MS: 1000 - PROVER_AGENT_ENABLED: false + PROVER_AGENT_COUNT: 0 + PROVER_BROKER_HOST: http://aztec-prover-broker PROVER_PUBLISHER_PRIVATE_KEY: "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97" PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" PROVER_MINIMUM_ESCROW_AMOUNT: 1000000000 @@ -76,6 +77,8 @@ services: depends_on: aztec-node: condition: service_healthy + aztec-prover-broker: + condition: service_healthy healthcheck: test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] interval: 3s @@ -84,6 +87,21 @@ services: command: [ "start", "--prover-node", "--archiver" ] restart: on-failure:5 + aztec-prover-broker: + image: "aztecprotocol/${IMAGE:-aztec:master}" + ports: + - "8084:80" + environment: + LOG_LEVEL: verbose + AZTEC_PORT: 80 + healthcheck: + test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] + interval: 3s + timeout: 30s + start_period: 120s + command: [ "start", "--prover-broker" ] + restart: on-failure:5 + # Prover agent that connects to the prover-node for fetching proving jobs and executing them # Multiple instances can be run, or PROVER_AGENT_CONCURRENCY can be increased to run multiple workers in a single instance aztec-prover-agent: @@ -93,13 +111,11 @@ services: environment: LOG_LEVEL: verbose ETHEREUM_HOST: http://ethereum:8545 - AZTEC_NODE_URL: http://aztec-prover # Deprecated, use PROVER_JOB_SOURCE_URL - PROVER_JOB_SOURCE_URL: http://aztec-prover + PROVER_BROKER_HOST: http://aztec-prover-broker L1_CHAIN_ID: 31337 AZTEC_PORT: 80 PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" PROVER_TEST_DELAY_MS: "${PROVER_TEST_DELAY_MS:-0}" - PROVER_AGENT_CONCURRENCY: 2 BB_SKIP_CLEANUP: "${BB_SKIP_CLEANUP:-0}" # Persist tmp dirs for debugging PROVER_ID: "${PROVER_ID:-0x01}" volumes: @@ -107,9 +123,12 @@ services: - ./cache/bb-crs/:/root/.bb-crs:rw - ./workdir/bb-prover/:/usr/src/yarn-project/bb:rw depends_on: - aztec-prover: + aztec-prover-broker: condition: service_healthy - command: [ "start", "--prover" ] + command: [ "start", "--prover-agent" ] + deploy: + mode: replicated + replicas: 2 restart: on-failure:5 healthcheck: test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] diff --git a/spartan/aztec-network/files/config/setup-service-addresses.sh b/spartan/aztec-network/files/config/setup-service-addresses.sh index 4594b7a7740..5ca3bb5a248 100644 --- a/spartan/aztec-network/files/config/setup-service-addresses.sh +++ b/spartan/aztec-network/files/config/setup-service-addresses.sh @@ -79,10 +79,19 @@ else PROVER_NODE_ADDR="http://${SERVICE_NAME}-prover-node.${NAMESPACE}:${PROVER_NODE_PORT}" fi +if [ "${PROVER_BROKER_EXTERNAL_HOST}" != "" ]; then + PROVER_BROKER_ADDR="${PROVER_BROKER_EXTERNAL_HOST}" +elif [ "${NETWORK_PUBLIC}" = "true" ]; then + PROVER_BROKER_ADDR=$(get_service_address "prover-broker" "${PROVER_BROKER_PORT}") +else + PROVER_BROKER_ADDR="http://${SERVICE_NAME}-prover-broker.${NAMESPACE}:${PROVER_BROKER_PORT}" +fi + # Write addresses to file for sourcing echo "export ETHEREUM_HOST=${ETHEREUM_ADDR}" >> /shared/config/service-addresses echo "export BOOT_NODE_HOST=${BOOT_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_NODE_HOST=${PROVER_NODE_ADDR}" >> /shared/config/service-addresses +echo "export PROVER_BROKER_HOST=${PROVER_BROKER_ADDR}" >> /shared/config/service-addresses echo "Addresses configured:" cat /shared/config/service-addresses diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 8afb0c4636d..3db484690a0 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -165,6 +165,8 @@ Service Address Setup Container value: "{{ .Values.proverNode.externalHost }}" - name: PROVER_NODE_PORT value: "{{ .Values.proverNode.service.nodePort }}" + - name: PROVER_BROKER_PORT + value: "{{ .Values.proverBroker.service.nodePort }}" - name: SERVICE_NAME value: {{ include "aztec-network.fullname" . }} volumeMounts: diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 34f9648f3ba..ef080501868 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -50,11 +50,11 @@ spec: - -c - | source /shared/config/service-addresses - until curl -s -X POST ${PROVER_NODE_HOST}/status; do - echo "Waiting for Prover node ${PROVER_NODE_HOST} ..." + until curl -s -X POST ${PROVER_BROKER_HOST}/status; do + echo "Waiting for broker ${PROVER_BROKER_HOST} ..." sleep 5 done - echo "Prover node is ready!" + echo "Broker is ready!" {{- if .Values.telemetry.enabled }} until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do echo "Waiting for OpenTelemetry collector..." @@ -77,8 +77,7 @@ spec: - "-c" - | source /shared/config/service-addresses && \ - PROVER_JOB_SOURCE_URL=${PROVER_NODE_HOST} \ - node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-agent env: - name: AZTEC_PORT value: "{{ .Values.proverAgent.service.nodePort }}" @@ -90,12 +89,12 @@ spec: value: "{{ .Values.proverAgent.debug }}" - name: PROVER_REAL_PROOFS value: "{{ .Values.proverAgent.realProofs }}" - - name: PROVER_AGENT_ENABLED - value: "true" - - name: PROVER_AGENT_CONCURRENCY - value: {{ .Values.proverAgent.concurrency | quote }} - - name: HARDWARE_CONCURRENCY - value: {{ .Values.proverAgent.bb.hardwareConcurrency | quote }} + - name: PROVER_AGENT_COUNT + value: "1" + - name: PROVER_AGENT_POLL_INTERVAL_MS + value: "{{ .Values.proverAgent.pollIntervalMs }}" + - name: PROVER_AGENT_PROOF_TYPES + value: {{ join "," .Values.proverAgent.proofTypes | quote }} - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT diff --git a/spartan/aztec-network/templates/prover-broker.yaml b/spartan/aztec-network/templates/prover-broker.yaml new file mode 100644 index 00000000000..214b6720fce --- /dev/null +++ b/spartan/aztec-network/templates/prover-broker.yaml @@ -0,0 +1,104 @@ +{{- if .Values.proverBroker.enabled }} +apiVersion: apps/v1 +kind: ReplicaSet +metadata: + name: {{ include "aztec-network.fullname" . }}-prover-broker + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.proverBroker.replicas }} + selector: + matchLabels: + {{- include "aztec-network.selectorLabels" . | nindent 6 }} + app: prover-broker + template: + metadata: + labels: + {{- include "aztec-network.selectorLabels" . | nindent 8 }} + app: prover-broker + spec: + serviceAccountName: {{ include "aztec-network.fullname" . }}-node + {{- if .Values.network.public }} + hostNetwork: true + {{- end }} + volumes: + - name: config + emptyDir: {} + - name: scripts + configMap: + name: {{ include "aztec-network.fullname" . }}-scripts + initContainers: + {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} + - name: wait-for-prover-node + image: {{ .Values.images.aztec.image }} + command: + - /bin/bash + - -c + - | + source /shared/config/service-addresses + {{- if .Values.telemetry.enabled }} + until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do + echo "Waiting for OpenTelemetry collector..." + sleep 5 + done + echo "OpenTelemetry collector is ready!" + {{- end }} + volumeMounts: + - name: config + mountPath: /shared/config + containers: + - name: prover-broker + image: "{{ .Values.images.aztec.image }}" + imagePullPolicy: {{ .Values.images.aztec.pullPolicy }} + volumeMounts: + - name: config + mountPath: /shared/config + command: + - "/bin/bash" + - "-c" + - | + source /shared/config/service-addresses && \ + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-broker + env: + - name: AZTEC_PORT + value: "{{ .Values.proverBroker.service.nodePort }}" + - name: LOG_LEVEL + value: "{{ .Values.proverBroker.logLevel }}" + - name: LOG_JSON + value: "1" + - name: DEBUG + value: "{{ .Values.proverBroker.debug }}" + - name: PROVER_BROKER_POLL_INTERVAL_MS + value: "{{ .Values.proverBroker.pollIntervalMs }}" + - name: PROVER_BROKER_JOB_TIMEOUT_MS + value: "{{ .Values.proverBroker.jobTimeoutMs }}" + - name: PROVER_BROKER_JOB_MAX_RETRIES + value: "{{ .Values.proverBroker.jobMaxRetries }}" + - name: PROVER_BROKER_DATA_DIRECTORY + value: "{{ .Values.proverBroker.dataDirectory }}" + - name: OTEL_RESOURCE_ATTRIBUTES + value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} + - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} + resources: + {{- toYaml .Values.proverBroker.resources | nindent 12 }} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ include "aztec-network.fullname" . }}-prover-broker + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + type: ClusterIP + selector: + {{- include "aztec-network.selectorLabels" . | nindent 4 }} + app: prover-broker + ports: + - port: {{ .Values.proverBroker.service.nodePort }} + name: node +{{ end }} diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 6b7506149a2..bf13dad1821 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -39,6 +39,17 @@ spec: sleep 5 done echo "Ethereum node is ready!" + + if [ "${PROVER_BROKER_ENABLED}" == "false" ]; then + until curl -s -X POST ${PROVER_BROKER_HOST}/status; do + echo "Waiting for broker ${PROVER_BROKER_HOST} ..." + sleep 5 + done + echo "Broker is ready!" + else + echo "Using built-in job broker" + fi + {{- if .Values.telemetry.enabled }} until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do echo "Waiting for OpenTelemetry collector..." @@ -54,6 +65,10 @@ spec: volumeMounts: - name: config mountPath: /shared/config + env: + - name: PROVER_BROKER_ENABLED + value: "{{ .Values.proverNode.proverBroker.enabled }}" + - name: configure-prover-env image: "{{ .Values.images.aztec.image }}" imagePullPolicy: {{ .Values.images.aztec.pullPolicy }} @@ -107,15 +122,26 @@ spec: value: "{{ .Values.proverNode.debug }}" - name: PROVER_REAL_PROOFS value: "{{ .Values.proverNode.realProofs }}" - - name: PROVER_AGENT_ENABLED - value: "{{ .Values.proverNode.proverAgentEnabled }}" + - name: PROVER_AGENT_COUNT + value: "{{ .Values.proverNode.proverAgent.count }}" + - name: PROVER_AGENT_POLL_INTERVAL_MS + value: "{{ .Values.proverNode.proverAgent.pollIntervalMs }}" + - name: PROVER_AGENT_PROOF_TYPES + value: {{ join "," .Values.proverNode.proverAgent.proofTypes | quote }} + - name: PROVER_BROKER_ENABLED + value: "{{ .Values.proverNode.proverBroker.enabled }}" + - name: PROVER_BROKER_POLL_INTERVAL_MS + value: "{{ .Values.proverNode.proverBroker.pollIntervalMs }}" + - name: PROVER_BROKER_JOB_TIMEOUT_MS + value: "{{ .Values.proverNode.proverBroker.jobTimeoutMs }}" + - name: PROVER_BROKER_JOB_MAX_RETRIES + value: "{{ .Values.proverNode.proverBroker.jobMaxRetries }}" + - name: PROVER_BROKER_DATA_DIRECTORY + value: "{{ .Values.proverNode.proverBroker.dataDirectory }}" - name: PROVER_PUBLISHER_PRIVATE_KEY value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - # get private proofs from the boot node - - name: PROVER_JOB_SOURCE_URL - value: "http://$(POD_IP):{{ .Values.proverNode.service.nodePort }}" - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 0be51cd0d26..245b51f9435 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -118,7 +118,16 @@ proverNode: logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" realProofs: false - proverAgentEnabled: false + proverAgent: + count: 0 + pollIntervalMs: 1000 + proofTypes: [] + proverBroker: + enabled: false + jobTimeoutMs: 30000 + pollIntervalMs: 1000 + jobMaxRetries: 3 + dataDirectory: "" resources: requests: memory: "2Gi" @@ -206,17 +215,32 @@ proverAgent: nodePort: 8083 enabled: true replicas: 1 + pollIntervalMs: 1000 + proofTypes: ["foo", "bar", "baz"] gke: spotEnabled: false logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" realProofs: false - concurrency: 1 bb: hardwareConcurrency: "" nodeSelector: {} resources: {} +proverBroker: + service: + nodePort: 8084 + enabled: true + replicas: 1 + jobTimeoutMs: 30000 + pollIntervalMs: 1000 + jobMaxRetries: 3 + dataDirectory: "" + logLevel: "debug" + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + nodeSelector: {} + resources: {} + jobs: deployL1Verifier: enable: false diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 936e134eeb9..a2c686cdd6c 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -165,6 +165,7 @@ export class AztecNodeService implements AztecNode { // now create the merkle trees and the world state synchronizer const worldStateSynchronizer = await createWorldStateSynchronizer(config, archiver, telemetry); const proofVerifier = config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(); + log.info(`Aztec node accepting ${config.realProofs ? 'real' : 'test'} proofs`); // create the tx pool and the p2p client, which will need the l2 block source const p2pClient = await createP2PClient(config, archiver, proofVerifier, worldStateSynchronizer, telemetry); diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 53b0ab01949..90b0a970092 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -1,16 +1,21 @@ import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver'; import { sequencerClientConfigMappings } from '@aztec/aztec-node'; import { botConfigMappings } from '@aztec/bot'; +import { + type ProverAgentConfig, + type ProverBrokerConfig, + proverAgentConfigMappings, + proverBrokerConfigMappings, +} from '@aztec/circuit-types'; import { type ConfigMapping, type EnvVar, booleanConfigHelper, - filterConfigMappings, isBooleanConfigValue, + omitConfigMappings, } from '@aztec/foundation/config'; import { bootnodeConfigMappings, p2pConfigMappings } from '@aztec/p2p'; import { proofVerifierConfigMappings } from '@aztec/proof-verifier'; -import { proverClientConfigMappings } from '@aztec/prover-client'; import { proverNodeConfigMappings } from '@aztec/prover-node'; import { allPxeConfigMappings } from '@aztec/pxe'; import { telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; @@ -239,15 +244,6 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { }, ...getOptions('sequencer', sequencerClientConfigMappings), ], - 'PROVER AGENT': [ - { - flag: '--prover', - description: 'Starts Aztec Prover Agent with options', - defaultValue: undefined, - envVar: undefined, - }, - ...getOptions('prover', proverClientConfigMappings), - ], 'PROVER NODE': [ { flag: '--prover-node', @@ -263,10 +259,36 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { }, ...getOptions( 'proverNode', + omitConfigMappings(proverNodeConfigMappings, [ + // filter out options passed separately + ...(Object.keys(archiverConfigMappings) as (keyof ArchiverConfig)[]), + ...(Object.keys(proverBrokerConfigMappings) as (keyof ProverBrokerConfig)[]), + ...(Object.keys(proverAgentConfigMappings) as (keyof ProverAgentConfig)[]), + ]), + ), + ], + 'PROVER BROKER': [ + { + flag: '--prover-broker', + description: 'Starts Aztec proving job broker', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions( + 'proverBroker', // filter out archiver options from prover node options as they're passed separately in --archiver - filterConfigMappings(proverNodeConfigMappings, Object.keys(archiverConfigMappings) as (keyof ArchiverConfig)[]), + proverBrokerConfigMappings, ), ], + 'PROVER AGENT': [ + { + flag: '--prover-agent', + description: 'Starts Aztec Prover Agent with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('proverAgent', proverAgentConfigMappings), + ], 'P2P BOOTSTRAP': [ { flag: '--p2p-bootstrap', diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 57650fede28..91d803851e4 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -96,9 +96,12 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge } else if (options.p2pBootstrap) { const { startP2PBootstrap } = await import('./cmds/start_p2p_bootstrap.js'); await startP2PBootstrap(options, userLog, debugLogger); - } else if (options.prover) { + } else if (options.proverAgent) { const { startProverAgent } = await import('./cmds/start_prover_agent.js'); await startProverAgent(options, signalHandlers, services, userLog); + } else if (options.proverBroker) { + const { startProverBroker } = await import('./cmds/start_prover_broker.js'); + await startProverBroker(options, signalHandlers, services, userLog); } else if (options.txe) { const { startTXE } = await import('./cmds/start_txe.js'); await startTXE(options, debugLogger); diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts index 2cbad090b25..3ae24df0ad9 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts @@ -1,14 +1,11 @@ -import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; -import { ProverAgentApiSchema, type ServerCircuitProver } from '@aztec/circuit-types'; +import { type ProverAgentConfig, proverAgentConfigMappings } from '@aztec/circuit-types'; +import { times } from '@aztec/foundation/collection'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; -import { type ProverClientConfig, proverClientConfigMappings } from '@aztec/prover-client'; -import { ProverAgent, createProvingJobSourceClient } from '@aztec/prover-client/prover-agent'; -import { - type TelemetryClientConfig, - createAndStartTelemetryClient, - telemetryClientConfigMappings, -} from '@aztec/telemetry-client/start'; +import { buildServerCircuitProver } from '@aztec/prover-client'; +import { InlineProofStore, ProvingAgent, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; +import { getProverNodeAgentConfigFromEnv } from '@aztec/prover-node'; +import { createAndStartTelemetryClient, telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; import { extractRelevantOptions } from '../util.js'; @@ -16,36 +13,39 @@ export async function startProverAgent( options: any, signalHandlers: (() => Promise)[], services: NamespacedApiHandlers, - logger: LogFn, + userLog: LogFn, ) { - const proverConfig = extractRelevantOptions(options, proverClientConfigMappings, 'prover'); - const proverJobSourceUrl = proverConfig.proverJobSourceUrl ?? proverConfig.nodeUrl; - if (!proverJobSourceUrl) { - throw new Error('Starting prover without PROVER_JOB_SOURCE_URL is not supported'); + if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { + userLog(`Starting a prover agent with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); + process.exit(1); } - logger(`Connecting to prover at ${proverJobSourceUrl}`); - const source = createProvingJobSourceClient(proverJobSourceUrl); + const config = { + ...getProverNodeAgentConfigFromEnv(), // get default config from env + ...extractRelevantOptions(options, proverAgentConfigMappings, 'proverAgent'), // override with command line options + }; - const telemetryConfig = extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'); - const telemetry = await createAndStartTelemetryClient(telemetryConfig); + if (config.realProofs && (!config.bbBinaryPath || !config.acvmBinaryPath)) { + process.exit(1); + } - let circuitProver: ServerCircuitProver; - if (proverConfig.realProofs) { - if (!proverConfig.acvmBinaryPath || !proverConfig.bbBinaryPath) { - throw new Error('Cannot start prover without simulation or native prover options'); - } - circuitProver = await BBNativeRollupProver.new(proverConfig, telemetry); - } else { - circuitProver = new TestCircuitProver(telemetry, undefined, proverConfig); + if (!config.proverBrokerUrl) { + process.exit(1); } - const { proverAgentConcurrency, proverAgentPollInterval } = proverConfig; - const agent = new ProverAgent(circuitProver, proverAgentConcurrency, proverAgentPollInterval); - agent.start(source); + const broker = createProvingJobBrokerClient(config.proverBrokerUrl); + + const telemetry = await createAndStartTelemetryClient( + extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'), + ); + const prover = await buildServerCircuitProver(config, telemetry); + const proofStore = new InlineProofStore(); + const agents = times(config.proverAgentCount, () => new ProvingAgent(broker, proofStore, prover)); - logger(`Started prover agent with concurrency limit of ${proverAgentConcurrency}`); + await Promise.all(agents.map(agent => agent.start())); - services.prover = [agent, ProverAgentApiSchema]; - signalHandlers.push(() => agent.stop()); + signalHandlers.push(async () => { + await Promise.all(agents.map(agent => agent.stop())); + await telemetry.stop(); + }); } diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts new file mode 100644 index 00000000000..197d48971c9 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -0,0 +1,32 @@ +import { type ProverBrokerConfig, type ProvingJobBroker, proverBrokerConfigMappings } from '@aztec/circuit-types'; +import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; +import { type LogFn } from '@aztec/foundation/log'; +import { ProvingJobBrokerSchema, createAndStartProvingBroker } from '@aztec/prover-client/broker'; +import { getProverNodeBrokerConfigFromEnv } from '@aztec/prover-node'; + +import { extractRelevantOptions } from '../util.js'; + +export async function startProverBroker( + options: any, + signalHandlers: (() => Promise)[], + services: NamespacedApiHandlers, + userLog: LogFn, +): Promise { + if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { + userLog(`Starting a prover broker with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); + process.exit(1); + } + + const config: ProverBrokerConfig = { + ...getProverNodeBrokerConfigFromEnv(), // get default config from env + ...extractRelevantOptions(options, proverBrokerConfigMappings, 'proverBroker'), // override with command line options + }; + + const broker = await createAndStartProvingBroker(config); + services.proverBroker = [broker, ProvingJobBrokerSchema]; + signalHandlers.push(() => broker.stop()); + + await broker.start(); + + return broker; +} diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts index 031298e6890..0d6fa266edc 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -1,7 +1,8 @@ -import { ProverNodeApiSchema, ProvingJobSourceSchema, createAztecNodeClient } from '@aztec/circuit-types'; +import { ProverNodeApiSchema, type ProvingJobBroker, createAztecNodeClient } from '@aztec/circuit-types'; import { NULL_KEY } from '@aztec/ethereum'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; +import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; import { type ProverNodeConfig, createProverNode, @@ -13,6 +14,7 @@ import { createAndStartTelemetryClient, telemetryClientConfigMappings } from '@a import { mnemonicToAccount } from 'viem/accounts'; import { extractRelevantOptions } from '../util.js'; +import { startProverBroker } from './start_prover_broker.js'; export async function startProverNode( options: any, @@ -35,14 +37,6 @@ export async function startProverNode( process.exit(1); } - if (options.prover || options.proverAgentEnabled) { - userLog(`Running prover node with local prover agent.`); - proverConfig.proverAgentEnabled = true; - } else { - userLog(`Running prover node without local prover agent. Connect one or more prover agents to this node.`); - proverConfig.proverAgentEnabled = false; - } - if (!proverConfig.publisherPrivateKey || proverConfig.publisherPrivateKey === NULL_KEY) { if (!options.l1Mnemonic) { userLog(`--l1-mnemonic is required to start a Prover Node without --node.publisherPrivateKey`); @@ -67,12 +61,28 @@ export async function startProverNode( const telemetry = await createAndStartTelemetryClient( extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'), ); - const proverNode = await createProverNode(proverConfig, { telemetry }); + let broker: ProvingJobBroker; + if (proverConfig.proverBrokerUrl) { + broker = createProvingJobBrokerClient(proverConfig.proverBrokerUrl); + } else if (options.proverBroker) { + broker = await startProverBroker(options, signalHandlers, services, userLog); + } else { + userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`); + process.exit(1); + } + + if (proverConfig.proverAgentCount === 0) { + userLog( + `Running prover node without local prover agent. Connect one or more prover agents to this node or pass --proverAgent.proverAgentCount`, + ); + } + + const proverNode = await createProverNode(proverConfig, { telemetry, broker }); services.proverNode = [proverNode, ProverNodeApiSchema]; - if (!options.prover) { - services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobSourceSchema]; + if (!proverConfig.proverBrokerUrl) { + services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobConsumerSchema]; } signalHandlers.push(proverNode.stop.bind(proverNode)); diff --git a/yarn-project/bb-prover/src/config.ts b/yarn-project/bb-prover/src/config.ts index 7b58a67fd92..3e8002fb89a 100644 --- a/yarn-project/bb-prover/src/config.ts +++ b/yarn-project/bb-prover/src/config.ts @@ -6,6 +6,8 @@ export interface BBConfig { } export interface ACVMConfig { + /** The path to the ACVM binary */ acvmBinaryPath: string; + /** The working directory to use for simulation/proving */ acvmWorkingDirectory: string; } diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index c5980197894..c717ceae649 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -20,3 +20,4 @@ export * from './server_circuit_prover.js'; export * from './service.js'; export * from './sync-status.js'; export * from './world_state.js'; +export * from './prover-broker.js'; diff --git a/yarn-project/circuit-types/src/interfaces/prover-agent.ts b/yarn-project/circuit-types/src/interfaces/prover-agent.ts index 19142530d43..7a2cb519a9e 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-agent.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-agent.ts @@ -1,7 +1,63 @@ +import { type ConfigMappingsType, booleanConfigHelper, numberConfigHelper } from '@aztec/foundation/config'; import { type ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; +import { ProvingRequestType } from './proving-job.js'; + +export const ProverAgentConfig = z.object({ + /** The number of prover agents to start */ + proverAgentCount: z.number(), + /** The types of proofs the prover agent can generate */ + proverAgentProofTypes: z.array(z.nativeEnum(ProvingRequestType)), + /** How often the prover agents poll for jobs */ + proverAgentPollIntervalMs: z.number(), + /** The URL where this agent takes jobs from */ + proverBrokerUrl: z.string().optional(), + /** Whether to construct real proofs */ + realProofs: z.boolean(), + /** Artificial delay to introduce to all operations to the test prover. */ + proverTestDelayMs: z.number(), +}); + +export type ProverAgentConfig = z.infer; + +export const proverAgentConfigMappings: ConfigMappingsType = { + proverAgentCount: { + env: 'PROVER_AGENT_COUNT', + description: 'Whether this prover has a local prover agent', + ...numberConfigHelper(1), + }, + proverAgentPollIntervalMs: { + env: 'PROVER_AGENT_POLL_INTERVAL_MS', + description: 'The interval agents poll for jobs at', + ...numberConfigHelper(100), + }, + proverAgentProofTypes: { + env: 'PROVER_AGENT_PROOF_TYPES', + description: 'The types of proofs the prover agent can generate', + parseEnv: (val: string) => + val + .split(',') + .map(v => ProvingRequestType[v as any]) + .filter(v => typeof v === 'number'), + }, + proverBrokerUrl: { + env: 'PROVER_BROKER_HOST', + description: 'The URL where this agent takes jobs from', + }, + realProofs: { + env: 'PROVER_REAL_PROOFS', + description: 'Whether to construct real proofs', + ...booleanConfigHelper(false), + }, + proverTestDelayMs: { + env: 'PROVER_TEST_DELAY_MS', + description: 'Artificial delay to introduce to all operations to the test prover.', + ...numberConfigHelper(0), + }, +}; + export interface ProverAgentApi { setMaxConcurrency(maxConcurrency: number): Promise; diff --git a/yarn-project/circuit-types/src/interfaces/prover-broker.ts b/yarn-project/circuit-types/src/interfaces/prover-broker.ts new file mode 100644 index 00000000000..5f11be3347e --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/prover-broker.ts @@ -0,0 +1,124 @@ +import { + type ProofUri, + type ProvingJob, + type ProvingJobId, + type ProvingJobSettledResult, + type ProvingJobStatus, + type ProvingRequestType, +} from '@aztec/circuit-types'; +import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; + +import { z } from 'zod'; + +export const ProverBrokerConfig = z.object({ + /** If starting a prover broker locally, the max number of retries per proving job */ + proverBrokerJobMaxRetries: z.number(), + /** If starting a prover broker locally, the time after which a job times out and gets assigned to a different agent */ + proverBrokerJobTimeoutMs: z.number(), + /** If starting a prover broker locally, the interval the broker checks for timed out jobs */ + proverBrokerPollIntervalMs: z.number(), + /** If starting a prover broker locally, the directory to store broker data */ + proverBrokerDataDirectory: z.string().optional(), +}); + +export type ProverBrokerConfig = z.infer; + +export const proverBrokerConfigMappings: ConfigMappingsType = { + proverBrokerJobTimeoutMs: { + env: 'PROVER_BROKER_JOB_TIMEOUT_MS', + description: 'Jobs are retried if not kept alive for this long', + ...numberConfigHelper(30_000), + }, + proverBrokerPollIntervalMs: { + env: 'PROVER_BROKER_POLL_INTERVAL_MS', + description: 'The interval to check job health status', + ...numberConfigHelper(1_000), + }, + proverBrokerJobMaxRetries: { + env: 'PROVER_BROKER_JOB_MAX_RETRIES', + description: 'If starting a prover broker locally, the max number of retries per proving job', + ...numberConfigHelper(3), + }, + proverBrokerDataDirectory: { + env: 'PROVER_BROKER_DATA_DIRECTORY', + description: 'If starting a prover broker locally, the directory to store broker data', + }, +}; + +/** + * An interface for the proving orchestrator. The producer uses this to enqueue jobs for agents + */ +export interface ProvingJobProducer { + /** + * Enqueues a proving job + * @param job - The job to enqueue + */ + enqueueProvingJob(job: ProvingJob): Promise; + + /** + * Cancels a proving job and clears all of its + * @param id - The ID of the job to cancel + */ + removeAndCancelProvingJob(id: ProvingJobId): Promise; + + /** + * Returns the current status fof the proving job + * @param id - The ID of the job to get the status of + */ + getProvingJobStatus(id: ProvingJobId): Promise; + + /** + * Waits for the job to settle and returns to the result + * @param id - The ID of the job to get the status of + */ + waitForJobToSettle(id: ProvingJobId): Promise; +} + +export type ProvingJobFilter = { + allowList: ProvingRequestType[]; +}; + +export type GetProvingJobResponse = { + job: ProvingJob; + time: number; +}; + +/** + * An interface for proving agents to request jobs and report results + */ +export interface ProvingJobConsumer { + /** + * Gets a proving job to work on + * @param filter - Optional filter for the type of job to get + */ + getProvingJob(filter?: ProvingJobFilter): Promise; + + /** + * Marks a proving job as successful + * @param id - The ID of the job to report success for + * @param result - The result of the job + */ + reportProvingJobSuccess(id: ProvingJobId, result: ProofUri): Promise; + + /** + * Marks a proving job as errored + * @param id - The ID of the job to report an error for + * @param err - The error that occurred while processing the job + * @param retry - Whether to retry the job + */ + reportProvingJobError(id: ProvingJobId, err: string, retry?: boolean): Promise; + + /** + * Sends a heartbeat to the broker to indicate that the agent is still working on the given proving job + * @param id - The ID of the job to report progress for + * @param startedAt - The unix epoch when the job was started + * @param filter - Optional filter for the type of job to get + */ + reportProvingJobProgress( + id: ProvingJobId, + startedAt: number, + filter?: ProvingJobFilter, + ): Promise; +} + +export interface ProvingJobBroker extends ProvingJobProducer, ProvingJobConsumer {} diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts index 2f2953b5dd7..bf1ef3d6485 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-client.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -7,42 +7,37 @@ import { z } from 'zod'; import { type TxHash } from '../tx/tx_hash.js'; import { type EpochProver } from './epoch-prover.js'; import { type MerkleTreeReadOperations } from './merkle_tree_operations.js'; -import { type ProvingJobSource } from './proving-job-source.js'; +import { type ProvingJobConsumer } from './prover-broker.js'; +import { type ProvingJobStatus } from './proving-job.js'; + +export type ActualProverConfig = { + /** Whether to construct real proofs */ + realProofs: boolean; + /** Artificial delay to introduce to all operations to the test prover. */ + proverTestDelayMs: number; +}; /** * The prover configuration. */ -export type ProverConfig = { +export type ProverConfig = ActualProverConfig & { /** The URL to the Aztec node to take proving jobs from */ nodeUrl?: string; - /** Whether to construct real proofs */ - realProofs: boolean; - /** Whether this prover has a local prover agent */ - proverAgentEnabled: boolean; - /** The interval agents poll for jobs at */ - proverAgentPollInterval: number; - /** The maximum number of proving jobs to be run in parallel */ - proverAgentConcurrency: number; - /** Jobs are retried if not kept alive for this long */ - proverJobTimeoutMs: number; - /** The interval to check job health status */ - proverJobPollIntervalMs: number; - /** Artificial delay to introduce to all operations to the test prover. */ - proverTestDelayMs: number; /** Identifier of the prover */ - proverId?: Fr; + proverId: Fr; + /** Where to store temporary data */ + cacheDir?: string; + + proverAgentCount: number; }; export const ProverConfigSchema = z.object({ nodeUrl: z.string().optional(), realProofs: z.boolean(), - proverAgentEnabled: z.boolean(), - proverAgentPollInterval: z.number(), - proverAgentConcurrency: z.number(), - proverJobTimeoutMs: z.number(), - proverJobPollIntervalMs: z.number(), - proverId: schemas.Fr.optional(), + proverId: schemas.Fr, proverTestDelayMs: z.number(), + cacheDir: z.string().optional(), + proverAgentCount: z.number(), }) satisfies ZodFor; export const proverConfigMappings: ConfigMappingsType = { @@ -55,59 +50,68 @@ export const proverConfigMappings: ConfigMappingsType = { description: 'Whether to construct real proofs', ...booleanConfigHelper(), }, - proverAgentEnabled: { - env: 'PROVER_AGENT_ENABLED', - description: 'Whether this prover has a local prover agent', - ...booleanConfigHelper(true), - }, - proverAgentPollInterval: { - env: 'PROVER_AGENT_POLL_INTERVAL_MS', - description: 'The interval agents poll for jobs at', - ...numberConfigHelper(100), - }, - proverAgentConcurrency: { - env: 'PROVER_AGENT_CONCURRENCY', - description: 'The maximum number of proving jobs to be run in parallel', - ...numberConfigHelper(1), - }, - proverJobTimeoutMs: { - env: 'PROVER_JOB_TIMEOUT_MS', - description: 'Jobs are retried if not kept alive for this long', - ...numberConfigHelper(60_000), - }, - proverJobPollIntervalMs: { - env: 'PROVER_JOB_POLL_INTERVAL_MS', - description: 'The interval to check job health status', - ...numberConfigHelper(1_000), - }, proverId: { env: 'PROVER_ID', parseEnv: (val: string) => parseProverId(val), description: 'Identifier of the prover', + defaultValue: Fr.ZERO, }, proverTestDelayMs: { env: 'PROVER_TEST_DELAY_MS', description: 'Artificial delay to introduce to all operations to the test prover.', ...numberConfigHelper(0), }, + cacheDir: { + env: 'PROVER_CACHE_DIR', + description: 'Where to store cache data generated while proving', + defaultValue: '/tmp/aztec-prover', + }, + proverAgentCount: { + env: 'PROVER_AGENT_COUNT', + description: 'The number of prover agents to start', + ...numberConfigHelper(1), + }, }; function parseProverId(str: string) { return Fr.fromString(str.startsWith('0x') ? str : Buffer.from(str, 'utf8').toString('hex')); } +/** + * A database where the proving orchestrator can store intermediate results + */ +export interface ProverCache { + /** + * Saves the status of a proving job + * @param jobId - The job ID + * @param status - The status of the proof + */ + setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise; + + /** + * Retrieves the status of a proving job (if known) + * @param jobId - The job ID + */ + getProvingJobStatus(jobId: string): Promise; + + /** + * Closes the cache + */ + close(): Promise; +} + /** * The interface to the prover client. * Provides the ability to generate proofs and build rollups. */ export interface EpochProverManager { - createEpochProver(db: MerkleTreeReadOperations): EpochProver; + createEpochProver(db: MerkleTreeReadOperations, cache?: ProverCache): EpochProver; start(): Promise; stop(): Promise; - getProvingJobSource(): ProvingJobSource; + getProvingJobSource(): ProvingJobConsumer; updateProverConfig(config: Partial): Promise; } diff --git a/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts b/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts index eb1388a54d7..57b7d2192be 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts @@ -1,7 +1,6 @@ import { BaseOrMergeRollupPublicInputs, NESTED_RECURSIVE_PROOF_LENGTH, - PrivateBaseRollupInputs, VerificationKeyData, makeRecursiveProof, } from '@aztec/circuits.js'; @@ -9,9 +8,9 @@ import { type JsonRpcTestContext, createJsonRpcTestSetup } from '@aztec/foundati import { type ProvingJobSource, ProvingJobSourceSchema } from './proving-job-source.js'; import { + type ProofUri, type ProvingJob, - type ProvingRequest, - type ProvingRequestResult, + type ProvingJobResult, type ProvingRequestResultFor, ProvingRequestType, makePublicInputsAndRecursiveProof, @@ -66,17 +65,18 @@ describe('ProvingJobSourceSchema', () => { }); class MockProvingJobSource implements ProvingJobSource { - getProvingJob(): Promise | undefined> { + getProvingJob(): Promise { return Promise.resolve({ id: 'a-job-id', - request: { type: ProvingRequestType.PRIVATE_BASE_ROLLUP, inputs: PrivateBaseRollupInputs.empty() }, + type: ProvingRequestType.PRIVATE_BASE_ROLLUP, + inputsUri: 'inputs-uri' as ProofUri, }); } heartbeat(jobId: string): Promise { expect(typeof jobId).toEqual('string'); return Promise.resolve(); } - resolveProvingJob(jobId: string, result: ProvingRequestResult): Promise { + resolveProvingJob(jobId: string, result: ProvingJobResult): Promise { expect(typeof jobId).toEqual('string'); const baseRollupResult = result as ProvingRequestResultFor; expect(baseRollupResult.result.inputs).toBeInstanceOf(BaseOrMergeRollupPublicInputs); diff --git a/yarn-project/circuit-types/src/interfaces/proving-job-source.ts b/yarn-project/circuit-types/src/interfaces/proving-job-source.ts index ebeaa05301a..c54f5964e51 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job-source.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job-source.ts @@ -2,21 +2,14 @@ import { type ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; -import { - JobIdSchema, - type ProvingJob, - ProvingJobSchema, - type ProvingRequest, - type ProvingRequestResult, - ProvingRequestResultSchema, -} from './proving-job.js'; +import { ProvingJob, ProvingJobId, ProvingJobResult } from './proving-job.js'; export interface ProvingJobSource { /** * Gets the next proving job. `heartbeat` must be called periodically to keep the job alive. * @returns The proving job, or undefined if there are no jobs available. */ - getProvingJob(): Promise | undefined>; + getProvingJob(): Promise; /** * Keeps the job alive. If this isn't called regularly then the job will be @@ -30,7 +23,7 @@ export interface ProvingJobSource { * @param jobId - The ID of the job to resolve. * @param result - The result of the proving job. */ - resolveProvingJob(jobId: string, result: ProvingRequestResult): Promise; + resolveProvingJob(jobId: string, result: ProvingJobResult): Promise; /** * Rejects a proving job. @@ -41,8 +34,8 @@ export interface ProvingJobSource { } export const ProvingJobSourceSchema: ApiSchemaFor = { - getProvingJob: z.function().args().returns(ProvingJobSchema.optional()), - heartbeat: z.function().args(JobIdSchema).returns(z.void()), - resolveProvingJob: z.function().args(JobIdSchema, ProvingRequestResultSchema).returns(z.void()), - rejectProvingJob: z.function().args(JobIdSchema, z.string()).returns(z.void()), + getProvingJob: z.function().args().returns(ProvingJob.optional()), + heartbeat: z.function().args(ProvingJobId).returns(z.void()), + resolveProvingJob: z.function().args(ProvingJobId, ProvingJobResult).returns(z.void()), + rejectProvingJob: z.function().args(ProvingJobId, z.string()).returns(z.void()), }; diff --git a/yarn-project/circuit-types/src/interfaces/proving-job.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts index 7c0643192c8..f2013799dac 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -126,14 +126,12 @@ export function mapProvingRequestTypeToCircuitName(type: ProvingRequestType): Ci export type AvmProvingRequest = z.infer; -export type ProvingRequest = z.infer; - export const AvmProvingRequestSchema = z.object({ type: z.literal(ProvingRequestType.PUBLIC_VM), inputs: AvmCircuitInputs.schema, }); -export const ProvingRequestSchema = z.discriminatedUnion('type', [ +export const ProvingJobInputs = z.discriminatedUnion('type', [ AvmProvingRequestSchema, z.object({ type: z.literal(ProvingRequestType.BASE_PARITY), inputs: BaseParityInputs.schema }), z.object({ type: z.literal(ProvingRequestType.ROOT_PARITY), inputs: RootParityInputs.schema }), @@ -147,47 +145,23 @@ export const ProvingRequestSchema = z.discriminatedUnion('type', [ z.object({ type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), inputs: PrivateKernelEmptyInputData.schema }), z.object({ type: z.literal(ProvingRequestType.TUBE_PROOF), inputs: TubeInputs.schema }), ]); - -export type JobId = z.infer; - -export const JobIdSchema = z.string(); - -export type ProvingJob = { id: JobId; request: T }; - -export const ProvingJobSchema = z.object({ id: JobIdSchema, request: ProvingRequestSchema }); - -type ProvingRequestResultsMap = { - [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PublicInputsAndRecursiveProof; - [ProvingRequestType.PUBLIC_VM]: ProofAndVerificationKey; - [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.MERGE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BLOCK_MERGE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BASE_PARITY]: PublicInputsAndRecursiveProof; - [ProvingRequestType.ROOT_PARITY]: PublicInputsAndRecursiveProof< - ParityPublicInputs, - typeof NESTED_RECURSIVE_PROOF_LENGTH - >; - [ProvingRequestType.TUBE_PROOF]: ProofAndVerificationKey; +export type ProvingJobInputs = z.infer; +export type ProvingJobInputsMap = { + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PrivateKernelEmptyInputData; + [ProvingRequestType.PUBLIC_VM]: AvmCircuitInputs; + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PrivateBaseRollupInputs; + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicBaseRollupInputs; + [ProvingRequestType.MERGE_ROLLUP]: MergeRollupInputs; + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: EmptyBlockRootRollupInputs; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: BlockRootRollupInputs; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: BlockMergeRollupInputs; + [ProvingRequestType.ROOT_ROLLUP]: RootRollupInputs; + [ProvingRequestType.BASE_PARITY]: BaseParityInputs; + [ProvingRequestType.ROOT_PARITY]: RootParityInputs; + [ProvingRequestType.TUBE_PROOF]: TubeInputs; }; -export type ProvingRequestResultFor = { type: T; result: ProvingRequestResultsMap[T] }; - -export type ProvingRequestResult = { - [K in keyof ProvingRequestResultsMap]: { type: K; result: ProvingRequestResultsMap[K] }; -}[keyof ProvingRequestResultsMap]; - -export function makeProvingRequestResult( - type: ProvingRequestType, - result: ProvingRequestResult['result'], -): ProvingRequestResult { - return { type, result } as ProvingRequestResult; -} - -export const ProvingRequestResultSchema = z.discriminatedUnion('type', [ +export const ProvingJobResult = z.discriminatedUnion('type', [ z.object({ type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), result: schemaForPublicInputsAndRecursiveProof(KernelCircuitPublicInputs.schema), @@ -236,148 +210,73 @@ export const ProvingRequestResultSchema = z.discriminatedUnion('type', [ type: z.literal(ProvingRequestType.TUBE_PROOF), result: schemaForRecursiveProofAndVerificationKey(TUBE_PROOF_LENGTH), }), -]) satisfies ZodFor; +]); +export type ProvingJobResult = z.infer; +export type ProvingJobResultsMap = { + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PublicInputsAndRecursiveProof; + [ProvingRequestType.PUBLIC_VM]: ProofAndVerificationKey; + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.MERGE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BASE_PARITY]: PublicInputsAndRecursiveProof; + [ProvingRequestType.ROOT_PARITY]: PublicInputsAndRecursiveProof< + ParityPublicInputs, + typeof NESTED_RECURSIVE_PROOF_LENGTH + >; + [ProvingRequestType.TUBE_PROOF]: ProofAndVerificationKey; +}; -export const V2ProvingJobId = z.string().brand('ProvingJobId'); -export type V2ProvingJobId = z.infer; +export type ProvingRequestResultFor = { type: T; result: ProvingJobResultsMap[T] }; -export const V2ProvingJob = z.discriminatedUnion('type', [ - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.PUBLIC_VM), - inputs: AvmCircuitInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.BASE_PARITY), - inputs: BaseParityInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.ROOT_PARITY), - inputs: RootParityInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.PRIVATE_BASE_ROLLUP), - inputs: PrivateBaseRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.PUBLIC_BASE_ROLLUP), - inputs: PublicBaseRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.MERGE_ROLLUP), - inputs: MergeRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.BLOCK_ROOT_ROLLUP), - inputs: BlockRootRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP), - inputs: EmptyBlockRootRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.BLOCK_MERGE_ROLLUP), - inputs: BlockMergeRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.ROOT_ROLLUP), - inputs: RootRollupInputs.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), - inputs: PrivateKernelEmptyInputData.schema, - }), - z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.literal(ProvingRequestType.TUBE_PROOF), - inputs: TubeInputs.schema, - }), -]); -export type V2ProvingJob = z.infer; +export const ProvingJobId = z.string(); -export const V2ProofOutput = z.discriminatedUnion('type', [ - z.object({ - type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), - value: schemaForPublicInputsAndRecursiveProof(KernelCircuitPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.PUBLIC_VM), - value: schemaForRecursiveProofAndVerificationKey(AVM_PROOF_LENGTH_IN_FIELDS), - }), - z.object({ - type: z.literal(ProvingRequestType.PRIVATE_BASE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BaseOrMergeRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.PUBLIC_BASE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BaseOrMergeRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.MERGE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BaseOrMergeRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BlockRootOrBlockMergePublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.BLOCK_ROOT_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BlockRootOrBlockMergePublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.BLOCK_MERGE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BlockRootOrBlockMergePublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.ROOT_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(RootRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.BASE_PARITY), - value: schemaForPublicInputsAndRecursiveProof(ParityPublicInputs.schema, RECURSIVE_PROOF_LENGTH), - }), - z.object({ - type: z.literal(ProvingRequestType.ROOT_PARITY), - value: schemaForPublicInputsAndRecursiveProof(ParityPublicInputs.schema, NESTED_RECURSIVE_PROOF_LENGTH), - }), - z.object({ - type: z.literal(ProvingRequestType.TUBE_PROOF), - value: schemaForRecursiveProofAndVerificationKey(TUBE_PROOF_LENGTH), - }), -]); +export const ProofUri = z.string().brand('ProvingJobUri'); +export type ProofUri = z.infer; + +export type ProvingJobId = z.infer; +export const ProvingJob = z.object({ + id: ProvingJobId, + type: z.nativeEnum(ProvingRequestType), + blockNumber: z.number().optional(), + inputsUri: ProofUri, +}); + +export type ProvingJob = z.infer; + +export function makeProvingRequestResult( + type: ProvingRequestType, + result: ProvingJobResult['result'], +): ProvingJobResult { + return { type, result } as ProvingJobResult; +} + +export const ProvingJobFulfilledResult = z.object({ + status: z.literal('fulfilled'), + value: ProofUri, +}); +export type ProvingJobFulfilledResult = z.infer; -export type V2ProofOutput = z.infer; +export const ProvingJobRejectedResult = z.object({ + status: z.literal('rejected'), + reason: z.string(), +}); +export type ProvingJobRejectedResult = z.infer; + +export const ProvingJobSettledResult = z.discriminatedUnion('status', [ + ProvingJobFulfilledResult, + ProvingJobRejectedResult, +]); +export type ProvingJobSettledResult = z.infer; -export const V2ProvingJobStatus = z.discriminatedUnion('status', [ +export const ProvingJobStatus = z.discriminatedUnion('status', [ z.object({ status: z.literal('in-queue') }), z.object({ status: z.literal('in-progress') }), z.object({ status: z.literal('not-found') }), - z.object({ status: z.literal('resolved'), value: V2ProofOutput }), - z.object({ status: z.literal('rejected'), error: z.string() }), + ProvingJobFulfilledResult, + ProvingJobRejectedResult, ]); -export type V2ProvingJobStatus = z.infer; - -export const V2ProvingJobResult = z.union([z.object({ value: V2ProofOutput }), z.object({ error: z.string() })]); -export type V2ProvingJobResult = z.infer; +export type ProvingJobStatus = z.infer; diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index c6388c91e39..1fa0ac6865c 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -35,15 +35,14 @@ export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} export ETHEREUM_HOST="http://127.0.0.1:8545" -export PROVER_AGENT_ENABLED="true" +export PROVER_AGENT_COUNT="1" export PROVER_PUBLISHER_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" -export PROVER_JOB_SOURCE_URL="http://127.0.0.1:$PORT" export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" # Start the Prover Node with the prover and archiver -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --prover --archiver +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --prover-broker --archiver diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index c704be6b0aa..d77451f317f 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -56,8 +56,8 @@ describe('benchmarks/proving', () => { { // do setup with fake proofs realProofs: false, - proverAgentConcurrency: 4, - proverAgentPollInterval: 10, + proverAgentCount: 4, + proverAgentPollIntervalMs: 10, minTxsPerBlock: 1, }, {}, @@ -141,7 +141,7 @@ describe('benchmarks/proving', () => { ctx.logger.info('Stopping fake provers'); await ctx.aztecNode.setConfig({ - proverAgentConcurrency: 1, + proverAgentCount: 1, realProofs: true, minTxsPerBlock: 2, }); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index e2d230a40a2..1d200dc9c4c 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -266,7 +266,7 @@ export class FullProverTest { dataDirectory: undefined, proverId: new Fr(81), realProofs: this.realProofs, - proverAgentConcurrency: 2, + proverAgentCount: 2, publisherPrivateKey: `0x${proverNodePrivateKey!.toString('hex')}`, proverNodeMaxPendingJobs: 100, proverNodePollingIntervalMs: 100, diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index abbb9da5177..8fded19aa9a 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -701,7 +701,7 @@ export async function createAndSyncProverNode( dataDirectory: undefined, proverId: new Fr(42), realProofs: false, - proverAgentConcurrency: 2, + proverAgentCount: 2, publisherPrivateKey: proverNodePrivateKey, proverNodeMaxPendingJobs: 10, proverNodePollingIntervalMs: 200, diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 165d98d7b6c..ce7c17fb3ef 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -97,15 +97,22 @@ export type EnvVar = | 'PEER_ID_PRIVATE_KEY' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' - | 'PROVER_AGENT_CONCURRENCY' | 'PROVER_AGENT_ENABLED' + | 'PROVER_AGENT_CONCURRENCY' + | 'PROVER_AGENT_COUNT' + | 'PROVER_AGENT_PROOF_TYPES' | 'PROVER_AGENT_POLL_INTERVAL_MS' + | 'PROVER_BROKER_HOST' + | 'PROVER_BROKER_ENABLED' + | 'PROVER_BROKER_JOB_TIMEOUT_MS' + | 'PROVER_BROKER_POLL_INTERVAL_MS' + | 'PROVER_BROKER_JOB_MAX_RETRIES' + | 'PROVER_BROKER_DATA_DIRECTORY' | 'PROVER_COORDINATION_NODE_URL' | 'PROVER_DISABLED' | 'PROVER_ID' | 'PROVER_JOB_POLL_INTERVAL_MS' | 'PROVER_JOB_TIMEOUT_MS' - | 'PROVER_JOB_SOURCE_URL' | 'PROVER_NODE_POLLING_INTERVAL_MS' | 'PROVER_NODE_MAX_PENDING_JOBS' | 'PROVER_PUBLISH_RETRY_INTERVAL_MS' @@ -113,6 +120,7 @@ export type EnvVar = | 'PROVER_REAL_PROOFS' | 'PROVER_REQUIRED_CONFIRMATIONS' | 'PROVER_TEST_DELAY_MS' + | 'PROVER_CACHE_DIR' | 'PXE_BLOCK_POLLING_INTERVAL_MS' | 'PXE_L2_STARTING_BLOCK' | 'PXE_PROVER_ENABLED' diff --git a/yarn-project/foundation/src/config/index.ts b/yarn-project/foundation/src/config/index.ts index 48cbe0301a7..4485aae5059 100644 --- a/yarn-project/foundation/src/config/index.ts +++ b/yarn-project/foundation/src/config/index.ts @@ -46,7 +46,7 @@ export function getConfigFromMappings(configMappings: ConfigMappingsType): * @param keysToFilter - The keys to filter out * @returns The filtered config mappings */ -export function filterConfigMappings( +export function omitConfigMappings( configMappings: ConfigMappingsType, keysToFilter: K[], ): ConfigMappingsType> { diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 97480297dbf..4ee446b70af 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -4,6 +4,7 @@ "type": "module", "exports": { ".": "./dest/index.js", + "./broker": "./dest/proving_broker/index.js", "./prover-agent": "./dest/prover-agent/index.js", "./orchestrator": "./dest/orchestrator/index.js", "./helpers": "./dest/orchestrator/block-building-helpers.js" @@ -78,7 +79,8 @@ "commander": "^12.1.0", "lodash.chunk": "^4.2.0", "source-map-support": "^0.5.21", - "tslib": "^2.4.0" + "tslib": "^2.4.0", + "zod": "^3.23.8" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/prover-client/src/config.ts b/yarn-project/prover-client/src/config.ts index 8572b176dbc..347301ebb26 100644 --- a/yarn-project/prover-client/src/config.ts +++ b/yarn-project/prover-client/src/config.ts @@ -1,25 +1,20 @@ -import { type BBConfig } from '@aztec/bb-prover'; -import { type ProverConfig, proverConfigMappings } from '@aztec/circuit-types'; +import { type ACVMConfig, type BBConfig } from '@aztec/bb-prover'; +import { + type ProverAgentConfig, + type ProverBrokerConfig, + type ProverConfig, + proverAgentConfigMappings, + proverBrokerConfigMappings, + proverConfigMappings, +} from '@aztec/circuit-types'; import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; /** * The prover configuration. */ -export type ProverClientConfig = ProverConfig & - BBConfig & { - /** The URL to the Aztec prover node to take proving jobs from */ - proverJobSourceUrl?: string; - /** The working directory to use for simulation/proving */ - acvmWorkingDirectory: string; - /** The path to the ACVM binary */ - acvmBinaryPath: string; - }; +export type ProverClientConfig = ProverConfig & ProverAgentConfig & ProverBrokerConfig & BBConfig & ACVMConfig; -export const proverClientConfigMappings: ConfigMappingsType = { - proverJobSourceUrl: { - env: 'PROVER_JOB_SOURCE_URL', - description: 'The URL to the Aztec prover node to take proving jobs from', - }, +export const bbConfigMappings: ConfigMappingsType = { acvmWorkingDirectory: { env: 'ACVM_WORKING_DIRECTORY', description: 'The working directory to use for simulation/proving', @@ -41,7 +36,13 @@ export const proverClientConfigMappings: ConfigMappingsType description: 'Whether to skip cleanup of bb temporary files', ...booleanConfigHelper(false), }, +}; + +export const proverClientConfigMappings: ConfigMappingsType = { + ...bbConfigMappings, ...proverConfigMappings, + ...proverAgentConfigMappings, + ...proverBrokerConfigMappings, }; /** diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index 36affdfba2a..56f3430e2c6 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -3,3 +3,4 @@ export { EpochProverManager } from '@aztec/circuit-types'; export * from './tx-prover/tx-prover.js'; export * from './config.js'; export * from './tx-prover/factory.js'; +export * from './proving_broker/prover_cache/memory.js'; diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index ebecd07801a..764a092e813 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -9,7 +9,7 @@ import { type TxValidator, } from '@aztec/circuit-types'; import { type Gas, type GlobalVariables, Header } from '@aztec/circuits.js'; -import { type Fr } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { @@ -118,7 +118,7 @@ export class TestContext { } const queue = new MemoryProvingQueue(telemetry); - const orchestrator = new ProvingOrchestrator(proverDb, queue, telemetry); + const orchestrator = new ProvingOrchestrator(proverDb, queue, telemetry, Fr.ZERO); const agent = new ProverAgent(localProver, proverCount); queue.start(); diff --git a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts index fd6b3626ca1..450715d8c06 100644 --- a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts @@ -131,7 +131,7 @@ export class BlockProvingState { /** Returns the block number as an epoch number. Used for prioritizing proof requests. */ public get epochNumber(): number { - return this.globalVariables.blockNumber.toNumber(); + return this.parentEpoch.epochNumber; } /** diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts index dcdb839b595..aa971c116ce 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts @@ -11,17 +11,27 @@ import { AbortError } from '@aztec/foundation/error'; import { sleep } from '@aztec/foundation/sleep'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; import { MemoryProvingQueue } from './memory-proving-queue.js'; describe('MemoryProvingQueue', () => { let queue: MemoryProvingQueue; let jobTimeoutMs: number; let pollingIntervalMs: number; + let proofStore: ProofStore; beforeEach(() => { jobTimeoutMs = 100; pollingIntervalMs = 10; - queue = new MemoryProvingQueue(new NoopTelemetryClient(), jobTimeoutMs, pollingIntervalMs); + proofStore = new InlineProofStore(); + queue = new MemoryProvingQueue( + new NoopTelemetryClient(), + jobTimeoutMs, + pollingIntervalMs, + undefined, + undefined, + proofStore, + ); queue.start(); }); @@ -34,10 +44,10 @@ describe('MemoryProvingQueue', () => { void queue.getPrivateBaseRollupProof(makePrivateBaseRollupInputs()); const job1 = await queue.getProvingJob(); - expect(job1?.request.type).toEqual(ProvingRequestType.BASE_PARITY); + expect(job1?.type).toEqual(ProvingRequestType.BASE_PARITY); const job2 = await queue.getProvingJob(); - expect(job2?.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect(job2?.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); }); it('returns jobs ordered by priority', async () => { @@ -46,7 +56,7 @@ describe('MemoryProvingQueue', () => { void queue.getPublicBaseRollupProof(makePublicBaseRollupInputs(), undefined, 1); // The agent consumes one of them - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); // A new block comes along with its base rollups, and the orchestrator then pushes a root request for the first one void queue.getPublicBaseRollupProof(makePublicBaseRollupInputs(), undefined, 2); @@ -56,14 +66,14 @@ describe('MemoryProvingQueue', () => { void queue.getRootRollupProof(makeRootRollupInputs(), undefined, 1); // The next jobs for the agent should be the ones from block 1, skipping the ones for block 2 - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.ROOT_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.ROOT_ROLLUP); // And the base rollups for block 2 should go next - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); }); it('returns undefined when no jobs are available', async () => { @@ -75,7 +85,8 @@ describe('MemoryProvingQueue', () => { const promise = queue.getBaseParityProof(inputs); const job = await queue.getProvingJob(); - expect(job?.request.inputs).toEqual(inputs); + const jobInputs = await proofStore.getProofInput(job!.inputsUri); + expect(jobInputs.inputs).toEqual(inputs); const publicInputs = makeParityPublicInputs(); const proof = makeRecursiveProof(RECURSIVE_PROOF_LENGTH); @@ -93,7 +104,8 @@ describe('MemoryProvingQueue', () => { void queue.getBaseParityProof(inputs); const job = await queue.getProvingJob(); - expect(job?.request.inputs).toEqual(inputs); + const proofInput = await proofStore.getProofInput(job!.inputsUri); + expect(proofInput.inputs).toEqual(inputs); const error = new Error('test error'); diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts index f70b66efbc9..a6175f37e95 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts @@ -1,8 +1,8 @@ import { type ProofAndVerificationKey, type ProvingJob, + type ProvingJobInputsMap, type ProvingJobSource, - type ProvingRequest, type ProvingRequestResultFor, ProvingRequestType, type PublicInputsAndRecursiveProof, @@ -35,13 +35,13 @@ import { AbortError, TimeoutError } from '@aztec/foundation/error'; import { createDebugLogger } from '@aztec/foundation/log'; import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise'; import { PriorityMemoryQueue } from '@aztec/foundation/queue'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type TelemetryClient } from '@aztec/telemetry-client'; +import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; import { ProvingQueueMetrics } from './queue_metrics.js'; -type ProvingJobWithResolvers = ProvingJob & - PromiseWithResolvers> & { +type ProvingJobWithResolvers = ProvingJob & + PromiseWithResolvers> & { signal?: AbortSignal; epochNumber?: number; attempts: number; @@ -62,9 +62,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource (a, b) => (a.epochNumber ?? 0) - (b.epochNumber ?? 0), ); private jobsInProgress = new Map(); - private runningPromise: RunningPromise; - private metrics: ProvingQueueMetrics; constructor( @@ -75,6 +73,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource pollingIntervalMs = 1000, private generateId = defaultIdGenerator, private timeSource = defaultTimeSource, + private proofStore: ProofStore = new InlineProofStore(), ) { this.metrics = new ProvingQueueMetrics(client, 'MemoryProvingQueue'); this.runningPromise = new RunningPromise(this.poll, pollingIntervalMs); @@ -100,7 +99,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource this.log.info('Proving queue stopped'); } - public async getProvingJob({ timeoutSec = 1 } = {}): Promise | undefined> { + public async getProvingJob({ timeoutSec = 1 } = {}): Promise { if (!this.runningPromise.isRunning()) { throw new Error('Proving queue is not running. Start the queue before getting jobs.'); } @@ -119,7 +118,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource this.jobsInProgress.set(job.id, job); return { id: job.id, - request: job.request, + type: job.type, + inputsUri: job.inputsUri, }; } catch (err) { if (err instanceof TimeoutError) { @@ -167,20 +167,18 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } // every job should be retried with the exception of the public VM since its in development and can fail - if (job.attempts < MAX_RETRIES && job.request.type !== ProvingRequestType.PUBLIC_VM) { + if (job.attempts < MAX_RETRIES && job.type !== ProvingRequestType.PUBLIC_VM) { job.attempts++; this.log.warn( - `Job id=${job.id} type=${ProvingRequestType[job.request.type]} failed with error: ${reason}. Retry ${ + `Job id=${job.id} type=${ProvingRequestType[job.type]} failed with error: ${reason}. Retry ${ job.attempts }/${MAX_RETRIES}`, ); this.queue.put(job); } else { const logFn = - job.request.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT - ? this.log.warn - : this.log.error; - logFn(`Job id=${job.id} type=${ProvingRequestType[job.request.type]} failed with error: ${reason}`); + job.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT ? this.log.warn : this.log.error; + logFn(`Job id=${job.id} type=${ProvingRequestType[job.type]} failed with error: ${reason}`); job.reject(new Error(reason)); } return Promise.resolve(); @@ -214,7 +212,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } if (job.heartbeat + this.jobTimeoutMs < now) { - this.log.warn(`Job ${job.id} type=${ProvingRequestType[job.request.type]} has timed out`); + this.log.warn(`Job ${job.id} type=${ProvingRequestType[job.type]} has timed out`); this.jobsInProgress.delete(job.id); job.heartbeat = 0; @@ -223,19 +221,23 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } }; - private enqueue( - request: T, + private async enqueue( + type: T, + inputs: ProvingJobInputsMap[T], signal?: AbortSignal, epochNumber?: number, - ): Promise['result']> { + ): Promise['result']> { if (!this.runningPromise.isRunning()) { return Promise.reject(new Error('Proving queue is not running.')); } - const { promise, resolve, reject } = promiseWithResolvers>(); + const { promise, resolve, reject } = promiseWithResolvers>(); + const id = this.generateId(); + const inputsUri = await this.proofStore.saveProofInput(id, type, inputs); const item: ProvingJobWithResolvers = { - id: this.generateId(), - request, + id, + type, + inputsUri, signal, promise, resolve, @@ -250,16 +252,13 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } this.log.debug( - `Adding id=${item.id} type=${ProvingRequestType[request.type]} proving job to queue depth=${this.queue.length()}`, + `Adding id=${item.id} type=${ProvingRequestType[type]} proving job to queue depth=${this.queue.length()}`, ); - // TODO (alexg) remove the `any` - if (!this.queue.put(item as any)) { + + if (!this.queue.put(item as ProvingJobWithResolvers)) { throw new Error(); } - const byteSize = serializeToBuffer(item.request.inputs).length; - this.metrics.recordNewJob(item.request.type, byteSize); - return promise.then(({ result }) => result); } @@ -268,7 +267,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs, signal, epochNumber); } getTubeProof( @@ -276,7 +275,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.TUBE_PROOF, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.TUBE_PROOF, inputs, signal, epochNumber); } /** @@ -288,7 +287,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.BASE_PARITY, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.BASE_PARITY, inputs, signal, epochNumber); } /** @@ -300,7 +299,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.ROOT_PARITY, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.ROOT_PARITY, inputs, signal, epochNumber); } getPrivateBaseRollupProof( @@ -308,7 +307,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PRIVATE_BASE_ROLLUP, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PRIVATE_BASE_ROLLUP, inputs, signal, epochNumber); } getPublicBaseRollupProof( @@ -316,7 +315,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs, signal, epochNumber); } /** @@ -324,11 +323,11 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getMergeRollupProof( - input: MergeRollupInputs, + inputs: MergeRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.MERGE_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.MERGE_ROLLUP, inputs, signal, epochNumber); } /** @@ -336,19 +335,19 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getBlockRootRollupProof( - input: BlockRootRollupInputs, + inputs: BlockRootRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs, signal, epochNumber); } getEmptyBlockRootRollupProof( - input: EmptyBlockRootRollupInputs, + inputs: EmptyBlockRootRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, inputs, signal, epochNumber); } /** @@ -356,11 +355,11 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getBlockMergeRollupProof( - input: BlockMergeRollupInputs, + inputs: BlockMergeRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs, signal, epochNumber); } /** @@ -368,11 +367,11 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getRootRollupProof( - input: RootRollupInputs, + inputs: RootRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.ROOT_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.ROOT_ROLLUP, inputs, signal, epochNumber); } /** @@ -383,7 +382,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PUBLIC_VM, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PUBLIC_VM, inputs, signal, epochNumber); } /** diff --git a/yarn-project/prover-client/src/prover-agent/prover-agent.ts b/yarn-project/prover-client/src/prover-agent/prover-agent.ts index 50c1733652c..2b86450afbf 100644 --- a/yarn-project/prover-client/src/prover-agent/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-agent/prover-agent.ts @@ -1,9 +1,9 @@ import { type ProverAgentApi, type ProvingJob, + type ProvingJobInputs, + type ProvingJobResultsMap, type ProvingJobSource, - type ProvingRequest, - type ProvingRequestResultFor, ProvingRequestType, type ServerCircuitProver, makeProvingRequestResult, @@ -12,6 +12,8 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { elapsed } from '@aztec/foundation/timer'; +import { InlineProofStore } from '../proving_broker/proof_store.js'; + const PRINT_THRESHOLD_NS = 6e10; // 60 seconds /** @@ -27,6 +29,7 @@ export class ProverAgent implements ProverAgentApi { } >(); private runningPromise?: RunningPromise; + private proofInputsDatabase = new InlineProofStore(); constructor( /** The prover implementation to defer jobs to */ @@ -101,12 +104,12 @@ export class ProverAgent implements ProverAgentApi { const promise = this.work(jobSource, job).finally(() => this.inFlightPromises.delete(job.id)); this.inFlightPromises.set(job.id, { id: job.id, - type: job.request.type, + type: job.type, promise, }); } catch (err) { this.log.warn( - `Error processing job! type=${ProvingRequestType[job.request.type]}: ${err}. ${(err as Error).stack}`, + `Error processing job! type=${ProvingRequestType[job.type]}: ${err}. ${(err as Error).stack}`, ); } } catch (err) { @@ -130,28 +133,24 @@ export class ProverAgent implements ProverAgentApi { this.log.info('Agent stopped'); } - private async work( - jobSource: ProvingJobSource, - job: ProvingJob, - ): Promise { + private async work(jobSource: ProvingJobSource, job: ProvingJob): Promise { try { - this.log.debug(`Picked up proving job id=${job.id} type=${ProvingRequestType[job.request.type]}`); - const type: TRequest['type'] = job.request.type; - const [time, result] = await elapsed(this.getProof(job.request)); + this.log.debug(`Picked up proving job id=${job.id} type=${ProvingRequestType[job.type]}`); + const type = job.type; + const inputs = await this.proofInputsDatabase.getProofInput(job.inputsUri); + const [time, result] = await elapsed(this.getProof(inputs)); if (this.#isRunning()) { this.log.verbose(`Processed proving job id=${job.id} type=${ProvingRequestType[type]} duration=${time}ms`); await jobSource.resolveProvingJob(job.id, makeProvingRequestResult(type, result)); } else { this.log.verbose( - `Dropping proving job id=${job.id} type=${ - ProvingRequestType[job.request.type] - } duration=${time}ms: agent stopped`, + `Dropping proving job id=${job.id} type=${ProvingRequestType[job.type]} duration=${time}ms: agent stopped`, ); } } catch (err) { - const type = ProvingRequestType[job.request.type]; + const type = ProvingRequestType[job.type]; if (this.#isRunning()) { - if (job.request.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT) { + if (job.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT) { this.log.warn(`Expected error processing VM proving job id=${job.id} type=${type}: ${err}`); } else { this.log.error(`Error processing proving job id=${job.id} type=${type}: ${err}`, err); @@ -164,10 +163,7 @@ export class ProverAgent implements ProverAgentApi { } } - private getProof( - request: TRequest, - ): Promise['result']>; - private getProof(request: ProvingRequest): Promise['result']> { + private getProof(request: ProvingJobInputs): Promise { const { type, inputs } = request; switch (type) { case ProvingRequestType.PUBLIC_VM: { diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts new file mode 100644 index 00000000000..f4782e092ac --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts @@ -0,0 +1,104 @@ +import { type ProvingJobProducer, ProvingRequestType, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; +import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js/testing'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; + +import { jest } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { CachingBrokerFacade } from './caching_broker_facade.js'; +import { InlineProofStore } from './proof_store.js'; +import { InMemoryProverCache } from './prover_cache/memory.js'; + +describe('CachingBrokerFacade', () => { + let facade: CachingBrokerFacade; + let cache: InMemoryProverCache; + let proofStore: InlineProofStore; + let broker: MockProxy; + + beforeAll(() => { + jest.useFakeTimers(); + }); + + beforeEach(() => { + broker = mock({ + enqueueProvingJob: jest.fn(), + getProvingJobStatus: jest.fn(), + removeAndCancelProvingJob: jest.fn(), + waitForJobToSettle: jest.fn(), + }); + cache = new InMemoryProverCache(); + proofStore = new InlineProofStore(); + facade = new CachingBrokerFacade(broker, cache, proofStore); + }); + + it('marks job as in progress', async () => { + const controller = new AbortController(); + void facade.getBaseParityProof(makeBaseParityInputs(), controller.signal); + + await jest.advanceTimersToNextTimerAsync(); + + expect(broker.enqueueProvingJob).toHaveBeenCalled(); + const job = broker.enqueueProvingJob.mock.calls[0][0]; + + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'in-queue' }); + controller.abort(); + }); + + it('removes the cached value if a job fails to enqueue', async () => { + const { promise, reject } = promiseWithResolvers(); + broker.enqueueProvingJob.mockResolvedValue(promise); + + void facade.getBaseParityProof(makeBaseParityInputs()).catch(() => {}); + await jest.advanceTimersToNextTimerAsync(); + + const job = broker.enqueueProvingJob.mock.calls[0][0]; + + reject(new Error('Failed to enqueue job')); + + await jest.advanceTimersToNextTimerAsync(); + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'not-found' }); + }); + + it('awaits existing job if in progress', async () => { + const inputs = makeBaseParityInputs(); + void facade.getBaseParityProof(inputs).catch(() => {}); + await jest.advanceTimersToNextTimerAsync(); + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); + + void facade.getBaseParityProof(inputs).catch(() => {}); + await jest.advanceTimersToNextTimerAsync(); + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); + }); + + it('reuses already cached results', async () => { + const { promise, resolve } = promiseWithResolvers(); + broker.enqueueProvingJob.mockResolvedValue(Promise.resolve()); + broker.waitForJobToSettle.mockResolvedValue(promise); + + const inputs = makeBaseParityInputs(); + void facade.getBaseParityProof(inputs); + await jest.advanceTimersToNextTimerAsync(); + + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); + const job = broker.enqueueProvingJob.mock.calls[0][0]; + + const result = makePublicInputsAndRecursiveProof( + makeParityPublicInputs(), + makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeFakeHonk(), + ); + + const outputUri = await proofStore.saveProofOutput(job.id, ProvingRequestType.BASE_PARITY, result); + resolve({ + status: 'fulfilled', + value: outputUri, + }); + + await jest.advanceTimersToNextTimerAsync(); + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'fulfilled', value: outputUri }); + + await expect(facade.getBaseParityProof(inputs)).resolves.toEqual(result); + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); // job was only ever enqueued once + }); +}); diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts new file mode 100644 index 00000000000..2885350d958 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts @@ -0,0 +1,312 @@ +import { + type ProofAndVerificationKey, + type ProverCache, + type ProvingJobId, + type ProvingJobInputsMap, + type ProvingJobProducer, + type ProvingJobResultsMap, + ProvingRequestType, + type PublicInputsAndRecursiveProof, + type ServerCircuitProver, +} from '@aztec/circuit-types'; +import { + type AVM_PROOF_LENGTH_IN_FIELDS, + type AvmCircuitInputs, + type BaseOrMergeRollupPublicInputs, + type BaseParityInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, + type EmptyBlockRootRollupInputs, + type KernelCircuitPublicInputs, + type MergeRollupInputs, + type NESTED_RECURSIVE_PROOF_LENGTH, + type ParityPublicInputs, + type PrivateBaseRollupInputs, + type PrivateKernelEmptyInputData, + type PublicBaseRollupInputs, + type RECURSIVE_PROOF_LENGTH, + type RootParityInputs, + type RootRollupInputs, + type RootRollupPublicInputs, + type TUBE_PROOF_LENGTH, + type TubeInputs, +} from '@aztec/circuits.js'; +import { sha256 } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { retryUntil } from '@aztec/foundation/retry'; + +import { InlineProofStore, type ProofStore } from './proof_store.js'; +import { InMemoryProverCache } from './prover_cache/memory.js'; + +// 20 minutes, roughly the length of an Aztec epoch. If a proof isn't ready in this amount of time then we've failed to prove the whole epoch +const MAX_WAIT_MS = 1_200_000; + +/** + * A facade around a job broker that generates stable job ids and caches results + */ +export class CachingBrokerFacade implements ServerCircuitProver { + constructor( + private broker: ProvingJobProducer, + private cache: ProverCache = new InMemoryProverCache(), + private proofStore: ProofStore = new InlineProofStore(), + private waitTimeoutMs = MAX_WAIT_MS, + private pollIntervalMs = 1000, + private log = createDebugLogger('aztec:prover-client:caching-prover-broker'), + ) {} + + private async enqueueAndWaitForJob( + id: ProvingJobId, + type: T, + inputs: ProvingJobInputsMap[T], + signal?: AbortSignal, + ): Promise { + // first try the cache + let jobEnqueued = false; + try { + const cachedResult = await this.cache.getProvingJobStatus(id); + if (cachedResult.status !== 'not-found') { + this.log.debug(`Found cached result for job=${id}: status=${cachedResult.status}`); + } + + if (cachedResult.status === 'fulfilled') { + const output = await this.proofStore.getProofOutput(cachedResult.value); + if (output.type === type) { + return output.result as ProvingJobResultsMap[T]; + } else { + this.log.warn(`Cached result type mismatch for job=${id}. Expected=${type} but got=${output.type}`); + } + } else if (cachedResult.status === 'rejected') { + // prefer returning a rejected promises so that we don't trigger the catch block below + return Promise.reject(new Error(cachedResult.reason)); + } else if (cachedResult.status === 'in-progress' || cachedResult.status === 'in-queue') { + jobEnqueued = true; + } else { + jobEnqueued = false; + } + } catch (err) { + this.log.warn(`Failed to get cached proving job id=${id}: ${err}. Re-running job`); + } + + if (!jobEnqueued) { + try { + const inputsUri = await this.proofStore.saveProofInput(id, type, inputs); + await this.broker.enqueueProvingJob({ + id, + type, + inputsUri, + }); + await this.cache.setProvingJobStatus(id, { status: 'in-queue' }); + } catch (err) { + this.log.error(`Failed to enqueue proving job id=${id}: ${err}`); + await this.cache.setProvingJobStatus(id, { status: 'not-found' }); + throw err; + } + } + + // notify broker of cancelled job + const abortFn = async () => { + signal?.removeEventListener('abort', abortFn); + await this.broker.removeAndCancelProvingJob(id); + }; + + signal?.addEventListener('abort', abortFn); + + try { + // loop here until the job settles + // NOTE: this could also terminate because the job was cancelled through event listener above + const result = await retryUntil( + async () => { + try { + return await this.broker.waitForJobToSettle(id); + } catch (err) { + // waitForJobToSettle can only fail for network errors + // keep retrying until we time out + } + }, + `Proving job=${id} type=${ProvingRequestType[type]}`, + this.waitTimeoutMs / 1000, + this.pollIntervalMs / 1000, + ); + + try { + await this.cache.setProvingJobStatus(id, result); + } catch (err) { + this.log.warn(`Failed to cache proving job id=${id} resultStatus=${result.status}: ${err}`); + } + + if (result.status === 'fulfilled') { + const output = await this.proofStore.getProofOutput(result.value); + if (output.type === type) { + return output.result as ProvingJobResultsMap[T]; + } else { + return Promise.reject(new Error(`Unexpected proof type: ${output.type}. Expected: ${type}`)); + } + } else { + return Promise.reject(new Error(result.reason)); + } + } finally { + signal?.removeEventListener('abort', abortFn); + } + } + + getAvmProof( + inputs: AvmCircuitInputs, + signal?: AbortSignal, + _blockNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PUBLIC_VM, inputs), + ProvingRequestType.PUBLIC_VM, + inputs, + signal, + ); + } + + getBaseParityProof( + inputs: BaseParityInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.BASE_PARITY, inputs), + ProvingRequestType.BASE_PARITY, + inputs, + signal, + ); + } + + getBlockMergeRollupProof( + input: BlockMergeRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.BLOCK_MERGE_ROLLUP, input), + ProvingRequestType.BLOCK_MERGE_ROLLUP, + input, + signal, + ); + } + + getBlockRootRollupProof( + input: BlockRootRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.BLOCK_ROOT_ROLLUP, input), + ProvingRequestType.BLOCK_ROOT_ROLLUP, + input, + signal, + ); + } + + getEmptyBlockRootRollupProof( + input: EmptyBlockRootRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, input), + ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, + input, + signal, + ); + } + + getEmptyPrivateKernelProof( + inputs: PrivateKernelEmptyInputData, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs), + ProvingRequestType.PRIVATE_KERNEL_EMPTY, + inputs, + signal, + ); + } + + getMergeRollupProof( + input: MergeRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.MERGE_ROLLUP, input), + ProvingRequestType.MERGE_ROLLUP, + input, + signal, + ); + } + getPrivateBaseRollupProof( + baseRollupInput: PrivateBaseRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PRIVATE_BASE_ROLLUP, baseRollupInput), + ProvingRequestType.PRIVATE_BASE_ROLLUP, + baseRollupInput, + signal, + ); + } + + getPublicBaseRollupProof( + inputs: PublicBaseRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs), + ProvingRequestType.PUBLIC_BASE_ROLLUP, + inputs, + signal, + ); + } + + getRootParityProof( + inputs: RootParityInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.ROOT_PARITY, inputs), + ProvingRequestType.ROOT_PARITY, + inputs, + signal, + ); + } + + getRootRollupProof( + input: RootRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.ROOT_ROLLUP, input), + ProvingRequestType.ROOT_ROLLUP, + input, + signal, + ); + } + + getTubeProof( + tubeInput: TubeInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.TUBE_PROOF, tubeInput), + ProvingRequestType.TUBE_PROOF, + tubeInput, + signal, + ); + } + + private generateId(type: ProvingRequestType, inputs: { toBuffer(): Buffer }) { + const inputsHash = sha256(inputs.toBuffer()); + return `${ProvingRequestType[type]}:${inputsHash.toString('hex')}`; + } +} diff --git a/yarn-project/prover-client/src/proving_broker/factory.ts b/yarn-project/prover-client/src/proving_broker/factory.ts new file mode 100644 index 00000000000..02a5fcb314b --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/factory.ts @@ -0,0 +1,21 @@ +import { type ProverBrokerConfig } from '@aztec/circuit-types'; +import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; + +import { ProvingBroker } from './proving_broker.js'; +import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js'; +import { KVBrokerDatabase } from './proving_broker_database/persisted.js'; + +export async function createAndStartProvingBroker(config: ProverBrokerConfig): Promise { + const database = config.proverBrokerDataDirectory + ? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory)) + : new InMemoryBrokerDatabase(); + + const broker = new ProvingBroker(database, { + jobTimeoutMs: config.proverBrokerJobTimeoutMs, + maxRetries: config.proverBrokerJobMaxRetries, + timeoutIntervalMs: config.proverBrokerPollIntervalMs, + }); + + await broker.start(); + return broker; +} diff --git a/yarn-project/prover-client/src/proving_broker/index.ts b/yarn-project/prover-client/src/proving_broker/index.ts new file mode 100644 index 00000000000..6770b1ea14e --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/index.ts @@ -0,0 +1,8 @@ +export * from './proving_agent.js'; +export * from './proving_broker.js'; +export * from './rpc.js'; +export * from './proving_broker_database.js'; +export * from './proving_broker_database/memory.js'; +export * from './proving_broker_database/persisted.js'; +export * from './proof_store.js'; +export * from './factory.js'; diff --git a/yarn-project/prover-client/src/proving_broker/proof_store.ts b/yarn-project/prover-client/src/proving_broker/proof_store.ts new file mode 100644 index 00000000000..9f605170ed3 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proof_store.ts @@ -0,0 +1,106 @@ +import { + type ProofUri, + type ProvingJobId, + ProvingJobInputs, + type ProvingJobInputsMap, + ProvingJobResult, + type ProvingJobResultsMap, + type ProvingRequestType, +} from '@aztec/circuit-types'; +import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; +import { type ZodFor } from '@aztec/foundation/schemas'; + +/** + * A database for storing proof inputs and outputs. + */ +export interface ProofStore { + /** + * Save a proof input to the database. + * @param jobId - The ID of the job the proof input is associated with. + * @param type - The type of the proving request. + * @param inputs - The proof input to save. + * @returns The URI of the saved proof input. + */ + saveProofInput( + jobId: ProvingJobId, + type: T, + inputs: ProvingJobInputsMap[T], + ): Promise; + + /** + * Save a proof output to the database. + * @param jobId - The ID of the job the proof input is associated with. + * @param type - The type of the proving request. + * @param result - The proof output to save. + * @returns The URI of the saved proof output. + */ + saveProofOutput( + id: ProvingJobId, + type: T, + result: ProvingJobResultsMap[T], + ): Promise; + + /** + * Retrieve a proof input from the database. + * @param uri - The URI of the proof input to retrieve. + * @returns The proof input. + */ + getProofInput(uri: ProofUri): Promise; + + /** + * Retrieve a proof output from the database. + * @param uri - The URI of the proof output to retrieve. + * @returns The proof output. + */ + getProofOutput(uri: ProofUri): Promise; +} + +// use an ASCII encoded data uri https://datatracker.ietf.org/doc/html/rfc2397#section-2 +// we do this to avoid double encoding to base64 (since the inputs already serialize to a base64 string) +const PREFIX = 'data:application/json;charset=utf-8'; +const SEPARATOR = ','; + +/** + * An implementation of a proof input/output database that stores data inline in the URI. + */ +export class InlineProofStore implements ProofStore { + saveProofInput( + _id: ProvingJobId, + type: T, + inputs: ProvingJobInputsMap[T], + ): Promise { + const jobInputs = { type, inputs } as ProvingJobInputs; + return Promise.resolve(this.encode(jobInputs)); + } + + saveProofOutput( + _id: ProvingJobId, + type: T, + result: ProvingJobResultsMap[T], + ): Promise { + const jobResult = { type, result } as ProvingJobResult; + return Promise.resolve(this.encode(jobResult)); + } + + getProofInput(uri: ProofUri): Promise { + return Promise.resolve(this.decode(uri, ProvingJobInputs)); + } + + getProofOutput(uri: ProofUri): Promise { + return Promise.resolve(this.decode(uri, ProvingJobResult)); + } + + private encode(obj: object): ProofUri { + const encoded = encodeURIComponent(jsonStringify(obj)); + return (PREFIX + SEPARATOR + encoded) as ProofUri; + } + + private decode(uri: ProofUri, schema: ZodFor): T { + const [prefix, data] = uri.split(SEPARATOR); + if (prefix !== PREFIX) { + throw new Error('Invalid proof input URI: ' + prefix); + } + + return jsonParseWithSchema(decodeURIComponent(data), schema); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts b/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts new file mode 100644 index 00000000000..b4da076cbcb --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts @@ -0,0 +1,20 @@ +import type { ProverCache, ProvingJobStatus } from '@aztec/circuit-types'; + +export class InMemoryProverCache implements ProverCache { + private proofs: Record = {}; + + constructor() {} + + setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise { + this.proofs[jobId] = status; + return Promise.resolve(); + } + + getProvingJobStatus(jobId: string): Promise { + return Promise.resolve(this.proofs[jobId] ?? { status: 'not-found' }); + } + + close(): Promise { + return Promise.resolve(); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts index 9a2c7db1da9..cc49057ab6d 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts @@ -1,9 +1,12 @@ import { + type ProofUri, ProvingError, + type ProvingJob, + type ProvingJobConsumer, + type ProvingJobId, + type ProvingJobInputs, ProvingRequestType, type PublicInputsAndRecursiveProof, - type V2ProvingJob, - type V2ProvingJobId, makePublicInputsAndRecursiveProof, } from '@aztec/circuit-types'; import { @@ -20,13 +23,14 @@ import { promiseWithResolvers } from '@aztec/foundation/promise'; import { jest } from '@jest/globals'; import { MockProver } from '../test/mock_prover.js'; +import { type ProofStore } from './proof_store.js'; import { ProvingAgent } from './proving_agent.js'; -import { type ProvingJobConsumer } from './proving_broker_interface.js'; describe('ProvingAgent', () => { let prover: MockProver; let jobSource: jest.Mocked; let agent: ProvingAgent; + let proofDB: jest.Mocked; const agentPollIntervalMs = 1000; beforeEach(() => { @@ -39,7 +43,14 @@ describe('ProvingAgent', () => { reportProvingJobError: jest.fn(), reportProvingJobSuccess: jest.fn(), }; - agent = new ProvingAgent(jobSource, prover, [ProvingRequestType.BASE_PARITY]); + proofDB = { + getProofInput: jest.fn(), + getProofOutput: jest.fn(), + saveProofInput: jest.fn(), + saveProofOutput: jest.fn(), + }; + + agent = new ProvingAgent(jobSource, proofDB, prover, [ProvingRequestType.BASE_PARITY]); }); afterEach(async () => { @@ -59,8 +70,9 @@ describe('ProvingAgent', () => { promiseWithResolvers>(); jest.spyOn(prover, 'getBaseParityProof').mockReturnValueOnce(promise); - const jobResponse = makeBaseParityJob(); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + const { job, time, inputs } = makeBaseParityJob(); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); @@ -85,64 +97,72 @@ describe('ProvingAgent', () => { }); it('reports success to the job source', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); const result = makeBaseParityResult(); - jest.spyOn(prover, 'getBaseParityProof').mockResolvedValueOnce(result.value); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jest.spyOn(prover, 'getBaseParityProof').mockResolvedValueOnce(result); + + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); + proofDB.saveProofOutput.mockResolvedValueOnce('output-uri' as ProofUri); + agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobSuccess).toHaveBeenCalledWith(jobResponse.job.id, result); + expect(proofDB.saveProofOutput).toHaveBeenCalledWith(job.id, job.type, result); + expect(jobSource.reportProvingJobSuccess).toHaveBeenCalledWith(job.id, 'output-uri'); }); it('reports errors to the job source', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); jest.spyOn(prover, 'getBaseParityProof').mockRejectedValueOnce(new Error('test error')); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(jobResponse.job.id, new Error('test error'), false); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, 'test error', false); }); it('sets the retry flag on when reporting an error', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); const err = new ProvingError('test error', undefined, true); jest.spyOn(prover, 'getBaseParityProof').mockRejectedValueOnce(err); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(jobResponse.job.id, err, true); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, err.message, true); }); it('reports jobs in progress to the job source', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); const { promise, resolve } = promiseWithResolvers>(); jest.spyOn(prover, 'getBaseParityProof').mockReturnValueOnce(promise); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(jobResponse.job.id, jobResponse.time, { + expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(job.id, time, { allowList: [ProvingRequestType.BASE_PARITY], }); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(jobResponse.job.id, jobResponse.time, { + expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(job.id, time, { allowList: [ProvingRequestType.BASE_PARITY], }); - resolve(makeBaseParityResult().value); + resolve(makeBaseParityResult()); }); it('abandons jobs if told so by the source', async () => { - const firstJobResponse = makeBaseParityJob(); + const firstJob = makeBaseParityJob(); let firstProofAborted = false; const firstProof = promiseWithResolvers>(); @@ -156,13 +176,14 @@ describe('ProvingAgent', () => { return firstProof.promise; }); - jobSource.getProvingJob.mockResolvedValueOnce(firstJobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job: firstJob.job, time: firstJob.time }); + proofDB.getProofInput.mockResolvedValueOnce(firstJob.inputs); agent.start(); // now the agent should be happily proving and reporting progress await jest.advanceTimersByTimeAsync(agentPollIntervalMs); expect(jobSource.reportProvingJobProgress).toHaveBeenCalledTimes(1); - expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(firstJobResponse.job.id, firstJobResponse.time, { + expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(firstJob.job.id, firstJob.time, { allowList: [ProvingRequestType.BASE_PARITY], }); @@ -172,7 +193,9 @@ describe('ProvingAgent', () => { // now let's simulate the job source cancelling the job and giving the agent something else to do // this should cause the agent to abort the current job and start the new one const secondJobResponse = makeBaseParityJob(); + jobSource.reportProvingJobProgress.mockResolvedValueOnce(secondJobResponse); + proofDB.getProofInput.mockResolvedValueOnce(secondJobResponse.inputs); const secondProof = promiseWithResolvers>(); @@ -180,13 +203,9 @@ describe('ProvingAgent', () => { await jest.advanceTimersByTimeAsync(agentPollIntervalMs); expect(jobSource.reportProvingJobProgress).toHaveBeenCalledTimes(3); - expect(jobSource.reportProvingJobProgress).toHaveBeenLastCalledWith( - firstJobResponse.job.id, - firstJobResponse.time, - { - allowList: [ProvingRequestType.BASE_PARITY], - }, - ); + expect(jobSource.reportProvingJobProgress).toHaveBeenLastCalledWith(firstJob.job.id, firstJob.time, { + allowList: [ProvingRequestType.BASE_PARITY], + }); expect(firstProofAborted).toBe(true); // agent should have switched now @@ -200,27 +219,38 @@ describe('ProvingAgent', () => { }, ); - secondProof.resolve(makeBaseParityResult().value); + secondProof.resolve(makeBaseParityResult()); + }); + + it('reports an error if inputs cannot be loaded', async () => { + const { job, time } = makeBaseParityJob(); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockRejectedValueOnce(new Error('Failed to load proof inputs')); + + agent.start(); + + await jest.advanceTimersByTimeAsync(agentPollIntervalMs); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, 'Failed to load proof inputs', true); }); - function makeBaseParityJob(): { job: V2ProvingJob; time: number } { + function makeBaseParityJob(): { job: ProvingJob; time: number; inputs: ProvingJobInputs } { const time = jest.now(); - const job: V2ProvingJob = { - id: randomBytes(8).toString('hex') as V2ProvingJobId, + const inputs: ProvingJobInputs = { type: ProvingRequestType.BASE_PARITY, inputs: makeBaseParityInputs() }; + const job: ProvingJob = { + id: randomBytes(8).toString('hex') as ProvingJobId, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputsUri: randomBytes(8).toString('hex') as ProofUri, }; - return { job, time }; + return { job, time, inputs }; } function makeBaseParityResult() { - const value = makePublicInputsAndRecursiveProof( + return makePublicInputsAndRecursiveProof( makeParityPublicInputs(), makeRecursiveProof(RECURSIVE_PROOF_LENGTH), VerificationKeyData.makeFakeHonk(), ); - return { type: ProvingRequestType.BASE_PARITY, value }; } }); diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index 5ee86900e0d..b7ee2eb69f8 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -1,14 +1,18 @@ import { ProvingError, - type ProvingRequestType, + type ProvingJob, + type ProvingJobConsumer, + type ProvingJobId, + type ProvingJobInputs, + type ProvingJobResultsMap, + ProvingRequestType, type ServerCircuitProver, - type V2ProvingJob, } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; -import { type ProvingJobConsumer } from './proving_broker_interface.js'; -import { ProvingJobController, ProvingJobStatus } from './proving_job_controller.js'; +import { type ProofStore } from './proof_store.js'; +import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js'; /** * A helper class that encapsulates a circuit prover and connects it to a job source. @@ -19,14 +23,16 @@ export class ProvingAgent { constructor( /** The source of proving jobs */ - private jobSource: ProvingJobConsumer, + private broker: ProvingJobConsumer, + /** Database holding proof inputs and outputs */ + private proofStore: ProofStore, /** The prover implementation to defer jobs to */ private circuitProver: ServerCircuitProver, /** Optional list of allowed proof types to build */ - private proofAllowList?: Array, + private proofAllowList: Array = [], /** How long to wait between jobs */ private pollIntervalMs = 1000, - private log = createDebugLogger('aztec:proving-broker:proving-agent'), + private log = createDebugLogger('aztec:prover-client:proving-agent'), ) { this.runningPromise = new RunningPromise(this.safeWork, this.pollIntervalMs); } @@ -54,37 +60,86 @@ export class ProvingAgent { // (1) either do a heartbeat, telling the broker that we're working // (2) get a new job // If during (1) the broker returns a new job that means we can cancel the current job and start the new one - let maybeJob: { job: V2ProvingJob; time: number } | undefined; - if (this.currentJobController?.getStatus() === ProvingJobStatus.PROVING) { - maybeJob = await this.jobSource.reportProvingJobProgress( + let maybeJob: { job: ProvingJob; time: number } | undefined; + if (this.currentJobController?.getStatus() === ProvingJobControllerStatus.PROVING) { + maybeJob = await this.broker.reportProvingJobProgress( this.currentJobController.getJobId(), this.currentJobController.getStartedAt(), { allowList: this.proofAllowList }, ); } else { - maybeJob = await this.jobSource.getProvingJob({ allowList: this.proofAllowList }); + maybeJob = await this.broker.getProvingJob({ allowList: this.proofAllowList }); } if (!maybeJob) { return; } - if (this.currentJobController?.getStatus() === ProvingJobStatus.PROVING) { + let abortedProofJobId: string | undefined; + let abortedProofName: string | undefined; + if (this.currentJobController?.getStatus() === ProvingJobControllerStatus.PROVING) { + abortedProofJobId = this.currentJobController.getJobId(); + abortedProofName = this.currentJobController.getProofTypeName(); this.currentJobController?.abort(); } const { job, time } = maybeJob; - this.currentJobController = new ProvingJobController(job, time, this.circuitProver, (err, result) => { - if (err) { - const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; - return this.jobSource.reportProvingJobError(job.id, err, retry); - } else if (result) { - return this.jobSource.reportProvingJobSuccess(job.id, result); - } - }); + let inputs: ProvingJobInputs; + try { + inputs = await this.proofStore.getProofInput(job.inputsUri); + } catch (err) { + await this.broker.reportProvingJobError(job.id, 'Failed to load proof inputs', true); + return; + } + + this.currentJobController = new ProvingJobController( + job.id, + inputs, + time, + this.circuitProver, + this.handleJobResult, + ); + + if (abortedProofJobId) { + this.log.info( + `Aborting job id=${abortedProofJobId} type=${abortedProofName} to start new job id=${this.currentJobController.getJobId()} type=${this.currentJobController.getProofTypeName()} inputsUri=${truncateString( + job.inputsUri, + )}`, + ); + } else { + this.log.info( + `Starting job id=${this.currentJobController.getJobId()} type=${this.currentJobController.getProofTypeName()} inputsUri=${truncateString( + job.inputsUri, + )}`, + ); + } + this.currentJobController.start(); } catch (err) { this.log.error(`Error in ProvingAgent: ${String(err)}`); } }; + + handleJobResult = async ( + jobId: ProvingJobId, + type: T, + err: Error | undefined, + result: ProvingJobResultsMap[T] | undefined, + ) => { + if (err) { + const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; + this.log.info(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`); + return this.broker.reportProvingJobError(jobId, err.message, retry); + } else if (result) { + const outputUri = await this.proofStore.saveProofOutput(jobId, type, result); + this.log.info( + `Job id=${jobId} type=${ProvingRequestType[type]} completed outputUri=${truncateString(outputUri)}`, + ); + return this.broker.reportProvingJobSuccess(jobId, outputUri); + } + }; +} + +function truncateString(str: string, length: number = 64): string { + return str.length > length ? str.slice(0, length) + '...' : str; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts index fef79bfb99f..543843a6e15 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts @@ -1,57 +1,43 @@ -import { - ProvingRequestType, - type V2ProofOutput, - type V2ProvingJob, - type V2ProvingJobId, - makePublicInputsAndRecursiveProof, -} from '@aztec/circuit-types'; -import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; -import { - makeBaseOrMergeRollupPublicInputs, - makeBaseParityInputs, - makeParityPublicInputs, - makePrivateBaseRollupInputs, - makeRootParityInputs, -} from '@aztec/circuits.js/testing'; +import { type ProofUri, type ProvingJob, type ProvingJobId, ProvingRequestType } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; import { openTmpStore } from '@aztec/kv-store/utils'; import { jest } from '@jest/globals'; import { ProvingBroker } from './proving_broker.js'; -import { type ProvingJobDatabase } from './proving_job_database.js'; -import { InMemoryDatabase } from './proving_job_database/memory.js'; -import { PersistedProvingJobDatabase } from './proving_job_database/persisted.js'; +import { type ProvingBrokerDatabase } from './proving_broker_database.js'; +import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js'; +import { KVBrokerDatabase } from './proving_broker_database/persisted.js'; beforeAll(() => { jest.useFakeTimers(); }); describe.each([ - () => ({ database: new InMemoryDatabase(), cleanup: undefined }), + () => ({ database: new InMemoryBrokerDatabase(), cleanup: undefined }), () => { const store = openTmpStore(true); - const database = new PersistedProvingJobDatabase(store); + const database = new KVBrokerDatabase(store); const cleanup = () => store.close(); return { database, cleanup }; }, ])('ProvingBroker', createDb => { let broker: ProvingBroker; - let jobTimeoutSec: number; + let jobTimeoutMs: number; let maxRetries: number; - let database: ProvingJobDatabase; + let database: ProvingBrokerDatabase; let cleanup: undefined | (() => Promise | void); const now = () => Math.floor(Date.now() / 1000); beforeEach(() => { - jobTimeoutSec = 10; + jobTimeoutMs = 10_000; maxRetries = 2; ({ database, cleanup } = createDb()); broker = new ProvingBroker(database, { - jobTimeoutSec: jobTimeoutSec, - timeoutIntervalSec: jobTimeoutSec / 4, + jobTimeoutMs, + timeoutIntervalMs: jobTimeoutMs / 4, maxRetries, }); }); @@ -77,7 +63,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); expect(await broker.getProvingJobStatus(id)).toEqual({ status: 'in-queue' }); @@ -86,17 +72,17 @@ describe.each([ id: id2, blockNumber: 1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); expect(await broker.getProvingJobStatus(id2)).toEqual({ status: 'in-queue' }); }); it('ignores duplicate jobs', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -110,14 +96,14 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(1), + inputsUri: makeInputsUri(), }); await expect( broker.enqueueProvingJob({ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(2), + inputsUri: makeInputsUri(), }), ).rejects.toThrow('Duplicate proving job ID'); }); @@ -133,7 +119,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'in-queue' }); @@ -148,7 +134,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'in-queue' }); await broker.getProvingJob(); @@ -158,39 +144,35 @@ describe.each([ }); it('returns job result if successful', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); - const value = makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ); - await broker.reportProvingJobSuccess(provingJob.id, { type: ProvingRequestType.BASE_PARITY, value }); + const value = makeOutputsUri(); + await broker.reportProvingJobSuccess(provingJob.id, value); const status = await broker.getProvingJobStatus(provingJob.id); - expect(status).toEqual({ status: 'resolved', value: { type: ProvingRequestType.BASE_PARITY, value } }); + expect(status).toEqual({ status: 'fulfilled', value }); }); it('returns job error if failed', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); - const error = new Error('test error'); + const error = 'test error'; await broker.reportProvingJobError(provingJob.id, error); const status = await broker.getProvingJobStatus(provingJob.id); - expect(status).toEqual({ status: 'rejected', error: String(error) }); + expect(status).toEqual({ status: 'rejected', reason: String(error) }); }); }); @@ -209,25 +191,25 @@ describe.each([ }); it('returns jobs in priority order', async () => { - const provingJob1: V2ProvingJob = { + const provingJob1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; - const provingJob2: V2ProvingJob = { + const provingJob2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; - const provingJob3: V2ProvingJob = { + const provingJob3: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 3, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob2); @@ -242,7 +224,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await expect( @@ -256,7 +238,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -264,7 +246,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -272,7 +254,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -280,7 +262,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeRootParityInputs(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(baseParity1, ProvingRequestType.BASE_PARITY); @@ -292,7 +274,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -300,7 +282,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -308,7 +290,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -316,7 +298,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeRootParityInputs(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId( @@ -327,13 +309,49 @@ describe.each([ ); }); + it('returns any job if filter is empty', async () => { + const baseParity1 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: baseParity1, + type: ProvingRequestType.BASE_PARITY, + blockNumber: 1, + inputsUri: makeInputsUri(), + }); + + const baseRollup1 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: baseRollup1, + type: ProvingRequestType.PRIVATE_BASE_ROLLUP, + blockNumber: 1, + inputsUri: makeInputsUri(), + }); + + const baseRollup2 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: baseRollup2, + type: ProvingRequestType.PRIVATE_BASE_ROLLUP, + blockNumber: 2, + inputsUri: makeInputsUri(), + }); + + const rootParity1 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: rootParity1, + type: ProvingRequestType.ROOT_PARITY, + blockNumber: 1, + inputsUri: makeInputsUri(), + }); + + await getAndAssertNextJobId(baseRollup1); + }); + it('returns a new job when reporting progress if current one is cancelled', async () => { const id = makeProvingJobId(); await broker.enqueueProvingJob({ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await broker.getProvingJob(); await assertJobStatus(id, 'in-progress'); @@ -345,7 +363,7 @@ describe.each([ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await expect( broker.reportProvingJobProgress(id, now(), { allowList: [ProvingRequestType.BASE_PARITY] }), @@ -354,18 +372,18 @@ describe.each([ it('returns a new job if job is already in progress elsewhere', async () => { // this test simulates the broker crashing and when it comes back online it has two agents working the same job - const job1: V2ProvingJob = { + const job1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; - const job2: V2ProvingJob = { + const job2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -378,7 +396,7 @@ describe.each([ expect(firstAgentJob).toEqual(job1); await assertJobStatus(job1.id, 'in-progress'); - await jest.advanceTimersByTimeAsync(jobTimeoutSec / 2); + await jest.advanceTimersByTimeAsync(jobTimeoutMs / 2); await expect( broker.reportProvingJobProgress(job1.id, firstAgentStartedAt, { allowList: [ProvingRequestType.BASE_PARITY], @@ -422,18 +440,18 @@ describe.each([ it('avoids sending the same job to a new agent after a restart', async () => { // this test simulates the broker crashing and when it comes back online it has two agents working the same job - const job1: V2ProvingJob = { + const job1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; - const job2: V2ProvingJob = { + const job2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -477,18 +495,18 @@ describe.each([ it('avoids sending a completed job to a new agent after a restart', async () => { // this test simulates the broker crashing and when it comes back online it has two agents working the same job - const job1: V2ProvingJob = { + const job1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; - const job2: V2ProvingJob = { + const job2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -501,7 +519,7 @@ describe.each([ await broker.stop(); // fake some time passing while the broker restarts - await jest.advanceTimersByTimeAsync(100 * jobTimeoutSec * 1000); + await jest.advanceTimersByTimeAsync(100 * jobTimeoutMs); broker = new ProvingBroker(database); await broker.start(); @@ -510,22 +528,13 @@ describe.each([ // after the restart the new broker thinks job1 is available // inform the agent of the job completion - await expect( - broker.reportProvingJobSuccess(job1.id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }), - ).resolves.toBeUndefined(); - await assertJobStatus(job1.id, 'resolved'); + await expect(broker.reportProvingJobSuccess(job1.id, makeOutputsUri())).resolves.toBeUndefined(); + await assertJobStatus(job1.id, 'fulfilled'); // make sure the the broker sends the next job to the agent await getAndAssertNextJobId(job2.id); - await assertJobStatus(job1.id, 'resolved'); + await assertJobStatus(job1.id, 'fulfilled'); await assertJobStatus(job2.id, 'in-progress'); }); @@ -536,30 +545,23 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await broker.enqueueProvingJob({ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(id1); await assertJobStatus(id1, 'in-progress'); - await broker.reportProvingJobSuccess(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); - await assertJobStatus(id1, 'resolved'); + await broker.reportProvingJobSuccess(id1, makeOutputsUri()); + await assertJobStatus(id1, 'fulfilled'); await getAndAssertNextJobId(id2); await assertJobStatus(id2, 'in-progress'); - await broker.reportProvingJobError(id2, new Error('test error')); + await broker.reportProvingJobError(id2, 'test error'); await assertJobStatus(id2, 'rejected'); }); @@ -570,47 +572,33 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await broker.enqueueProvingJob({ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); - await broker.reportProvingJobSuccess(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); - await assertJobStatus(id1, 'resolved'); + await broker.reportProvingJobSuccess(id1, makeOutputsUri()); + await assertJobStatus(id1, 'fulfilled'); - await broker.reportProvingJobError(id2, new Error('test error')); + await broker.reportProvingJobError(id2, 'test error'); await assertJobStatus(id2, 'rejected'); }); it('ignores reported job error if unknown job', async () => { const id = makeProvingJobId(); await assertJobStatus(id, 'not-found'); - await broker.reportProvingJobError(id, new Error('test error')); + await broker.reportProvingJobError(id, 'test error'); await assertJobStatus(id, 'not-found'); }); it('ignores job result if unknown job', async () => { const id = makeProvingJobId(); await assertJobStatus(id, 'not-found'); - await broker.reportProvingJobSuccess(id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await broker.reportProvingJobSuccess(id, makeOutputsUri()); await assertJobStatus(id, 'not-found'); }); }); @@ -630,7 +618,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -644,7 +632,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -652,7 +640,7 @@ describe.each([ await assertJobStatus(id, 'in-progress'); // advance time so job times out because of no heartbeats - await jest.advanceTimersByTimeAsync(jobTimeoutSec * 1000); + await jest.advanceTimersByTimeAsync(jobTimeoutMs); // should be back in the queue now await assertJobStatus(id, 'in-queue'); @@ -664,7 +652,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -673,7 +661,7 @@ describe.each([ await assertJobStatus(id, 'in-progress'); // advance the time slightly, not enough for the request to timeout - await jest.advanceTimersByTimeAsync((jobTimeoutSec * 1000) / 2); + await jest.advanceTimersByTimeAsync(jobTimeoutMs / 2); await assertJobStatus(id, 'in-progress'); @@ -681,24 +669,24 @@ describe.each([ await broker.reportProvingJobProgress(id, time); // advance the time again - await jest.advanceTimersByTimeAsync((jobTimeoutSec * 1000) / 2); + await jest.advanceTimersByTimeAsync(jobTimeoutMs / 2); // should still be our request to process await assertJobStatus(id, 'in-progress'); // advance the time again and lose the request - await jest.advanceTimersByTimeAsync(jobTimeoutSec * 1000); + await jest.advanceTimersByTimeAsync(jobTimeoutMs); await assertJobStatus(id, 'in-queue'); }); }); describe('Retries', () => { it('retries jobs', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -713,7 +701,7 @@ describe.each([ status: 'in-progress', }); - await broker.reportProvingJobError(provingJob.id, new Error('test error'), true); + await broker.reportProvingJobError(provingJob.id, 'test error', true); await expect(broker.getProvingJobStatus(provingJob.id)).resolves.toEqual({ status: 'in-queue', @@ -726,19 +714,19 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); for (let i = 0; i < maxRetries; i++) { await assertJobStatus(id, 'in-queue'); await getAndAssertNextJobId(id); await assertJobStatus(id, 'in-progress'); - await broker.reportProvingJobError(id, new Error('test error'), true); + await broker.reportProvingJobError(id, 'test error', true); } await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'rejected', - error: String(new Error('test error')), + reason: 'test error', }); }); @@ -748,15 +736,15 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(id); await assertJobStatus(id, 'in-progress'); - await broker.reportProvingJobError(id, new Error('test error'), false); + await broker.reportProvingJobError(id, 'test error', false); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'rejected', - error: String(new Error('test error')), + reason: 'test error', }); }); }); @@ -773,7 +761,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); const id2 = makeProvingJobId(); @@ -781,7 +769,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); await broker.start(); @@ -794,7 +782,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: expect.any(Object), + inputsUri: expect.any(String), }, time: expect.any(Number), }); @@ -804,7 +792,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: expect.any(Object), + inputsUri: expect.any(String), }, time: expect.any(Number), }); @@ -824,7 +812,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); const id2 = makeProvingJobId(); @@ -832,37 +820,22 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); - await database.setProvingJobResult(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); - - await database.setProvingJobResult(id2, { - type: ProvingRequestType.PRIVATE_BASE_ROLLUP, - value: makePublicInputsAndRecursiveProof( - makeBaseOrMergeRollupPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await database.setProvingJobResult(id1, makeOutputsUri()); + await database.setProvingJobResult(id2, makeOutputsUri()); await broker.start(); await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ - status: 'resolved', - value: expect.any(Object), + status: 'fulfilled', + value: expect.any(String), }); await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ - status: 'resolved', - value: expect.any(Object), + status: 'fulfilled', + value: expect.any(String), }); }); @@ -873,33 +846,22 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), - }); - await database.setProvingJobResult(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), + inputsUri: makeInputsUri(), }); + await database.setProvingJobResult(id1, makeOutputsUri()); const id2 = makeProvingJobId(); await database.addProvingJob({ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); await broker.start(); - await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ - status: 'resolved', - value: expect.any(Object), - }); - - await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'in-queue' }); + await assertJobStatus(id1, 'fulfilled'); + await assertJobStatus(id2, 'in-queue'); await getAndAssertNextJobId(id2); }); @@ -910,33 +872,22 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), - }); - await database.setProvingJobResult(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), + inputsUri: makeInputsUri(), }); + await database.setProvingJobResult(id1, makeOutputsUri()); const id2 = makeProvingJobId(); await database.addProvingJob({ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputsUri: makeInputsUri(), }); await broker.start(); - await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ - status: 'resolved', - value: expect.any(Object), - }); - - await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'in-queue' }); + await assertJobStatus(id1, 'fulfilled'); + await assertJobStatus(id2, 'in-queue'); jest.spyOn(database, 'deleteProvingJobAndResult'); @@ -948,15 +899,17 @@ describe.each([ await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ status: 'not-found' }); await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'not-found' }); + await assertJobStatus(id1, 'not-found'); + await assertJobStatus(id2, 'not-found'); }); it('saves job when enqueued', async () => { await broker.start(); - const job: V2ProvingJob = { + const job: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; jest.spyOn(database, 'addProvingJob'); @@ -975,7 +928,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }), ).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'not-found'); @@ -984,28 +937,19 @@ describe.each([ it('saves job result', async () => { await broker.start(); - const job: V2ProvingJob = { + const job: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }; jest.spyOn(database, 'setProvingJobResult'); await broker.enqueueProvingJob(job); - const result: V2ProofOutput = { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }; - await broker.reportProvingJobSuccess(job.id, result); - - await assertJobStatus(job.id, 'resolved'); - expect(database.setProvingJobResult).toHaveBeenCalledWith(job.id, result); + await broker.reportProvingJobSuccess(job.id, makeOutputsUri()); + await assertJobStatus(job.id, 'fulfilled'); + expect(database.setProvingJobResult).toHaveBeenCalledWith(job.id, expect.any(String)); }); it('does not retain job result if database fails to save', async () => { @@ -1016,18 +960,9 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); - await expect( - broker.reportProvingJobSuccess(id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }), - ).rejects.toThrow(new Error('db error')); + await expect(broker.reportProvingJobSuccess(id, makeOutputsUri())).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'in-queue'); }); @@ -1041,10 +976,10 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); - const error = new Error('test error'); + const error = 'test error'; await broker.reportProvingJobError(id, error); await assertJobStatus(id, 'rejected'); expect(database.setProvingJobError).toHaveBeenCalledWith(id, error); @@ -1058,9 +993,9 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputsUri: makeInputsUri(), }); - await expect(broker.reportProvingJobError(id, new Error())).rejects.toThrow(new Error('db error')); + await expect(broker.reportProvingJobError(id, 'test error')).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'in-queue'); }); @@ -1071,14 +1006,7 @@ describe.each([ jest.spyOn(database, 'setProvingJobResult'); jest.spyOn(database, 'addProvingJob'); - await broker.reportProvingJobSuccess(id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await broker.reportProvingJobSuccess(id, makeOutputsUri()); expect(database.setProvingJobResult).not.toHaveBeenCalled(); expect(database.addProvingJob).not.toHaveBeenCalled(); @@ -1091,24 +1019,32 @@ describe.each([ jest.spyOn(database, 'setProvingJobError'); jest.spyOn(database, 'addProvingJob'); - await broker.reportProvingJobError(id, new Error('test error')); + await broker.reportProvingJobError(id, 'test error'); expect(database.setProvingJobError).not.toHaveBeenCalled(); expect(database.addProvingJob).not.toHaveBeenCalled(); }); }); - async function assertJobStatus(id: V2ProvingJobId, status: string) { + async function assertJobStatus(id: ProvingJobId, status: string) { await expect(broker.getProvingJobStatus(id)).resolves.toEqual(expect.objectContaining({ status })); } - async function getAndAssertNextJobId(id: V2ProvingJobId, ...allowList: ProvingRequestType[]) { - await expect(broker.getProvingJob(allowList.length > 0 ? { allowList } : undefined)).resolves.toEqual( + async function getAndAssertNextJobId(id: ProvingJobId, ...allowList: ProvingRequestType[]) { + await expect(broker.getProvingJob({ allowList })).resolves.toEqual( expect.objectContaining({ job: expect.objectContaining({ id }) }), ); } }); -function makeProvingJobId(): V2ProvingJobId { - return randomBytes(8).toString('hex') as V2ProvingJobId; +function makeProvingJobId(): ProvingJobId { + return randomBytes(8).toString('hex') as ProvingJobId; +} + +function makeInputsUri(): ProofUri { + return randomBytes(8).toString('hex') as ProofUri; +} + +function makeOutputsUri(): ProofUri { + return randomBytes(8).toString('hex') as ProofUri; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.ts index 2fe40eac234..62667821ec7 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.ts @@ -1,29 +1,31 @@ import { + type ProofUri, + type ProvingJob, + type ProvingJobConsumer, + type ProvingJobFilter, + type ProvingJobId, + type ProvingJobProducer, + type ProvingJobSettledResult, + type ProvingJobStatus, ProvingRequestType, - type V2ProofOutput, - type V2ProvingJob, - type V2ProvingJobId, - type V2ProvingJobResult, - type V2ProvingJobStatus, } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; -import { RunningPromise } from '@aztec/foundation/promise'; +import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise'; import { PriorityMemoryQueue } from '@aztec/foundation/queue'; import assert from 'assert'; -import type { ProvingJobConsumer, ProvingJobFilter, ProvingJobProducer } from './proving_broker_interface.js'; -import { type ProvingJobDatabase } from './proving_job_database.js'; +import { type ProvingBrokerDatabase } from './proving_broker_database.js'; type InProgressMetadata = { - id: V2ProvingJobId; + id: ProvingJobId; startedAt: number; lastUpdatedAt: number; }; type ProofRequestBrokerConfig = { - timeoutIntervalSec?: number; - jobTimeoutSec?: number; + timeoutIntervalMs?: number; + jobTimeoutMs?: number; maxRetries?: number; }; @@ -33,50 +35,53 @@ type ProofRequestBrokerConfig = { */ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { private queues: ProvingQueues = { - [ProvingRequestType.PUBLIC_VM]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.TUBE_PROOF]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PUBLIC_VM]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.TUBE_PROOF]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.PRIVATE_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.PUBLIC_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.BLOCK_MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.BASE_PARITY]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.ROOT_PARITY]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.BASE_PARITY]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.ROOT_PARITY]: new PriorityMemoryQueue(provingJobComparator), }; // holds a copy of the database in memory in order to quickly fulfill requests // this is fine because this broker is the only one that can modify the database - private jobsCache = new Map(); + private jobsCache = new Map(); // as above, but for results - private resultsCache = new Map(); + private resultsCache = new Map(); // keeps track of which jobs are currently being processed // in the event of a crash this information is lost, but that's ok // the next time the broker starts it will recreate jobsCache and still // accept results from the workers - private inProgress = new Map(); + private inProgress = new Map(); // keep track of which proving job has been retried - private retries = new Map(); + private retries = new Map(); + + // a map of promises that will be resolved when a job is settled + private promises = new Map>(); private timeoutPromise: RunningPromise; private timeSource = () => Math.floor(Date.now() / 1000); - private jobTimeoutSec: number; + private jobTimeoutMs: number; private maxRetries: number; public constructor( - private database: ProvingJobDatabase, - { jobTimeoutSec = 30, timeoutIntervalSec = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, - private logger = createDebugLogger('aztec:prover-client:proof-request-broker'), + private database: ProvingBrokerDatabase, + { jobTimeoutMs = 30, timeoutIntervalMs = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, + private logger = createDebugLogger('aztec:prover-client:proving-broker'), ) { - this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalSec * 1000); - this.jobTimeoutSec = jobTimeoutSec; + this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalMs); + this.jobTimeoutMs = jobTimeoutMs; this.maxRetries = maxRetries; } @@ -86,7 +91,10 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.info(`Restoring proving job id=${item.id} settled=${!!result}`); this.jobsCache.set(item.id, item); + this.promises.set(item.id, promiseWithResolvers()); + if (result) { + this.promises.get(item.id)!.resolve(result); this.resultsCache.set(item.id, result); } else { this.logger.debug(`Re-enqueuing proving job id=${item.id}`); @@ -101,7 +109,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { return this.timeoutPromise.stop(); } - public async enqueueProvingJob(job: V2ProvingJob): Promise { + public async enqueueProvingJob(job: ProvingJob): Promise { if (this.jobsCache.has(job.id)) { const existing = this.jobsCache.get(job.id); assert.deepStrictEqual(job, existing, 'Duplicate proving job ID'); @@ -113,20 +121,35 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.enqueueJobInternal(job); } - public async removeAndCancelProvingJob(id: V2ProvingJobId): Promise { + public waitForJobToSettle(id: ProvingJobId): Promise { + const promiseWithResolvers = this.promises.get(id); + if (!promiseWithResolvers) { + return Promise.resolve({ status: 'rejected', reason: `Job ${id} not found` }); + } + return promiseWithResolvers.promise; + } + + public async removeAndCancelProvingJob(id: ProvingJobId): Promise { this.logger.info(`Cancelling job id=${id}`); await this.database.deleteProvingJobAndResult(id); + // notify listeners of the cancellation + if (!this.resultsCache.has(id)) { + this.promises.get(id)?.resolve({ status: 'rejected', reason: 'Aborted' }); + } + this.jobsCache.delete(id); + this.promises.delete(id); this.resultsCache.delete(id); this.inProgress.delete(id); this.retries.delete(id); } - // eslint-disable-next-line require-await - public async getProvingJobStatus(id: V2ProvingJobId): Promise { + public getProvingJobStatus(id: ProvingJobId): Promise { const result = this.resultsCache.get(id); - if (!result) { + if (result) { + return Promise.resolve(result); + } else { // no result yet, check if we know the item const item = this.jobsCache.get(id); @@ -136,29 +159,26 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } return Promise.resolve({ status: this.inProgress.has(id) ? 'in-progress' : 'in-queue' }); - } else if ('value' in result) { - return Promise.resolve({ status: 'resolved', value: result.value }); - } else { - return Promise.resolve({ status: 'rejected', error: result.error }); } } // eslint-disable-next-line require-await - async getProvingJob( - filter: ProvingJobFilter = {}, - ): Promise<{ job: V2ProvingJob; time: number } | undefined> { - const allowedProofs: ProvingRequestType[] = filter.allowList - ? [...filter.allowList] - : Object.values(ProvingRequestType).filter((x): x is ProvingRequestType => typeof x === 'number'); + async getProvingJob( + filter: ProvingJobFilter = { allowList: [] }, + ): Promise<{ job: ProvingJob; time: number } | undefined> { + const allowedProofs: ProvingRequestType[] = + Array.isArray(filter.allowList) && filter.allowList.length > 0 + ? [...filter.allowList] + : Object.values(ProvingRequestType).filter((x): x is ProvingRequestType => typeof x === 'number'); allowedProofs.sort(proofTypeComparator); for (const proofType of allowedProofs) { const queue = this.queues[proofType]; - let job: V2ProvingJob | undefined; + let job: ProvingJob | undefined; // exhaust the queue and make sure we're not sending a job that's already in progress // or has already been completed // this can happen if the broker crashes and restarts - // it's possible agents will report progress or results for jobs that are no longer in the queue + // it's possible agents will report progress or results for jobs that are in the queue (after the restart) while ((job = queue.getImmediate())) { if (!this.inProgress.has(job.id) && !this.resultsCache.has(job.id)) { const time = this.timeSource(); @@ -176,7 +196,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { return undefined; } - async reportProvingJobError(id: V2ProvingJobId, err: Error, retry = false): Promise { + async reportProvingJobError(id: ProvingJobId, err: string, retry = false): Promise { const info = this.inProgress.get(id); const item = this.jobsCache.get(id); const retries = this.retries.get(id) ?? 0; @@ -202,15 +222,19 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.debug( `Marking proving job id=${id} type=${ProvingRequestType[item.type]} totalAttempts=${retries + 1} as failed`, ); + await this.database.setProvingJobError(id, err); - this.resultsCache.set(id, { error: String(err) }); + + const result: ProvingJobSettledResult = { status: 'rejected', reason: String(err) }; + this.resultsCache.set(id, result); + this.promises.get(id)!.resolve(result); } - reportProvingJobProgress( - id: V2ProvingJobId, + reportProvingJobProgress( + id: ProvingJobId, startedAt: number, - filter?: ProvingJobFilter, - ): Promise<{ job: V2ProvingJob; time: number } | undefined> { + filter?: ProvingJobFilter, + ): Promise<{ job: ProvingJob; time: number } | undefined> { const job = this.jobsCache.get(id); if (!job) { this.logger.warn(`Proving job id=${id} does not exist`); @@ -255,7 +279,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } } - async reportProvingJobSuccess(id: V2ProvingJobId, value: V2ProofOutput): Promise { + async reportProvingJobSuccess(id: ProvingJobId, value: ProofUri): Promise { const info = this.inProgress.get(id); const item = this.jobsCache.get(id); const retries = this.retries.get(id) ?? 0; @@ -273,8 +297,12 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.debug( `Proving job complete id=${id} type=${ProvingRequestType[item.type]} totalAttempts=${retries + 1}`, ); + await this.database.setProvingJobResult(id, value); - this.resultsCache.set(id, { value }); + + const result: ProvingJobSettledResult = { status: 'fulfilled', value }; + this.resultsCache.set(id, result); + this.promises.get(id)!.resolve(result); } private timeoutCheck = () => { @@ -287,8 +315,8 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { continue; } - const secondsSinceLastUpdate = this.timeSource() - metadata.lastUpdatedAt; - if (secondsSinceLastUpdate >= this.jobTimeoutSec) { + const msSinceLastUpdate = (this.timeSource() - metadata.lastUpdatedAt) * 1000; + if (msSinceLastUpdate >= this.jobTimeoutMs) { this.logger.warn(`Proving job id=${id} timed out. Adding it back to the queue.`); this.inProgress.delete(id); this.enqueueJobInternal(item); @@ -296,14 +324,17 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } }; - private enqueueJobInternal(job: V2ProvingJob): void { + private enqueueJobInternal(job: ProvingJob): void { + if (!this.promises.has(job.id)) { + this.promises.set(job.id, promiseWithResolvers()); + } this.queues[job.type].put(job); this.logger.debug(`Enqueued new proving job id=${job.id}`); } } type ProvingQueues = { - [K in ProvingRequestType]: PriorityMemoryQueue; + [K in ProvingRequestType]: PriorityMemoryQueue; }; /** @@ -312,10 +343,12 @@ type ProvingQueues = { * @param b - Another proving job * @returns A number indicating the relative priority of the two proving jobs */ -function provingJobComparator(a: V2ProvingJob, b: V2ProvingJob): -1 | 0 | 1 { - if (a.blockNumber < b.blockNumber) { +function provingJobComparator(a: ProvingJob, b: ProvingJob): -1 | 0 | 1 { + const aBlockNumber = a.blockNumber ?? 0; + const bBlockNumber = b.blockNumber ?? 0; + if (aBlockNumber < bBlockNumber) { return -1; - } else if (a.blockNumber > b.blockNumber) { + } else if (aBlockNumber > bBlockNumber) { return 1; } else { return 0; diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database.ts similarity index 63% rename from yarn-project/prover-client/src/proving_broker/proving_job_database.ts rename to yarn-project/prover-client/src/proving_broker/proving_broker_database.ts index 99cae7147ac..b5adf91cb89 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database.ts @@ -1,30 +1,25 @@ -import { - type V2ProofOutput, - type V2ProvingJob, - type V2ProvingJobId, - type V2ProvingJobResult, -} from '@aztec/circuit-types'; +import { type ProofUri, type ProvingJob, type ProvingJobId, type ProvingJobSettledResult } from '@aztec/circuit-types'; /** * A database for storing proof requests and their results */ -export interface ProvingJobDatabase { +export interface ProvingBrokerDatabase { /** * Saves a proof request so it can be retrieved later * @param request - The proof request to save */ - addProvingJob(request: V2ProvingJob): Promise; + addProvingJob(request: ProvingJob): Promise; /** * Removes a proof request from the backend * @param id - The ID of the proof request to remove */ - deleteProvingJobAndResult(id: V2ProvingJobId): Promise; + deleteProvingJobAndResult(id: ProvingJobId): Promise; /** * Returns an iterator over all saved proving jobs */ - allProvingJobs(): Iterable<[V2ProvingJob, V2ProvingJobResult | undefined]>; + allProvingJobs(): Iterable<[ProvingJob, ProvingJobSettledResult | undefined]>; /** * Saves the result of a proof request @@ -32,7 +27,7 @@ export interface ProvingJobDatabase { * @param ProvingRequestType - The type of proof that was requested * @param value - The result of the proof request */ - setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutput): Promise; + setProvingJobResult(id: ProvingJobId, value: ProofUri): Promise; /** * Saves an error that occurred while processing a proof request @@ -40,5 +35,5 @@ export interface ProvingJobDatabase { * @param ProvingRequestType - The type of proof that was requested * @param err - The error that occurred while processing the proof request */ - setProvingJobError(id: V2ProvingJobId, err: Error): Promise; + setProvingJobError(id: ProvingJobId, err: string): Promise; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts new file mode 100644 index 00000000000..0a737aadd43 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts @@ -0,0 +1,43 @@ +import type { ProofUri, ProvingJob, ProvingJobId, ProvingJobSettledResult } from '@aztec/circuit-types'; + +import { type ProvingBrokerDatabase } from '../proving_broker_database.js'; + +export class InMemoryBrokerDatabase implements ProvingBrokerDatabase { + private jobs = new Map(); + private results = new Map(); + + getProvingJob(id: ProvingJobId): ProvingJob | undefined { + return this.jobs.get(id); + } + + getProvingJobResult(id: ProvingJobId): ProvingJobSettledResult | undefined { + return this.results.get(id); + } + + addProvingJob(request: ProvingJob): Promise { + this.jobs.set(request.id, request); + return Promise.resolve(); + } + + setProvingJobResult(id: ProvingJobId, value: ProofUri): Promise { + this.results.set(id, { status: 'fulfilled', value }); + return Promise.resolve(); + } + + setProvingJobError(id: ProvingJobId, reason: string): Promise { + this.results.set(id, { status: 'rejected', reason }); + return Promise.resolve(); + } + + deleteProvingJobAndResult(id: ProvingJobId): Promise { + this.jobs.delete(id); + this.results.delete(id); + return Promise.resolve(); + } + + *allProvingJobs(): Iterable<[ProvingJob, ProvingJobSettledResult | undefined]> { + for (const item of this.jobs.values()) { + yield [item, this.results.get(item.id)] as const; + } + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts new file mode 100644 index 00000000000..909b2d6e4e1 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts @@ -0,0 +1,45 @@ +import { type ProofUri, ProvingJob, type ProvingJobId, ProvingJobSettledResult } from '@aztec/circuit-types'; +import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type ProvingBrokerDatabase } from '../proving_broker_database.js'; + +export class KVBrokerDatabase implements ProvingBrokerDatabase { + private jobs: AztecMap; + private jobResults: AztecMap; + + constructor(private store: AztecKVStore) { + this.jobs = store.openMap('proving_jobs'); + this.jobResults = store.openMap('proving_job_results'); + } + + async addProvingJob(job: ProvingJob): Promise { + await this.jobs.set(job.id, jsonStringify(job)); + } + + *allProvingJobs(): Iterable<[ProvingJob, ProvingJobSettledResult | undefined]> { + for (const jobStr of this.jobs.values()) { + const job = jsonParseWithSchema(jobStr, ProvingJob); + const resultStr = this.jobResults.get(job.id); + const result = resultStr ? jsonParseWithSchema(resultStr, ProvingJobSettledResult) : undefined; + yield [job, result]; + } + } + + deleteProvingJobAndResult(id: ProvingJobId): Promise { + return this.store.transaction(() => { + void this.jobs.delete(id); + void this.jobResults.delete(id); + }); + } + + async setProvingJobError(id: ProvingJobId, reason: string): Promise { + const result: ProvingJobSettledResult = { status: 'rejected', reason }; + await this.jobResults.set(id, jsonStringify(result)); + } + + async setProvingJobResult(id: ProvingJobId, value: ProofUri): Promise { + const result: ProvingJobSettledResult = { status: 'fulfilled', value }; + await this.jobResults.set(id, jsonStringify(result)); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts deleted file mode 100644 index 493cab538a5..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { - type ProvingRequestType, - type V2ProofOutput, - type V2ProvingJob, - type V2ProvingJobId, - type V2ProvingJobStatus, -} from '@aztec/circuit-types'; - -/** - * An interface for the proving orchestrator. The producer uses this to enqueue jobs for agents - */ -export interface ProvingJobProducer { - /** - * Enqueues a proving job - * @param job - The job to enqueue - */ - enqueueProvingJob(job: V2ProvingJob): Promise; - - /** - * Cancels a proving job and clears all of its - * @param id - The ID of the job to cancel - */ - removeAndCancelProvingJob(id: V2ProvingJobId): Promise; - - /** - * Returns the current status fof the proving job - * @param id - The ID of the job to get the status of - */ - getProvingJobStatus(id: V2ProvingJobId): Promise; -} - -export interface ProvingJobFilter { - allowList?: T; -} - -/** - * An interface for proving agents to request jobs and report results - */ -export interface ProvingJobConsumer { - /** - * Gets a proving job to work on - * @param filter - Optional filter for the type of job to get - */ - getProvingJob( - filter?: ProvingJobFilter, - ): Promise<{ job: V2ProvingJob; time: number } | undefined>; - - /** - * Marks a proving job as successful - * @param id - The ID of the job to report success for - * @param result - The result of the job - */ - reportProvingJobSuccess(id: V2ProvingJobId, result: V2ProofOutput): Promise; - - /** - * Marks a proving job as errored - * @param id - The ID of the job to report an error for - * @param err - The error that occurred while processing the job - * @param retry - Whether to retry the job - */ - reportProvingJobError(id: V2ProvingJobId, err: Error, retry?: boolean): Promise; - - /** - * Sends a heartbeat to the broker to indicate that the agent is still working on the given proving job - * @param id - The ID of the job to report progress for - * @param startedAt - The unix epoch when the job was started - * @param filter - Optional filter for the type of job to get - */ - reportProvingJobProgress( - id: V2ProvingJobId, - startedAt: number, - filter?: ProvingJobFilter, - ): Promise<{ job: V2ProvingJob; time: number } | undefined>; -} diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts b/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts index 724d1d4606f..364703b23cf 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts @@ -1,12 +1,13 @@ -import { ProvingRequestType, type V2ProvingJobId, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { type ProvingJobId, ProvingRequestType, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js/testing'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; import { jest } from '@jest/globals'; import { MockProver } from '../test/mock_prover.js'; -import { ProvingJobController, ProvingJobStatus } from './proving_job_controller.js'; +import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js'; describe('ProvingJobController', () => { let prover: MockProver; @@ -17,10 +18,9 @@ describe('ProvingJobController', () => { prover = new MockProver(); onComplete = jest.fn(); controller = new ProvingJobController( + '1' as ProvingJobId, { type: ProvingRequestType.BASE_PARITY, - blockNumber: 1, - id: '1' as V2ProvingJobId, inputs: makeBaseParityInputs(), }, 0, @@ -30,18 +30,25 @@ describe('ProvingJobController', () => { }); it('reports IDLE status initially', () => { - expect(controller.getStatus()).toBe(ProvingJobStatus.IDLE); + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.IDLE); }); it('reports PROVING status while busy', () => { controller.start(); - expect(controller.getStatus()).toBe(ProvingJobStatus.PROVING); + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.PROVING); }); it('reports DONE status after job is done', async () => { controller.start(); await sleep(1); // give promises a chance to complete - expect(controller.getStatus()).toBe(ProvingJobStatus.DONE); + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.DONE); + }); + + it('reports ABORTED status after job is aborted', async () => { + controller.start(); + controller.abort(); + await sleep(1); // give promises a chance to complete + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.ABORTED); }); it('calls onComplete with the proof', async () => { @@ -54,10 +61,7 @@ describe('ProvingJobController', () => { controller.start(); await sleep(1); // give promises a chance to complete - expect(onComplete).toHaveBeenCalledWith(undefined, { - type: ProvingRequestType.BASE_PARITY, - value: resp, - }); + expect(onComplete).toHaveBeenCalledWith('1', ProvingRequestType.BASE_PARITY, undefined, resp); }); it('calls onComplete with the error', async () => { @@ -66,7 +70,7 @@ describe('ProvingJobController', () => { controller.start(); await sleep(1); - expect(onComplete).toHaveBeenCalledWith(err, undefined); + expect(onComplete).toHaveBeenCalledWith('1', ProvingRequestType.BASE_PARITY, err, undefined); }); it('does not crash if onComplete throws', async () => { @@ -88,4 +92,30 @@ describe('ProvingJobController', () => { await sleep(1); expect(onComplete).toHaveBeenCalled(); }); + + it('does not call onComplete if abort is called', async () => { + const { promise, resolve } = promiseWithResolvers(); + jest.spyOn(prover, 'getBaseParityProof').mockReturnValueOnce(promise); + + controller.start(); + + await sleep(1); + expect(onComplete).not.toHaveBeenCalled(); + + controller.abort(); + await sleep(1); + expect(onComplete).not.toHaveBeenCalled(); + + // simulate a prover that does not respect signals, still completes the proof after aborting + resolve( + makePublicInputsAndRecursiveProof( + makeParityPublicInputs(), + makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeFakeHonk(), + ), + ); + + await sleep(1); + expect(onComplete).not.toHaveBeenCalled(); + }); }); diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts b/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts index 53d18b476a0..2ce47cbe6f7 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts @@ -1,53 +1,68 @@ import { + type ProvingJobId, + type ProvingJobInputs, + type ProvingJobResultsMap, ProvingRequestType, type ServerCircuitProver, - type V2ProofOutput, - type V2ProvingJob, - type V2ProvingJobId, } from '@aztec/circuit-types'; -export enum ProvingJobStatus { +export enum ProvingJobControllerStatus { IDLE = 'idle', PROVING = 'proving', DONE = 'done', + ABORTED = 'aborted', } -type ProvingJobCompletionCallback = ( - error: Error | undefined, - result: V2ProofOutput | undefined, -) => void | Promise; +interface ProvingJobCompletionCallback { + ( + jobId: ProvingJobId, + type: T, + error: Error | undefined, + result: ProvingJobResultsMap[T] | undefined, + ): void | Promise; +} export class ProvingJobController { - private status: ProvingJobStatus = ProvingJobStatus.IDLE; + private status: ProvingJobControllerStatus = ProvingJobControllerStatus.IDLE; private promise?: Promise; private abortController = new AbortController(); constructor( - private job: V2ProvingJob, + private jobId: ProvingJobId, + private inputs: ProvingJobInputs, private startedAt: number, private circuitProver: ServerCircuitProver, private onComplete: ProvingJobCompletionCallback, ) {} public start(): void { - if (this.status !== ProvingJobStatus.IDLE) { + if (this.status !== ProvingJobControllerStatus.IDLE) { return; } - this.status = ProvingJobStatus.PROVING; + this.status = ProvingJobControllerStatus.PROVING; this.promise = this.generateProof() .then( result => { - this.status = ProvingJobStatus.DONE; - return this.onComplete(undefined, result); + if (this.status === ProvingJobControllerStatus.ABORTED) { + return; + } + + this.status = ProvingJobControllerStatus.DONE; + return this.onComplete(this.jobId, this.inputs.type, undefined, result); }, error => { - this.status = ProvingJobStatus.DONE; + if (this.status === ProvingJobControllerStatus.ABORTED) { + return; + } + if (error.name === 'AbortError') { // Ignore abort errors return; } - return this.onComplete(error, undefined); + + this.status = ProvingJobControllerStatus.DONE; + return this.onComplete(this.jobId, this.inputs.type, error, undefined); }, ) .catch(_ => { @@ -55,88 +70,81 @@ export class ProvingJobController { }); } - public getStatus(): ProvingJobStatus { + public getStatus(): ProvingJobControllerStatus { return this.status; } public abort(): void { - if (this.status !== ProvingJobStatus.PROVING) { + if (this.status !== ProvingJobControllerStatus.PROVING) { return; } + this.status = ProvingJobControllerStatus.ABORTED; this.abortController.abort(); } - public getJobId(): V2ProvingJobId { - return this.job.id; + public getJobId(): ProvingJobId { + return this.jobId; } public getStartedAt(): number { return this.startedAt; } - private async generateProof(): Promise { - const { type, inputs } = this.job; + public getProofTypeName(): string { + return ProvingRequestType[this.inputs.type]; + } + + private async generateProof(): Promise { + const { type, inputs } = this.inputs; const signal = this.abortController.signal; switch (type) { case ProvingRequestType.PUBLIC_VM: { - const value = await this.circuitProver.getAvmProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getAvmProof(inputs, signal); } case ProvingRequestType.PRIVATE_BASE_ROLLUP: { - const value = await this.circuitProver.getPrivateBaseRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getPrivateBaseRollupProof(inputs, signal); } case ProvingRequestType.PUBLIC_BASE_ROLLUP: { - const value = await this.circuitProver.getPublicBaseRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getPublicBaseRollupProof(inputs, signal); } case ProvingRequestType.MERGE_ROLLUP: { - const value = await this.circuitProver.getMergeRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getMergeRollupProof(inputs, signal); } case ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP: { - const value = await this.circuitProver.getEmptyBlockRootRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getEmptyBlockRootRollupProof(inputs, signal); } case ProvingRequestType.BLOCK_ROOT_ROLLUP: { - const value = await this.circuitProver.getBlockRootRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getBlockRootRollupProof(inputs, signal); } case ProvingRequestType.BLOCK_MERGE_ROLLUP: { - const value = await this.circuitProver.getBlockMergeRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getBlockMergeRollupProof(inputs, signal); } case ProvingRequestType.ROOT_ROLLUP: { - const value = await this.circuitProver.getRootRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getRootRollupProof(inputs, signal); } case ProvingRequestType.BASE_PARITY: { - const value = await this.circuitProver.getBaseParityProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getBaseParityProof(inputs, signal); } case ProvingRequestType.ROOT_PARITY: { - const value = await this.circuitProver.getRootParityProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getRootParityProof(inputs, signal); } case ProvingRequestType.PRIVATE_KERNEL_EMPTY: { - const value = await this.circuitProver.getEmptyPrivateKernelProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getEmptyPrivateKernelProof(inputs, signal); } case ProvingRequestType.TUBE_PROOF: { - const value = await this.circuitProver.getTubeProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getTubeProof(inputs, signal); } default: { diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts deleted file mode 100644 index 19acfaf88e7..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts +++ /dev/null @@ -1,43 +0,0 @@ -import type { V2ProofOutput, V2ProvingJob, V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; - -import { type ProvingJobDatabase } from '../proving_job_database.js'; - -export class InMemoryDatabase implements ProvingJobDatabase { - private jobs = new Map(); - private results = new Map(); - - getProvingJob(id: V2ProvingJobId): V2ProvingJob | undefined { - return this.jobs.get(id); - } - - getProvingJobResult(id: V2ProvingJobId): V2ProvingJobResult | undefined { - return this.results.get(id); - } - - addProvingJob(request: V2ProvingJob): Promise { - this.jobs.set(request.id, request); - return Promise.resolve(); - } - - setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutput): Promise { - this.results.set(id, { value }); - return Promise.resolve(); - } - - setProvingJobError(id: V2ProvingJobId, error: Error): Promise { - this.results.set(id, { error: String(error) }); - return Promise.resolve(); - } - - deleteProvingJobAndResult(id: V2ProvingJobId): Promise { - this.jobs.delete(id); - this.results.delete(id); - return Promise.resolve(); - } - - *allProvingJobs(): Iterable<[V2ProvingJob, V2ProvingJobResult | undefined]> { - for (const item of this.jobs.values()) { - yield [item, this.results.get(item.id)] as const; - } - } -} diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts deleted file mode 100644 index c03684b1bf3..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { type V2ProofOutput, V2ProvingJob, type V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; -import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; - -import { type ProvingJobDatabase } from '../proving_job_database.js'; - -export class PersistedProvingJobDatabase implements ProvingJobDatabase { - private jobs: AztecMap; - private jobResults: AztecMap; - - constructor(private store: AztecKVStore) { - this.jobs = store.openMap('proving_jobs'); - this.jobResults = store.openMap('proving_job_results'); - } - - async addProvingJob(job: V2ProvingJob): Promise { - await this.jobs.set(job.id, jsonStringify(job)); - } - - *allProvingJobs(): Iterable<[V2ProvingJob, V2ProvingJobResult | undefined]> { - for (const jobStr of this.jobs.values()) { - const job = jsonParseWithSchema(jobStr, V2ProvingJob); - const resultStr = this.jobResults.get(job.id); - const result = resultStr ? jsonParseWithSchema(resultStr, V2ProvingJobResult) : undefined; - yield [job, result]; - } - } - - deleteProvingJobAndResult(id: V2ProvingJobId): Promise { - return this.store.transaction(() => { - void this.jobs.delete(id); - void this.jobResults.delete(id); - }); - } - - async setProvingJobError(id: V2ProvingJobId, err: Error): Promise { - const res: V2ProvingJobResult = { error: err.message }; - await this.jobResults.set(id, jsonStringify(res)); - } - - async setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutput): Promise { - const res: V2ProvingJobResult = { value }; - await this.jobResults.set(id, jsonStringify(res)); - } -} diff --git a/yarn-project/prover-client/src/proving_broker/rpc.ts b/yarn-project/prover-client/src/proving_broker/rpc.ts new file mode 100644 index 00000000000..9895e7937dc --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/rpc.ts @@ -0,0 +1,64 @@ +import { + type GetProvingJobResponse, + ProofUri, + ProvingJob, + type ProvingJobBroker, + type ProvingJobConsumer, + ProvingJobId, + type ProvingJobProducer, + ProvingJobSettledResult, + ProvingJobStatus, + ProvingRequestType, +} from '@aztec/circuit-types'; +import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client'; +import { type SafeJsonRpcServer, createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server'; +import { type ApiSchemaFor, optional } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + +const ProvingJobFilterSchema = z.object({ + allowList: z.array(z.nativeEnum(ProvingRequestType)), +}); + +const GetProvingJobResponse = z.object({ + job: ProvingJob, + time: z.number(), +}); + +export const ProvingJobProducerSchema: ApiSchemaFor = { + enqueueProvingJob: z.function().args(ProvingJob).returns(z.void()), + getProvingJobStatus: z.function().args(ProvingJobId).returns(ProvingJobStatus), + removeAndCancelProvingJob: z.function().args(ProvingJobId).returns(z.void()), + waitForJobToSettle: z.function().args(ProvingJobId).returns(ProvingJobSettledResult), +}; + +export const ProvingJobConsumerSchema: ApiSchemaFor = { + getProvingJob: z.function().args(optional(ProvingJobFilterSchema)).returns(GetProvingJobResponse.optional()), + reportProvingJobError: z.function().args(ProvingJobId, z.string(), optional(z.boolean())).returns(z.void()), + reportProvingJobProgress: z + .function() + .args(ProvingJobId, z.number(), optional(ProvingJobFilterSchema)) + .returns(GetProvingJobResponse.optional()), + reportProvingJobSuccess: z.function().args(ProvingJobId, ProofUri).returns(z.void()), +}; + +export const ProvingJobBrokerSchema: ApiSchemaFor = { + ...ProvingJobConsumerSchema, + ...ProvingJobProducerSchema, +}; + +export function createProvingBrokerServer(broker: ProvingJobBroker): SafeJsonRpcServer { + return createSafeJsonRpcServer(broker, ProvingJobBrokerSchema); +} + +export function createProvingJobBrokerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobBroker { + return createSafeJsonRpcClient(url, ProvingJobBrokerSchema, false, 'proverBroker', fetch); +} + +export function createProvingJobProducerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobProducer { + return createSafeJsonRpcClient(url, ProvingJobProducerSchema, false, 'provingJobProducer', fetch); +} + +export function createProvingJobConsumerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobConsumer { + return createSafeJsonRpcClient(url, ProvingJobConsumerSchema, false, 'provingJobConsumer', fetch); +} diff --git a/yarn-project/prover-client/src/test/mock_prover.ts b/yarn-project/prover-client/src/test/mock_prover.ts index 118ff214e14..c0ea23c2643 100644 --- a/yarn-project/prover-client/src/test/mock_prover.ts +++ b/yarn-project/prover-client/src/test/mock_prover.ts @@ -1,5 +1,10 @@ import { type ProofAndVerificationKey, + type ProvingJob, + type ProvingJobId, + type ProvingJobProducer, + type ProvingJobSettledResult, + type ProvingJobStatus, type PublicInputsAndRecursiveProof, type ServerCircuitProver, makeProofAndVerificationKey, @@ -37,6 +42,52 @@ import { makeParityPublicInputs, makeRootRollupPublicInputs, } from '@aztec/circuits.js/testing'; +import { times } from '@aztec/foundation/collection'; + +import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; +import { ProvingAgent } from '../proving_broker/proving_agent.js'; +import { ProvingBroker } from '../proving_broker/proving_broker.js'; +import { InMemoryBrokerDatabase } from '../proving_broker/proving_broker_database/memory.js'; + +export class TestBroker implements ProvingJobProducer { + private broker = new ProvingBroker(new InMemoryBrokerDatabase()); + private agents: ProvingAgent[]; + + constructor( + agentCount: number, + prover: ServerCircuitProver, + private proofStore: ProofStore = new InlineProofStore(), + ) { + this.agents = times(agentCount, () => new ProvingAgent(this.broker, proofStore, prover)); + } + + public async start() { + await this.broker.start(); + this.agents.forEach(agent => agent.start()); + } + + public async stop() { + await Promise.all(this.agents.map(agent => agent.stop())); + await this.broker.stop(); + } + + public getProofStore(): ProofStore { + return this.proofStore; + } + + enqueueProvingJob(job: ProvingJob): Promise { + return this.broker.enqueueProvingJob(job); + } + getProvingJobStatus(id: ProvingJobId): Promise { + return this.broker.getProvingJobStatus(id); + } + removeAndCancelProvingJob(id: ProvingJobId): Promise { + return this.broker.removeAndCancelProvingJob(id); + } + waitForJobToSettle(id: ProvingJobId): Promise { + return this.broker.waitForJobToSettle(id); + } +} export class MockProver implements ServerCircuitProver { constructor() {} diff --git a/yarn-project/prover-client/src/tx-prover/factory.ts b/yarn-project/prover-client/src/tx-prover/factory.ts index d81ff2e15e7..07a65a8c57c 100644 --- a/yarn-project/prover-client/src/tx-prover/factory.ts +++ b/yarn-project/prover-client/src/tx-prover/factory.ts @@ -1,9 +1,14 @@ +import { type ProvingJobBroker } from '@aztec/circuit-types'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type ProverClientConfig } from '../config.js'; import { TxProver } from './tx-prover.js'; -export function createProverClient(config: ProverClientConfig, telemetry: TelemetryClient = new NoopTelemetryClient()) { - return TxProver.new(config, telemetry); +export function createProverClient( + config: ProverClientConfig, + broker: ProvingJobBroker, + telemetry: TelemetryClient = new NoopTelemetryClient(), +) { + return TxProver.new(config, broker, telemetry); } diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 7fc059d902a..9bd34df56ca 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -1,40 +1,59 @@ -import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; +import { type ACVMConfig, type BBConfig, BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; import { + type ActualProverConfig, type EpochProver, type EpochProverManager, type MerkleTreeWriteOperations, - type ProvingJobSource, + type ProverCache, + type ProvingJobBroker, + type ProvingJobConsumer, + type ProvingJobProducer, type ServerCircuitProver, } from '@aztec/circuit-types/interfaces'; import { Fr } from '@aztec/circuits.js'; +import { times } from '@aztec/foundation/collection'; +import { createDebugLogger } from '@aztec/foundation/log'; import { NativeACVMSimulator } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; +import { join } from 'path'; + import { type ProverClientConfig } from '../config.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; -import { MemoryProvingQueue } from '../prover-agent/memory-proving-queue.js'; -import { ProverAgent } from '../prover-agent/prover-agent.js'; +import { CachingBrokerFacade } from '../proving_broker/caching_broker_facade.js'; +import { InlineProofStore } from '../proving_broker/proof_store.js'; +import { InMemoryProverCache } from '../proving_broker/prover_cache/memory.js'; +import { ProvingAgent } from '../proving_broker/proving_agent.js'; /** * A prover factory. * TODO(palla/prover-node): Rename this class */ export class TxProver implements EpochProverManager { - private queue: MemoryProvingQueue; private running = false; + private agents: ProvingAgent[] = []; + + private cacheDir?: string; private constructor( private config: ProverClientConfig, private telemetry: TelemetryClient, - private agent?: ProverAgent, + private orchestratorClient: ProvingJobProducer, + private agentClient?: ProvingJobConsumer, + private log = createDebugLogger('aztec:prover-client:tx-prover'), ) { // TODO(palla/prover-node): Cache the paddingTx here, and not in each proving orchestrator, // so it can be reused across multiple ones and not recomputed every time. - this.queue = new MemoryProvingQueue(telemetry, config.proverJobTimeoutMs, config.proverJobPollIntervalMs); + this.cacheDir = this.config.cacheDir ? join(this.config.cacheDir, `tx_prover_${this.config.proverId}`) : undefined; } - public createEpochProver(db: MerkleTreeWriteOperations): EpochProver { - return new ProvingOrchestrator(db, this.queue, this.telemetry, this.config.proverId); + public createEpochProver(db: MerkleTreeWriteOperations, cache: ProverCache = new InMemoryProverCache()): EpochProver { + return new ProvingOrchestrator( + db, + new CachingBrokerFacade(this.orchestratorClient, cache), + this.telemetry, + this.config.proverId, + ); } public getProverId(): Fr { @@ -44,13 +63,12 @@ export class TxProver implements EpochProverManager { async updateProverConfig(config: Partial): Promise { const newConfig = { ...this.config, ...config }; - if (newConfig.realProofs !== this.config.realProofs && this.agent) { - const circuitProver = await TxProver.buildCircuitProver(newConfig, this.telemetry); - this.agent.setCircuitProver(circuitProver); - } - - if (this.config.proverAgentConcurrency !== newConfig.proverAgentConcurrency) { - await this.agent?.setMaxConcurrency(newConfig.proverAgentConcurrency); + if ( + newConfig.realProofs !== this.config.realProofs || + newConfig.proverAgentCount !== this.config.proverAgentCount + ) { + await this.stopAgents(); + await this.createAndStartAgents(); } if (!this.config.realProofs && newConfig.realProofs) { @@ -63,15 +81,13 @@ export class TxProver implements EpochProverManager { /** * Starts the prover instance */ - public start() { + public async start(): Promise { if (this.running) { return Promise.resolve(); } this.running = true; - this.queue.start(); - this.agent?.start(this.queue); - return Promise.resolve(); + await this.createAndStartAgents(); } /** @@ -82,10 +98,7 @@ export class TxProver implements EpochProverManager { return; } this.running = false; - - // TODO(palla/prover-node): Keep a reference to all proving orchestrators that are alive and stop them? - await this.agent?.stop(); - await this.queue.stop(); + await this.stopAgents(); } /** @@ -95,36 +108,55 @@ export class TxProver implements EpochProverManager { * @param worldStateSynchronizer - An instance of the world state * @returns An instance of the prover, constructed and started. */ - public static async new(config: ProverClientConfig, telemetry: TelemetryClient) { - const agent = config.proverAgentEnabled - ? new ProverAgent( - await TxProver.buildCircuitProver(config, telemetry), - config.proverAgentConcurrency, - config.proverAgentPollInterval, - ) - : undefined; - - const prover = new TxProver(config, telemetry, agent); + public static async new(config: ProverClientConfig, broker: ProvingJobBroker, telemetry: TelemetryClient) { + const prover = new TxProver(config, telemetry, broker, broker); await prover.start(); return prover; } - private static async buildCircuitProver( - config: ProverClientConfig, - telemetry: TelemetryClient, - ): Promise { - if (config.realProofs) { - return await BBNativeRollupProver.new(config, telemetry); + public getProvingJobSource(): ProvingJobConsumer { + if (!this.agentClient) { + throw new Error('Agent client not provided'); + } + + return this.agentClient; + } + + private async createAndStartAgents(): Promise { + if (this.agents.length > 0) { + throw new Error('Agents already started'); + } + + if (!this.agentClient) { + throw new Error('Agent client not provided'); } - const simulationProvider = config.acvmBinaryPath - ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath) - : undefined; + const proofStore = new InlineProofStore(); + const prover = await buildServerCircuitProver(this.config, this.telemetry); + this.agents = times( + this.config.proverAgentCount, + () => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs), + ); + + await Promise.all(this.agents.map(agent => agent.start())); + } - return new TestCircuitProver(telemetry, simulationProvider, config); + private async stopAgents() { + await Promise.all(this.agents.map(agent => agent.stop())); } +} - public getProvingJobSource(): ProvingJobSource { - return this.queue; +export function buildServerCircuitProver( + config: ActualProverConfig & ACVMConfig & BBConfig, + telemetry: TelemetryClient, +): Promise { + if (config.realProofs) { + return BBNativeRollupProver.new(config, telemetry); } + + const simulationProvider = config.acvmBinaryPath + ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath) + : undefined; + + return Promise.resolve(new TestCircuitProver(telemetry, simulationProvider, config)); } diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts index 12894b5cd0d..34a59b0a338 100644 --- a/yarn-project/prover-node/src/config.ts +++ b/yarn-project/prover-node/src/config.ts @@ -1,4 +1,11 @@ import { type ArchiverConfig, archiverConfigMappings, getArchiverConfigFromEnv } from '@aztec/archiver'; +import { type ACVMConfig, type BBConfig } from '@aztec/bb-prover'; +import { + type ProverAgentConfig, + type ProverBrokerConfig, + proverAgentConfigMappings, + proverBrokerConfigMappings, +} from '@aztec/circuit-types'; import { type ConfigMappingsType, bigintConfigHelper, @@ -7,7 +14,12 @@ import { } from '@aztec/foundation/config'; import { type DataStoreConfig, dataConfigMappings, getDataConfigFromEnv } from '@aztec/kv-store/config'; import { type P2PConfig, getP2PConfigFromEnv, p2pConfigMappings } from '@aztec/p2p'; -import { type ProverClientConfig, getProverEnvVars, proverClientConfigMappings } from '@aztec/prover-client'; +import { + type ProverClientConfig, + bbConfigMappings, + getProverEnvVars, + proverClientConfigMappings, +} from '@aztec/prover-client'; import { type PublisherConfig, type TxSenderConfig, @@ -107,3 +119,16 @@ export function getProverNodeConfigFromEnv(): ProverNodeConfig { ...getConfigFromMappings(proverBondManagerConfigMappings), }; } + +export function getProverNodeBrokerConfigFromEnv(): ProverBrokerConfig { + return { + ...getConfigFromMappings(proverBrokerConfigMappings), + }; +} + +export function getProverNodeAgentConfigFromEnv(): ProverAgentConfig & BBConfig & ACVMConfig { + return { + ...getConfigFromMappings(proverAgentConfigMappings), + ...getConfigFromMappings(bbConfigMappings), + }; +} diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 12ac2e0de92..2f54b4b7f7d 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -1,22 +1,25 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; -import { type ProverCoordination } from '@aztec/circuit-types'; +import { type ProverCoordination, type ProvingJobBroker } from '@aztec/circuit-types'; import { createEthereumChain } from '@aztec/ethereum'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type DataStoreConfig } from '@aztec/kv-store/config'; import { RollupAbi } from '@aztec/l1-artifacts'; import { createProverClient } from '@aztec/prover-client'; +import { createAndStartProvingBroker } from '@aztec/prover-client/broker'; import { L1Publisher } from '@aztec/sequencer-client'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createWorldStateSynchronizer } from '@aztec/world-state'; +import { join } from 'path'; import { createPublicClient, getAddress, getContract, http } from 'viem'; import { createBondManager } from './bond/factory.js'; import { type ProverNodeConfig, type QuoteProviderConfig } from './config.js'; import { ClaimsMonitor } from './monitors/claims-monitor.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; +import { ProverCacheManager } from './prover-cache/cache_manager.js'; import { createProverCoordination } from './prover-coordination/factory.js'; import { ProverNode } from './prover-node.js'; import { HttpQuoteProvider } from './quote-provider/http.js'; @@ -32,6 +35,7 @@ export async function createProverNode( aztecNodeTxProvider?: ProverCoordination; archiver?: Archiver; publisher?: L1Publisher; + broker?: ProvingJobBroker; } = {}, ) { const telemetry = deps.telemetry ?? new NoopTelemetryClient(); @@ -43,7 +47,8 @@ export async function createProverNode( const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, archiver, telemetry); await worldStateSynchronizer.start(); - const prover = await createProverClient(config, telemetry); + const broker = deps.broker ?? (await createAndStartProvingBroker(config)); + const prover = await createProverClient(config, broker, telemetry); // REFACTOR: Move publisher out of sequencer package and into an L1-related package const publisher = deps.publisher ?? new L1Publisher(config, telemetry); @@ -72,8 +77,11 @@ export async function createProverNode( const walletClient = publisher.getClient(); const bondManager = await createBondManager(rollupContract, walletClient, config); + const cacheDir = config.cacheDir ? join(config.cacheDir, `prover_${config.proverId}`) : undefined; + const cacheManager = new ProverCacheManager(cacheDir); + return new ProverNode( - prover!, + prover, publisher, archiver, archiver, @@ -86,6 +94,7 @@ export async function createProverNode( epochMonitor, bondManager, telemetry, + cacheManager, proverNodeConfig, ); } diff --git a/yarn-project/prover-node/src/prover-cache/cache_manager.ts b/yarn-project/prover-node/src/prover-cache/cache_manager.ts new file mode 100644 index 00000000000..b15693ecffe --- /dev/null +++ b/yarn-project/prover-node/src/prover-cache/cache_manager.ts @@ -0,0 +1,69 @@ +import { type ProverCache } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; +import { InMemoryProverCache } from '@aztec/prover-client'; + +import { type Dirent } from 'fs'; +import { mkdir, readFile, readdir, rm, writeFile } from 'fs/promises'; +import { join } from 'path'; + +import { KVProverCache } from './kv_cache.js'; + +const EPOCH_DIR_PREFIX = 'epoch'; +const EPOCH_DIR_SEPARATOR = '_'; +const EPOCH_HASH_FILENAME = 'epoch_hash.txt'; + +export class ProverCacheManager { + constructor(private cacheDir?: string, private log = createDebugLogger('aztec:prover-node:cache-manager')) {} + + public async openCache(epochNumber: bigint, epochHash: Buffer): Promise { + if (!this.cacheDir) { + return new InMemoryProverCache(); + } + + const epochDir = EPOCH_DIR_PREFIX + EPOCH_DIR_SEPARATOR + epochNumber; + const dataDir = join(this.cacheDir, epochDir); + + const storedEpochHash = await readFile(join(dataDir, EPOCH_HASH_FILENAME), 'hex').catch(() => Buffer.alloc(0)); + if (storedEpochHash.toString() !== epochHash.toString()) { + await rm(dataDir, { recursive: true, force: true }); + } + + await mkdir(dataDir, { recursive: true }); + await writeFile(join(dataDir, EPOCH_HASH_FILENAME), epochHash.toString('hex')); + + const store = AztecLmdbStore.open(dataDir); + this.log.debug(`Created new database for epoch ${epochNumber} at ${dataDir}`); + const cleanup = () => store.close(); + return new KVProverCache(store, cleanup); + } + + /** + * Removes all caches for epochs older than the given epoch (including) + * @param upToAndIncludingEpoch - The epoch number up to which to remove caches + */ + public async removeStaleCaches(upToAndIncludingEpoch: bigint): Promise { + if (!this.cacheDir) { + return; + } + + const entries: Dirent[] = await readdir(this.cacheDir, { withFileTypes: true }).catch(() => []); + + for (const item of entries) { + if (!item.isDirectory()) { + continue; + } + + const [prefix, epochNumber] = item.name.split(EPOCH_DIR_SEPARATOR); + if (prefix !== EPOCH_DIR_PREFIX) { + continue; + } + + const epochNumberInt = BigInt(epochNumber); + if (epochNumberInt <= upToAndIncludingEpoch) { + this.log.info(`Removing old epoch database for epoch ${epochNumberInt} at ${join(this.cacheDir, item.name)}`); + await rm(join(this.cacheDir, item.name), { recursive: true }); + } + } + } +} diff --git a/yarn-project/prover-node/src/prover-cache/kv_cache.ts b/yarn-project/prover-node/src/prover-cache/kv_cache.ts new file mode 100644 index 00000000000..82b216e384a --- /dev/null +++ b/yarn-project/prover-node/src/prover-cache/kv_cache.ts @@ -0,0 +1,27 @@ +import type { ProverCache, ProvingJobStatus } from '@aztec/circuit-types'; +import type { AztecKVStore, AztecMap } from '@aztec/kv-store'; + +export class KVProverCache implements ProverCache { + private proofs: AztecMap; + + constructor(store: AztecKVStore, private cleanup?: () => Promise) { + this.proofs = store.openMap('prover_node_proof_status'); + } + + getProvingJobStatus(jobId: string): Promise { + const item = this.proofs.get(jobId); + if (!item) { + return Promise.resolve({ status: 'not-found' }); + } + + return Promise.resolve(JSON.parse(item)); + } + + setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise { + return this.proofs.set(jobId, JSON.stringify(status)); + } + + async close(): Promise { + await this.cleanup?.(); + } +} diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index ce7251dc344..008b2443cc4 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -7,11 +7,12 @@ import { type L2Block, type L2BlockSource, type MerkleTreeWriteOperations, + type ProverCache, type ProverCoordination, WorldStateRunningState, type WorldStateSynchronizer, } from '@aztec/circuit-types'; -import { type ContractDataSource, EthAddress } from '@aztec/circuits.js'; +import { type ContractDataSource, EthAddress, Fr } from '@aztec/circuits.js'; import { times } from '@aztec/foundation/collection'; import { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; @@ -35,6 +36,7 @@ import { type BondManager } from './bond/bond-manager.js'; import { type EpochProvingJob } from './job/epoch-proving-job.js'; import { ClaimsMonitor } from './monitors/claims-monitor.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; +import { ProverCacheManager } from './prover-cache/cache_manager.js'; import { ProverNode, type ProverNodeOptions } from './prover-node.js'; import { type QuoteProvider } from './quote-provider/index.js'; import { type QuoteSigner } from './quote-signer.js'; @@ -102,6 +104,7 @@ describe('prover-node', () => { epochMonitor, bondManager, telemetryClient, + new ProverCacheManager(), config, ); @@ -139,7 +142,7 @@ describe('prover-node', () => { quoteSigner.sign.mockImplementation(payload => Promise.resolve(new EpochProofQuote(payload, Signature.empty()))); // Archiver returns a bunch of fake blocks - blocks = times(3, i => mock({ number: i + 20 })); + blocks = times(3, i => mock({ number: i + 20, hash: () => new Fr(i) })); l2BlockSource.getBlocksForEpoch.mockResolvedValue(blocks); // A sample claim @@ -377,6 +380,7 @@ describe('prover-node', () => { _blocks: L2Block[], publicDb: MerkleTreeWriteOperations, _proverDb: MerkleTreeWriteOperations, + _cache: ProverCache, _publicProcessorFactory: PublicProcessorFactory, cleanUp: (job: EpochProvingJob) => Promise, ): EpochProvingJob { diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index cff56201098..0c63bc79b40 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -7,6 +7,7 @@ import { type L2Block, type L2BlockSource, type MerkleTreeWriteOperations, + type ProverCache, type ProverCoordination, type ProverNodeApi, type Service, @@ -15,6 +16,7 @@ import { } from '@aztec/circuit-types'; import { type ContractDataSource } from '@aztec/circuits.js'; import { compact } from '@aztec/foundation/collection'; +import { sha256 } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { type Maybe } from '@aztec/foundation/types'; import { type L1Publisher } from '@aztec/sequencer-client'; @@ -26,6 +28,7 @@ import { EpochProvingJob, type EpochProvingJobState } from './job/epoch-proving- import { ProverNodeMetrics } from './metrics.js'; import { type ClaimsMonitor, type ClaimsMonitorHandler } from './monitors/claims-monitor.js'; import { type EpochMonitor, type EpochMonitorHandler } from './monitors/epoch-monitor.js'; +import { type ProverCacheManager } from './prover-cache/cache_manager.js'; import { type QuoteProvider } from './quote-provider/index.js'; import { type QuoteSigner } from './quote-signer.js'; @@ -62,6 +65,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr private readonly epochsMonitor: EpochMonitor, private readonly bondManager: BondManager, private readonly telemetryClient: TelemetryClient, + private readonly proverCacheManager: ProverCacheManager, options: Partial = {}, ) { this.options = { @@ -250,13 +254,26 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr // Create a processor using the forked world state const publicProcessorFactory = new PublicProcessorFactory(this.contractDataSource, this.telemetryClient); + const epochHash = sha256(Buffer.concat(blocks.map(block => block.hash().toBuffer()))); + const proverCache = await this.proverCacheManager.openCache(epochNumber, epochHash); + const cleanUp = async () => { await publicDb.close(); await proverDb.close(); + await proverCache.close(); + await this.proverCacheManager.removeStaleCaches(epochNumber); this.jobs.delete(job.getId()); }; - const job = this.doCreateEpochProvingJob(epochNumber, blocks, publicDb, proverDb, publicProcessorFactory, cleanUp); + const job = this.doCreateEpochProvingJob( + epochNumber, + blocks, + publicDb, + proverDb, + proverCache, + publicProcessorFactory, + cleanUp, + ); this.jobs.set(job.getId(), job); return job; } @@ -267,6 +284,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr blocks: L2Block[], publicDb: MerkleTreeWriteOperations, proverDb: MerkleTreeWriteOperations, + proverCache: ProverCache, publicProcessorFactory: PublicProcessorFactory, cleanUp: () => Promise, ) { @@ -274,7 +292,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr publicDb, epochNumber, blocks, - this.prover.createEpochProver(proverDb), + this.prover.createEpochProver(proverDb, proverCache), publicProcessorFactory, this.publisher, this.l2BlockSource, diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index fa81c902d7e..9a41966a582 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -23,6 +23,7 @@ import { type SiblingPath, SimulationError, type Tx, + type TxEffect, type TxExecutionRequest, type TxHash, TxProvingResult, @@ -621,7 +622,7 @@ export class PXEService implements PXE { return this.node.getTxReceipt(txHash); } - public getTxEffect(txHash: TxHash) { + public getTxEffect(txHash: TxHash): Promise | undefined> { return this.node.getTxEffect(txHash); } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 0e29df8b664..f12b77c3c63 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1000,6 +1000,7 @@ __metadata: ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 + zod: ^3.23.8 languageName: unknown linkType: soft