Skip to content

Commit

Permalink
fix: secp256k1, small improvements (#135)
Browse files Browse the repository at this point in the history
* fix: secp256k1 add assign

* fixes

* fix

* add changes for secp256k1, simplify sha3-keccak

* clean

* add

* add

* clippy

* end block

* concurrent native host

* add

* fmt

* fix
  • Loading branch information
ratankaliani authored Oct 3, 2024
1 parent d9ba10b commit ac8e6ca
Show file tree
Hide file tree
Showing 14 changed files with 406 additions and 323 deletions.
570 changes: 304 additions & 266 deletions Cargo.lock

Large diffs are not rendered by default.

12 changes: 6 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
license = "MIT"
edition = "2021"
authors = ["zachobront", "ratankaliani"]
authors = ["ratankaliani", "zachobront"]
homepage = "https://succinctlabs.github.io/op-succinct/"
repository = "https://github.com/succinctlabs/op-succinct"

Expand Down Expand Up @@ -59,7 +59,8 @@ op-succinct-proposer = { path = "proposer/succinct" }

# ethereum / op
alloy = { version = "0.3", default-features = false, features = ["full"] }
alloy-primitives = { version = "0.8", default-features = false }
# sha3-keccak is enabled in 0.8.4
alloy-primitives = { version = "0.8.4", default-features = false, features = ["sha3-keccak"] }
alloy-rlp = { version = "0.3.8", default-features = false }
alloy-eips = { version = "0.3", default-features = false }
revm = { version = "14.0", default-features = false, features = ["kzg-rs"] }
Expand All @@ -85,8 +86,7 @@ lto = "fat"
[patch.crates-io]
tiny-keccak = { git = "https://github.com/sp1-patches/tiny-keccak", branch = "patch-v2.0.2" }
sha2 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", branch = "patch-sha2-v0.10.8" }
ecdsa = { git = "https://github.com/sp1-patches/signatures", branch = "patch-ecdsa-v0.16.8" }
# TODO: Change this back to the original patch branch once the changes to sp1-lib to fix secp256k1 addition have been merged and a stable version tag is released.
ecdsa = { git = "https://github.com/sp1-patches/signatures", branch = "ratan/secp256k1-add-fixes-v0.16.8" }
bn = { git = "https://github.com/0xWOLAND/bn.git", package = "substrate-bn" }
sha3 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha3", branch = "patch-sha3-v0.10.8" }
# This patch uses sha3 instead of tiny-keccak. Reduces cycle count for Keccak by 50%.
alloy-primitives = { git = "https://github.com/sp1-patches/alloy-core", branch = "patch-v0.8.3" }
sha3 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha3", branch = "patch-sha3-v0.10.8" }
14 changes: 7 additions & 7 deletions contracts/opsuccinctl2ooconfig.json
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
{
"chainId": 11155420,
"chainId": 10,
"challenger": "0x0000000000000000000000000000000000000000",
"finalizationPeriod": 0,
"l2BlockTime": 2,
"owner": "0xDEd0000E32f8F40414d3ab3a830f735a3553E18e",
"proposer": "0xDEd0000E32f8F40414d3ab3a830f735a3553E18e",
"rollupConfigHash": "0x8db2d5ac8409f1f427755d76905c72938229e8d013ebcfe894e0db67f98ea45b",
"startingBlockNumber": 17716036,
"startingOutputRoot": "0x94bd4dad79b967eca2c6a5dc3d06f41fa2d592e47245cd38caee5b0af2f5a763",
"startingTimestamp": 1727234612,
"submissionInterval": 20,
"rollupConfigHash": "0xaaa6ae5735fc2cd9d94d361a8208946371cc689e4c03e45be9dd7a3ea866ab2f",
"startingBlockNumber": 126147850,
"startingOutputRoot": "0xc6722f65202d9971ff736f449973d72ade2268e29945a5753c5a3be8d6b15a97",
"startingTimestamp": 1727894477,
"submissionInterval": 1000,
"verifierGateway": "0x3B6041173B80E77f038f3F2C0f9744f04837185e",
"aggregationVkey": "0x004c04e3eab8ea57cd1f9076cf1b3b87e1e7155776a64ae101e0a39d9098c676",
"rangeVkeyCommitment": "0x75580008047522f72c420286476498af1ec8266960fb4f6549c3bff50109442e"
"rangeVkeyCommitment": "0x50b251451a821c18594f0ae7267b5ac2072e308a03c540552c1d754f2103b460"
}
Binary file modified elf/fault-proof-elf
Binary file not shown.
Binary file modified elf/range-elf
Binary file not shown.
8 changes: 4 additions & 4 deletions rollup-configs/10.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
},
"l2_time": 1686068903,
"system_config": {
"batcherAddr": "0x6887246668a3b87f54deb3b94ba47a6f63f32985",
"batcherAddr": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985",
"overhead": "0xbc",
"scalar": "0xa6fe0",
"gasLimit": 30000000,
Expand Down Expand Up @@ -39,8 +39,8 @@
"ecotone_time": 1710374401,
"fjord_time": 1720627201,
"granite_time": 1726070401,
"batch_inbox_address": "0xff00000000000000000000000000000000000010",
"deposit_contract_address": "0xbeb5fc579115071764c7423a4f12edde41f106ed",
"l1_system_config_address": "0x229047fed2591dbec1ef1118d64f7af3db9eb290",
"batch_inbox_address": "0xFF00000000000000000000000000000000000010",
"deposit_contract_address": "0xbEb5Fc579115071764c7423A4f12eDde41f106Ed",
"l1_system_config_address": "0x229047fed2591dbec1eF1118d64F7aF3dB9EB290",
"protocol_versions_address": "0x0000000000000000000000000000000000000000"
}
8 changes: 4 additions & 4 deletions rollup-configs/11155420.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
},
"l2_time": 1691802540,
"system_config": {
"batcherAddr": "0x8f23bb38f531600e5d8fddaaec41f13fab46e98c",
"batcherAddr": "0x8F23BB38F531600e5d8FDDaAEC41F13FaB46E98c",
"overhead": "0xbc",
"scalar": "0xa6fe0",
"gasLimit": 30000000,
Expand Down Expand Up @@ -40,7 +40,7 @@
"fjord_time": 1716998400,
"granite_time": 1723478400,
"batch_inbox_address": "0xff00000000000000000000000000000011155420",
"deposit_contract_address": "0x16fc5058f25648194471939df75cf27a2fdc48bc",
"l1_system_config_address": "0x034edd2a225f7f429a63e0f1d2084b9e0a93b538",
"protocol_versions_address": "0x79add5713b383daa0a138d3c4780c7a1804a8090"
"deposit_contract_address": "0x16Fc5058F25648194471939df75CF27A2fdC48BC",
"l1_system_config_address": "0x034edD2A225f7f429A63E0f1D2084B9E0A93b538",
"protocol_versions_address": "0x79ADD5713B383DAa0a138d3C4780C7A1804a8090"
}
14 changes: 10 additions & 4 deletions scripts/prove/bin/multi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,11 @@ async fn main() -> Result<()> {
witnessgen_executor.spawn_witnessgen(&host_cli).await?;
witnessgen_executor.flush().await?;
}
let execution_duration = start_time.elapsed();
println!("Execution Duration: {:?}", execution_duration);
let witness_generation_time_sec = start_time.elapsed();
println!(
"Witness Generation Duration: {:?}",
witness_generation_time_sec.as_secs()
);

// Get the stdin for the block.
let sp1_stdin = get_proof_stdin(&host_cli)?;
Expand Down Expand Up @@ -115,9 +118,12 @@ async fn main() -> Result<()> {
stats
.add_block_data(&data_fetcher, args.start, args.end)
.await;
stats.add_report_data(&report, execution_duration);
stats.add_report_data(&report);
stats.add_aggregate_data();

stats.add_timing_data(
execution_duration.as_secs(),
witness_generation_time_sec.as_secs(),
);
println!("Execution Stats: \n{:?}", stats);

// Write to CSV.
Expand Down
6 changes: 4 additions & 2 deletions scripts/prove/bin/single.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,14 @@ async fn main() -> Result<()> {
.await?;

// By default, re-run the native execution unless the user passes `--use-cache`.
let start_time = Instant::now();
if !args.use_cache {
// Start the server and native client.
let mut witnessgen_executor = WitnessGenExecutor::default();
witnessgen_executor.spawn_witnessgen(&host_cli).await?;
witnessgen_executor.flush().await?;
}

let witness_generation_time_sec = start_time.elapsed().as_secs();
// Get the stdin for the block.
let sp1_stdin = get_proof_stdin(&host_cli)?;

Expand Down Expand Up @@ -105,8 +106,9 @@ async fn main() -> Result<()> {
stats
.add_block_data(&data_fetcher, args.l2_block, args.l2_block)
.await;
stats.add_report_data(&report, execution_duration);
stats.add_report_data(&report);
stats.add_aggregate_data();
stats.add_timing_data(execution_duration.as_secs(), witness_generation_time_sec);
println!("Execution Stats: \n{:?}", stats);

// Write to CSV.
Expand Down
51 changes: 37 additions & 14 deletions scripts/utils/bin/cost_estimator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ use std::{
fs::{self},
future::Future,
path::PathBuf,
sync::Arc,
sync::{Arc, Mutex},
time::Instant,
};
use tokio::{sync::Mutex, task::block_in_place};
use tokio::task::block_in_place;

pub const MULTI_BLOCK_ELF: &[u8] = include_bytes!("../../../elf/range-elf");

Expand Down Expand Up @@ -62,7 +62,7 @@ fn get_max_span_batch_range_size(l2_chain_id: u64) -> u64 {
const DEFAULT_SIZE: u64 = 1000;
match l2_chain_id {
8453 => 5, // Base
11155420 => 40, // OP Sepolia
11155420 => 30, // OP Sepolia
10 => 10, // OP Mainnet
_ => DEFAULT_SIZE,
}
Expand Down Expand Up @@ -91,7 +91,7 @@ async fn run_native_data_generation(
data_fetcher: &OPSuccinctDataFetcher,
split_ranges: &[SpanBatchRange],
) -> Vec<BatchHostCli> {
const CONCURRENT_NATIVE_HOST_RUNNERS: usize = 5;
const CONCURRENT_NATIVE_HOST_RUNNERS: usize = 20;

// Split the entire range into chunks of size CONCURRENT_NATIVE_HOST_RUNNERS and process chunks
// serially. Generate witnesses within each chunk in parallel. This prevents the RPC from
Expand Down Expand Up @@ -164,7 +164,7 @@ async fn execute_blocks_parallel(
let data_fetcher = OPSuccinctDataFetcher::new().await;
let mut exec_stats = ExecutionStats::default();
exec_stats.add_block_data(&data_fetcher, start, end).await;
let mut execution_stats_map = execution_stats_map.lock().await;
let mut execution_stats_map = execution_stats_map.lock().unwrap();
execution_stats_map.insert((start, end), exec_stats);
});
handles.push(handle);
Expand All @@ -175,22 +175,29 @@ async fn execute_blocks_parallel(
host_clis.par_iter().for_each(|r| {
let sp1_stdin = get_proof_stdin(&r.host_cli).unwrap();

let start_time = Instant::now();
let (_, report) = prover.execute(MULTI_BLOCK_ELF, sp1_stdin).run().unwrap();
let execution_duration = start_time.elapsed();
// TODO: Implement retries with a smaller block range if this fails.
let (_, report) = prover
.execute(MULTI_BLOCK_ELF, sp1_stdin)
.run()
.unwrap_or_else(|e| {
panic!(
"Failed to execute blocks {:?} - {:?}: {:?}",
r.start, r.end, e
)
});

// Get the existing execution stats and modify it in place.
let mut execution_stats_map = block_on(execution_stats_map.lock());
let mut execution_stats_map = execution_stats_map.lock().unwrap();
let exec_stats = execution_stats_map.get_mut(&(r.start, r.end)).unwrap();
exec_stats.add_report_data(&report, execution_duration);
exec_stats.add_report_data(&report);
exec_stats.add_aggregate_data();
});

info!("Execution is complete.");

let execution_stats = execution_stats_map
.lock()
.await
.unwrap()
.clone()
.into_values()
.collect();
Expand Down Expand Up @@ -225,7 +232,11 @@ fn write_execution_stats_to_csv(
}

/// Aggregate the execution statistics for an array of execution stats objects.
fn aggregate_execution_stats(execution_stats: &[ExecutionStats]) -> ExecutionStats {
fn aggregate_execution_stats(
execution_stats: &[ExecutionStats],
total_execution_time_sec: u64,
witness_generation_time_sec: u64,
) -> ExecutionStats {
let mut aggregate_stats = ExecutionStats::default();
let mut batch_start = u64::MAX;
let mut batch_end = u64::MIN;
Expand All @@ -234,7 +245,6 @@ fn aggregate_execution_stats(execution_stats: &[ExecutionStats]) -> ExecutionSta
batch_end = max(batch_end, stats.batch_end);

// Accumulate most statistics across all blocks.
aggregate_stats.execution_duration_sec += stats.execution_duration_sec;
aggregate_stats.total_instruction_count += stats.total_instruction_count;
aggregate_stats.oracle_verify_instruction_count += stats.oracle_verify_instruction_count;
aggregate_stats.derivation_instruction_count += stats.derivation_instruction_count;
Expand Down Expand Up @@ -269,6 +279,10 @@ fn aggregate_execution_stats(execution_stats: &[ExecutionStats]) -> ExecutionSta
aggregate_stats.batch_start = batch_start;
aggregate_stats.batch_end = batch_end;

// Set the total execution time to the total execution time of the entire range.
aggregate_stats.total_execution_time_sec = total_execution_time_sec;
aggregate_stats.witness_generation_time_sec = witness_generation_time_sec;

aggregate_stats
}

Expand All @@ -290,16 +304,25 @@ async fn main() -> Result<()> {
);

let prover = ProverClient::new();

let start_time = Instant::now();
let host_clis = run_native_data_generation(&data_fetcher, &split_ranges).await;
let witness_generation_time_sec = start_time.elapsed().as_secs();

let start_time = Instant::now();
let execution_stats = execute_blocks_parallel(host_clis, &prover).await;
let total_execution_time_sec = start_time.elapsed().as_secs();

// Sort the execution stats by batch start block.
let mut sorted_execution_stats = execution_stats.clone();
sorted_execution_stats.sort_by_key(|stats| stats.batch_start);
write_execution_stats_to_csv(&sorted_execution_stats, l2_chain_id, &args)?;

let aggregate_execution_stats = aggregate_execution_stats(&sorted_execution_stats);
let aggregate_execution_stats = aggregate_execution_stats(
&sorted_execution_stats,
total_execution_time_sec,
witness_generation_time_sec,
);
println!(
"Aggregate Execution Stats: \n {}",
aggregate_execution_stats
Expand Down
6 changes: 3 additions & 3 deletions scripts/witnessgen/bin/native_host_runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,17 @@ async fn main() -> Result<()> {
init_tracing_subscriber(cfg.v)?;

if cfg.server {
let res = start_server(cfg).await;
let res = start_server(cfg.clone()).await;
if res.is_err() {
std::process::exit(1);
}
} else {
let res = start_server_and_native_client(cfg).await;
let res = start_server_and_native_client(cfg.clone()).await;
if res.is_err() {
std::process::exit(1);
}
}

println!("Exiting host program.");
println!("Ran host program with end block: {:?}", cfg.l2_block_number);
std::process::exit(0);
}
11 changes: 7 additions & 4 deletions utils/client/src/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@ use kona_client::{
BootInfo, HintType,
};
use kona_derive::{
attributes::StatefulAttributesBuilder,
pipeline::{DerivationPipeline, Pipeline, PipelineBuilder, StepResult},
sources::EthereumDataSource,
stages::{
AttributesQueue, BatchQueue, ChannelBank, ChannelReader, FrameQueue, L1Retrieval,
L1Traversal, StatefulAttributesBuilder,
AttributesQueue, BatchQueue, BatchStream, ChannelBank, ChannelReader, FrameQueue,
L1Retrieval, L1Traversal,
},
traits::{ChainProvider, L2ChainProvider},
};
Expand Down Expand Up @@ -46,8 +47,10 @@ pub type OracleAttributesBuilder<O> =
/// An oracle-backed attributes queue for the derivation pipeline.
pub type OracleAttributesQueue<DAP, O> = AttributesQueue<
BatchQueue<
ChannelReader<
ChannelBank<FrameQueue<L1Retrieval<DAP, L1Traversal<OracleL1ChainProvider<O>>>>>,
BatchStream<
ChannelReader<
ChannelBank<FrameQueue<L1Retrieval<DAP, L1Traversal<OracleL1ChainProvider<O>>>>>,
>,
>,
MultiblockOracleL2ChainProvider<O>,
>,
Expand Down
7 changes: 3 additions & 4 deletions utils/client/src/precompiles/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ pub(crate) const ANNOTATED_KZG_EVAL: PrecompileWithAddress = create_annotated_pr
revm::precompile::kzg_point_evaluation::POINT_EVALUATION,
"kzg-eval"
);
// pub(crate) const ANNOTATED_EC_RECOVER: PrecompileWithAddress =
// create_annotated_precompile!(secp256k1::ECRECOVER, "ec-recover");
pub(crate) const ANNOTATED_EC_RECOVER: PrecompileWithAddress =
create_annotated_precompile!(revm::precompile::secp256k1::ECRECOVER, "ec-recover");

// Source: https://github.com/anton-rs/kona/blob/main/bin/client/src/fault/handler/mod.rs#L20-L42
pub fn zkvm_handle_register<F, H>(handler: &mut EvmHandler<'_, (), &mut State<&mut TrieDB<F, H>>>)
Expand All @@ -68,8 +68,7 @@ where
ANNOTATED_BN_MUL,
ANNOTATED_BN_PAIR,
ANNOTATED_KZG_EVAL,
// Note: Removed annotations for the ec-recover precompile as it's not a large contributor to cycle count.
// ANNOTATED_EC_RECOVER,
ANNOTATED_EC_RECOVER,
];
ctx_precompiles.extend(override_precompiles);

Expand Down
Loading

0 comments on commit ac8e6ca

Please sign in to comment.