Skip to content

Commit

Permalink
feat: add rust-toolchain, rustfmt.toml, and clippy (#80)
Browse files Browse the repository at this point in the history
* feat: add rust-toolchain and rustfmt.toml

* fix actions

* action

* remove cargo check

* pr

---------

Co-authored-by: Ratan Kaliani <[email protected]>
  • Loading branch information
jtguibas and ratankaliani authored Aug 29, 2024
1 parent 4267e3e commit edcbc28
Show file tree
Hide file tree
Showing 30 changed files with 296 additions and 499 deletions.
47 changes: 47 additions & 0 deletions .github/actions/setup/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: Test setup
inputs:
pull_token:
description: "Token to use for private repo access"
required: true
runs:
using: "composite"
steps:
- name: Set up git private repo access
shell: bash
run: |
git config --global url."https://${{ inputs.pull_token }}@github.com/".insteadOf ssh://[email protected]
git config --global url."https://${{ inputs.pull_token }}@github.com".insteadOf https://github.com
- name: Install Go 1.22
uses: actions/setup-go@v5
with:
go-version: "1.22"
cache-dependency-path: "**/go.sum"

- name: Print go version
shell: bash
run: go version

- name: Setup GCC
uses: Dup4/actions-setup-gcc@v1
with:
version: latest

- name: rust-cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
~/.rustup/
key: rust-1.79.0-${{ hashFiles('**/Cargo.toml') }}
restore-keys: rust-1.79.0-

- name: Setup toolchain
id: rustc-toolchain
shell: bash
run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain 1.79.0 -y
49 changes: 49 additions & 0 deletions .github/workflows/pr.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
name: PR

on:
push:
branches: [main]
pull_request:
branches:
- "**"
paths:
- "crates/**"
- "client-programs/**"
- "native-host/**"
- "zkvm-host/**"
- "op-succinct-proposer/**"
- "Cargo.toml"
merge_group:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

jobs:
lint:
name: Formatting & Clippy
runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"]
env:
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
steps:
- name: Checkout sources
uses: actions/checkout@v4

- name: Setup CI
uses: ./.github/actions/setup

- name: Run cargo fmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
env:
CARGO_INCREMENTAL: 1

- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-features --all-targets -- -D warnings -A incomplete-features
env:
CARGO_INCREMENTAL: 1
25 changes: 9 additions & 16 deletions client-programs/aggregation/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,14 @@ use std::collections::HashMap;
/// TODO: The aggregation program should take in an arbitrary vkey digest, and the smart contract
/// should verify the proof matches the arbitrary vkey digest stored in the contract. This means
/// that the aggregate program would no longer need to update this value.
const MULTI_BLOCK_PROGRAM_VKEY_DIGEST: [u32; 8] = [
227309663, 1637133225, 136526498, 1878261023, 2013043842, 450616441, 575447582, 1643259779,
];
const MULTI_BLOCK_PROGRAM_VKEY_DIGEST: [u32; 8] =
[227309663, 1637133225, 136526498, 1878261023, 2013043842, 450616441, 575447582, 1643259779];

/// Verify that the L1 heads in the boot infos are in the header chain.
fn verify_l1_heads(agg_inputs: &AggregationInputs, headers: &[Header]) {
// Create a map of each l1_head in the BootInfo's to booleans
let mut l1_heads_map: HashMap<B256, bool> = agg_inputs
.boot_infos
.iter()
.map(|boot_info| (boot_info.l1_head, false))
.collect();
let mut l1_heads_map: HashMap<B256, bool> =
agg_inputs.boot_infos.iter().map(|boot_info| (boot_info.l1_head, false)).collect();

// Iterate through all headers in the chain.
let mut current_hash = agg_inputs.latest_l1_checkpoint_head;
Expand All @@ -45,19 +41,15 @@ fn verify_l1_heads(agg_inputs: &AggregationInputs, headers: &[Header]) {

// Check if all l1_heads were found in the chain.
for (l1_head, found) in l1_heads_map.iter() {
assert!(
*found,
"L1 head {:?} not found in the provided header chain",
l1_head
);
assert!(*found, "L1 head {:?} not found in the provided header chain", l1_head);
}
}

pub fn main() {
// Read in the public values corresponding to each multi-block proof.
let agg_inputs = sp1_zkvm::io::read::<AggregationInputs>();
// Note: The headers are in order from start to end. We use serde_cbor as bincode serialization causes
// issues with the zkVM.
// Note: The headers are in order from start to end. We use serde_cbor as bincode serialization
// causes issues with the zkVM.
let headers_bytes = sp1_zkvm::io::read_vec();
let headers: Vec<Header> = serde_cbor::from_slice(&headers_bytes).unwrap();
assert!(!agg_inputs.boot_infos.is_empty());
Expand All @@ -66,7 +58,8 @@ pub fn main() {
agg_inputs.boot_infos.windows(2).for_each(|pair| {
let (prev_boot_info, boot_info) = (&pair[0], &pair[1]);

// The claimed block of the previous boot info must be the L2 output root of the current boot.
// The claimed block of the previous boot info must be the L2 output root of the current
// boot.
assert_eq!(prev_boot_info.l2_claim, boot_info.l2_output_root);

// The chain ID must be the same for all the boot infos, to ensure they're
Expand Down
12 changes: 4 additions & 8 deletions client-programs/range/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,14 +115,9 @@ fn main() {
let mut new_block_header;
'step: loop {
// Execute the payload to generate a new block header.
info!(
"Executing Payload for L2 Block: {}",
payload.parent.block_info.number + 1
);
info!("Executing Payload for L2 Block: {}", payload.parent.block_info.number + 1);
println!("cycle-tracker-report-start: block-execution");
new_block_header = executor
.execute_payload(payload.attributes.clone())
.unwrap();
new_block_header = executor.execute_payload(payload.attributes.clone()).unwrap();
println!("cycle-tracker-report-end: block-execution");
let new_block_number = new_block_header.number;
assert_eq!(new_block_number, payload.parent.block_info.number + 1);
Expand Down Expand Up @@ -173,7 +168,8 @@ fn main() {
// EPILOGUE //
////////////////////////////////////////////////////////////////

// Note: We don't need the last_block_num == claim_block check, because it's the only way to exit the above loop
// Note: We don't need the last_block_num == claim_block check, because it's the only way to
// exit the above loop
assert_eq!(output_root, boot.l2_claim);
});
}
2 changes: 1 addition & 1 deletion contracts/lib/sp1-contracts
Submodule sp1-contracts updated from 9494bb to dd93a9
18 changes: 3 additions & 15 deletions crates/client-utils/src/boot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,23 +31,11 @@ pub struct RawBootInfo {
impl From<RawBootInfo> for BootInfo {
/// Convert the BootInfoWithoutRollupConfig into BootInfo by deriving the RollupConfig.
fn from(boot_info_without_rollup_config: RawBootInfo) -> Self {
let RawBootInfo {
l1_head,
l2_output_root,
l2_claim,
l2_claim_block,
chain_id,
} = boot_info_without_rollup_config;
let RawBootInfo { l1_head, l2_output_root, l2_claim, l2_claim_block, chain_id } =
boot_info_without_rollup_config;
let rollup_config = RollupConfig::from_l2_chain_id(chain_id).unwrap();

Self {
l1_head,
l2_output_root,
l2_claim,
l2_claim_block,
chain_id,
rollup_config,
}
Self { l1_head, l2_output_root, l2_claim, l2_claim_block, chain_id, rollup_config }
}
}

Expand Down
39 changes: 13 additions & 26 deletions crates/client-utils/src/driver.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//! Contains the [MultiBlockDerivationDriver] struct, which handles the [L2PayloadAttributes] derivation
//! process.
//! Contains the [MultiBlockDerivationDriver] struct, which handles the [L2PayloadAttributes]
//! derivation process.
//!
//! [L2PayloadAttributes]: kona_derive::types::L2PayloadAttributes
Expand Down Expand Up @@ -52,8 +52,8 @@ pub type OracleAttributesQueue<DAP, O> = AttributesQueue<
OracleAttributesBuilder<O>,
>;

/// The [MultiBlockDerivationDriver] struct is responsible for handling the [L2PayloadAttributes] derivation
/// process.
/// The [MultiBlockDerivationDriver] struct is responsible for handling the [L2PayloadAttributes]
/// derivation process.
///
/// It contains an inner [OraclePipeline] that is used to derive the attributes, backed by
/// oracle-based data sources.
Expand All @@ -77,8 +77,8 @@ impl<O: CommsClient + Send + Sync + Debug> MultiBlockDerivationDriver<O> {
self.l2_safe_head_header.clone()
}

/// Creates a new [MultiBlockDerivationDriver] with the given configuration, blob provider, and chain
/// providers.
/// Creates a new [MultiBlockDerivationDriver] with the given configuration, blob provider, and
/// chain providers.
///
/// ## Takes
/// - `cfg`: The rollup configuration.
Expand Down Expand Up @@ -123,12 +123,7 @@ impl<O: CommsClient + Send + Sync + Debug> MultiBlockDerivationDriver<O> {
.build();

let l2_claim_block = boot_info.l2_claim_block;
Ok(Self {
l2_safe_head,
l2_safe_head_header,
pipeline,
l2_claim_block,
})
Ok(Self { l2_safe_head, l2_safe_head_header, pipeline, l2_claim_block })
}

pub fn update_safe_head(
Expand Down Expand Up @@ -197,22 +192,14 @@ impl<O: CommsClient + Send + Sync + Debug> MultiBlockDerivationDriver<O> {
)
.await?;

let safe_hash: alloy_primitives::FixedBytes<32> = output_preimage[96..128]
.try_into()
.map_err(|_| anyhow!("Invalid L2 output root"))?;
let safe_hash: alloy_primitives::FixedBytes<32> =
output_preimage[96..128].try_into().map_err(|_| anyhow!("Invalid L2 output root"))?;
let safe_header = l2_chain_provider.header_by_hash(safe_hash)?;
let safe_head_info = l2_chain_provider
.l2_block_info_by_number(safe_header.number)
.await?;
let safe_head_info = l2_chain_provider.l2_block_info_by_number(safe_header.number).await?;

let l1_origin = chain_provider
.block_info_by_number(safe_head_info.l1_origin.number)
.await?;
let l1_origin =
chain_provider.block_info_by_number(safe_head_info.l1_origin.number).await?;

Ok((
l1_origin,
safe_head_info,
Sealed::new_unchecked(safe_header, safe_hash),
))
Ok((l1_origin, safe_head_info, Sealed::new_unchecked(safe_header, safe_hash)))
}
}
Loading

0 comments on commit edcbc28

Please sign in to comment.