Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: run sealevel E2E in parallel with EVM
Browse files Browse the repository at this point in the history
 - separate evm e2e tests with sealevel e2e tests
  - refactor relative paths code to use more absolute paths
  - update working directory code to be more reliable
 - update github CI to run these 2 tests in parallel
kamiyaa committed Jan 24, 2025
1 parent 09e1d5b commit 8c14c77
Showing 14 changed files with 844 additions and 239 deletions.
23 changes: 16 additions & 7 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -211,7 +211,7 @@ jobs:
strategy:
fail-fast: false
matrix:
e2e-type: [cosmwasm, non-cosmwasm]
e2e-type: [cosmwasm, evm, sealevel]
steps:
- uses: actions/setup-node@v4
with:
@@ -238,7 +238,7 @@ jobs:
save-if: ${{ !startsWith(github.ref, 'refs/heads/gh-readonly-queue') }}
workspaces: |
./rust/main
${{ matrix.e2e-type == 'non-cosmwasm' && './rust/sealevel' || '' }}
${{ matrix.e2e-type != 'cosmwasm' && './rust/sealevel' || '' }}
- name: Free disk space
run: |
@@ -268,7 +268,7 @@ jobs:
uses: ./.github/actions/checkout-registry

- name: agent tests (CosmWasm)
run: cargo test --release --package run-locally --bin run-locally --features cosmos test-utils -- cosmos::test --nocapture
run: cargo test --release --package run-locally --bin run-locally --features cosmos -- cosmos::test --nocapture
if: matrix.e2e-type == 'cosmwasm'
working-directory: ./rust/main
env:
@@ -284,16 +284,25 @@ jobs:
echo "rust_changes=false" >> $GITHUB_OUTPUT
fi
- name: agent tests (EVM and Sealevel)
- name: agent tests (EVM)
run: cargo run --release --bin run-locally --features test-utils
if: matrix.e2e-type == 'non-cosmwasm'
if: matrix.e2e-type == 'evm'
working-directory: ./rust/main
env:
E2E_CI_MODE: 'true'
E2E_CI_TIMEOUT_SEC: '600'
E2E_KATHY_MESSAGES: '20'
RUST_BACKTRACE: 'full'

- name: agent tests (Sealevel)
run: cargo test --release --package run-locally --bin run-locally --features sealevel -- sealevel::test --nocapture
if: matrix.e2e-type == 'evm' && ${{ steps.check-rust-changes.outputs.rust_changes }}
working-directory: ./rust/main
env:
E2E_CI_MODE: 'true'
E2E_CI_TIMEOUT_SEC: '600'
E2E_KATHY_MESSAGES: '20'
RUST_BACKTRACE: 'full'
SEALEVEL_ENABLED: ${{ steps.check-rust-changes.outputs.rust_changes }}

env-test:
runs-on: ubuntu-latest
@@ -367,4 +376,4 @@ jobs:
- name: Upload coverage reports to Codecov with GitHub Action
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
token: ${{ secrets.CODECOV_TOKEN }}
8 changes: 8 additions & 0 deletions rust/README.md
Original file line number Diff line number Diff line change
@@ -112,10 +112,18 @@ validator. By default, this test will run indefinitely, but can be stopped with

To run the tests for a specific VM, use the `--features` flag.

##### Cosmos E2E Test

```bash
cargo test --release --package run-locally --bin run-locally --features cosmos -- cosmos::test --nocapture
```

##### Sealevel E2E Test

```bash
cargo test --release --package run-locally --bin run-locally --features sealevel -- sealevel::test --nocapture
```

### Building Agent Docker Images

There exists a docker build for the agent binaries. These docker images are used for deploying the agents in a
1 change: 1 addition & 0 deletions rust/main/utils/run-locally/Cargo.toml
Original file line number Diff line number Diff line change
@@ -43,3 +43,4 @@ vergen = { version = "8.3.2", features = ["build", "git", "gitcl"] }

[features]
cosmos = []
sealevel = []
4 changes: 0 additions & 4 deletions rust/main/utils/run-locally/src/config.rs
Original file line number Diff line number Diff line change
@@ -7,7 +7,6 @@ pub struct Config {
pub ci_mode: bool,
pub ci_mode_timeout: u64,
pub kathy_messages: u64,
pub sealevel_enabled: bool,
// TODO: Include count of sealevel messages in a field separate from `kathy_messages`?
}

@@ -28,9 +27,6 @@ impl Config {
.map(|r| r.parse::<u64>().unwrap());
r.unwrap_or(16)
},
sealevel_enabled: env::var("SEALEVEL_ENABLED")
.map(|k| k.parse::<bool>().unwrap())
.unwrap_or(true),
})
}
}
14 changes: 10 additions & 4 deletions rust/main/utils/run-locally/src/cosmos/mod.rs
Original file line number Diff line number Diff line change
@@ -30,7 +30,9 @@ use crate::cosmos::link::link_networks;
use crate::logging::log;
use crate::metrics::agent_balance_sum;
use crate::program::Program;
use crate::utils::{as_task, concat_path, stop_child, AgentHandles, TaskHandle};
use crate::utils::{
as_task, concat_path, get_workspace_path, stop_child, AgentHandles, TaskHandle,
};
use crate::{fetch_metric, AGENT_BIN_PATH};
use cli::{OsmosisCLI, OsmosisEndpoint};

@@ -345,10 +347,12 @@ fn run_locally() {
const TIMEOUT_SECS: u64 = 60 * 10;
let debug = false;

let workspace_path = get_workspace_path();

log!("Building rust...");
Program::new("cargo")
.cmd("build")
.working_dir("../../")
.working_dir(&workspace_path)
.arg("features", "test-utils")
.arg("bin", "relayer")
.arg("bin", "validator")
@@ -529,7 +533,8 @@ fn run_locally() {
// give things a chance to fully start.
sleep(Duration::from_secs(10));

let starting_relayer_balance: f64 = agent_balance_sum(hpl_rly_metrics_port).unwrap();
let starting_relayer_balance: f64 =
agent_balance_sum(hpl_rly_metrics_port).expect("Failed to get relayer agent balance");

// dispatch the second batch of messages (after agents start)
dispatched_messages += dispatch(&osmosisd, linker, &nodes);
@@ -664,7 +669,8 @@ fn termination_invariants_met(
return Ok(false);
}

let ending_relayer_balance: f64 = agent_balance_sum(relayer_metrics_port).unwrap();
let ending_relayer_balance: f64 =
agent_balance_sum(relayer_metrics_port).expect("Failed to get relayer agent balance");

// Make sure the balance was correctly updated in the metrics.
// Ideally, make sure that the difference is >= gas_per_tx * gas_cost, set here:
11 changes: 7 additions & 4 deletions rust/main/utils/run-locally/src/ethereum/mod.rs
Original file line number Diff line number Diff line change
@@ -10,15 +10,18 @@ use crate::config::Config;
use crate::ethereum::multicall::{DEPLOYER_ADDRESS, SIGNED_DEPLOY_MULTICALL_TX};
use crate::logging::log;
use crate::program::Program;
use crate::utils::{as_task, AgentHandles, TaskHandle};
use crate::{INFRA_PATH, MONOREPO_ROOT_PATH};
use crate::utils::{as_task, get_ts_infra_path, get_workspace_path, AgentHandles, TaskHandle};

mod multicall;

#[apply(as_task)]
pub fn start_anvil(config: Arc<Config>) -> AgentHandles {
log!("Installing typescript dependencies...");
let yarn_monorepo = Program::new("yarn").working_dir(MONOREPO_ROOT_PATH);

let workspace_path = get_workspace_path();
let ts_infra_path = get_ts_infra_path(&workspace_path);

let yarn_monorepo = Program::new("yarn").working_dir(workspace_path);
if !config.is_ci_env {
// test.yaml workflow installs dependencies
yarn_monorepo.clone().cmd("install").run().join();
@@ -42,7 +45,7 @@ pub fn start_anvil(config: Arc<Config>) -> AgentHandles {

sleep(Duration::from_secs(10));

let yarn_infra = Program::new("yarn").working_dir(INFRA_PATH);
let yarn_infra = Program::new("yarn").working_dir(&ts_infra_path);

log!("Deploying hyperlane ism contracts...");
yarn_infra.clone().cmd("deploy-ism").run().join();
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use common::SOL_MESSAGES_EXPECTED;
pub use common::*;
pub use post_startup_invariants::post_startup_invariants;
pub use termination_invariants::termination_invariants_met;

Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
use std::fs::File;
use std::path::Path;

use crate::config::Config;
use crate::metrics::agent_balance_sum;
use crate::utils::get_matching_lines;
use maplit::hashmap;
use relayer::GAS_EXPENDITURE_LOG_MESSAGE;

use crate::invariants::common::{SOL_MESSAGES_EXPECTED, SOL_MESSAGES_WITH_NON_MATCHING_IGP};
use crate::logging::log;
use crate::solana::solana_termination_invariants_met;
use crate::{
fetch_metric, AGENT_LOGGING_DIR, RELAYER_METRICS_PORT, SCRAPER_METRICS_PORT,
ZERO_MERKLE_INSERTION_KATHY_MESSAGES,
@@ -21,34 +18,20 @@ use crate::{
pub fn termination_invariants_met(
config: &Config,
starting_relayer_balance: f64,
solana_cli_tools_path: Option<&Path>,
solana_config_path: Option<&Path>,
) -> eyre::Result<bool> {
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2;
let sol_messages_expected = if config.sealevel_enabled {
SOL_MESSAGES_EXPECTED
} else {
0
};
let sol_messages_with_non_matching_igp = if config.sealevel_enabled {
SOL_MESSAGES_WITH_NON_MATCHING_IGP
} else {
0
};

// this is total messages expected to be delivered
let total_messages_expected = eth_messages_expected + sol_messages_expected;
let total_messages_dispatched = total_messages_expected + sol_messages_with_non_matching_igp;
let total_messages_expected = eth_messages_expected;
let total_messages_dispatched = total_messages_expected;

let lengths = fetch_metric(
RELAYER_METRICS_PORT,
"hyperlane_submitter_queue_length",
&hashmap! {},
)?;
assert!(!lengths.is_empty(), "Could not find queue length metric");
if lengths.iter().sum::<u32>()
!= ZERO_MERKLE_INSERTION_KATHY_MESSAGES + sol_messages_with_non_matching_igp
{
if lengths.iter().sum::<u32>() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES {
log!(
"Relayer queues contain more messages than the zero-merkle-insertion ones. Lengths: {:?}",
lengths
@@ -162,20 +145,9 @@ pub fn termination_invariants_met(
merkle_tree_max_sequence.iter().filter(|&x| *x > 0).count() as u32;
assert_eq!(
merkle_tree_max_sequence.iter().sum::<u32>() + non_zero_sequence_count,
total_messages_expected
+ sol_messages_with_non_matching_igp
+ (config.kathy_messages as u32 / 4) * 2
total_messages_expected + (config.kathy_messages as u32 / 4) * 2
);

if let Some((solana_cli_tools_path, solana_config_path)) =
solana_cli_tools_path.zip(solana_config_path)
{
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) {
log!("Solana termination invariants not met");
return Ok(false);
}
}

let dispatched_messages_scraped = fetch_metric(
SCRAPER_METRICS_PORT,
"hyperlane_contract_sync_stored_events",
@@ -221,12 +193,13 @@ pub fn termination_invariants_met(
log!(
"Scraper has scraped {} delivered messages, expected {}",
delivered_messages_scraped,
total_messages_expected + sol_messages_with_non_matching_igp
total_messages_expected
);
return Ok(false);
}

let ending_relayer_balance: f64 = agent_balance_sum(9092).unwrap();
let ending_relayer_balance: f64 =
agent_balance_sum(9092).expect("Failed to get relayer agent balance");
// Make sure the balance was correctly updated in the metrics.
if starting_relayer_balance <= ending_relayer_balance {
log!(
169 changes: 33 additions & 136 deletions rust/main/utils/run-locally/src/main.rs
Original file line number Diff line number Diff line change
@@ -13,7 +13,6 @@
//! the end conditions are met, the test is a failure. Defaults to 10 min.
//! - `E2E_KATHY_MESSAGES`: Number of kathy messages to dispatch. Defaults to 16 if CI mode is enabled.
//! else false.
//! - `SEALEVEL_ENABLED`: true/false, enables sealevel testing. Defaults to true.
use std::{
collections::HashMap,
@@ -34,26 +33,30 @@ pub use metrics::fetch_metric;
use once_cell::sync::Lazy;
use program::Program;
use tempfile::tempdir;
use utils::{get_ts_infra_path, get_workspace_path};

use crate::{
config::Config,
ethereum::start_anvil,
invariants::{post_startup_invariants, termination_invariants_met, SOL_MESSAGES_EXPECTED},
invariants::{post_startup_invariants, termination_invariants_met},
metrics::agent_balance_sum,
solana::*,
utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle},
};

mod config;
mod cosmos;
mod ethereum;
mod invariants;
mod logging;
mod metrics;
mod program;
mod solana;
mod utils;

#[cfg(feature = "cosmos")]
mod cosmos;

#[cfg(feature = "sealevel")]
mod sealevel;

pub static AGENT_LOGGING_DIR: Lazy<&Path> = Lazy::new(|| {
let dir = Path::new("/tmp/test_logs");
fs::create_dir_all(dir).unwrap();
@@ -68,10 +71,6 @@ const RELAYER_KEYS: &[&str] = &[
"0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97",
// test3
"0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356",
// sealeveltest1
"0x892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f",
// sealeveltest2
"0x892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f",
];
/// These private keys are from hardhat/anvil's testing accounts.
/// These must be consistent with the ISM config for the test.
@@ -82,15 +81,7 @@ const ETH_VALIDATOR_KEYS: &[&str] = &[
"0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e",
];

const SEALEVEL_VALIDATOR_KEYS: &[&str] = &[
// sealevel
"0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d",
];

const AGENT_BIN_PATH: &str = "target/debug";
const SOLANA_AGNET_BIN_PATH: &str = "../sealevel/target/debug/";
const INFRA_PATH: &str = "../../typescript/infra";
const MONOREPO_ROOT_PATH: &str = "../../";

const ZERO_MERKLE_INSERTION_KATHY_MESSAGES: u32 = 10;

@@ -134,13 +125,16 @@ impl Drop for State {
for w in self.watchers.drain(..) {
w.join_box();
}

log!("Dropping data...");
// drop any held data
self.data.reverse();
for data in self.data.drain(..) {
drop(data)
}
fs::remove_dir_all(SOLANA_CHECKPOINT_LOCATION).unwrap_or_default();
fs::remove_dir_all::<&Path>(AGENT_LOGGING_DIR.as_ref()).unwrap_or_default();

log!("Done...");
}
}

@@ -155,21 +149,15 @@ fn main() -> ExitCode {
let config = Config::load();
log!("Running with config: {:?}", config);

let mut validator_origin_chains = ["test1", "test2", "test3"].to_vec();
let mut validator_keys = ETH_VALIDATOR_KEYS.to_vec();
let mut validator_count: usize = validator_keys.len();
let mut checkpoints_dirs: Vec<DynPath> = (0..validator_count)
let workspace_path = get_workspace_path();
let ts_infra_path = get_ts_infra_path(&workspace_path);

let validator_origin_chains = ["test1", "test2", "test3"].to_vec();
let validator_keys = ETH_VALIDATOR_KEYS.to_vec();
let validator_count: usize = validator_keys.len();
let checkpoints_dirs: Vec<DynPath> = (0..validator_count)
.map(|_| Box::new(tempdir().unwrap()) as DynPath)
.collect();
if config.sealevel_enabled {
validator_origin_chains.push("sealeveltest1");
let mut sealevel_keys = SEALEVEL_VALIDATOR_KEYS.to_vec();
validator_keys.append(&mut sealevel_keys);
let solana_checkpoint_path = Path::new(SOLANA_CHECKPOINT_LOCATION);
fs::remove_dir_all(solana_checkpoint_path).unwrap_or_default();
checkpoints_dirs.push(Box::new(solana_checkpoint_path) as DynPath);
validator_count += 1;
}
assert_eq!(validator_origin_chains.len(), validator_keys.len());

let rocks_db_dir = tempdir().unwrap();
@@ -190,7 +178,10 @@ fn main() -> ExitCode {

let relayer_env = common_agent_env
.clone()
.bin(concat_path(AGENT_BIN_PATH, "relayer"))
.bin(concat_path(
&workspace_path,
format!("{}/relayer", AGENT_BIN_PATH),
))
.hyp_env("CHAINS_TEST1_RPCCONSENSUSTYPE", "fallback")
.hyp_env(
"CHAINS_TEST2_CONNECTION_URLS",
@@ -219,8 +210,6 @@ fn main() -> ExitCode {
.hyp_env("DB", relayer_db.to_str().unwrap())
.hyp_env("CHAINS_TEST1_SIGNER_KEY", RELAYER_KEYS[0])
.hyp_env("CHAINS_TEST2_SIGNER_KEY", RELAYER_KEYS[1])
.hyp_env("CHAINS_SEALEVELTEST1_SIGNER_KEY", RELAYER_KEYS[3])
.hyp_env("CHAINS_SEALEVELTEST2_SIGNER_KEY", RELAYER_KEYS[4])
.hyp_env("RELAYCHAINS", "invalidchain,otherinvalid")
.hyp_env("ALLOWLOCALCHECKPOINTSYNCERS", "true")
.hyp_env(
@@ -235,15 +224,8 @@ fn main() -> ExitCode {
"http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545",
)
// default is used for TEST3
.arg("defaultSigner.key", RELAYER_KEYS[2]);
let relayer_env = if config.sealevel_enabled {
relayer_env.arg(
"relayChains",
"test1,test2,test3,sealeveltest1,sealeveltest2",
)
} else {
relayer_env.arg("relayChains", "test1,test2,test3")
};
.arg("defaultSigner.key", RELAYER_KEYS[2])
.arg("relayChains", "test1,test2,test3");

let base_validator_env = common_agent_env
.clone()
@@ -292,15 +274,8 @@ fn main() -> ExitCode {
.hyp_env(
"DB",
"postgresql://postgres:47221c18c610@localhost:5432/postgres",
);
let scraper_env = if config.sealevel_enabled {
scraper_env.hyp_env(
"CHAINSTOSCRAPE",
"test1,test2,test3,sealeveltest1,sealeveltest2",
)
} else {
scraper_env.hyp_env("CHAINSTOSCRAPE", "test1,test2,test3")
};
.hyp_env("CHAINSTOSCRAPE", "test1,test2,test3");

let mut state = State::default();

@@ -321,19 +296,6 @@ fn main() -> ExitCode {
// Ready to run...
//

let solana_paths = if config.sealevel_enabled {
let (solana_path, solana_path_tempdir) = install_solana_cli_tools(
SOLANA_CONTRACTS_CLI_RELEASE_URL.to_owned(),
SOLANA_CONTRACTS_CLI_VERSION.to_owned(),
)
.join();
state.data.push(Box::new(solana_path_tempdir));
let solana_program_builder = build_solana_programs(solana_path.clone());
Some((solana_program_builder.join(), solana_path))
} else {
None
};

// this task takes a long time in the CI so run it in parallel
log!("Building rust...");
let build_main = Program::new("cargo")
@@ -360,37 +322,6 @@ fn main() -> ExitCode {
state.push_agent(postgres);

build_main.join();
if config.sealevel_enabled {
Program::new("cargo")
.working_dir("../sealevel")
.cmd("build")
.arg("bin", "hyperlane-sealevel-client")
.filter_logs(|l| !l.contains("workspace-inheritance"))
.run()
.join();
}

let solana_ledger_dir = tempdir().unwrap();
let solana_config_path = if let Some((solana_program_path, _)) = solana_paths.clone() {
// use the agave 2.x validator version to ensure mainnet compatibility
let (solana_path, solana_path_tempdir) = install_solana_cli_tools(
SOLANA_NETWORK_CLI_RELEASE_URL.to_owned(),
SOLANA_NETWORK_CLI_VERSION.to_owned(),
)
.join();
state.data.push(Box::new(solana_path_tempdir));
let start_solana_validator = start_solana_test_validator(
solana_path.clone(),
solana_program_path,
solana_ledger_dir.as_ref().to_path_buf(),
);

let (solana_config_path, solana_validator) = start_solana_validator.join();
state.push_agent(solana_validator);
Some(solana_config_path)
} else {
None
};

state.push_agent(start_anvil.join());

@@ -407,14 +338,14 @@ fn main() -> ExitCode {

// Send half the kathy messages before starting the rest of the agents
let kathy_env_single_insertion = Program::new("yarn")
.working_dir(INFRA_PATH)
.working_dir(&ts_infra_path)
.cmd("kathy")
.arg("messages", (config.kathy_messages / 4).to_string())
.arg("timeout", "1000");
kathy_env_single_insertion.clone().run().join();

let kathy_env_zero_insertion = Program::new("yarn")
.working_dir(INFRA_PATH)
.working_dir(&ts_infra_path)
.cmd("kathy")
.arg(
"messages",
@@ -427,7 +358,7 @@ fn main() -> ExitCode {
kathy_env_zero_insertion.clone().run().join();

let kathy_env_double_insertion = Program::new("yarn")
.working_dir(INFRA_PATH)
.working_dir(&ts_infra_path)
.cmd("kathy")
.arg("messages", (config.kathy_messages / 4).to_string())
.arg("timeout", "1000")
@@ -436,16 +367,6 @@ fn main() -> ExitCode {
.arg("required-hook", "merkleTreeHook");
kathy_env_double_insertion.clone().run().join();

if let Some((solana_config_path, (_, solana_path))) =
solana_config_path.clone().zip(solana_paths.clone())
{
// Send some sealevel messages before spinning up the agents, to test the backward indexing cursor
for _i in 0..(SOL_MESSAGES_EXPECTED / 2) {
initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone())
.join();
}
}

// spawn the rest of the validators
for (i, validator_env) in validator_envs.into_iter().enumerate().skip(1) {
let validator = validator_env.spawn(
@@ -457,21 +378,6 @@ fn main() -> ExitCode {

state.push_agent(relayer_env.spawn("RLY", Some(&AGENT_LOGGING_DIR)));

if let Some((solana_config_path, (_, solana_path))) =
solana_config_path.clone().zip(solana_paths.clone())
{
// Send some sealevel messages before spinning up the agents, to test the backward indexing cursor
for _i in 0..(SOL_MESSAGES_EXPECTED / 2) {
initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone())
.join();
}
initiate_solana_non_matching_igp_paying_transfer(
solana_path.clone(),
solana_config_path.clone(),
)
.join();
}

log!("Setup complete! Agents running in background...");
log!("Ctrl+C to end execution...");

@@ -489,28 +395,19 @@ fn main() -> ExitCode {
sleep(Duration::from_secs(10));

if !post_startup_invariants(&checkpoints_dirs) {
log!("Failure: Post startup invariants are not met");
log!("Error: Post startup invariants are not met");
return report_test_result(true);
} else {
log!("Success: Post startup invariants are met");
}

let mut failure_occurred = false;
let starting_relayer_balance: f64 = agent_balance_sum(9092).unwrap();
let starting_relayer_balance: f64 =
agent_balance_sum(9092).expect("Failed to get relayer agent balance");
while !SHUTDOWN.load(Ordering::Relaxed) {
if config.ci_mode {
// for CI we have to look for the end condition.
if termination_invariants_met(
&config,
starting_relayer_balance,
solana_paths
.clone()
.map(|(_, solana_path)| solana_path)
.as_deref(),
solana_config_path.as_deref(),
)
.unwrap_or(false)
{
if termination_invariants_met(&config, starting_relayer_balance).unwrap_or(false) {
// end condition reached successfully
break;
} else if (Instant::now() - loop_start).as_secs() > config.ci_mode_timeout {
3 changes: 3 additions & 0 deletions rust/main/utils/run-locally/src/program.rs
Original file line number Diff line number Diff line change
@@ -183,6 +183,9 @@ impl Program {
.unwrap(),
);
if let Some(wd) = &self.working_dir {
if !wd.exists() {
panic!("Working directory does not exist: {:?}", wd.as_path());
}
cmd.current_dir(wd.as_path());
}
for (k, v) in self.env.iter() {
402 changes: 402 additions & 0 deletions rust/main/utils/run-locally/src/sealevel/mod.rs

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -10,11 +10,14 @@ use tempfile::{tempdir, NamedTempFile};

use crate::logging::log;
use crate::program::Program;
use crate::utils::{as_task, concat_path, AgentHandles, ArbitraryData, TaskHandle};
use crate::SOLANA_AGNET_BIN_PATH;
use crate::utils::{
as_task, concat_path, get_sealevel_path, get_workspace_path, AgentHandles, TaskHandle,
};

pub const SOLANA_AGENT_BIN_PATH: &str = "target/debug";

/// Solana CLI version for compiling programs
pub const SOLANA_CONTRACTS_CLI_VERSION: &str = "1.14.20";
pub const SOLANA_CONTRACTS_CLI_VERSION: &str = "1.14.29";
pub const SOLANA_CONTRACTS_CLI_RELEASE_URL: &str = "github.com/solana-labs/solana";

/// Solana version used by mainnet validators
@@ -54,13 +57,12 @@ const SOLANA_HYPERLANE_PROGRAMS: &[&str] = &[
"hyperlane-sealevel-igp",
];

const SOLANA_KEYPAIR: &str = "../main/config/test-sealevel-keys/test_deployer-keypair.json";
const SOLANA_DEPLOYER_ACCOUNT: &str =
"../main/config/test-sealevel-keys/test_deployer-account.json";
const SOLANA_KEYPAIR: &str = "config/test-sealevel-keys/test_deployer-keypair.json";
const SOLANA_DEPLOYER_ACCOUNT: &str = "config/test-sealevel-keys/test_deployer-account.json";
const SOLANA_WARPROUTE_TOKEN_CONFIG_FILE: &str =
"../sealevel/environments/local-e2e/warp-routes/testwarproute/token-config.json";
const SOLANA_CHAIN_CONFIG_FILE: &str = "../sealevel/environments/local-e2e/chain-config.json";
const SOLANA_ENVS_DIR: &str = "../sealevel/environments";
"environments/local-e2e/warp-routes/testwarproute/token-config.json";
const SOLANA_CHAIN_CONFIG_FILE: &str = "environments/local-e2e/chain-config.json";
const SOLANA_ENVS_DIR: &str = "environments";

const SOLANA_ENV_NAME: &str = "local-e2e";

@@ -76,17 +78,16 @@ const SEALEVELTEST2_IGP_PROGRAM_ID: &str = "FArd4tEikwz2fk3MB7S9kC82NGhkgT6f9aXi
pub const SOLANA_CHECKPOINT_LOCATION: &str =
"/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8";

const SOLANA_GAS_ORACLE_CONFIG_FILE: &str =
"../sealevel/environments/local-e2e/gas-oracle-configs.json";
const SOLANA_GAS_ORACLE_CONFIG_FILE: &str = "environments/local-e2e/gas-oracle-configs.json";

// Install the CLI tools and return the path to the bin dir.
#[apply(as_task)]
pub fn install_solana_cli_tools(
release_url: String,
release_version: String,
) -> (PathBuf, impl ArbitraryData) {
tools_dir: PathBuf,
) -> PathBuf {
let solana_download_dir = tempdir().unwrap();
let solana_tools_dir = tempdir().unwrap();
log!(
"Downloading solana cli release v{} from {}",
release_version,
@@ -132,19 +133,20 @@ pub fn install_solana_cli_tools(

fs::rename(
concat_path(&solana_download_dir, "solana-release"),
&solana_tools_dir,
&tools_dir,
)
.expect("Failed to move solana-release dir");
(concat_path(&solana_tools_dir, "bin"), solana_tools_dir)
concat_path(&tools_dir, "bin")
}

#[apply(as_task)]
pub fn build_solana_programs(solana_cli_tools_path: PathBuf) -> PathBuf {
let out_path = Path::new(SBF_OUT_PATH);
let workspace_path = get_workspace_path();
let out_path = concat_path(&workspace_path, SBF_OUT_PATH);
if out_path.exists() {
fs::remove_dir_all(out_path).expect("Failed to remove solana program deploy dir");
fs::remove_dir_all(&out_path).expect("Failed to remove solana program deploy dir");
}
fs::create_dir_all(out_path).expect("Failed to create solana program deploy dir");
fs::create_dir_all(&out_path).expect("Failed to create solana program deploy dir");
let out_path = out_path.canonicalize().unwrap();

Program::new("curl")
@@ -167,19 +169,18 @@ pub fn build_solana_programs(solana_cli_tools_path: PathBuf) -> PathBuf {
fs::remove_file(concat_path(&out_path, "spl.tar.gz"))
.expect("Failed to remove solana program archive");

let build_sbf = Program::new(
concat_path(&solana_cli_tools_path, "cargo-build-sbf")
.to_str()
.unwrap(),
)
.env("PATH", updated_path(&solana_cli_tools_path))
.env("SBF_OUT_PATH", out_path.to_str().unwrap());
let bin_path = concat_path(&solana_cli_tools_path, "cargo-build-sbf");
let build_sbf = Program::new(bin_path).env("SBF_OUT_PATH", out_path.to_str().unwrap());

let workspace_path = get_workspace_path();
let sealevel_path = get_sealevel_path(&workspace_path);
let sealevel_programs = concat_path(sealevel_path, "programs");

// build our programs
for &path in SOLANA_HYPERLANE_PROGRAMS {
build_sbf
.clone()
.working_dir(concat_path("../sealevel/programs", path))
.working_dir(concat_path(&sealevel_programs, path))
.run()
.join();
}
@@ -193,11 +194,34 @@ pub fn start_solana_test_validator(
solana_programs_path: PathBuf,
ledger_dir: PathBuf,
) -> (PathBuf, AgentHandles) {
let workspace_path = get_workspace_path();
let sealevel_path = get_sealevel_path(&workspace_path);

let solana_deployer_account = concat_path(&workspace_path, SOLANA_DEPLOYER_ACCOUNT);
let solana_deployer_account_str = solana_deployer_account.to_string_lossy();

let solana_env_dir = concat_path(&sealevel_path, SOLANA_ENVS_DIR);
let solana_env_dir_str = solana_env_dir.to_string_lossy();

let solana_chain_config_file = concat_path(&sealevel_path, SOLANA_CHAIN_CONFIG_FILE);
let solana_chain_config_file_str = solana_chain_config_file.to_string_lossy();

let solana_warproute_token_config_file =
concat_path(&sealevel_path, SOLANA_WARPROUTE_TOKEN_CONFIG_FILE);
let solana_warproute_token_config_file_str =
solana_warproute_token_config_file.to_string_lossy();

let solana_gas_oracle_config_file = concat_path(&sealevel_path, SOLANA_GAS_ORACLE_CONFIG_FILE);
let solana_gas_oracle_config_file_str = solana_gas_oracle_config_file.to_string_lossy();

let build_so_dir = concat_path(&workspace_path, SBF_OUT_PATH);
let build_so_dir_str = build_so_dir.to_string_lossy();
// init solana config
let solana_config = NamedTempFile::new().unwrap().into_temp_path();
let solana_config_path = solana_config.to_path_buf();

Program::new(concat_path(&solana_cli_tools_path, "solana"))
.arg("config", solana_config.to_str().unwrap())
.arg("config", solana_config_path.to_string_lossy())
.cmd("config")
.cmd("set")
.arg("url", "localhost")
@@ -212,7 +236,7 @@ pub fn start_solana_test_validator(
.arg3(
"account",
"E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty",
SOLANA_DEPLOYER_ACCOUNT,
solana_deployer_account_str.clone(),
)
.remember(solana_config);
for &(address, lib) in SOLANA_PROGRAMS {
@@ -226,16 +250,16 @@ pub fn start_solana_test_validator(
sleep(Duration::from_secs(5));

log!("Deploying the hyperlane programs to solana");
let sealevel_client = sealevel_client(&solana_cli_tools_path, &solana_config_path);

let sealevel_client = sealevel_client(&solana_cli_tools_path, &solana_config_path);
let sealevel_client_deploy_core = sealevel_client
.clone()
.arg("compute-budget", "200000")
.cmd("core")
.cmd("deploy")
.arg("environment", SOLANA_ENV_NAME)
.arg("environments-dir", SOLANA_ENVS_DIR)
.arg("built-so-dir", SBF_OUT_PATH);
.arg("environments-dir", solana_env_dir_str.clone())
.arg("built-so-dir", build_so_dir_str.clone());

// Deploy sealeveltest1 core
sealevel_client_deploy_core
@@ -256,8 +280,11 @@ pub fn start_solana_test_validator(
.clone()
.cmd("igp")
.cmd("configure")
.arg("gas-oracle-config-file", SOLANA_GAS_ORACLE_CONFIG_FILE)
.arg("chain-config-file", SOLANA_CHAIN_CONFIG_FILE);
.arg(
"gas-oracle-config-file",
solana_gas_oracle_config_file_str.clone(),
)
.arg("chain-config-file", solana_chain_config_file_str.clone());

// Configure sealeveltest1 IGP
igp_configure_command
@@ -280,10 +307,13 @@ pub fn start_solana_test_validator(
.cmd("warp-route")
.cmd("deploy")
.arg("environment", SOLANA_ENV_NAME)
.arg("environments-dir", SOLANA_ENVS_DIR)
.arg("built-so-dir", SBF_OUT_PATH)
.arg("environments-dir", solana_env_dir_str.clone())
.arg("built-so-dir", build_so_dir_str.clone())
.arg("warp-route-name", "testwarproute")
.arg("token-config-file", SOLANA_WARPROUTE_TOKEN_CONFIG_FILE)
.arg(
"token-config-file",
solana_warproute_token_config_file_str.clone(),
)
.arg("chain-config-file", SOLANA_CHAIN_CONFIG_FILE)
.arg("ata-payer-funding-amount", "1000000000")
.run()
@@ -325,7 +355,7 @@ pub fn start_solana_test_validator(
.cmd("init-igp-account")
.arg("program-id", SEALEVELTEST1_IGP_PROGRAM_ID)
.arg("environment", SOLANA_ENV_NAME)
.arg("environments-dir", SOLANA_ENVS_DIR)
.arg("environments-dir", solana_env_dir_str)
.arg("chain", "sealeveltest1")
.arg("account-salt", ALTERNATIVE_SALT)
.run()
@@ -348,7 +378,7 @@ pub fn start_solana_test_validator(
.cmd("igp")
.cmd("configure")
.arg("program-id", SEALEVELTEST1_IGP_PROGRAM_ID)
.arg("gas-oracle-config-file", SOLANA_GAS_ORACLE_CONFIG_FILE)
.arg("gas-oracle-config-file", solana_gas_oracle_config_file_str)
.arg("chain-config-file", SOLANA_CHAIN_CONFIG_FILE)
.arg("chain", "sealeveltest1")
.arg("account-salt", ALTERNATIVE_SALT)
@@ -366,9 +396,13 @@ pub fn initiate_solana_hyperlane_transfer(
solana_cli_tools_path: PathBuf,
solana_config_path: PathBuf,
) -> String {
let workspace_path = get_workspace_path();
let solana_keypair = concat_path(workspace_path, SOLANA_KEYPAIR);
let solana_keypair_str = solana_keypair.to_string_lossy();

let sender = Program::new(concat_path(&solana_cli_tools_path, "solana"))
.arg("config", solana_config_path.to_str().unwrap())
.arg("keypair", SOLANA_KEYPAIR)
.arg("keypair", solana_keypair_str.clone())
.cmd("address")
.run_with_output()
.join()
@@ -380,7 +414,7 @@ pub fn initiate_solana_hyperlane_transfer(
let output = sealevel_client(&solana_cli_tools_path, &solana_config_path)
.cmd("token")
.cmd("transfer-remote")
.cmd(SOLANA_KEYPAIR)
.cmd(solana_keypair_str.clone())
.cmd("10000000000")
.cmd(SOLANA_REMOTE_CHAIN_ID)
.cmd(sender) // send to self
@@ -411,9 +445,13 @@ pub fn initiate_solana_non_matching_igp_paying_transfer(
solana_cli_tools_path: PathBuf,
solana_config_path: PathBuf,
) -> String {
let workspace_path = get_workspace_path();
let solana_keypair = concat_path(workspace_path, SOLANA_KEYPAIR);
let solana_keypair_str = solana_keypair.to_string_lossy();

let sender = Program::new(concat_path(&solana_cli_tools_path, "solana"))
.arg("config", solana_config_path.to_str().unwrap())
.arg("keypair", SOLANA_KEYPAIR)
.arg("keypair", solana_keypair_str.clone())
.cmd("address")
.run_with_output()
.join()
@@ -425,7 +463,7 @@ pub fn initiate_solana_non_matching_igp_paying_transfer(
let output = sealevel_client(&solana_cli_tools_path, &solana_config_path)
.cmd("token")
.cmd("transfer-remote")
.cmd(SOLANA_KEYPAIR)
.cmd(solana_keypair_str)
.cmd("10000000000")
.cmd(SOLANA_REMOTE_CHAIN_ID)
.cmd(sender) // send to self
@@ -491,17 +529,21 @@ pub fn solana_termination_invariants_met(
.contains("Message delivered")
}
fn sealevel_client(solana_cli_tools_path: &Path, solana_config_path: &Path) -> Program {
let workspace_path = get_workspace_path();
let sealevel_path = get_sealevel_path(&workspace_path);

let solana_keypair = concat_path(workspace_path, SOLANA_KEYPAIR);
let solana_keypair_str = solana_keypair.to_string_lossy();

Program::new(concat_path(
SOLANA_AGNET_BIN_PATH,
"hyperlane-sealevel-client",
&sealevel_path,
format!("{}/hyperlane-sealevel-client", SOLANA_AGENT_BIN_PATH),
))
.working_dir(sealevel_path.clone())
.env("PATH", updated_path(solana_cli_tools_path))
.env("RUST_BACKTRACE", "1")
.arg("config", solana_config_path.to_str().unwrap())
.arg(
"keypair",
"config/test-sealevel-keys/test_deployer-keypair.json",
)
.arg("keypair", solana_keypair_str)
}

fn updated_path(solana_cli_tools_path: &Path) -> String {
223 changes: 223 additions & 0 deletions rust/main/utils/run-locally/src/sealevel/termination_invariant.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,223 @@
use std::{fs::File, path::Path};

use maplit::hashmap;
use relayer::GAS_EXPENDITURE_LOG_MESSAGE;

use crate::{
config::Config,
fetch_metric,
invariants::{SOL_MESSAGES_EXPECTED, SOL_MESSAGES_WITH_NON_MATCHING_IGP},
logging::log,
metrics::agent_balance_sum,
sealevel::solana::*,
utils::get_matching_lines,
AGENT_LOGGING_DIR, RELAYER_METRICS_PORT, SCRAPER_METRICS_PORT,
};

/// Use the metrics to check if the relayer queues are empty and the expected
/// number of messages have been sent.
#[allow(clippy::unnecessary_get_then_check)] // TODO: `rustc` 1.80.1 clippy issue
pub fn termination_invariants_met(
config: &Config,
starting_relayer_balance: f64,
solana_cli_tools_path: &Path,
solana_config_path: &Path,
) -> eyre::Result<bool> {
let sol_messages_expected = SOL_MESSAGES_EXPECTED;
let sol_messages_with_non_matching_igp = SOL_MESSAGES_WITH_NON_MATCHING_IGP;

// this is total messages expected to be delivered
let total_messages_expected = sol_messages_expected;
let total_messages_dispatched = total_messages_expected + sol_messages_with_non_matching_igp;

let lengths = fetch_metric(
RELAYER_METRICS_PORT,
"hyperlane_submitter_queue_length",
&hashmap! {},
)?;
assert!(!lengths.is_empty(), "Could not find queue length metric");
if lengths.iter().sum::<u32>() != sol_messages_with_non_matching_igp {
log!(
"Relayer queues contain more messages than the zero-merkle-insertion ones. Lengths: {:?}",
lengths
);
return Ok(false);
};

// Also ensure the counter is as expected (total number of messages), summed
// across all mailboxes.
let msg_processed_count = fetch_metric(
RELAYER_METRICS_PORT,
"hyperlane_messages_processed_count",
&hashmap! {},
)?
.iter()
.sum::<u32>();
if msg_processed_count != total_messages_expected {
log!(
"Relayer has {} processed messages, expected {}",
msg_processed_count,
total_messages_expected
);
return Ok(false);
}

let gas_payment_events_count = fetch_metric(
RELAYER_METRICS_PORT,
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payments"},
)?
.iter()
.sum::<u32>();

let log_file_path = AGENT_LOGGING_DIR.join("RLY-output.log");
const STORING_NEW_MESSAGE_LOG_MESSAGE: &str = "Storing new message in db";
const LOOKING_FOR_EVENTS_LOG_MESSAGE: &str = "Looking for events in index range";
const HYPER_INCOMING_BODY_LOG_MESSAGE: &str = "incoming body completed";

const TX_ID_INDEXING_LOG_MESSAGE: &str = "Found log(s) for tx id";

let relayer_logfile = File::open(log_file_path)?;
let invariant_logs = &[
STORING_NEW_MESSAGE_LOG_MESSAGE,
LOOKING_FOR_EVENTS_LOG_MESSAGE,
GAS_EXPENDITURE_LOG_MESSAGE,
HYPER_INCOMING_BODY_LOG_MESSAGE,
TX_ID_INDEXING_LOG_MESSAGE,
];
let log_counts = get_matching_lines(&relayer_logfile, invariant_logs);
// Zero insertion messages don't reach `submit` stage where gas is spent, so we only expect these logs for the other messages.
// TODO: Sometimes we find more logs than expected. This may either mean that gas is deducted twice for the same message due to a bug,
// or that submitting the message transaction fails for some messages. Figure out which is the case and convert this check to
// strict equality.
// EDIT: Having had a quick look, it seems like there are some legitimate reverts happening in the confirm step
// (`Transaction attempting to process message either reverted or was reorged`)
// in which case more gas expenditure logs than messages are expected.
let gas_expenditure_log_count = log_counts.get(GAS_EXPENDITURE_LOG_MESSAGE).unwrap();
assert!(
gas_expenditure_log_count >= &total_messages_expected,
"Didn't record gas payment for all delivered messages. Got {} gas payment logs, expected at least {}",
gas_expenditure_log_count,
total_messages_expected
);
// These tests check that we fixed https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/3915, where some logs would not show up
assert!(
log_counts.get(STORING_NEW_MESSAGE_LOG_MESSAGE).unwrap() > &0,
"Didn't find any logs about storing messages in db"
);
assert!(
log_counts.get(LOOKING_FOR_EVENTS_LOG_MESSAGE).unwrap() > &0,
"Didn't find any logs about looking for events in index range"
);
let total_tx_id_log_count = log_counts.get(TX_ID_INDEXING_LOG_MESSAGE).unwrap();
assert!(
// there are 3 txid-indexed events:
// - relayer: merkle insertion and gas payment
// - scraper: gas payment
// some logs are emitted for multiple events, so requiring there to be at least
// `config.kathy_messages` logs is a reasonable approximation, since all three of these events
// are expected to be logged for each message.
*total_tx_id_log_count as u64 >= config.kathy_messages,
"Didn't find as many tx id logs as expected. Found {} and expected {}",
total_tx_id_log_count,
config.kathy_messages
);
assert!(
log_counts.get(HYPER_INCOMING_BODY_LOG_MESSAGE).is_none(),
"Verbose logs not expected at the log level set in e2e"
);

// TestSendReceiver randomly breaks gas payments up into
// two. So we expect at least as many gas payments as messages.
if gas_payment_events_count < total_messages_dispatched {
log!(
"Relayer has {} gas payment events, expected at least {}",
gas_payment_events_count,
total_messages_dispatched
);
return Ok(false);
}

let merkle_tree_max_sequence = fetch_metric(
RELAYER_METRICS_PORT,
"hyperlane_cursor_max_sequence",
&hashmap! {"event_type" => "merkle_tree_insertion"},
)?;
// check for each origin that the highest tree index seen by the syncer == # of messages sent + # of double insertions
// LHS: sum(merkle_tree_max_sequence) + len(merkle_tree_max_sequence) (each is index so we add 1 to each)
// RHS: total_messages_expected + non_matching_igp_messages + (config.kathy_messages as u32 / 4) * 2 (double insertions)
let non_zero_sequence_count =
merkle_tree_max_sequence.iter().filter(|&x| *x > 0).count() as u32;
assert_eq!(
merkle_tree_max_sequence.iter().sum::<u32>() + non_zero_sequence_count,
total_messages_expected + sol_messages_with_non_matching_igp
);

if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) {
log!("Solana termination invariants not met");
return Ok(false);
}

let dispatched_messages_scraped = fetch_metric(
SCRAPER_METRICS_PORT,
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_dispatch"},
)?
.iter()
.sum::<u32>();
if dispatched_messages_scraped != total_messages_dispatched {
log!(
"Scraper has scraped {} dispatched messages, expected {}",
dispatched_messages_scraped,
total_messages_dispatched,
);
return Ok(false);
}

let gas_payments_scraped = fetch_metric(
SCRAPER_METRICS_PORT,
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payment"},
)?
.iter()
.sum::<u32>();
if gas_payments_scraped != gas_payment_events_count {
log!(
"Scraper has scraped {} gas payments, expected {}",
gas_payments_scraped,
gas_payment_events_count
);
return Ok(false);
}

let delivered_messages_scraped = fetch_metric(
SCRAPER_METRICS_PORT,
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_delivery"},
)?
.iter()
.sum::<u32>();
if delivered_messages_scraped != total_messages_expected {
log!(
"Scraper has scraped {} delivered messages, expected {}",
delivered_messages_scraped,
total_messages_expected + sol_messages_with_non_matching_igp
);
return Ok(false);
}

let ending_relayer_balance: f64 =
agent_balance_sum(9092).expect("Failed to get relayer agent balance");
// Make sure the balance was correctly updated in the metrics.
if starting_relayer_balance <= ending_relayer_balance {
log!(
"Expected starting relayer balance to be greater than ending relayer balance, but got {} <= {}",
starting_relayer_balance,
ending_relayer_balance
);
return Ok(false);
}

log!("Termination invariants have been meet");
Ok(true)
}
44 changes: 43 additions & 1 deletion rust/main/utils/run-locally/src/utils.rs
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::fs::File;
use std::io::{self, BufRead};
use std::path::{Path, PathBuf};
use std::process::Child;
use std::process::{Child, Command};
use std::sync::{Arc, Mutex};
use std::thread::JoinHandle;

@@ -134,3 +134,45 @@ pub fn get_matching_lines(file: &File, search_strings: &[&str]) -> HashMap<Strin
}
matches
}

/// Returns absolute path to rust workspace
/// `/<...>/hyperlane-monorepo/rust/main`.
/// This allows us to have a more reliable way of generating
/// relative paths such path to sealevel directory
pub fn get_workspace_path() -> PathBuf {
let output = Command::new(env!("CARGO"))
.arg("locate-project")
.arg("--workspace")
.arg("--message-format=plain")
.output()
.expect("Failed to get workspace path")
.stdout;
let path_str = String::from_utf8(output).expect("Failed to parse workspace path");
let mut workspace_path = PathBuf::from(path_str);
// pop Cargo.toml from path
workspace_path.pop();
workspace_path
}

/// Returns absolute path to sealevel directory
/// `/<...>/hyperlane-monorepo/rust/sealevel`
pub fn get_sealevel_path(workspace_path: &Path) -> PathBuf {
concat_path(
workspace_path
.parent()
.expect("workspace path has no parent"),
"sealevel",
)
}

/// Returns absolute path to typescript infra directory
/// `/<...>/hyperlane-monorepo/typescript/infra`
pub fn get_ts_infra_path(workspace_path: &Path) -> PathBuf {
concat_path(
workspace_path
.parent()
.and_then(|p| p.parent())
.expect("workspace path has no parent x2"),
"typescript/infra",
)
}

0 comments on commit 8c14c77

Please sign in to comment.