-
Notifications
You must be signed in to change notification settings - Fork 371
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Add scraping of Sealevel transactions into E2E tests #4850
base: main
Are you sure you want to change the base?
Changes from all commits
578d6c5
9e9cb63
8ce4aaf
ff41bad
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -36,7 +36,6 @@ impl LogMetaComposer { | |
pub fn log_meta( | ||
&self, | ||
block: UiConfirmedBlock, | ||
log_index: U256, | ||
pda_pubkey: &Pubkey, | ||
pda_slot: &Slot, | ||
) -> Result<LogMeta, HyperlaneSealevelError> { | ||
|
@@ -64,15 +63,17 @@ impl LogMetaComposer { | |
)))? | ||
} | ||
|
||
let (transaction_index, transaction_hash) = | ||
transaction_hashes | ||
.into_iter() | ||
.next() | ||
.ok_or(HyperlaneSealevelError::NoTransactions(format!( | ||
let (transaction_index, transaction_hash, program_index) = transaction_hashes | ||
.into_iter() | ||
.next() | ||
.ok_or(HyperlaneSealevelError::NoTransactions(format!( | ||
"block which should contain {} transaction does not contain any after filtering", | ||
self.transaction_description, | ||
)))?; | ||
|
||
// Construct log index which will be increasing relative to block | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. fwiw I view us as being able to define this however we want per VM, and don't think we necessarily need to replicate the EVM behavior here, but happy as it is |
||
let log_index = U256::from((transaction_index << 8) + (program_index as usize)); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Noting that because the log index is used to determine "uniqueness" of an IGP payment, changing this may have some consequences. If we re-index a payment, we'll consider them as two separate payments because the log index is different:
What I'm not sure about is if we'd expect to actually double-process any gas payments? |
||
|
||
let log_meta = LogMeta { | ||
address: self.program_id.to_bytes().into(), | ||
block_number: *pda_slot, | ||
|
@@ -120,7 +121,8 @@ pub fn is_interchain_payment_instruction(instruction_data: &[u8]) -> bool { | |
} | ||
|
||
/// This function searches for relevant transactions in the vector of provided transactions and | ||
/// returns the relative index and hashes of such transactions. | ||
/// returns the relative index and hashes of such transactions together with index of the relevant | ||
/// instruction. | ||
/// | ||
/// This function takes a program identifier and the identifier for PDA and searches transactions | ||
/// which act upon this program and the PDA. | ||
|
@@ -144,16 +146,19 @@ fn search_transactions( | |
program_id: &Pubkey, | ||
pda_pubkey: &Pubkey, | ||
is_specified_instruction: fn(&[u8]) -> bool, | ||
) -> Vec<(usize, H512)> { | ||
) -> Vec<(usize, H512, u8)> { | ||
transactions | ||
.into_iter() | ||
.enumerate() | ||
.filter_map(|(index, tx)| filter_by_encoding(tx).map(|(tx, meta)| (index, tx, meta))) | ||
.filter_map(|(index, tx, meta)| { | ||
filter_by_validity(tx, meta) | ||
.map(|(hash, account_keys, instructions)| (index, hash, account_keys, instructions)) | ||
.filter_map(|(txn_index, tx)| { | ||
filter_by_encoding(tx).map(|(tx, meta)| (txn_index, tx, meta)) | ||
}) | ||
.filter_map(|(txn_index, tx, meta)| { | ||
filter_by_validity(tx, meta).map(|(hash, account_keys, instructions)| { | ||
(txn_index, hash, account_keys, instructions) | ||
}) | ||
}) | ||
.filter_map(|(index, hash, account_keys, instructions)| { | ||
.filter_map(|(txn_index, hash, account_keys, instructions)| { | ||
filter_by_relevancy( | ||
program_id, | ||
pda_pubkey, | ||
|
@@ -162,9 +167,9 @@ fn search_transactions( | |
instructions, | ||
is_specified_instruction, | ||
) | ||
.map(|hash| (index, hash)) | ||
.map(|(hash, program_index)| (txn_index, hash, program_index)) | ||
}) | ||
.collect::<Vec<(usize, H512)>>() | ||
.collect::<Vec<(usize, H512, u8)>>() | ||
} | ||
|
||
fn filter_by_relevancy( | ||
|
@@ -174,7 +179,7 @@ fn filter_by_relevancy( | |
account_keys: Vec<String>, | ||
instructions: Vec<UiCompiledInstruction>, | ||
is_specified_instruction: fn(&[u8]) -> bool, | ||
) -> Option<H512> { | ||
) -> Option<(H512, u8)> { | ||
let account_index_map = account_index_map(account_keys); | ||
|
||
let program_id_str = program_id.to_string(); | ||
|
@@ -213,7 +218,7 @@ fn filter_by_relevancy( | |
return None; | ||
} | ||
|
||
Some(hash) | ||
Some((hash, program.program_id_index)) | ||
} | ||
|
||
fn filter_by_validity( | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -448,19 +448,19 @@ fn main() -> ExitCode { | |
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. should we add some invariants to the e2e test to make sure that these sealevel events / txs are being indexed? similar to what we do for the EVM things |
||
state.push_agent(relayer_env.spawn("RLY", Some(&AGENT_LOGGING_DIR))); | ||
|
||
log!("Setup complete! Agents running in background..."); | ||
log!("Ctrl+C to end execution..."); | ||
|
||
if let Some((solana_config_path, (_, solana_path))) = | ||
solana_config_path.clone().zip(solana_paths.clone()) | ||
{ | ||
// Send some sealevel messages before spinning up the agents, to test the backward indexing cursor | ||
// Send some sealevel messages after spinning up the agents, to test the backward indexing cursor | ||
for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { | ||
initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()) | ||
.join(); | ||
} | ||
} | ||
|
||
log!("Setup complete! Agents running in background..."); | ||
log!("Ctrl+C to end execution..."); | ||
|
||
// Send half the kathy messages after the relayer comes up | ||
kathy_env_double_insertion.clone().run().join(); | ||
kathy_env_zero_insertion.clone().run().join(); | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Something I'm wondering - is the scraper indexing the SVM IGP payments and deliveries in a sequence aware fashion, or using the rate limited / watermark approach?
In the relayer it indexes SVM IGP payments in a sequence aware manner, but other ones in a rate limited / watermark approach. We'll want to do the same in the scraper otherwise I'd expect bugs to occur
For reference, this is how we do it in the relayer:
We create ContractSyncs here
hyperlane-monorepo/rust/main/agents/relayer/src/relayer.rs
Line 161 in 5db46bd
hyperlane-monorepo/rust/main/hyperlane-base/src/settings/base.rs
Lines 205 to 250 in 5db46bd
sequenced_contract_sync
if the CursorType is SequenceAware (the case for SVM IGP payments), and awatermark_contract_sync
otherwiseThe db used by the sequence aware cursors also requires it to impl
HyperlaneSequenceAwareIndexerStoreReader
hyperlane-monorepo/rust/main/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs
Line 31 in 5db46bd
hyperlane-monorepo/rust/main/agents/scraper/src/chain_scraper/mod.rs
Line 393 in 5db46bd