Skip to content

Commit

Permalink
Merge pull request #5654 from jbencin/chore/clippy-collection-is-neve…
Browse files Browse the repository at this point in the history
…r-read

chore: Apply Clippy lint `collection_is_never_used`
  • Loading branch information
jbencin authored Jan 29, 2025
2 parents 99ef43c + d57bff5 commit 9139fb2
Show file tree
Hide file tree
Showing 24 changed files with 20 additions and 206 deletions.
13 changes: 0 additions & 13 deletions stackslib/src/burnchains/tests/burnchain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -694,32 +694,21 @@ fn test_burn_snapshot_sequence() {
initial_reward_start_block: first_block_height,
};

let mut leader_private_keys = vec![];
let mut leader_public_keys = vec![];
let mut leader_bitcoin_public_keys = vec![];
let mut leader_bitcoin_addresses = vec![];

for i in 0..32 {
let mut csprng: ThreadRng = thread_rng();
let vrf_privkey = VRFPrivateKey(ed25519_dalek::SigningKey::generate(&mut csprng));
let vrf_pubkey = VRFPublicKey::from_private(&vrf_privkey);

let privkey_hex = vrf_privkey.to_hex();
leader_private_keys.push(privkey_hex);

let pubkey_hex = vrf_pubkey.to_hex();
leader_public_keys.push(pubkey_hex);

let bitcoin_privkey = Secp256k1PrivateKey::random();
let bitcoin_publickey = BitcoinPublicKey::from_private(&bitcoin_privkey);

leader_bitcoin_public_keys.push(to_hex(&bitcoin_publickey.to_bytes()));

leader_bitcoin_addresses.push(BitcoinAddress::from_bytes_legacy(
BitcoinNetworkType::Testnet,
LegacyBitcoinAddressType::PublicKeyHash,
&Hash160::from_data(&bitcoin_publickey.to_bytes()).0,
));
}

let mut expected_burn_total: u64 = 0;
Expand All @@ -728,7 +717,6 @@ fn test_burn_snapshot_sequence() {
let mut db = SortitionDB::connect_test(first_block_height, &first_burn_hash).unwrap();
let mut prev_snapshot =
BlockSnapshot::initial(first_block_height, &first_burn_hash, first_block_height);
let mut all_stacks_block_hashes = vec![];

for i in 0..32 {
let mut block_ops = vec![];
Expand Down Expand Up @@ -819,7 +807,6 @@ fn test_burn_snapshot_sequence() {
burn_header_hash: burn_block_hash.clone(),
};

all_stacks_block_hashes.push(next_block_commit.block_header_hash.clone());
block_ops.push(BlockstackOperationType::LeaderBlockCommit(
next_block_commit,
));
Expand Down
4 changes: 0 additions & 4 deletions stackslib/src/burnchains/tests/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -915,8 +915,6 @@ fn test_update_block_descendancy_with_fork() {
let mut cmts_genesis = vec![];
let mut cmts_invalid = vec![];

let mut fork_parent = None;
let mut fork_parent_block_header: Option<BurnchainBlockHeader> = None;
let mut fork_cmts = vec![];

for i in 0..5 {
Expand Down Expand Up @@ -950,7 +948,6 @@ fn test_update_block_descendancy_with_fork() {
};

fork_headers.push(block_header.clone());
fork_parent_block_header = Some(block_header);
}

let mut am_id = 0;
Expand Down Expand Up @@ -1014,7 +1011,6 @@ fn test_update_block_descendancy_with_fork() {
fork_cmts.push(fork_cmt.clone());

parent = Some(cmt);
fork_parent = Some(fork_cmt);

if i == 0 {
am_id = {
Expand Down
33 changes: 0 additions & 33 deletions stackslib/src/chainstate/coordinator/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2253,7 +2253,6 @@ fn test_sortition_with_reward_set() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -2427,10 +2426,6 @@ fn test_sortition_with_reward_set() {
let new_burnchain_tip = burnchain.get_canonical_chain_tip().unwrap();
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
}

let tip = SortitionDB::get_canonical_burn_chain_tip(sort_db.conn()).unwrap();
Expand Down Expand Up @@ -2525,7 +2520,6 @@ fn test_sortition_with_burner_reward_set() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -2673,10 +2667,6 @@ fn test_sortition_with_burner_reward_set() {
let new_burnchain_tip = burnchain.get_canonical_chain_tip().unwrap();
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
}

let tip = SortitionDB::get_canonical_burn_chain_tip(sort_db.conn()).unwrap();
Expand Down Expand Up @@ -2789,7 +2779,6 @@ fn test_pox_btc_ops() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// track the reward set consumption
let mut reward_cycle_count = 0;
Expand Down Expand Up @@ -2957,10 +2946,6 @@ fn test_pox_btc_ops() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -3081,7 +3066,6 @@ fn test_stx_transfer_btc_ops() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// track the reward set consumption
let mut reward_recipients = HashSet::new();
Expand Down Expand Up @@ -3304,10 +3288,6 @@ fn test_stx_transfer_btc_ops() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -5288,7 +5268,6 @@ fn test_sortition_with_sunset() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -5472,10 +5451,6 @@ fn test_sortition_with_sunset() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -5601,7 +5576,6 @@ fn test_sortition_with_sunset_and_epoch_switch() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -5813,10 +5787,6 @@ fn test_sortition_with_sunset_and_epoch_switch() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -6464,7 +6434,6 @@ fn test_pox_fork_out_of_order() {
let mut sortition_ids_diverged = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// setup:
// 2 forks: 0 - 1 - 2 - 3 - 4 - 5 - 11 - 12 - 13 - 14 - 15
Expand Down Expand Up @@ -6545,8 +6514,6 @@ fn test_pox_fork_out_of_order() {
.unwrap()
.block_height
);

anchor_blocks.push(bhh);
}

let tip = SortitionDB::get_canonical_burn_chain_tip(sort_db.conn()).unwrap();
Expand Down
14 changes: 1 addition & 13 deletions stackslib/src/chainstate/nakamoto/coordinator/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ fn advance_to_nakamoto(
let default_pox_addr =
PoxAddress::from_legacy(AddressHashMode::SerializeP2PKH, addr.bytes().clone());

let mut tip = None;
for sortition_height in 0..11 {
// stack to pox-3 in cycle 7
let txs = if sortition_height == 6 {
Expand Down Expand Up @@ -156,7 +155,7 @@ fn advance_to_nakamoto(
vec![]
};

tip = Some(peer.tenure_with_txs(&txs, &mut peer_nonce));
peer.tenure_with_txs(&txs, &mut peer_nonce);
}
// peer is at the start of cycle 8
}
Expand Down Expand Up @@ -347,9 +346,6 @@ fn replay_reward_cycle(
.step_by(reward_cycle_length)
.collect();

let mut indexes: Vec<_> = (0..stacks_blocks.len()).collect();
indexes.shuffle(&mut thread_rng());

for burn_ops in burn_ops.iter() {
let (_, _, consensus_hash) = peer.next_burnchain_block(burn_ops.clone());
}
Expand Down Expand Up @@ -842,7 +838,6 @@ fn block_descendant() {
boot_plan.pox_constants = pox_constants;

let mut peer = boot_plan.boot_into_nakamoto_peer(vec![], None);
let mut blocks = vec![];
let pox_constants = peer.sortdb().pox_constants.clone();
let first_burn_height = peer.sortdb().first_block_height;

Expand All @@ -851,7 +846,6 @@ fn block_descendant() {
loop {
let (block, burn_height, ..) =
peer.single_block_tenure(&private_key, |_| {}, |_| {}, |_| true);
blocks.push(block);

if pox_constants.is_in_prepare_phase(first_burn_height, burn_height + 1) {
info!("At prepare phase start"; "burn_height" => burn_height);
Expand Down Expand Up @@ -3196,9 +3190,6 @@ fn test_stacks_on_burnchain_ops() {
);

let mut all_blocks: Vec<NakamotoBlock> = vec![];
let mut all_burn_ops = vec![];
let mut consensus_hashes = vec![];
let mut fee_counts = vec![];
let stx_miner_key = peer.miner.nakamoto_miner_key();

let mut extra_burn_ops = vec![];
Expand Down Expand Up @@ -3395,8 +3386,6 @@ fn test_stacks_on_burnchain_ops() {
})
.sum::<u128>();

consensus_hashes.push(consensus_hash);
fee_counts.push(fees);
let mut blocks: Vec<NakamotoBlock> = blocks_and_sizes
.into_iter()
.map(|(block, _, _)| block)
Expand Down Expand Up @@ -3438,7 +3427,6 @@ fn test_stacks_on_burnchain_ops() {
);

all_blocks.append(&mut blocks);
all_burn_ops.push(burn_ops);
}

// check receipts for burn ops
Expand Down
2 changes: 0 additions & 2 deletions stackslib/src/chainstate/nakamoto/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2518,8 +2518,6 @@ fn parse_vote_for_aggregate_public_key_invalid() {
};
invalid_function_arg_reward_cycle.set_origin_nonce(1);

let mut account_nonces = std::collections::HashMap::new();
account_nonces.insert(invalid_contract_name.origin_address(), 1);
for (i, tx) in vec![
invalid_contract_address,
invalid_contract_name,
Expand Down
13 changes: 0 additions & 13 deletions stackslib/src/chainstate/stacks/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1137,19 +1137,6 @@ mod test {
StacksEpochId::latest(),
);

// remove all coinbases
let mut txs_anchored = vec![];

for tx in all_txs.iter() {
match tx.payload {
TransactionPayload::Coinbase(..) => {
continue;
}
_ => {}
}
txs_anchored.push(tx);
}

// make microblocks with 3 transactions each (or fewer)
for i in 0..(all_txs.len() / 3) {
let txs = vec![
Expand Down
4 changes: 4 additions & 0 deletions stackslib/src/chainstate/stacks/boot/pox_4_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -928,6 +928,8 @@ fn pox_lock_unlock() {

assert_eq!(burnchain.pox_constants.reward_slots(), 6);
let mut coinbase_nonce = 0;
// Stores the result of a function with side effects, so have Clippy ignore it
#[allow(clippy::collection_is_never_read)]
let mut latest_block = None;

// Advance into pox4
Expand Down Expand Up @@ -2693,6 +2695,8 @@ fn pox_4_delegate_stack_increase_events() {

assert_eq!(burnchain.pox_constants.reward_slots(), 6);
let mut coinbase_nonce = 0;
// Stores the result of a function with side effects, so have Clippy ignore it
#[allow(clippy::collection_is_never_read)]
let mut latest_block = None;

let alice_key = keys.pop().unwrap();
Expand Down
3 changes: 2 additions & 1 deletion stackslib/src/chainstate/stacks/db/transactions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,9 @@ impl TryFrom<Value> for HashableClarityValue {

impl std::hash::Hash for HashableClarityValue {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
#[allow(clippy::unwrap_used)]
#[allow(clippy::unwrap_used, clippy::collection_is_never_read)]
// this unwrap is safe _as long as_ TryFrom<Value> was used as a constructor
// Also, this function has side effects, which cause Clippy to wrongly think `bytes` is unused
let bytes = self.0.serialize_to_vec().unwrap();
bytes.hash(state);
}
Expand Down
5 changes: 0 additions & 5 deletions stackslib/src/chainstate/stacks/index/test/trie.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1232,8 +1232,6 @@ fn trie_cursor_splice_leaf_4() {
let (nodes, node_ptrs, hashes) =
make_node_path(&mut f, node_id.to_u8(), &path_segments, [31u8; 40].to_vec());

let mut ptrs = vec![];

// splice in a node in each path segment
for k in 0..5 {
let mut path = vec![
Expand Down Expand Up @@ -1261,7 +1259,6 @@ fn trie_cursor_splice_leaf_4() {
&mut node,
)
.unwrap();
ptrs.push(new_ptr);

Trie::update_root_hash(&mut f, &c).unwrap();

Expand Down Expand Up @@ -1325,7 +1322,6 @@ fn trie_cursor_splice_leaf_2() {

let (nodes, node_ptrs, hashes) =
make_node_path(&mut f, node_id.to_u8(), &path_segments, [31u8; 40].to_vec());
let mut ptrs = vec![];

// splice in a node in each path segment
for k in 0..10 {
Expand All @@ -1350,7 +1346,6 @@ fn trie_cursor_splice_leaf_2() {
&mut node,
)
.unwrap();
ptrs.push(new_ptr);

Trie::update_root_hash(&mut f, &c).unwrap();

Expand Down
Loading

0 comments on commit 9139fb2

Please sign in to comment.