Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Apply Clippy lint collection_is_never_used #5654

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 0 additions & 13 deletions stackslib/src/burnchains/tests/burnchain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -698,32 +698,21 @@ fn test_burn_snapshot_sequence() {
initial_reward_start_block: first_block_height,
};

let mut leader_private_keys = vec![];
let mut leader_public_keys = vec![];
let mut leader_bitcoin_public_keys = vec![];
let mut leader_bitcoin_addresses = vec![];

for i in 0..32 {
let mut csprng: ThreadRng = thread_rng();
let vrf_privkey = VRFPrivateKey(ed25519_dalek::SigningKey::generate(&mut csprng));
let vrf_pubkey = VRFPublicKey::from_private(&vrf_privkey);

let privkey_hex = vrf_privkey.to_hex();
leader_private_keys.push(privkey_hex);

let pubkey_hex = vrf_pubkey.to_hex();
leader_public_keys.push(pubkey_hex);

let bitcoin_privkey = Secp256k1PrivateKey::new();
let bitcoin_publickey = BitcoinPublicKey::from_private(&bitcoin_privkey);

leader_bitcoin_public_keys.push(to_hex(&bitcoin_publickey.to_bytes()));

leader_bitcoin_addresses.push(BitcoinAddress::from_bytes_legacy(
BitcoinNetworkType::Testnet,
LegacyBitcoinAddressType::PublicKeyHash,
&Hash160::from_data(&bitcoin_publickey.to_bytes()).0,
));
}

let mut expected_burn_total: u64 = 0;
Expand All @@ -732,7 +721,6 @@ fn test_burn_snapshot_sequence() {
let mut db = SortitionDB::connect_test(first_block_height, &first_burn_hash).unwrap();
let mut prev_snapshot =
BlockSnapshot::initial(first_block_height, &first_burn_hash, first_block_height);
let mut all_stacks_block_hashes = vec![];

for i in 0..32 {
let mut block_ops = vec![];
Expand Down Expand Up @@ -823,7 +811,6 @@ fn test_burn_snapshot_sequence() {
burn_header_hash: burn_block_hash.clone(),
};

all_stacks_block_hashes.push(next_block_commit.block_header_hash.clone());
block_ops.push(BlockstackOperationType::LeaderBlockCommit(
next_block_commit,
));
Expand Down
4 changes: 0 additions & 4 deletions stackslib/src/burnchains/tests/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -919,8 +919,6 @@ fn test_update_block_descendancy_with_fork() {
let mut cmts_genesis = vec![];
let mut cmts_invalid = vec![];

let mut fork_parent = None;
let mut fork_parent_block_header: Option<BurnchainBlockHeader> = None;
let mut fork_cmts = vec![];

for i in 0..5 {
Expand Down Expand Up @@ -954,7 +952,6 @@ fn test_update_block_descendancy_with_fork() {
};

fork_headers.push(block_header.clone());
fork_parent_block_header = Some(block_header);
}

let mut am_id = 0;
Expand Down Expand Up @@ -1018,7 +1015,6 @@ fn test_update_block_descendancy_with_fork() {
fork_cmts.push(fork_cmt.clone());

parent = Some(cmt);
fork_parent = Some(fork_cmt);

if i == 0 {
am_id = {
Expand Down
33 changes: 0 additions & 33 deletions stackslib/src/chainstate/coordinator/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2263,7 +2263,6 @@ fn test_sortition_with_reward_set() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -2437,10 +2436,6 @@ fn test_sortition_with_reward_set() {
let new_burnchain_tip = burnchain.get_canonical_chain_tip().unwrap();
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
}

let tip = SortitionDB::get_canonical_burn_chain_tip(sort_db.conn()).unwrap();
Expand Down Expand Up @@ -2535,7 +2530,6 @@ fn test_sortition_with_burner_reward_set() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -2683,10 +2677,6 @@ fn test_sortition_with_burner_reward_set() {
let new_burnchain_tip = burnchain.get_canonical_chain_tip().unwrap();
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
}

let tip = SortitionDB::get_canonical_burn_chain_tip(sort_db.conn()).unwrap();
Expand Down Expand Up @@ -2799,7 +2789,6 @@ fn test_pox_btc_ops() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// track the reward set consumption
let mut reward_cycle_count = 0;
Expand Down Expand Up @@ -2967,10 +2956,6 @@ fn test_pox_btc_ops() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -3091,7 +3076,6 @@ fn test_stx_transfer_btc_ops() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// track the reward set consumption
let mut reward_recipients = HashSet::new();
Expand Down Expand Up @@ -3314,10 +3298,6 @@ fn test_stx_transfer_btc_ops() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -5298,7 +5278,6 @@ fn test_sortition_with_sunset() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -5482,10 +5461,6 @@ fn test_sortition_with_sunset() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -5611,7 +5586,6 @@ fn test_sortition_with_sunset_and_epoch_switch() {
let mut started_first_reward_cycle = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// split up the vrf keys and committers so that we have some that will be mining "correctly"
// and some that will be producing bad outputs
Expand Down Expand Up @@ -5823,10 +5797,6 @@ fn test_sortition_with_sunset_and_epoch_switch() {
if b.is_reward_cycle_start(new_burnchain_tip.block_height) {
if new_burnchain_tip.block_height < sunset_ht {
started_first_reward_cycle = true;
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
let bhh = ic.get_last_anchor_block_hash().unwrap().unwrap();
anchor_blocks.push(bhh);
} else {
// store the anchor block for this sortition for later checking
let ic = sort_db.index_handle_at_tip();
Expand Down Expand Up @@ -6474,7 +6444,6 @@ fn test_pox_fork_out_of_order() {
let mut sortition_ids_diverged = false;
// process sequential blocks, and their sortitions...
let mut stacks_blocks: Vec<(SortitionId, StacksBlock)> = vec![];
let mut anchor_blocks = vec![];

// setup:
// 2 forks: 0 - 1 - 2 - 3 - 4 - 5 - 11 - 12 - 13 - 14 - 15
Expand Down Expand Up @@ -6555,8 +6524,6 @@ fn test_pox_fork_out_of_order() {
.unwrap()
.block_height
);

anchor_blocks.push(bhh);
}

let tip = SortitionDB::get_canonical_burn_chain_tip(sort_db.conn()).unwrap();
Expand Down
13 changes: 2 additions & 11 deletions stackslib/src/chainstate/nakamoto/coordinator/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ fn advance_to_nakamoto(
let default_pox_addr =
PoxAddress::from_legacy(AddressHashMode::SerializeP2PKH, addr.bytes.clone());

// Stores the result of a function with side effects, so have Clippy ignore it
#[allow(clippy::collection_is_never_read)]
jbencin marked this conversation as resolved.
Show resolved Hide resolved
let mut tip = None;
for sortition_height in 0..11 {
// stack to pox-3 in cycle 7
Expand Down Expand Up @@ -347,9 +349,6 @@ fn replay_reward_cycle(
.step_by(reward_cycle_length)
.collect();

let mut indexes: Vec<_> = (0..stacks_blocks.len()).collect();
indexes.shuffle(&mut thread_rng());

for burn_ops in burn_ops.iter() {
let (_, _, consensus_hash) = peer.next_burnchain_block(burn_ops.clone());
}
Expand Down Expand Up @@ -845,7 +844,6 @@ fn block_descendant() {
boot_plan.pox_constants = pox_constants;

let mut peer = boot_plan.boot_into_nakamoto_peer(vec![], None);
let mut blocks = vec![];
let pox_constants = peer.sortdb().pox_constants.clone();
let first_burn_height = peer.sortdb().first_block_height;

Expand All @@ -854,7 +852,6 @@ fn block_descendant() {
loop {
let (block, burn_height, ..) =
peer.single_block_tenure(&private_key, |_| {}, |_| {}, |_| true);
blocks.push(block);

if pox_constants.is_in_prepare_phase(first_burn_height, burn_height + 1) {
info!("At prepare phase start"; "burn_height" => burn_height);
Expand Down Expand Up @@ -3206,9 +3203,6 @@ fn test_stacks_on_burnchain_ops() {
);

let mut all_blocks: Vec<NakamotoBlock> = vec![];
let mut all_burn_ops = vec![];
let mut consensus_hashes = vec![];
let mut fee_counts = vec![];
let stx_miner_key = peer.miner.nakamoto_miner_key();

let mut extra_burn_ops = vec![];
Expand Down Expand Up @@ -3406,8 +3400,6 @@ fn test_stacks_on_burnchain_ops() {
})
.sum::<u128>();

consensus_hashes.push(consensus_hash);
fee_counts.push(fees);
let mut blocks: Vec<NakamotoBlock> = blocks_and_sizes
.into_iter()
.map(|(block, _, _)| block)
Expand Down Expand Up @@ -3449,7 +3441,6 @@ fn test_stacks_on_burnchain_ops() {
);

all_blocks.append(&mut blocks);
all_burn_ops.push(burn_ops);
}

// check receipts for burn ops
Expand Down
2 changes: 0 additions & 2 deletions stackslib/src/chainstate/nakamoto/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2529,8 +2529,6 @@ fn parse_vote_for_aggregate_public_key_invalid() {
};
invalid_function_arg_reward_cycle.set_origin_nonce(1);

let mut account_nonces = std::collections::HashMap::new();
account_nonces.insert(invalid_contract_name.origin_address(), 1);
for (i, tx) in vec![
invalid_contract_address,
invalid_contract_name,
Expand Down
13 changes: 0 additions & 13 deletions stackslib/src/chainstate/stacks/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1146,19 +1146,6 @@ mod test {
StacksEpochId::latest(),
);

// remove all coinbases
let mut txs_anchored = vec![];

for tx in all_txs.iter() {
match tx.payload {
TransactionPayload::Coinbase(..) => {
continue;
}
_ => {}
}
txs_anchored.push(tx);
}

// make microblocks with 3 transactions each (or fewer)
for i in 0..(all_txs.len() / 3) {
let txs = vec![
Expand Down
4 changes: 4 additions & 0 deletions stackslib/src/chainstate/stacks/boot/pox_4_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -928,6 +928,8 @@ fn pox_lock_unlock() {

assert_eq!(burnchain.pox_constants.reward_slots(), 6);
let mut coinbase_nonce = 0;
// Stores the result of a function with side effects, so have Clippy ignore it
#[allow(clippy::collection_is_never_read)]
let mut latest_block = None;

// Advance into pox4
Expand Down Expand Up @@ -2685,6 +2687,8 @@ fn pox_4_delegate_stack_increase_events() {

assert_eq!(burnchain.pox_constants.reward_slots(), 6);
let mut coinbase_nonce = 0;
// Stores the result of a function with side effects, so have Clippy ignore it
#[allow(clippy::collection_is_never_read)]
let mut latest_block = None;

let alice_key = keys.pop().unwrap();
Expand Down
3 changes: 2 additions & 1 deletion stackslib/src/chainstate/stacks/db/transactions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,9 @@ impl TryFrom<Value> for HashableClarityValue {

impl std::hash::Hash for HashableClarityValue {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
#[allow(clippy::unwrap_used)]
#[allow(clippy::unwrap_used, clippy::collection_is_never_read)]
// this unwrap is safe _as long as_ TryFrom<Value> was used as a constructor
// Also, this function has side effects, which cause Clippy to wrongly think `bytes` is unused
let bytes = self.0.serialize_to_vec().unwrap();
bytes.hash(state);
}
Expand Down
5 changes: 0 additions & 5 deletions stackslib/src/chainstate/stacks/index/test/trie.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1232,8 +1232,6 @@ fn trie_cursor_splice_leaf_4() {
let (nodes, node_ptrs, hashes) =
make_node_path(&mut f, node_id.to_u8(), &path_segments, [31u8; 40].to_vec());

let mut ptrs = vec![];

// splice in a node in each path segment
for k in 0..5 {
let mut path = vec![
Expand Down Expand Up @@ -1261,7 +1259,6 @@ fn trie_cursor_splice_leaf_4() {
&mut node,
)
.unwrap();
ptrs.push(new_ptr);

Trie::update_root_hash(&mut f, &c).unwrap();

Expand Down Expand Up @@ -1325,7 +1322,6 @@ fn trie_cursor_splice_leaf_2() {

let (nodes, node_ptrs, hashes) =
make_node_path(&mut f, node_id.to_u8(), &path_segments, [31u8; 40].to_vec());
let mut ptrs = vec![];

// splice in a node in each path segment
for k in 0..10 {
Expand All @@ -1350,7 +1346,6 @@ fn trie_cursor_splice_leaf_2() {
&mut node,
)
.unwrap();
ptrs.push(new_ptr);

Trie::update_root_hash(&mut f, &c).unwrap();

Expand Down
Loading
Loading