Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Stop allowing unused vars #73

Merged
merged 1 commit into from
Oct 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion consensus/src/consensus/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ impl Service for Consensus {
"consensus"
}

fn start(self: Arc<Consensus>, core: Arc<Core>) -> Vec<JoinHandle<()>> {
fn start(self: Arc<Consensus>, _core: Arc<Core>) -> Vec<JoinHandle<()>> {
self.init()
}

Expand Down
2 changes: 1 addition & 1 deletion consensus/src/consensus/test_consensus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ impl Service for TestConsensus {
"test-consensus"
}

fn start(self: Arc<TestConsensus>, core: Arc<Core>) -> Vec<JoinHandle<()>> {
fn start(self: Arc<TestConsensus>, _core: Arc<Core>) -> Vec<JoinHandle<()>> {
self.init()
}

Expand Down
2 changes: 1 addition & 1 deletion consensus/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// Until the codebase stables up, we will have a lot of these -- ignore for now
// TODO: remove this
#![allow(dead_code, unused_variables)]
#![allow(dead_code)]

pub mod consensus;
pub mod constants;
Expand Down
1 change: 0 additions & 1 deletion consensus/src/model/stores/reachability.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ impl ReachabilityStore for DbReachabilityStore {

fn insert_future_covering_item(&mut self, hash: Hash, fci: Hash, insertion_index: usize) -> Result<(), StoreError> {
let mut data = self.cached_access.read(hash)?;
let height = data.height;
let mut_data = Arc::make_mut(&mut data);
Arc::make_mut(&mut mut_data.future_covering_set).insert(insertion_index, fci);
self.cached_access.write(DirectDbWriter::new(&self.raw_db), hash, &data)?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ impl BlockBodyProcessor {
self.check_block_is_not_pruned(block)
}

fn check_block_is_not_pruned(self: &Arc<Self>, block: &Block) -> BlockProcessResult<()> {
fn check_block_is_not_pruned(self: &Arc<Self>, _block: &Block) -> BlockProcessResult<()> {
// TODO: In kaspad code it checks that the block is not in the past of the current tips.
// We should decide what's the best indication that a block was pruned.
Ok(())
Expand Down
16 changes: 4 additions & 12 deletions consensus/src/pipeline/header_processor/post_pow_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ impl HeaderProcessor {
self.check_blue_score(ctx, header)?;
self.check_blue_work(ctx, header)?;
self.check_median_timestamp(ctx, header)?;
self.check_merge_size_limit(ctx, header)?;
self.check_bounded_merge_depth(ctx, header)?;
self.check_merge_size_limit(ctx)?;
self.check_bounded_merge_depth(ctx)?;
self.check_pruning_point(ctx, header)?;
self.check_indirect_parents(ctx, header)
}
Expand All @@ -36,11 +36,7 @@ impl HeaderProcessor {
Ok(())
}

pub fn check_merge_size_limit(
self: &Arc<HeaderProcessor>,
ctx: &mut HeaderProcessingContext,
header: &Header,
) -> BlockProcessResult<()> {
pub fn check_merge_size_limit(self: &Arc<HeaderProcessor>, ctx: &mut HeaderProcessingContext) -> BlockProcessResult<()> {
let mergeset_size = ctx.ghostdag_data.as_ref().unwrap().mergeset_size() as u64;

if mergeset_size > self.mergeset_size_limit {
Expand Down Expand Up @@ -103,11 +99,7 @@ impl HeaderProcessor {
Ok(())
}

pub fn check_bounded_merge_depth(
self: &Arc<HeaderProcessor>,
ctx: &mut HeaderProcessingContext,
header: &Header,
) -> BlockProcessResult<()> {
pub fn check_bounded_merge_depth(self: &Arc<HeaderProcessor>, ctx: &mut HeaderProcessingContext) -> BlockProcessResult<()> {
let gd_data = ctx.ghostdag_data.as_ref().unwrap();
let merge_depth_root = self.depth_manager.calc_merge_depth_root(gd_data, ctx.pruning_point());
let finality_point = self.depth_manager.calc_finality_point(gd_data, ctx.pruning_point());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ impl HeaderProcessor {

self.validate_header_in_isolation(header)?;
self.check_parents_exist(header)?;
self.check_parents_incest(ctx, header)?;
self.check_parents_incest(ctx)?;
Ok(())
}

Expand Down Expand Up @@ -92,11 +92,7 @@ impl HeaderProcessor {
Ok(())
}

fn check_parents_incest(
self: &Arc<HeaderProcessor>,
ctx: &mut HeaderProcessingContext,
header: &Header,
) -> BlockProcessResult<()> {
fn check_parents_incest(self: &Arc<HeaderProcessor>, ctx: &mut HeaderProcessingContext) -> BlockProcessResult<()> {
let parents = ctx.get_non_pruned_parents();
for parent_a in parents.iter() {
for parent_b in parents.iter() {
Expand Down
8 changes: 2 additions & 6 deletions consensus/src/pipeline/header_processor/pre_pow_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,13 @@ impl HeaderProcessor {
return Ok(());
}

self.check_pruning_violation(ctx, header)?;
self.check_pruning_violation(ctx)?;
self.check_pow_and_calc_block_level(ctx, header)?;
self.check_difficulty_and_daa_score(ctx, header)?;
Ok(())
}

fn check_pruning_violation(
self: &Arc<HeaderProcessor>,
ctx: &mut HeaderProcessingContext,
header: &Header,
) -> BlockProcessResult<()> {
fn check_pruning_violation(self: &Arc<HeaderProcessor>, ctx: &mut HeaderProcessingContext) -> BlockProcessResult<()> {
let non_pruned_parents = ctx.get_non_pruned_parents();
if non_pruned_parents.is_empty() {
return Ok(());
Expand Down
1 change: 0 additions & 1 deletion consensus/src/pipeline/virtual_processor/processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,6 @@ impl VirtualStateProcessor {
let ghostdag_data = self.ghostdag_store.get_compact_data(virtual_sp).unwrap();
let pruning_read_guard = self.pruning_store.upgradable_read();
let current_pruning_info = pruning_read_guard.get().unwrap();
let current_pp_bs = self.ghostdag_store.get_blue_score(current_pruning_info.pruning_point).unwrap();
let (new_pruning_points, new_candidate) = self.pruning_manager.next_pruning_points_and_candidate_by_ghostdag_data(
ghostdag_data,
None,
Expand Down
6 changes: 3 additions & 3 deletions consensus/src/pipeline/virtual_processor/utxo_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,9 +135,9 @@ impl VirtualStateProcessor {

fn verify_coinbase_transaction(
self: &Arc<Self>,
coinbase_tx: &Transaction,
mergeset_data: &GhostdagData,
mergeset_fees: &BlockHashMap<u64>,
_coinbase_tx: &Transaction,
_mergeset_data: &GhostdagData,
_mergeset_fees: &BlockHashMap<u64>,
) -> BlockProcessResult<()> {
// TODO: build expected coinbase using `mergeset_fees` and compare with the given tx
// Return `Err(BadCoinbaseTransaction)` if the expected and actual defer
Expand Down
5 changes: 3 additions & 2 deletions consensus/src/processes/parents_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl<T: HeaderStoreReader, U: ReachabilityStoreReader, V: RelationsStoreReader>
.expect("at least one of the parents is expected to be in the future of the pruning point");
direct_parent_headers.swap(0, first_parent_in_future_of_pruning_point_index);

let mut candidates_by_level_to_reference_blocks_map = (0..self.max_block_level + 1).map(|level| HashMap::new()).collect_vec();
let mut candidates_by_level_to_reference_blocks_map = (0..self.max_block_level + 1).map(|_| HashMap::new()).collect_vec();
// Direct parents are guaranteed to be in one other's anticones so add them all to
// all the block levels they occupy.
for direct_parent_header in direct_parent_headers.iter() {
Expand Down Expand Up @@ -158,7 +158,6 @@ impl<T: HeaderStoreReader, U: ReachabilityStoreReader, V: RelationsStoreReader>
break;
}

let level_blocks = reference_blocks_map.keys().copied().collect_vec();
parents.push(reference_blocks_map.keys().copied().collect_vec());
}

Expand Down Expand Up @@ -220,6 +219,7 @@ mod tests {
}
}

#[allow(unused_variables)]
impl HeaderStoreReader for HeaderStoreMock {
fn get_daa_score(&self, hash: hashes::Hash) -> Result<u64, StoreError> {
todo!()
Expand Down Expand Up @@ -254,6 +254,7 @@ mod tests {
pub children: BlockHashes,
}

#[allow(unused_variables)]
impl RelationsStoreReader for RelationsStoreMock {
fn get_parents(&self, hash: Hash) -> Result<consensus_core::blockhash::BlockHashes, StoreError> {
todo!()
Expand Down
4 changes: 2 additions & 2 deletions consensus/src/processes/reachability/inquirer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,8 +136,8 @@ pub(super) fn get_next_chain_ancestor_unchecked(
ancestor: Hash,
) -> Result<Hash> {
match binary_search_descendant(store, store.get_children(ancestor)?.as_slice(), descendant)? {
SearchOutput::Found(hash, i) => Ok(hash),
SearchOutput::NotFound(i) => Err(ReachabilityError::BadQuery),
SearchOutput::Found(hash, _) => Ok(hash),
SearchOutput::NotFound(_) => Err(ReachabilityError::BadQuery),
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ impl TransactionValidator {
if let Some((index, (input, entry))) = tx
.populated_inputs()
.enumerate()
.find(|(index, (input, entry))| entry.is_coinbase && entry.block_daa_score + self.coinbase_maturity > pov_daa_score)
.find(|(_, (_, entry))| entry.is_coinbase && entry.block_daa_score + self.coinbase_maturity > pov_daa_score)
{
return Err(TxRuleError::ImmatureCoinbaseSpend(
index,
Expand Down Expand Up @@ -65,10 +65,8 @@ impl TransactionValidator {

fn check_sequence_lock(tx: &PopulatedTransaction, pov_daa_score: u64) -> TxResult<()> {
let pov_daa_score: i64 = pov_daa_score as i64;
if tx
.populated_inputs()
.filter(|(input, entry)| input.sequence & SEQUENCE_LOCK_TIME_DISABLED != SEQUENCE_LOCK_TIME_DISABLED)
.any(|(input, entry)| {
if tx.populated_inputs().filter(|(input, _)| input.sequence & SEQUENCE_LOCK_TIME_DISABLED != SEQUENCE_LOCK_TIME_DISABLED).any(
|(input, entry)| {
// Given a sequence number, we apply the relative time lock
// mask in order to obtain the time lock delta required before
// this input can be spent.
Expand All @@ -85,19 +83,19 @@ impl TransactionValidator {
let lock_daa_score = entry.block_daa_score as i64 + relative_lock - 1;

lock_daa_score >= pov_daa_score
})
{
},
) {
return Err(TxRuleError::SequenceLockConditionsAreNotMet);
}
Ok(())
}

fn check_sig_op_counts(tx: &PopulatedTransaction) -> TxResult<()> {
fn check_sig_op_counts(_tx: &PopulatedTransaction) -> TxResult<()> {
// TODO: Implement this
Ok(())
}

fn check_scripts(tx: &PopulatedTransaction) -> TxResult<()> {
fn check_scripts(_tx: &PopulatedTransaction) -> TxResult<()> {
// TODO: Implement this
Ok(())
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,15 +265,15 @@ mod tests {
assert_match!(tv.validate_tx_in_isolation(&tx), Err(TxRuleError::NoTxInputs));

let mut tx = valid_tx.clone();
tx.inputs = (0..params.max_tx_inputs + 1).map(|i| valid_tx.inputs[0].clone()).collect();
tx.inputs = (0..params.max_tx_inputs + 1).map(|_| valid_tx.inputs[0].clone()).collect();
assert_match!(tv.validate_tx_in_isolation(&tx), Err(TxRuleError::TooManyInputs(_, _)));

let mut tx = valid_tx.clone();
tx.inputs[0].signature_script = vec![0; params.max_signature_script_len + 1];
assert_match!(tv.validate_tx_in_isolation(&tx), Err(TxRuleError::TooBigSignatureScript(_, _)));

let mut tx = valid_tx.clone();
tx.outputs = (0..params.max_tx_outputs + 1).map(|i| valid_tx.outputs[0].clone()).collect();
tx.outputs = (0..params.max_tx_outputs + 1).map(|_| valid_tx.outputs[0].clone()).collect();
assert_match!(tv.validate_tx_in_isolation(&tx), Err(TxRuleError::TooManyOutputs(_, _)));

let mut tx = valid_tx.clone();
Expand Down