Skip to content

Commit

Permalink
fix: sponge pad panics on input
Browse files Browse the repository at this point in the history
closes #44
  • Loading branch information
vlopes11 committed Jan 26, 2023
1 parent 6de7730 commit 7dba1c2
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 16 deletions.
27 changes: 11 additions & 16 deletions src/hash/rpo/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,7 @@ impl Hasher for Rpo256 {
// compute the number of elements required to represent the string; we will be processing
// the string in BINARY_CHUNK_SIZE-byte chunks, thus the number of elements will be equal
// to the number of such chunks (including a potential partial chunk at the end).
let num_elements = if bytes.len() % BINARY_CHUNK_SIZE == 0 {
bytes.len() / BINARY_CHUNK_SIZE
} else {
bytes.len() / BINARY_CHUNK_SIZE + 1
};
let num_elements = bytes.len() / BINARY_CHUNK_SIZE;

// initialize state to all zeros, except for the first element of the capacity part, which
// is set to the number of elements to be hashed. this is done so that adding zero elements
Expand All @@ -116,18 +112,15 @@ impl Hasher for Rpo256 {
let mut i = 0;
let mut buf = [0_u8; 8];
for chunk in bytes.chunks(BINARY_CHUNK_SIZE) {
if i < num_elements - 1 {
buf[..BINARY_CHUNK_SIZE].copy_from_slice(chunk);
} else {
// if we are dealing with the last chunk, it may be smaller than BINARY_CHUNK_SIZE
// bytes long, so we need to handle it slightly differently. We also append a byte
// with value 1 to the end of the string; this pads the string in such a way that
// adding trailing zeros results in different hash
let chunk_len = chunk.len();
buf = [0_u8; 8];
buf[..chunk_len].copy_from_slice(chunk);
buf[chunk_len] = 1;
// if we are dealing with the last chunk, it may be smaller than BINARY_CHUNK_SIZE
// bytes long, so we need to handle it slightly differently. We also append a byte
// with value 1 to the end of the string; this pads the string in such a way that
// adding trailing zeros results in different hash
if chunk.len() < BINARY_CHUNK_SIZE {
buf.fill(0);
buf[chunk.len()] = 1;
}
buf[..BINARY_CHUNK_SIZE.min(chunk.len())].copy_from_slice(chunk);

// convert the bytes into a field element and absorb it into the rate portion of the
// state; if the rate is filled up, apply the Rescue permutation and start absorbing
Expand All @@ -145,6 +138,8 @@ impl Hasher for Rpo256 {
// we don't need to apply any extra padding because we injected total number of elements
// in the input list into the capacity portion of the state during initialization.
if i > 0 {
state[RATE_RANGE.start + i..RATE_RANGE.end].fill(ZERO);
state[RATE_RANGE.start + i] = ONE;
Self::apply_permutation(&mut state);
}

Expand Down
20 changes: 20 additions & 0 deletions src/hash/rpo/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ use super::{
Felt, FieldElement, Hasher, Rpo256, RpoDigest, StarkField, ALPHA, CAPACITY_RANGE, DIGEST_RANGE,
INV_ALPHA, RATE_RANGE, STATE_WIDTH, ZERO,
};
use crate::utils::collections::Vec;
use core::convert::TryInto;
use proptest::prelude::*;
use rand_utils::rand_value;

#[test]
Expand Down Expand Up @@ -200,6 +202,24 @@ fn hash_test_vectors() {
}
}

#[test]
fn sponge_bytes_with_remainder_length_wont_panic() {
// this test targets to assert that no panic will happen with the edge case of having an inputs
// with length that is not divisible by the used binary chunk size. 113 is a non-negligible
// input length that is prime; hence guaranteed to not be divisible by any choice of chunk
// size.
//
// this is a preliminary test to the fuzzy-stress of proptest.
Rpo256::hash(&vec![0_u8; 113]);
}

proptest! {
#[test]
fn rpo256_wont_panic_with_arbitrary_input(ref vec in any::<Vec<u8>>()) {
Rpo256::hash(&vec);
}
}

const EXPECTED: [[Felt; 4]; 19] = [
[
Felt::new(1502364727743950833),
Expand Down

0 comments on commit 7dba1c2

Please sign in to comment.