Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(acir): Array dynamic flatten #4351

Merged
merged 10 commits into from
Feb 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/docker-test-flow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -805,4 +805,4 @@ jobs:
exit 0
fi
env:
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }}
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
2 changes: 1 addition & 1 deletion .github/workflows/test-js-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -516,4 +516,4 @@ jobs:
fi
env:
# We treat any skipped or failing jobs as a failure for the workflow as a whole.
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }}
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
5 changes: 3 additions & 2 deletions .github/workflows/test-rust-workspace.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ jobs:
runs-on: ubuntu-latest
needs: [build-test-artifacts]
strategy:
fail-fast: false
matrix:
partition: [1, 2, 3, 4]
steps:
Expand Down Expand Up @@ -95,5 +96,5 @@ jobs:
exit 0
fi
env:
# We treat any skipped or failing jobs as a failure for the workflow as a whole.
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }}
# We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole.
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
28 changes: 22 additions & 6 deletions compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1317,7 +1317,7 @@ impl AcirContext {
let mut witnesses = Vec::new();
for input in inputs {
let mut single_val_witnesses = Vec::new();
for (input, typ) in input.flatten() {
for (input, typ) in self.flatten(input)? {
// Intrinsics only accept Witnesses. This is not a limitation of the
// intrinsics, its just how we have defined things. Ideally, we allow
// constants too.
Expand Down Expand Up @@ -1399,16 +1399,32 @@ impl AcirContext {
self.radix_decompose(endian, input_var, two_var, limb_count_var, result_element_type)
}

/// Recursive helper for flatten_values to flatten a single AcirValue into the result vector.
pub(crate) fn flatten_value(acir_vars: &mut Vec<AcirVar>, value: AcirValue) {
/// Recursive helper to flatten a single AcirValue into the result vector.
/// This helper differs from `flatten()` on the `AcirValue` type, as this method has access to the AcirContext
/// which lets us flatten an `AcirValue::DynamicArray` by reading its variables from memory.
pub(crate) fn flatten(
&mut self,
value: AcirValue,
) -> Result<Vec<(AcirVar, AcirType)>, InternalError> {
match value {
AcirValue::Var(acir_var, _) => acir_vars.push(acir_var),
AcirValue::Var(acir_var, typ) => Ok(vec![(acir_var, typ)]),
AcirValue::Array(array) => {
let mut values = Vec::new();
for value in array {
Self::flatten_value(acir_vars, value);
values.append(&mut self.flatten(value)?);
}
Ok(values)
}
AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => {
try_vecmap(0..len, |i| {
let index_var = self.add_constant(i);

Ok::<(AcirVar, AcirType), InternalError>((
self.read_from_memory(block_id, &index_var)?,
AcirType::field(),
))
})
}
AcirValue::DynamicArray(_) => unreachable!("Cannot flatten a dynamic array"),
}
}

Expand Down
14 changes: 10 additions & 4 deletions compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1344,7 +1344,7 @@ impl Context {

// The return value may or may not be an array reference. Calling `flatten_value_list`
// will expand the array if there is one.
let return_acir_vars = self.flatten_value_list(return_values, dfg);
let return_acir_vars = self.flatten_value_list(return_values, dfg)?;
let mut warnings = Vec::new();
for acir_var in return_acir_vars {
if self.acir_context.is_constant(&acir_var) {
Expand Down Expand Up @@ -2129,13 +2129,19 @@ impl Context {
/// Maps an ssa value list, for which some values may be references to arrays, by inlining
/// the `AcirVar`s corresponding to the contents of each array into the list of `AcirVar`s
/// that correspond to other values.
fn flatten_value_list(&mut self, arguments: &[ValueId], dfg: &DataFlowGraph) -> Vec<AcirVar> {
fn flatten_value_list(
&mut self,
arguments: &[ValueId],
dfg: &DataFlowGraph,
) -> Result<Vec<AcirVar>, InternalError> {
let mut acir_vars = Vec::with_capacity(arguments.len());
for value_id in arguments {
let value = self.convert_value(*value_id, dfg);
AcirContext::flatten_value(&mut acir_vars, value);
acir_vars.append(
&mut self.acir_context.flatten(value)?.iter().map(|(var, _)| *var).collect(),
);
}
acir_vars
Ok(acir_vars)
}

/// Convert a Vec<AcirVar> into a Vec<AcirValue> using the given result ids.
Expand Down
12 changes: 8 additions & 4 deletions compiler/noirc_frontend/src/debug/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,7 @@ impl DebugInstrumenter {
)
}
};

let ret_kind =
ast::StatementKind::Expression(id_expr(&ident("__debug_expr", expression_span)));

Expand Down Expand Up @@ -470,21 +471,24 @@ pub fn build_debug_crate_file() -> String {
.to_string(),
(1..=MAX_MEMBER_ASSIGN_DEPTH)
.map(|n| {
// The variable signature has to be generic as Noir supports using any polymorphic integer as an index.
// If we were to set a specific type for index signatures here, such as `Field`, we will error in
// type checking if we attempt to index with a different type such as `u8`.
let var_sig =
(0..n).map(|i| format!["_v{i}: Field"]).collect::<Vec<String>>().join(", ");
(0..n).map(|i| format!["_v{i}: Index"]).collect::<Vec<String>>().join(", ");
let vars = (0..n).map(|i| format!["_v{i}"]).collect::<Vec<String>>().join(", ");
format!(
r#"
#[oracle(__debug_member_assign_{n})]
unconstrained fn __debug_oracle_member_assign_{n}<T>(
unconstrained fn __debug_oracle_member_assign_{n}<T, Index>(
_var_id: u32, _value: T, {var_sig}
) {{}}
unconstrained fn __debug_inner_member_assign_{n}<T>(
unconstrained fn __debug_inner_member_assign_{n}<T, Index>(
var_id: u32, value: T, {var_sig}
) {{
__debug_oracle_member_assign_{n}(var_id, value, {vars});
}}
pub fn __debug_member_assign_{n}<T>(var_id: u32, value: T, {var_sig}) {{
pub fn __debug_member_assign_{n}<T, Index>(var_id: u32, value: T, {var_sig}) {{
__debug_inner_member_assign_{n}(var_id, value, {vars});
}}

Expand Down
1 change: 0 additions & 1 deletion compiler/noirc_frontend/src/hir_def/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -931,7 +931,6 @@ impl Type {
};

let this = self.substitute(bindings);

match &this {
Type::FieldElement | Type::Integer(..) => {
bindings.insert(target_id, (var.clone(), this));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[package]
name = "array_dynamic_blackbox_input"
type = "bin"
authors = [""]
compiler_version = ">=0.23.0"

[dependencies]
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
index = "1"
leaf = ["51", "109", "224", "175", "60", "42", "79", "222", "117", "255", "174", "79", "126", "242", "74", "34", "100", "35", "20", "200", "109", "89", "191", "219", "41", "10", "118", "217", "165", "224", "215", "109"]
path = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63"]
root = ["243", "212", "223", "132", "202", "119", "167", "60", "162", "158", "66", "192", "88", "114", "34", "191", "202", "195", "19", "102", "150", "88", "222", "176", "35", "51", "110", "97", "204", "224", "253", "171"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
fn main(leaf: [u8; 32], path: [u8; 64], index: u32, root: [u8; 32]) {
compute_root(leaf, path, index, root);
}

fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) {
let mut current = leaf;
let mut index = _index;

for i in 0..2 {
let mut hash_input = [0; 64];
let offset = i * 32;
let is_right = (index & 1) != 0;
let a = if is_right { 32 } else { 0 };
let b = if is_right { 0 } else { 32 };

for j in 0..32 {
hash_input[j + a] = current[j];
hash_input[j + b] = path[offset + j];
}

current = dep::std::hash::sha256(hash_input);
index = index >> 1;
}

// Regression for issue #4258
assert(root == current);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[package]
name = "array_dynamic_main_output"
type = "bin"
authors = [""]
compiler_version = ">=0.23.0"

[dependencies]
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
index = "5"
x = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
fn main(mut x: [Field; 10], index: u8) -> pub [Field; 10] {
x[index] = 0;
x
}
3 changes: 2 additions & 1 deletion tooling/debugger/ignored-tests.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
array_dynamic_blackbox_input
array_sort
assign_ex
bit_shifts_comptime
Expand All @@ -16,4 +17,4 @@ scalar_mul
signed_comparison
simple_2d_array
to_bytes_integration
bigint
bigint
Loading