{
+ self.pop()
+ }
+
+ /// NOTE: If we happen to not have any bytes buffered yet when this is called, then we will be
+ /// forced to try and read from the underlying reader. This requires a mutable reference, which
+ /// is obtained dynamically via [RefCell].
+ ///
+ ///
+ /// Callers must ensure that they do not hold any immutable references to the buffer of this
+ /// reader when calling this function so as to avoid a situtation in which the dynamic borrow
+ /// check fails. Specifically, you must not be holding a reference to the result of
+ /// [Self::read_slice] when this function is called.
+ ///
+ fn peek_u8(&self) -> Result {
+ if let Some(byte) = self.buffer().first() {
+ return Ok(*byte);
+ }
+ self.non_empty_reader_buffer().map(|b| b[0])
+ }
+
+ fn read_slice(&mut self, len: usize) -> Result<&[u8], DeserializationError> {
+ // Edge case
+ if len == 0 {
+ return Ok(&[]);
+ }
+
+ // If we have unused buffer, and the consumed portion is
+ // large enough, we will move the unused portion of the buffer
+ // to the start, freeing up bytes at the end for more reads
+ // before forcing a reallocation
+ let should_optimize_storage = self.pos >= 16 && !self.has_remaining_capacity(len);
+ if should_optimize_storage {
+ // We're going to optimize storage first
+ let buf = self.buffer();
+ let src = buf.as_ptr();
+ let count = buf.len();
+ let dst = self.buf.as_mut_ptr();
+ unsafe {
+ core::ptr::copy(src, dst, count);
+ self.buf.set_len(count);
+ self.pos = 0;
+ }
+ }
+
+ // Fill the buffer so we have at least `len` bytes available,
+ // this will return an error if we hit EOF first
+ self.buffer_at_least(len)?;
+
+ Ok(&self.buffer()[0..len])
+ }
+
+ #[inline]
+ fn read_array(&mut self) -> Result<[u8; N], DeserializationError> {
+ if N == 0 {
+ return Ok([0; N]);
+ }
+ self.read_exact()
+ }
+
+ fn check_eor(&self, num_bytes: usize) -> Result<(), DeserializationError> {
+ // Do we have sufficient data in the local buffer?
+ let buffer_len = self.buffer().len();
+ if buffer_len >= num_bytes {
+ return Ok(());
+ }
+
+ // What about if we include what is in the local buffer and the reader's buffer?
+ let reader_buffer_len = self.non_empty_reader_buffer().map(|b| b.len())?;
+ let buffer_len = buffer_len + reader_buffer_len;
+ if buffer_len >= num_bytes {
+ return Ok(());
+ }
+
+ // We have no more input, thus can't fulfill a request of `num_bytes`
+ if self.guaranteed_eof {
+ return Err(DeserializationError::UnexpectedEOF);
+ }
+
+ // Because this function is read-only, we must optimistically assume we can read `num_bytes`
+ // from the input, and fail later if that does not hold. We know we're not at EOF yet, but
+ // that's all we can say without buffering more from the reader. We could make use of
+ // `buffer_at_least`, which would guarantee a correct result, but it would also impose
+ // additional restrictions on the use of this function, e.g. not using it while holding a
+ // reference returned from `read_slice`. Since it is not a memory safety violation to return
+ // an optimistic result here, it makes for a better tradeoff.
+ Ok(())
+ }
+
+ #[inline]
+ fn has_more_bytes(&self) -> bool {
+ !self.buffer().is_empty() || self.non_empty_reader_buffer().is_ok()
+ }
+}
+
+// CURSOR
+// ================================================================================================
+
+#[cfg(feature = "std")]
+macro_rules! cursor_remaining_buf {
+ ($cursor:ident) => {{
+ let buf = $cursor.get_ref().as_ref();
+ let start = $cursor.position().min(buf.len() as u64) as usize;
+ &buf[start..]
+ }};
+}
+
+#[cfg(feature = "std")]
+impl> ByteReader for std::io::Cursor {
+ fn read_u8(&mut self) -> Result {
+ let buf = cursor_remaining_buf!(self);
+ if buf.is_empty() {
+ Err(DeserializationError::UnexpectedEOF)
+ } else {
+ let byte = buf[0];
+ self.set_position(self.position() + 1);
+ Ok(byte)
+ }
+ }
+
+ fn peek_u8(&self) -> Result {
+ cursor_remaining_buf!(self)
+ .first()
+ .copied()
+ .ok_or(DeserializationError::UnexpectedEOF)
+ }
+
+ fn read_slice(&mut self, len: usize) -> Result<&[u8], DeserializationError> {
+ let pos = self.position();
+ let size = self.get_ref().as_ref().len() as u64;
+ if size.saturating_sub(pos) < len as u64 {
+ Err(DeserializationError::UnexpectedEOF)
+ } else {
+ self.set_position(pos + len as u64);
+ let start = pos.min(size) as usize;
+ Ok(&self.get_ref().as_ref()[start..(start + len)])
+ }
+ }
+
+ fn read_array(&mut self) -> Result<[u8; N], DeserializationError> {
+ self.read_slice(N).map(|bytes| {
+ let mut result = [0u8; N];
+ result.copy_from_slice(bytes);
+ result
+ })
+ }
+
+ fn check_eor(&self, num_bytes: usize) -> Result<(), DeserializationError> {
+ if cursor_remaining_buf!(self).len() >= num_bytes {
+ Ok(())
+ } else {
+ Err(DeserializationError::UnexpectedEOF)
+ }
+ }
+
+ #[inline]
+ fn has_more_bytes(&self) -> bool {
+ let pos = self.position();
+ let size = self.get_ref().as_ref().len() as u64;
+ pos < size
+ }
+}
+
// SLICE READER
// ================================================================================================
/// Implements [ByteReader] trait for a slice of bytes.
+///
+/// NOTE: If you are building with the `std` feature, you should probably prefer [std::io::Cursor]
+/// instead. However, [SliceReader] is still useful in no-std environments until stabilization of
+/// the `core_io_borrowed_buf` feature.
pub struct SliceReader<'a> {
source: &'a [u8],
pos: usize,
@@ -249,3 +678,66 @@ impl<'a> ByteReader for SliceReader<'a> {
self.pos < self.source.len()
}
}
+
+#[cfg(all(test, feature = "std"))]
+mod tests {
+ use super::*;
+ use crate::ByteWriter;
+ use std::io::Cursor;
+
+ #[test]
+ fn read_adapter_empty() -> Result<(), DeserializationError> {
+ let mut reader = std::io::empty();
+ let mut adapter = ReadAdapter::new(&mut reader);
+ assert!(!adapter.has_more_bytes());
+ assert_eq!(adapter.check_eor(8), Err(DeserializationError::UnexpectedEOF));
+ assert_eq!(adapter.peek_u8(), Err(DeserializationError::UnexpectedEOF));
+ assert_eq!(adapter.read_u8(), Err(DeserializationError::UnexpectedEOF));
+ assert_eq!(adapter.read_slice(0), Ok([].as_slice()));
+ assert_eq!(adapter.read_slice(1), Err(DeserializationError::UnexpectedEOF));
+ assert_eq!(adapter.read_array(), Ok([]));
+ assert_eq!(adapter.read_array::<1>(), Err(DeserializationError::UnexpectedEOF));
+ Ok(())
+ }
+
+ #[test]
+ fn read_adapter_passthrough() -> Result<(), DeserializationError> {
+ let mut reader = std::io::repeat(0b101);
+ let mut adapter = ReadAdapter::new(&mut reader);
+ assert!(adapter.has_more_bytes());
+ assert_eq!(adapter.check_eor(8), Ok(()));
+ assert_eq!(adapter.peek_u8(), Ok(0b101));
+ assert_eq!(adapter.read_u8(), Ok(0b101));
+ assert_eq!(adapter.read_slice(0), Ok([].as_slice()));
+ assert_eq!(adapter.read_slice(4), Ok([0b101, 0b101, 0b101, 0b101].as_slice()));
+ assert_eq!(adapter.read_array(), Ok([]));
+ assert_eq!(adapter.read_array(), Ok([0b101, 0b101]));
+ Ok(())
+ }
+
+ #[test]
+ fn read_adapter_exact() {
+ const VALUE: usize = 2048;
+ let mut reader = Cursor::new(VALUE.to_le_bytes());
+ let mut adapter = ReadAdapter::new(&mut reader);
+ assert_eq!(usize::from_le_bytes(adapter.read_array().unwrap()), VALUE);
+ assert!(!adapter.has_more_bytes());
+ assert_eq!(adapter.peek_u8(), Err(DeserializationError::UnexpectedEOF));
+ assert_eq!(adapter.read_u8(), Err(DeserializationError::UnexpectedEOF));
+ }
+
+ #[test]
+ fn read_adapter_roundtrip() {
+ const VALUE: usize = 2048;
+
+ // Write VALUE to storage
+ let mut cursor = Cursor::new([0; core::mem::size_of::()]);
+ cursor.write_usize(VALUE);
+
+ // Read VALUE from storage
+ cursor.set_position(0);
+ let mut adapter = ReadAdapter::new(&mut cursor);
+
+ assert_eq!(adapter.read_usize(), Ok(VALUE));
+ }
+}
diff --git a/utils/core/src/serde/byte_writer.rs b/utils/core/src/serde/byte_writer.rs
index 1a32b484b..bdcbc24e2 100644
--- a/utils/core/src/serde/byte_writer.rs
+++ b/utils/core/src/serde/byte_writer.rs
@@ -4,7 +4,6 @@
// LICENSE file in the root directory of this source tree.
use super::Serializable;
-use crate::collections::*;
// BYTE WRITER TRAIT
// ================================================================================================
@@ -100,7 +99,11 @@ pub trait ByteWriter: Sized {
/// Serializes all `elements` and writes the resulting bytes into `self`.
///
/// This method does not write any metadata (e.g. number of serialized elements) into `self`.
- fn write_many(&mut self, elements: &[S]) {
+ fn write_many(&mut self, elements: T)
+ where
+ T: IntoIterator- ,
+ S: Serializable,
+ {
for element in elements {
element.write_into(self);
}
@@ -110,7 +113,20 @@ pub trait ByteWriter: Sized {
// BYTE WRITER IMPLEMENTATIONS
// ================================================================================================
-impl ByteWriter for Vec {
+#[cfg(feature = "std")]
+impl ByteWriter for W {
+ #[inline(always)]
+ fn write_u8(&mut self, byte: u8) {
+ ::write_all(self, &[byte]).expect("write failed")
+ }
+ #[inline(always)]
+ fn write_bytes(&mut self, bytes: &[u8]) {
+ ::write_all(self, bytes).expect("write failed")
+ }
+}
+
+#[cfg(not(feature = "std"))]
+impl ByteWriter for alloc::vec::Vec {
fn write_u8(&mut self, value: u8) {
self.push(value);
}
@@ -129,3 +145,24 @@ fn encoded_len(value: usize) -> usize {
let len = zeros.saturating_sub(1) / 7;
9 - core::cmp::min(len, 8)
}
+
+#[cfg(all(test, feature = "std"))]
+mod tests {
+ use super::*;
+ use std::io::Cursor;
+
+ #[test]
+ fn write_adapter_passthrough() {
+ let mut writer = Cursor::new([0u8; 128]);
+ writer.write_bytes(b"nope");
+ let buf = writer.get_ref();
+ assert_eq!(&buf[..4], b"nope");
+ }
+
+ #[test]
+ #[should_panic]
+ fn write_adapter_writer_out_of_capacity() {
+ let mut writer = Cursor::new([0; 2]);
+ writer.write_bytes(b"nope");
+ }
+}
diff --git a/utils/core/src/serde/mod.rs b/utils/core/src/serde/mod.rs
index fc23ff532..335af1c4d 100644
--- a/utils/core/src/serde/mod.rs
+++ b/utils/core/src/serde/mod.rs
@@ -3,12 +3,20 @@
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
+use alloc::{
+ collections::{BTreeMap, BTreeSet},
+ string::String,
+ vec::Vec,
+};
+
use super::DeserializationError;
-use crate::collections::*;
mod byte_reader;
pub use byte_reader::{ByteReader, SliceReader};
+#[cfg(feature = "std")]
+pub use byte_reader::ReadAdapter;
+
mod byte_writer;
pub use byte_writer::ByteWriter;
@@ -16,7 +24,7 @@ pub use byte_writer::ByteWriter;
// ================================================================================================
/// Defines how to serialize `Self` into bytes.
-pub trait Serializable: Sized {
+pub trait Serializable {
// REQUIRED METHODS
// --------------------------------------------------------------------------------------------
/// Serializes `self` into bytes and writes these bytes into the `target`.
@@ -40,6 +48,12 @@ pub trait Serializable: Sized {
}
}
+impl Serializable for &T {
+ fn write_into(&self, target: &mut W) {
+ (*self).write_into(target)
+ }
+}
+
impl Serializable for () {
fn write_into(&self, _target: &mut W) {}
}
@@ -176,21 +190,53 @@ impl Serializable for Option {
}
}
-impl Serializable for &Option {
+impl Serializable for [T; C] {
fn write_into(&self, target: &mut W) {
- match self {
- Some(v) => {
- target.write_bool(true);
- v.write_into(target);
- }
- None => target.write_bool(false),
+ target.write_many(self)
+ }
+}
+
+impl Serializable for [T] {
+ fn write_into(&self, target: &mut W) {
+ target.write_usize(self.len());
+ for element in self.iter() {
+ element.write_into(target);
}
}
}
-impl Serializable for [T; C] {
+impl Serializable for Vec {
fn write_into(&self, target: &mut W) {
- target.write_many(self)
+ target.write_usize(self.len());
+ target.write_many(self);
+ }
+}
+
+impl Serializable for BTreeMap {
+ fn write_into(&self, target: &mut W) {
+ target.write_usize(self.len());
+ target.write_many(self);
+ }
+}
+
+impl Serializable for BTreeSet {
+ fn write_into(&self, target: &mut W) {
+ target.write_usize(self.len());
+ target.write_many(self);
+ }
+}
+
+impl Serializable for str {
+ fn write_into(&self, target: &mut W) {
+ target.write_usize(self.len());
+ target.write_many(self.as_bytes());
+ }
+}
+
+impl Serializable for String {
+ fn write_into(&self, target: &mut W) {
+ target.write_usize(self.len());
+ target.write_many(self.as_bytes());
}
}
@@ -384,3 +430,36 @@ impl Deserializable for [T; C] {
Ok(res)
}
}
+
+impl Deserializable for Vec {
+ fn read_from(source: &mut R) -> Result {
+ let len = source.read_usize()?;
+ source.read_many(len)
+ }
+}
+
+impl Deserializable for BTreeMap {
+ fn read_from(source: &mut R) -> Result {
+ let len = source.read_usize()?;
+ let data = source.read_many(len)?;
+ Ok(BTreeMap::from_iter(data))
+ }
+}
+
+impl Deserializable for BTreeSet {
+ fn read_from(source: &mut R) -> Result {
+ let len = source.read_usize()?;
+ let data = source.read_many(len)?;
+ Ok(BTreeSet::from_iter(data))
+ }
+}
+
+impl Deserializable for String {
+ fn read_from(source: &mut R) -> Result {
+ let len = source.read_usize()?;
+ let data = source.read_many(len)?;
+
+ String::from_utf8(data)
+ .map_err(|err| DeserializationError::InvalidValue(format!("{}", err)))
+ }
+}
diff --git a/utils/core/src/string.rs b/utils/core/src/string.rs
index baff60d49..e2ff9cac3 100644
--- a/utils/core/src/string.rs
+++ b/utils/core/src/string.rs
@@ -4,13 +4,5 @@
// LICENSE file in the root directory of this source tree.
//! Feature-based re-export of common string components.
-//!
-//! When `std` feature is enabled, this module exports string components from the Rust standard
-//! library. When `alloc` feature is enabled, same components are provided without relying on the
-//! Rust standard library.
-#[cfg(not(feature = "std"))]
pub use alloc::string::{String, ToString};
-
-#[cfg(feature = "std")]
-pub use std::string::{String, ToString};
diff --git a/utils/core/src/tests.rs b/utils/core/src/tests.rs
index af92aaa8c..ef1484c59 100644
--- a/utils/core/src/tests.rs
+++ b/utils/core/src/tests.rs
@@ -3,7 +3,8 @@
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
-use super::{collections::*, ByteReader, ByteWriter, Serializable, SliceReader};
+use super::{ByteReader, ByteWriter, Serializable, SliceReader};
+use alloc::vec::Vec;
use proptest::prelude::{any, proptest};
// VECTOR UTILS TESTS
@@ -139,7 +140,7 @@ fn write_serializable_batch() {
assert_eq!(64, target.len());
let batch2 = [5u128, 6, 7, 8];
- target.write_many(&batch2);
+ target.write_many(batch2);
assert_eq!(128, target.len());
let mut reader = SliceReader::new(&target);
@@ -157,7 +158,7 @@ fn write_serializable_array_batch() {
assert_eq!(64, target.len());
let batch2 = [[5u128, 6], [7, 8]];
- target.write_many(&batch2);
+ target.write_many(batch2);
assert_eq!(128, target.len());
let mut reader = SliceReader::new(&target);
diff --git a/utils/rand/Cargo.toml b/utils/rand/Cargo.toml
index 2c944d563..181f06bb3 100644
--- a/utils/rand/Cargo.toml
+++ b/utils/rand/Cargo.toml
@@ -1,12 +1,12 @@
[package]
name = "winter-rand-utils"
-version = "0.8.1"
+version = "0.8.3"
description = "Random value generation utilities for Winterfell crates"
authors = ["winterfell contributors"]
readme = "README.md"
license = "MIT"
repository = "https://github.com/novifinancial/winterfell"
-documentation = "https://docs.rs/winter-rand-utils/0.8.1"
+documentation = "https://docs.rs/winter-rand-utils/0.8.3"
categories = ["cryptography"]
keywords = ["rand"]
edition = "2021"
diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml
index 4ef474827..a6f9f5517 100644
--- a/verifier/Cargo.toml
+++ b/verifier/Cargo.toml
@@ -1,12 +1,12 @@
[package]
name = "winter-verifier"
-version = "0.8.1"
+version = "0.8.3"
description = "Winterfell STARK verifier"
authors = ["winterfell contributors"]
readme = "README.md"
license = "MIT"
repository = "https://github.com/novifinancial/winterfell"
-documentation = "https://docs.rs/winter-verifier/0.8.1"
+documentation = "https://docs.rs/winter-verifier/0.8.3"
categories = ["cryptography", "no-std"]
keywords = ["crypto", "zkp", "stark", "verifier"]
edition = "2021"
diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs
index 752626c10..fa8c28e86 100644
--- a/verifier/src/channel.rs
+++ b/verifier/src/channel.rs
@@ -8,10 +8,10 @@ use air::{
proof::{ParsedOodFrame, Queries, StarkProof, Table},
Air, EvaluationFrame, LagrangeKernelEvaluationFrame,
};
+use alloc::{string::ToString, vec::Vec};
use crypto::{BatchMerkleProof, ElementHasher, MerkleTree};
use fri::VerifierChannel as FriVerifierChannel;
use math::{FieldElement, StarkField};
-use utils::{collections::*, string::*};
// VERIFIER CHANNEL
// ================================================================================================
diff --git a/verifier/src/composer.rs b/verifier/src/composer.rs
index fc7c60d12..243e04c11 100644
--- a/verifier/src/composer.rs
+++ b/verifier/src/composer.rs
@@ -4,8 +4,8 @@
// LICENSE file in the root directory of this source tree.
use air::{proof::Table, Air, DeepCompositionCoefficients, EvaluationFrame};
+use alloc::vec::Vec;
use math::{batch_inversion, FieldElement};
-use utils::collections::*;
// DEEP COMPOSER
// ================================================================================================
diff --git a/verifier/src/errors.rs b/verifier/src/errors.rs
index f9f877058..068f3d5ae 100644
--- a/verifier/src/errors.rs
+++ b/verifier/src/errors.rs
@@ -5,8 +5,8 @@
//! Contains common error types for prover and verifier.
+use alloc::string::String;
use core::fmt;
-use utils::string::*;
// VERIFIER ERROR
// ================================================================================================
diff --git a/verifier/src/evaluator.rs b/verifier/src/evaluator.rs
index dd3d8dfb6..e70aef264 100644
--- a/verifier/src/evaluator.rs
+++ b/verifier/src/evaluator.rs
@@ -7,8 +7,8 @@ use air::{
Air, AuxTraceRandElements, ConstraintCompositionCoefficients, EvaluationFrame,
LagrangeKernelEvaluationFrame, LagrangeKernelTransitionConstraints,
};
+use alloc::vec::Vec;
use math::{polynom, FieldElement};
-use utils::collections::*;
// CONSTRAINT EVALUATION
// ================================================================================================
diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs
index 9b2cd55c5..20020e89c 100644
--- a/verifier/src/lib.rs
+++ b/verifier/src/lib.rs
@@ -26,9 +26,8 @@
//! need to be in tens of thousands. And even for hundreds of thousands of asserted values, the
//! verification time should not exceed 50 ms.
-#![cfg_attr(not(feature = "std"), no_std)]
+#![no_std]
-#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
@@ -45,9 +44,14 @@ use math::{
FieldElement, ToElements,
};
+#[deprecated(
+ since = "0.8.2",
+ note = "You should prefer the types from libstd/liballoc instead"
+)]
+#[allow(deprecated)]
+pub use utils::collections::*;
pub use utils::{
- collections::*, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
- SliceReader,
+ ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, SliceReader,
};
pub use crypto;
diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml
index 68be128d0..e1cf4b00d 100644
--- a/winterfell/Cargo.toml
+++ b/winterfell/Cargo.toml
@@ -1,12 +1,12 @@
[package]
name = "winterfell"
-version = "0.8.1"
+version = "0.8.3"
description = "Winterfell STARK prover and verifier"
authors = ["winterfell contributors"]
readme = "../README.md"
license = "MIT"
repository = "https://github.com/novifinancial/winterfell"
-documentation = "https://docs.rs/winterfell/0.8.1"
+documentation = "https://docs.rs/winterfell/0.8.3"
categories = ["cryptography", "no-std"]
keywords = ["crypto", "zkp", "stark", "prover", "verifier"]
edition = "2021"
diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs
index 69e768921..f4fdfc8cc 100644
--- a/winterfell/src/lib.rs
+++ b/winterfell/src/lib.rs
@@ -18,12 +18,12 @@
//! 1. Define an *algebraic intermediate representation* (AIR) for your computation. This can
//! be done by implementing [Air] trait.
//! 2. Define an execution trace for your computation. This can be done by implementing [Trace]
-//! trait. Alternatively, you can use [TraceTable] struct which already implements [Trace]
+//! trait. Alternatively, you can use the [TraceTable] struct which already implements [Trace]
//! trait in cases when this generic implementation works for your use case.
//! 3. Execute your computation and record its execution trace.
//! 4. Define your prover by implementing [Prover] trait. Then execute [Prover::prove()] function
//! passing the trace generated in the previous step into it as a parameter. The function will
-//! return a instance of [StarkProof].
+//! return an instance of [StarkProof].
//!
//! This `StarkProof` can be serialized and sent to a STARK verifier for verification. The size
//! of proof depends on the specifics of a given computation, but for most computations it should
@@ -31,10 +31,10 @@
//! computations).
//!
//! Proof generation time is also highly dependent on the specifics of a given computation, but
-//! also depends on the capabilities of the machine used to generate the proofs (i.e. on number
+//! also depends on the capabilities of the machine used to generate the proofs (i.e. on the number
//! of CPU cores and memory bandwidth).
//!
-//! When the crate is compiled with `concurrent` feature enabled, proof generation will be
+//! When the crate is compiled with the `concurrent` feature enabled, proof generation will be
//! performed in multiple threads (usually, as many threads as there are logical cores on the
//! machine). The number of threads can be configured via `RAYON_NUM_THREADS` environment
//! variable.
@@ -44,18 +44,18 @@
//! do the following:
//!
//! 1. Define an *algebraic intermediate representation* (AIR) for you computation. This AIR
-//! must be the same as the one used during proof generation process.
+//! must be the same as the one used during the proof generation process.
//! 2. Execute [verify()] function and supply the AIR of your computation together with the
//! [StarkProof] and related public inputs as parameters.
//!
//! Proof verification is extremely fast and is nearly independent of the complexity of the
-//! computation being verified. In vast majority of cases proofs can be verified in 3 - 5 ms
+//! computation being verified. In the vast majority of cases, proofs can be verified in 3 - 5 ms
//! on a modern mid-range laptop CPU (using a single core).
//!
//! There is one exception, however: if a computation requires a lot of `sequence` assertions
//! (see [Assertion] for more info), the verification time will grow linearly in the number of
//! asserted values. But for the impact to be noticeable, the number of asserted values would
-//! need to be in tens of thousands. And even for hundreds of thousands of asserted values, the
+//! need to be in tens of thousands. And even for hundreds of thousands of assorted values, the
//! verification time should not exceed 50 ms.
//!
//! # Examples
@@ -77,10 +77,10 @@
//! ```
//!
//! This computation starts with an element in a finite field and then, for the specified number
-//! of steps, cubes the element and adds value `42` to it.
+//! of steps, cubes the element, and adds value `42` to it.
//!
//! Suppose, we run this computation for a million steps and get some result. Using STARKs we can
-//! prove that we did the work correctly without requiring any verifying party to re-execute the
+//! Prove that we did the work correctly without requiring any verifying party to re-execute the
//! computation. Here is how to do it:
//!
//! First, we need to define an *execution trace* for our computation. This trace should capture
@@ -99,7 +99,7 @@
//! | ... |
//! | 1,048,575 | 247770943907079986105389697876176586605 |
//!
-//! To record the trace, we'll use the [TraceTable] struct. The function below, is just a
+//! To record the trace, we'll use the [TraceTable] struct. The function below is just a
//! modified version of the `do_work()` function which records every intermediate state of the
//! computation in the [TraceTable] struct:
//!
@@ -346,7 +346,7 @@
//! type TraceLde> = DefaultTraceLde;
//! type ConstraintEvaluator<'a, E: FieldElement> =
//! DefaultConstraintEvaluator<'a, Self::Air, E>;
-//!
+//!
//! // Our public inputs consist of the first and last value in the execution trace.
//! fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs {
//! let last_step = trace.length() - 1;
@@ -367,7 +367,7 @@
//! domain: &StarkDomain,
//! ) -> (Self::TraceLde, TracePolyTable) {
//! DefaultTraceLde::new(trace_info, main_trace, domain)
-//! }
+//! }
//!
//! fn new_evaluator<'a, E: FieldElement>(
//! &self,
@@ -384,7 +384,7 @@
//!
//! In the code below, we will execute our computation and get the result together with the proof
//! that the computation was executed correctly. Then, we will use this proof (together with the
-//! public inputs) to verify that we did in fact execute the computation and got the claimed
+//! public inputs) to verify that we did execute the computation and got the claimed
//! result.
//!
//! ```
@@ -507,7 +507,7 @@
//! # ) -> (Self::TraceLde, TracePolyTable) {
//! # DefaultTraceLde::new(trace_info, main_trace, domain)
//! # }
-//! #
+//! #
//! # fn new_evaluator<'a, E: FieldElement>(
//! # &self,
//! # air: &'a Self::Air,
@@ -567,9 +567,9 @@
//! * STARKs vs. SNARKs: [A Cambrian Explosion of Crypto Proofs](https://nakamoto.com/cambrian-explosion-of-crypto-proofs/)
//!
//! Vitalik Buterin's blog series on zk-STARKs:
-//! * [STARKs, part 1: Proofs with Polynomials](https://vitalik.ca/general/2017/11/09/starks_part_1.html)
-//! * [STARKs, part 2: Thank Goodness it's FRI-day](https://vitalik.ca/general/2017/11/22/starks_part_2.html)
-//! * [STARKs, part 3: Into the Weeds](https://vitalik.ca/general/2018/07/21/starks_part_3.html)
+//! * [STARKs, part 1: Proofs with Polynomials](https://vitalik.eth.limo/general/2017/11/09/starks_part_1.html)
+//! * [STARKs, part 2: Thank Goodness it's FRI-day](https://vitalik.eth.limo/general/2017/11/22/starks_part_2.html)
+//! * [STARKs, part 3: Into the Weeds](https://vitalik.eth.limo/general/2018/07/21/starks_part_3.html)
//!
//! StarkWare's STARK Math blog series:
//! * [STARK Math: The Journey Begins](https://medium.com/starkware/stark-math-the-journey-begins-51bd2b063c71)
@@ -578,8 +578,9 @@
//! * [Low Degree Testing](https://medium.com/starkware/low-degree-testing-f7614f5172db)
//! * [A Framework for Efficient STARKs](https://medium.com/starkware/a-framework-for-efficient-starks-19608ba06fbe)
-#![cfg_attr(not(feature = "std"), no_std)]
-#[cfg(not(feature = "std"))]
+#![no_std]
+
+#[cfg(test)]
extern crate alloc;
pub use prover::{
diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs
index b794d93e4..fa9118d5c 100644
--- a/winterfell/src/tests.rs
+++ b/winterfell/src/tests.rs
@@ -1,13 +1,12 @@
use super::*;
+use alloc::vec;
+use alloc::vec::Vec;
use prover::{
crypto::{hashers::Blake3_256, DefaultRandomCoin},
math::{fields::f64::BaseElement, ExtensionOf, FieldElement},
matrix::ColMatrix,
};
-#[cfg(not(feature = "std"))]
-use alloc::{vec, vec::Vec};
-
#[test]
fn test_lagrange_kernel_air() {
let trace = LagrangeMockTrace::new();