From 9b734d6cbd878ce26fadb3debb6d3aa8e3c6afaf Mon Sep 17 00:00:00 2001 From: Joe Fioti Date: Sat, 27 Apr 2024 09:43:51 -0500 Subject: [PATCH] Fixed llama layers --- examples/llama/src/model.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llama/src/model.rs b/examples/llama/src/model.rs index ecab6862..90760362 100644 --- a/examples/llama/src/model.rs +++ b/examples/llama/src/model.rs @@ -6,7 +6,7 @@ use luminal_nn::{Embedding, PermutedLinear, RMSNorm}; // Llama3 8B Config pub const VOCAB_SIZE: usize = 128256; pub const HIDDEN_DIM: usize = 4096; -pub const NUM_LAYERS: usize = 1; +pub const NUM_LAYERS: usize = 32; pub const N_HEADS: usize = 32; pub const N_KV_HEADS: usize = 8; pub const MLP_DIM: usize = 14336;