Skip to content

Commit

Permalink
Dynamically dispatch on dtype
Browse files Browse the repository at this point in the history
  • Loading branch information
EricLBuehler committed Mar 12, 2024
1 parent 53faff9 commit 37f5336
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion candle-nn/src/layer_norm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ impl crate::Module for LayerNorm {
eps: f32,
}

fn run<T: WithDType>(dev: CudaDevice, storage: &candle::CudaStorage, layout: &Layout) -> Result<CudaStorage> {
fn run<T: WithDType + CudaDType>(dev: CudaDevice, storage: &candle::CudaStorage, layout: &Layout) -> Result<CudaStorage> {
let slice = storage.as_cuda_slice::<T>()?;
let slice = match layout.contiguous_offsets() {
None => candle::bail!("input has to be contiguous"),
Expand Down

0 comments on commit 37f5336

Please sign in to comment.