Skip to content

Commit

Permalink
Rebase + clippy.
Browse files Browse the repository at this point in the history
  • Loading branch information
Narsil committed Nov 9, 2023
1 parent 33115eb commit 082b922
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 3 deletions.
10 changes: 10 additions & 0 deletions candle-core/src/dummy_metal_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,16 @@ impl crate::backend::BackendStorage for MetalStorage {
Err(Error::NotCompiledWithMetalSupport)
}

fn conv_transpose1d(
&self,
_l: &Layout,
_kernel: &Self,
_kernel_l: &Layout,
_params: &crate::conv::ParamsConvTranspose1D,
) -> Result<Self> {
Err(Error::NotCompiledWithMetalSupport)
}

fn conv2d(
&self,
_: &Layout,
Expand Down
4 changes: 3 additions & 1 deletion candle-core/src/quantized/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use crate::{backend::BackendStorage, Device, Result, Shape, Tensor};
#[cfg(feature = "metal")]
use crate::backend::BackendStorage;
use crate::{Device, Result, Shape, Tensor};

#[cfg(target_feature = "avx")]
pub mod avx;
Expand Down
3 changes: 2 additions & 1 deletion candle-metal-kernels/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#![allow(clippy::too_many_arguments)]
use metal::{
Buffer, CommandBufferRef, CompileOptions, ComputePipelineDescriptor, Device, Function, Library,
MTLSize,
Expand Down Expand Up @@ -89,7 +90,7 @@ type KernelMap<T> = HashMap<&'static str, T>;
type Libraries = HashMap<Source, Library>;
type Functions = KernelMap<Function>;

#[derive(Debug)]
#[derive(Debug, Default)]
pub struct Kernels {
libraries: RwLock<Libraries>,
funcs: RwLock<Functions>,
Expand Down
4 changes: 3 additions & 1 deletion candle-nn/src/ops.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use candle::{backend::BackendStorage, CpuStorage, Layout, Result, Shape, Tensor};
#[cfg(feature = "metal")]
use candle::backend::BackendStorage;
use candle::{CpuStorage, Layout, Result, Shape, Tensor};
use rayon::prelude::*;

/// Applies the softmax function to the input tensor, rescaling the element so that elements on
Expand Down

0 comments on commit 082b922

Please sign in to comment.