Skip to content

Commit c1ed73f

Browse files
committed
Chore: Fix a ton of warnings
1 parent 54978ca commit c1ed73f

24 files changed

+33
-56
lines changed

crates/cudnn/src/activation/activation_descriptor.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{sys, ActivationMode, CudnnContext, CudnnError, IntoResult, NanPropagation};
1+
use crate::{sys, ActivationMode, CudnnError, IntoResult, NanPropagation};
22
use std::mem::MaybeUninit;
33

44
/// The descriptor of a neuron activation operation.
@@ -47,7 +47,7 @@ impl ActivationDescriptor {
4747
unsafe {
4848
sys::cudnnCreateActivationDescriptor(raw.as_mut_ptr()).into_result()?;
4949

50-
let mut raw = raw.assume_init();
50+
let raw = raw.assume_init();
5151

5252
let coefficient = coefficient.into().unwrap_or_else(|| match mode {
5353
ActivationMode::ClippedRelu => std::f64::MAX,

crates/cudnn/src/activation/mod.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ use crate::{
88
private, sys, CudnnContext, CudnnError, DataType, IntoResult, ScalingDataType, TensorDescriptor,
99
};
1010
use cust::memory::GpuBuffer;
11-
use std::mem::MaybeUninit;
1211

1312
impl CudnnContext {
1413
/// Applies a specific neuron activation functions element wise of the provided tensor.

crates/cudnn/src/attention/attention_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ where
125125
unsafe {
126126
sys::cudnnCreateAttnDescriptor(raw.as_mut_ptr()).into_result()?;
127127

128-
let mut raw = raw.assume_init();
128+
let raw = raw.assume_init();
129129

130130
sys::cudnnSetAttnDescriptor(
131131
raw,

crates/cudnn/src/attention/mod.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ pub use attention_weights_kind::*;
88
pub use seq_data_axis::*;
99
pub use seq_data_descriptor::*;
1010

11-
use crate::{sys, CudnnContext, CudnnError, DataType, IntoResult, WGradMode};
11+
use crate::{sys, CudnnContext, CudnnError, IntoResult, WGradMode};
1212
use cust::memory::GpuBuffer;
1313
use std::mem::MaybeUninit;
1414

@@ -259,7 +259,6 @@ impl CudnnContext {
259259
pub fn multi_head_attn_backward_data<T, U, D1, D2>(
260260
&self,
261261
attn_desc: &AttentionDescriptor<T, U, D1, D2>,
262-
current_idx: i32,
263262
lo_win_idx: &[i32],
264263
hi_win_idx: &[i32],
265264
device_seq_lengths_dqdo: &impl GpuBuffer<i32>,
@@ -288,7 +287,7 @@ impl CudnnContext {
288287
let device_seq_lengths_dqdo_ptr =
289288
device_seq_lengths_dqdo.as_device_ptr().as_ptr() as *const _;
290289
let device_seq_lengths_dkdv_ptr =
291-
device_seq_lengths_dqdo.as_device_ptr().as_ptr() as *const _;
290+
device_seq_lengths_dkdv.as_device_ptr().as_ptr() as *const _;
292291

293292
let d_out_ptr = d_out.as_device_ptr().as_ptr() as *const _;
294293

crates/cudnn/src/attention/seq_data_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ where
120120
unsafe {
121121
sys::cudnnCreateSeqDataDescriptor(raw.as_mut_ptr()).into_result()?;
122122

123-
let mut raw = raw.assume_init();
123+
let raw = raw.assume_init();
124124

125125
let raw_axes = axes.map(SeqDataAxis::into);
126126

crates/cudnn/src/backend/mod.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#![allow(warnings)]
2+
13
mod conv_bwd_data;
24
mod conv_bwd_filter;
35
mod conv_cfg;

crates/cudnn/src/context.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
use crate::{sys, CudnnError, IntoResult};
2-
use cust::memory::{GpuBox, GpuBuffer};
3-
use std::mem::{self, MaybeUninit};
2+
use std::mem::MaybeUninit;
43

54
/// cuDNN library context. It's the central structure required to interact with cuDNN.
65
/// It holds and manages internal memory allocations.

crates/cudnn/src/convolution/convolution_algo.rs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,4 @@
1-
use crate::{
2-
data_type::DataType,
3-
determinism::Determinism,
4-
error::{CudnnError, IntoResult},
5-
math_type::MathType,
6-
private, sys, TensorFormat,
7-
};
1+
use crate::{sys, CudnnError, Determinism, IntoResult, MathType};
82

93
/// The best suited algorithm according to the layer specifications obtained through a heuristic.
104
#[derive(Debug, Clone, Copy, PartialEq)]

crates/cudnn/src/convolution/convolution_config.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{private, sys, DataType};
1+
use crate::{private, DataType};
22

33
/// Supported data types configurations for convolution operations.
44
pub trait SupportedConv<X, W, Y>: private::Sealed + DataType

crates/cudnn/src/convolution/convolution_descriptor.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
use crate::{
2-
sys, ConvMode, DataType, MathType, TensorFormat, {CudnnError, IntoResult},
3-
};
1+
use crate::{sys, ConvMode, CudnnError, DataType, IntoResult, MathType};
42

53
use std::{marker::PhantomData, mem::MaybeUninit};
64

@@ -97,7 +95,7 @@ impl<T: DataType> ConvDescriptor<T> {
9795
unsafe {
9896
sys::cudnnCreateConvolutionDescriptor(raw.as_mut_ptr()).into_result()?;
9997

100-
let mut conv_desc = Self {
98+
let conv_desc = Self {
10199
raw: raw.assume_init(),
102100
comp_type: PhantomData,
103101
};

crates/cudnn/src/convolution/filter_descriptor.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
use crate::{sys, CudnnError, DataType, IntoResult, ScalarC};
2-
use std::{
3-
marker::PhantomData,
4-
mem::{self, MaybeUninit},
5-
};
2+
use std::{marker::PhantomData, mem::MaybeUninit};
63

74
/// A generic description of an n-dimensional filter dataset.
85
#[derive(Debug, PartialEq, Eq, Hash)]
@@ -58,6 +55,7 @@ where
5855

5956
unsafe {
6057
sys::cudnnCreateFilterDescriptor(raw.as_mut_ptr()).into_result()?;
58+
6159
let raw = raw.assume_init();
6260

6361
sys::cudnnSetFilterNdDescriptor(

crates/cudnn/src/dropout/dropout_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{error::CudnnError, sys, IntoResult};
1+
use crate::sys;
22
use cust::memory::GpuBuffer;
33

44
/// The descriptor of a dropout operation.

crates/cudnn/src/dropout/mod.rs

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -67,19 +67,11 @@ impl CudnnContext {
6767
///
6868
/// let ctx = CudnnContext::new()?;
6969
///
70-
/// let desc = TensorDescriptor::<f32>::new_strides(&[1, 1, 5], &[5, 5, 1])?;
71-
///
72-
/// let size = ctx.get_dropout_reserved_space_size(&desc)?;
70+
/// let size = ctx.get_dropout_reserved_space_size()?;
7371
/// # Ok(())
7472
/// # }
7573
/// ```
76-
pub fn get_dropout_reserved_space_size<T>(
77-
&self,
78-
x_desc: &TensorDescriptor<T>,
79-
) -> Result<usize, CudnnError>
80-
where
81-
T: DataType,
82-
{
74+
pub fn get_dropout_reserved_space_size(&self) -> Result<usize, CudnnError> {
8375
let mut size = MaybeUninit::uninit();
8476

8577
unsafe {
@@ -141,7 +133,7 @@ impl CudnnContext {
141133
unsafe {
142134
sys::cudnnCreateDropoutDescriptor(raw.as_mut_ptr()).into_result()?;
143135

144-
let mut raw = raw.assume_init();
136+
let raw = raw.assume_init();
145137

146138
sys::cudnnSetDropoutDescriptor(raw, self.raw, dropout, states_ptr, states_size, seed)
147139
.into_result()?;
@@ -208,7 +200,7 @@ impl CudnnContext {
208200
/// let dropout_desc = ctx.create_dropout_descriptor(dropout, states, seed)?;
209201
///
210202
/// let mut reserved_space = {
211-
/// let size = ctx.get_dropout_reserved_space_size(&x_desc)?;
203+
/// let size = ctx.get_dropout_reserved_space_size()?;
212204
/// unsafe { DeviceBuffer::uninitialized(size)? }
213205
/// };
214206
///
@@ -307,7 +299,7 @@ impl CudnnContext {
307299
/// # let seed = 123;
308300
/// # let dropout_desc = ctx.create_dropout_descriptor(dropout, states, seed)?;
309301
/// # let mut reserved_space = {
310-
/// # let size = ctx.get_dropout_reserved_space_size(&x_desc)?;
302+
/// # let size = ctx.get_dropout_reserved_space_size()?;
311303
/// # unsafe { DeviceBuffer::uninitialized(size)? }
312304
/// # };
313305
/// # ctx.dropout_forward(&dropout_desc, &x_desc, &x, &y_desc, &mut y, &mut reserved_space)?;

crates/cudnn/src/error.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::sys::{self, cudnnStatus_t};
1+
use crate::sys;
22
use std::{error::Error, ffi::CStr, fmt::Display};
33

44
/// Enum encapsulating function status returns. All cuDNN library functions return their status.

crates/cudnn/src/lib.rs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
#![allow(warnings, clippy::all)]
2-
mod sys;
3-
41
mod activation;
52
mod attention;
63
mod backend;
@@ -16,6 +13,7 @@ mod op;
1613
mod pooling;
1714
mod rnn;
1815
mod softmax;
16+
mod sys;
1917
mod tensor;
2018
mod w_grad_mode;
2119

crates/cudnn/src/op/op_tensor_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ unsafe fn init_raw_op_descriptor<T: DataType>(
1818

1919
sys::cudnnCreateOpTensorDescriptor(raw.as_mut_ptr()).into_result()?;
2020

21-
let mut raw = raw.assume_init();
21+
let raw = raw.assume_init();
2222

2323
sys::cudnnSetOpTensorDescriptor(raw, op, T::into_raw(), nan_opt).into_result()?;
2424
Ok(raw)

crates/cudnn/src/pooling/pooling_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ impl PoolingDescriptor {
6060
unsafe {
6161
sys::cudnnCreatePoolingDescriptor(raw.as_mut_ptr()).into_result()?;
6262

63-
let mut raw = raw.assume_init();
63+
let raw = raw.assume_init();
6464

6565
sys::cudnnSetPoolingNdDescriptor(
6666
raw,

crates/cudnn/src/rnn/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ pub use rnn_direction_mode::*;
2020
pub use rnn_input_mode::*;
2121
pub use rnn_mode::*;
2222

23-
use crate::{sys, CudnnContext, CudnnError, DataType, IntoResult, TensorDescriptor, WGradMode};
23+
use crate::{sys, CudnnContext, CudnnError, IntoResult, TensorDescriptor, WGradMode};
2424
use cust::memory::GpuBuffer;
2525
use std::mem::MaybeUninit;
2626

@@ -565,7 +565,7 @@ impl CudnnContext {
565565
let device_sequence_lengths_ptr = device_seq_lengths.as_device_ptr().as_mut_ptr();
566566

567567
let x_ptr = x.as_device_ptr().as_ptr() as *const std::ffi::c_void;
568-
let hx_ptr = x.as_device_ptr().as_ptr() as *const std::ffi::c_void;
568+
let hx_ptr = hx.as_device_ptr().as_ptr() as *const std::ffi::c_void;
569569
let y_ptr = y.as_device_ptr().as_ptr() as *const std::ffi::c_void;
570570

571571
let dweight_space_ptr = dweight_space.as_device_ptr().as_mut_ptr() as *mut std::ffi::c_void;

crates/cudnn/src/rnn/rnn_data_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ where
112112
unsafe {
113113
sys::cudnnCreateRNNDataDescriptor(raw.as_mut_ptr()).into_result()?;
114114

115-
let mut raw = raw.assume_init();
115+
let raw = raw.assume_init();
116116

117117
let fill: *mut T = padding_fill
118118
.into()

crates/cudnn/src/rnn/rnn_data_layout.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{sys, RnnDataDescriptor};
1+
use crate::sys;
22

33
/// The data layout for input and output of a recurrent neural network.
44
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]

crates/cudnn/src/rnn/rnn_descriptor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ where
160160
unsafe {
161161
sys::cudnnCreateRNNDescriptor(raw.as_mut_ptr()).into_result()?;
162162

163-
let mut raw = raw.assume_init();
163+
let raw = raw.assume_init();
164164

165165
let proj_size = projection_size.into().unwrap_or(0);
166166
let dropout_desc = dropout_desc.map_or(std::ptr::null_mut(), |desc| desc.raw);

crates/cudnn/src/softmax/softmax_mode.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{sys, SoftmaxAlgo};
1+
use crate::sys;
22

33
/// Specifies how the softmax input must be processed.
44
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]

crates/cudnn/src/sys.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
/* automatically generated by rust-bindgen 0.59.2 */
2+
#![allow(warnings)]
23

34
pub const CUDNN_MAJOR: u32 = 8;
45
pub const CUDNN_MINOR: u32 = 3;

crates/cudnn/src/tensor/tensor_descriptor.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
use crate::{sys, CudnnError, DataType, IntoResult, ScalarC, TensorFormat, VecType};
2-
use std::{
3-
marker::PhantomData,
4-
mem::{self, MaybeUninit},
5-
};
2+
use std::{marker::PhantomData, mem::MaybeUninit};
63

74
/// A generic description of an n-dimensional dataset.
85
#[derive(Debug, PartialEq, Eq, Hash)]

0 commit comments

Comments
 (0)