Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into implicit-gemm-opt
Browse files Browse the repository at this point in the history
  • Loading branch information
wingertge committed Sep 24, 2024
2 parents 0771f1f + a6f7a5e commit 597ea84
Show file tree
Hide file tree
Showing 246 changed files with 6,270 additions and 8,999 deletions.
4 changes: 2 additions & 2 deletions backend-comparison/benches/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use burn_common::{
};

pub struct BinaryBenchmark<B: Backend, const D: usize> {
shape: Shape<D>,
shape: Shape,
device: B::Device,
}

Expand All @@ -18,7 +18,7 @@ impl<B: Backend, const D: usize> Benchmark for BinaryBenchmark<B, D> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, (lhs, rhs): Self::Args) {
Expand Down
12 changes: 6 additions & 6 deletions backend-comparison/benches/conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ use burn_common::{
};

pub struct Conv2dBenchmark<B: Backend> {
input_shape: Shape<4>,
weight_shape: Shape<4>,
bias_shape: Shape<1>,
input_shape: Shape,
weight_shape: Shape,
bias_shape: Shape,
options: ConvOptions<2>,
device: B::Device,
}
Expand All @@ -24,9 +24,9 @@ impl<B: Backend> Benchmark for Conv2dBenchmark<B> {

fn shapes(&self) -> Vec<Vec<usize>> {
vec![
self.input_shape.dims.into(),
self.weight_shape.dims.into(),
self.bias_shape.dims.into(),
self.input_shape.dims.clone(),
self.weight_shape.dims.clone(),
self.bias_shape.dims.clone(),
]
}

Expand Down
12 changes: 6 additions & 6 deletions backend-comparison/benches/conv3d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ use burn_common::{
};

pub struct Conv3dBenchmark<B: Backend> {
input_shape: Shape<5>,
weight_shape: Shape<5>,
bias_shape: Shape<1>,
input_shape: Shape,
weight_shape: Shape,
bias_shape: Shape,
options: ConvOptions<3>,
device: B::Device,
}
Expand All @@ -24,9 +24,9 @@ impl<B: Backend> Benchmark for Conv3dBenchmark<B> {

fn shapes(&self) -> Vec<Vec<usize>> {
vec![
self.input_shape.dims.into(),
self.weight_shape.dims.into(),
self.bias_shape.dims.into(),
self.input_shape.dims.clone(),
self.weight_shape.dims.clone(),
self.bias_shape.dims.clone(),
]
}

Expand Down
12 changes: 6 additions & 6 deletions backend-comparison/benches/conv_transpose2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ use burn_common::{
};

pub struct ConvTranspose2dBenchmark<B: Backend> {
input_shape: Shape<4>,
weight_shape: Shape<4>,
bias_shape: Shape<1>,
input_shape: Shape,
weight_shape: Shape,
bias_shape: Shape,
options: ConvTransposeOptions<2>,
device: B::Device,
}
Expand All @@ -25,9 +25,9 @@ impl<B: Backend> Benchmark for ConvTranspose2dBenchmark<B> {

fn shapes(&self) -> Vec<Vec<usize>> {
vec![
self.input_shape.dims.into(),
self.weight_shape.dims.into(),
self.bias_shape.dims.into(),
self.input_shape.dims.clone(),
self.weight_shape.dims.clone(),
self.bias_shape.dims.clone(),
]
}

Expand Down
12 changes: 6 additions & 6 deletions backend-comparison/benches/conv_transpose3d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ use burn_common::{
};

pub struct ConvTranspose3dBenchmark<B: Backend> {
input_shape: Shape<5>,
weight_shape: Shape<5>,
bias_shape: Shape<1>,
input_shape: Shape,
weight_shape: Shape,
bias_shape: Shape,
options: ConvTransposeOptions<3>,
device: B::Device,
}
Expand All @@ -25,9 +25,9 @@ impl<B: Backend> Benchmark for ConvTranspose3dBenchmark<B> {

fn shapes(&self) -> Vec<Vec<usize>> {
vec![
self.input_shape.dims.into(),
self.weight_shape.dims.into(),
self.bias_shape.dims.into(),
self.input_shape.dims.clone(),
self.weight_shape.dims.clone(),
self.bias_shape.dims.clone(),
]
}

Expand Down
6 changes: 3 additions & 3 deletions backend-comparison/benches/custom_gelu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ enum GeluKind {
/// operations.
#[derive(new)]
struct CustomGeluBenchmark<B: Backend, const D: usize> {
shape: Shape<D>,
shape: Shape,
device: B::Device,
kind: GeluKind,
autodiff: bool,
Expand All @@ -38,7 +38,7 @@ impl<B: Backend, const D: usize> Benchmark for CustomGeluBenchmark<B, D> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, tensor: Self::Args) {
Expand Down Expand Up @@ -121,7 +121,7 @@ fn bench<B: Backend>(
token: Option<&str>,
) {
const D: usize = 3;
let shape: Shape<D> = [32, 512, 2048].into();
let shape: Shape = [32, 512, 2048].into();

let run = |autodiff: bool| {
let reference_gelu = CustomGeluBenchmark::<B, D>::new(
Expand Down
10 changes: 5 additions & 5 deletions backend-comparison/benches/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use derive_new::new;

#[derive(new)]
struct ToDataBenchmark<B: Backend, const D: usize> {
shape: Shape<D>,
shape: Shape,
device: B::Device,
}

Expand All @@ -20,7 +20,7 @@ impl<B: Backend, const D: usize> Benchmark for ToDataBenchmark<B, D> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, args: Self::Args) {
Expand All @@ -38,7 +38,7 @@ impl<B: Backend, const D: usize> Benchmark for ToDataBenchmark<B, D> {

#[derive(new)]
struct FromDataBenchmark<B: Backend, const D: usize> {
shape: Shape<D>,
shape: Shape,
device: B::Device,
}

Expand All @@ -50,7 +50,7 @@ impl<B: Backend, const D: usize> Benchmark for FromDataBenchmark<B, D> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, (data, device): Self::Args) {
Expand Down Expand Up @@ -81,7 +81,7 @@ fn bench<B: Backend>(
token: Option<&str>,
) {
const D: usize = 3;
let shape: Shape<D> = [32, 512, 1024].into();
let shape: Shape = [32, 512, 1024].into();

let to_benchmark = ToDataBenchmark::<B, D>::new(shape.clone(), device.clone());
let from_benchmark = FromDataBenchmark::<B, D>::new(shape, device.clone());
Expand Down
6 changes: 3 additions & 3 deletions backend-comparison/benches/matmul.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ use derive_new::new;

#[derive(new)]
struct MatmulBenchmark<B: Backend, const D: usize> {
shape_lhs: Shape<D>,
shape_rhs: Shape<D>,
shape_lhs: Shape,
shape_rhs: Shape,
device: B::Device,
}

Expand All @@ -21,7 +21,7 @@ impl<B: Backend, const D: usize> Benchmark for MatmulBenchmark<B, D> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape_lhs.dims.into(), self.shape_rhs.dims.into()]
vec![self.shape_lhs.dims.clone(), self.shape_rhs.dims.clone()]
}

fn num_samples(&self) -> usize {
Expand Down
4 changes: 2 additions & 2 deletions backend-comparison/benches/max_pool2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use burn_common::{
};

pub struct MaxPool2dBenchmark<B: Backend> {
shape: Shape<4>,
shape: Shape,
kernel_size: [usize; 2],
stride: [usize; 2],
padding: [usize; 2],
Expand All @@ -22,7 +22,7 @@ impl<B: Backend> Benchmark for MaxPool2dBenchmark<B> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, x: Self::Args) {
Expand Down
4 changes: 2 additions & 2 deletions backend-comparison/benches/resnet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ mod model {
}

pub struct ResNetBenchmark<B: Backend> {
shape: Shape<4>,
shape: Shape,
device: B::Device,
}

Expand All @@ -26,7 +26,7 @@ impl<B: Backend> Benchmark for ResNetBenchmark<B> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, (model, input): Self::Args) {
Expand Down
6 changes: 3 additions & 3 deletions backend-comparison/benches/unary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use derive_new::new;

#[derive(new)]
struct UnaryBenchmark<B: Backend, const D: usize> {
shape: Shape<D>,
shape: Shape,
device: B::Device,
}

Expand All @@ -20,7 +20,7 @@ impl<B: Backend, const D: usize> Benchmark for UnaryBenchmark<B, D> {
}

fn shapes(&self) -> Vec<Vec<usize>> {
vec![self.shape.dims.into()]
vec![self.shape.dims.clone()]
}

fn execute(&self, args: Self::Args) {
Expand All @@ -45,7 +45,7 @@ fn bench<B: Backend>(
token: Option<&str>,
) {
const D: usize = 3;
let shape: Shape<D> = [32, 512, 1024].into();
let shape: Shape = [32, 512, 1024].into();

let benchmark = UnaryBenchmark::<B, D>::new(shape, device.clone());

Expand Down
Loading

0 comments on commit 597ea84

Please sign in to comment.