-
Notifications
You must be signed in to change notification settings - Fork 449
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
JC
committed
Jul 11, 2024
1 parent
69be99b
commit cb4cb1d
Showing
11 changed files
with
344 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,156 @@ | ||
#!/usr/bin/env python3 | ||
|
||
# used to generate model: onnx-tests/tests/pad/pad.onnx | ||
|
||
### Helper Functions ### | ||
from typing import Any | ||
import numpy | ||
import onnx | ||
from onnx import ModelProto, TensorProto, ValueInfoProto | ||
from onnx.reference import ReferenceEvaluator | ||
from onnx.checker import check_model | ||
from onnx.helper import ( | ||
make_model, | ||
make_node, | ||
make_graph, | ||
) | ||
|
||
|
||
def build_test_save( | ||
name: str, | ||
inputs: list[ValueInfoProto], | ||
outputs: list[ValueInfoProto], | ||
initializers: list[TensorProto] = [], | ||
attributes: dict[str, Any] = {}, | ||
) -> None: | ||
node_inputs = [input.name for input in inputs + initializers] | ||
node_outputs = [output.name for output in outputs] | ||
|
||
node = make_node( | ||
name.capitalize(), | ||
inputs=node_inputs, | ||
outputs=node_outputs, | ||
**attributes, | ||
) | ||
|
||
graph = make_graph( | ||
nodes=[node], | ||
name=f"{name.capitalize()}Graph", | ||
inputs=inputs, | ||
outputs=outputs, | ||
initializer=initializers, | ||
) | ||
|
||
onnx_model = make_model(graph) | ||
check_model(onnx_model) | ||
|
||
run_tests(onnx_model) | ||
|
||
onnx.save(onnx_model, f"{name}.onnx") | ||
|
||
|
||
class TestCase: | ||
def __init__( | ||
self, name: str, feeds: dict[str, numpy.ndarray], expected: numpy.ndarray | ||
): | ||
self.name = name | ||
self.feeds = feeds | ||
self.expected = expected | ||
|
||
def test_model(self, model: ModelProto): | ||
sess = ReferenceEvaluator(model) | ||
|
||
result = numpy.array(sess.run(None, self.feeds)) | ||
|
||
if not numpy.array_equal(result, self.expected): | ||
print( | ||
f"""{self.name} | ||
Expected result: {self.expected} | ||
Got: {result}""" | ||
) | ||
raise Exception("Test failed") | ||
|
||
|
||
def test_positive_pads(model: ModelProto) -> None: | ||
input_tensor = numpy.arange(1, 7).reshape(3, 2) | ||
pads = numpy.array([1, 2, 3, 4], dtype="int") | ||
constant_value = 0 | ||
feeds = { | ||
"input_tensor": input_tensor, | ||
"pads": pads, | ||
"constant_value": constant_value, | ||
} | ||
expected = numpy.array( | ||
[ | ||
[ | ||
[0, 0, 0, 0, 0, 0, 0, 0], | ||
[0, 0, 1, 2, 0, 0, 0, 0], | ||
[0, 0, 3, 4, 0, 0, 0, 0], | ||
[0, 0, 5, 6, 0, 0, 0, 0], | ||
[0, 0, 0, 0, 0, 0, 0, 0], | ||
[0, 0, 0, 0, 0, 0, 0, 0], | ||
[0, 0, 0, 0, 0, 0, 0, 0], | ||
] | ||
] | ||
) | ||
|
||
TestCase("test_positive_constant_pads", feeds, expected).test_model(model) | ||
|
||
|
||
def test_1d_input(model: ModelProto) -> None: | ||
input_tensor = numpy.arange(1, 5) | ||
pads = numpy.array([1, 2], dtype="int") | ||
constant_value = 0 | ||
feeds = { | ||
"input_tensor": input_tensor, | ||
"pads": pads, | ||
"constant_value": constant_value, | ||
} | ||
expected = numpy.array([[0, 1, 2, 3, 4, 0, 0]]) | ||
|
||
TestCase("test_1d_input", feeds, expected).test_model(model) | ||
|
||
|
||
def run_tests(model: ModelProto) -> None: | ||
test_positive_pads(model) | ||
test_1d_input(model) | ||
# TODO: test_negative_pads | ||
# TODO: support other modes: reflect, edge, wrap | ||
|
||
|
||
### Helper Functions End ### | ||
|
||
import numpy | ||
from onnx import TensorProto, numpy_helper | ||
from onnx.helper import make_tensor_value_info | ||
|
||
|
||
def get_initializers() -> list[TensorProto]: | ||
pads = numpy_helper.from_array( | ||
numpy.array([1, 2, 3, 4]).astype(numpy.int64), name="pads" | ||
) | ||
constant_value = numpy_helper.from_array( | ||
numpy.array([0]).astype(numpy.int64), name="constant_value" | ||
) | ||
|
||
return [pads, constant_value] | ||
|
||
|
||
def main() -> None: | ||
name = "pad" | ||
|
||
inputs = [make_tensor_value_info("input_tensor", TensorProto.INT64, [None, None])] | ||
outputs = [make_tensor_value_info("output", TensorProto.INT64, [None, None])] | ||
initializers = get_initializers() | ||
|
||
build_test_save( | ||
name=name, | ||
inputs=inputs, | ||
outputs=outputs, | ||
initializers=initializers, | ||
attributes={"mode": "constant"}, | ||
) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
use std::str::FromStr; | ||
|
||
use super::{Node, NodeCodegen}; | ||
use crate::burn::{Scope, TensorType, ToTokens, Type}; | ||
use burn::config::Config; | ||
use burn::record::PrecisionSettings; | ||
use proc_macro2::TokenStream; | ||
use quote::quote; | ||
|
||
#[derive(Config, Debug)] | ||
pub struct PadConfig { | ||
pub pads: Vec<usize>, | ||
pub constant_value: i64, | ||
} | ||
|
||
#[derive(Debug, Clone, new)] | ||
pub struct PadNode { | ||
pub input: TensorType, | ||
pub output: TensorType, | ||
pub config: PadConfig, | ||
} | ||
|
||
impl<PS: PrecisionSettings> NodeCodegen<PS> for PadNode { | ||
fn output_types(&self) -> Vec<Type> { | ||
vec![Type::Tensor(self.output.clone())] | ||
} | ||
fn input_types(&self) -> Vec<Type> { | ||
vec![Type::Tensor(self.input.clone())] | ||
} | ||
fn forward(&self, scope: &mut Scope, node_position: usize) -> TokenStream { | ||
let input = scope.tensor_use_owned(&self.input, node_position); | ||
let output = &self.output.name; | ||
|
||
let pads = self.config.pads.iter().map(|p| p.to_tokens()); | ||
let constant_value_string = format!("{}_i64.elem()", self.config.constant_value); | ||
let constant_value = TokenStream::from_str(&constant_value_string).unwrap(); | ||
|
||
quote! { | ||
let #output = #input.pad((#(#pads),*), #constant_value); | ||
} | ||
} | ||
fn into_node(self) -> Node<PS> { | ||
Node::Pad(self) | ||
} | ||
|
||
fn register_imports(&self, imports: &mut crate::burn::BurnImports) { | ||
imports.register("burn::tensor::ElementConversion"); | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod tests { | ||
use burn::record::FullPrecisionSettings; | ||
|
||
use super::*; | ||
use crate::burn::{ | ||
graph::BurnGraph, | ||
node::{pad::PadNode, test::assert_tokens}, | ||
TensorType, | ||
}; | ||
|
||
#[test] | ||
fn test_codegen_pad() { | ||
let mut graph = BurnGraph::<FullPrecisionSettings>::default(); | ||
let config = PadConfig::new(vec![1, 2, 3, 4], -1); | ||
graph.register(PadNode::new( | ||
TensorType::new_float("input", 2), | ||
TensorType::new_float("output", 2), | ||
config, | ||
)); | ||
graph.register_input_output(vec!["input".to_string()], vec!["output".to_string()]); | ||
|
||
let expected = quote! { | ||
use burn::tensor::ElementConversion; | ||
use burn::{ | ||
module::Module, | ||
tensor::{backend::Backend, Tensor}, | ||
}; | ||
|
||
#[derive(Module, Debug)] | ||
pub struct Model<B: Backend> { | ||
phantom: core::marker::PhantomData<B>, | ||
device: burn::module::Ignored<B::Device>, | ||
} | ||
|
||
impl<B: Backend> Model <B> { | ||
#[allow(unused_variables)] | ||
pub fn new(device: &B::Device) -> Self { | ||
Self { | ||
phantom: core::marker::PhantomData, | ||
device: burn::module::Ignored(device.clone()), | ||
} | ||
} | ||
#[allow(clippy::let_and_return, clippy::approx_constant)] | ||
pub fn forward(&self, input: Tensor<B, 2>) -> Tensor<B, 2> { | ||
let output = input.pad((1, 2, 3, 4), -1_i64.elem()); | ||
output | ||
} | ||
} | ||
}; | ||
|
||
assert_tokens(graph.codegen(), expected); | ||
} | ||
} |
Oops, something went wrong.