[go: nahoru, domu]

Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix bug in onnx perchannel mode with ConvTranspose #3149

Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Remove hardcoded path when getting model
Signed-off-by: Michael Tuttle <quic_mtuttle@quicinc.com>
  • Loading branch information
quic-mtuttle committed Jul 5, 2024
commit a1fcfd11f12d3cf4b1dc388913f7ff4f66d9804c
55 changes: 29 additions & 26 deletions TrainingExtensions/onnx/test/python/models/models_for_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
from typing import Dict, List

import os
import tempfile
import torch.nn.functional as F
from torch import nn as nn
from torchvision.ops import roi_align
Expand Down Expand Up @@ -1112,35 +1113,37 @@ def multi_output_model():
return model

def transposed_conv_model():
x = torch.randn(10, 10, 4, 4, requires_grad=True)
model = TransposedConvModel()

# Export the model
torch.onnx.export(model, # model being run
x, # model input (or a tuple for multiple inputs)
"./model_transposed_conv.onnx", # where to save the model (can be a file or file-like object)
export_params=True, # store the trained parameter weights inside the model file
opset_version=12, # the ONNX version to export the model to
do_constant_folding=True, # whether to execute constant folding for optimization
input_names=['input'], # the model's input names
output_names=['output'])
model = ONNXModel(load_model('./model_transposed_conv.onnx'))
with tempfile.TemporaryDirectory() as save_dir:
x = torch.randn(10, 10, 4, 4, requires_grad=True)
model = TransposedConvModel()
save_path = os.path.join(save_dir, "model_transposed_conv.onnx")
# Export the model
torch.onnx.export(model, # model being run
x, # model input (or a tuple for multiple inputs)
save_path, # where to save the model (can be a file or file-like object)
export_params=True, # store the trained parameter weights inside the model file
opset_version=12, # the ONNX version to export the model to
do_constant_folding=True, # whether to execute constant folding for optimization
input_names=['input'], # the model's input names
output_names=['output'])
model = ONNXModel(load_model(save_path))
return model

def depthwise_transposed_conv_model():
x = torch.randn(10, 10, 4, 4, requires_grad=True)
model = DepthwiseTransposedConvModel()

# Export the model
torch.onnx.export(model, # model being run
x, # model input (or a tuple for multiple inputs)
"./model_transposed_conv.onnx", # where to save the model (can be a file or file-like object)
export_params=True, # store the trained parameter weights inside the model file
opset_version=12, # the ONNX version to export the model to
do_constant_folding=True, # whether to execute constant folding for optimization
input_names=['input'], # the model's input names
output_names=['output'])
model = ONNXModel(load_model('./model_transposed_conv.onnx'))
with tempfile.TemporaryDirectory() as save_dir:
x = torch.randn(10, 10, 4, 4, requires_grad=True)
model = DepthwiseTransposedConvModel()
save_path = os.path.join(save_dir, "model_transposed_conv.onnx")
# Export the model
torch.onnx.export(model, # model being run
x, # model input (or a tuple for multiple inputs)
save_path, # where to save the model (can be a file or file-like object)
export_params=True, # store the trained parameter weights inside the model file
opset_version=12, # the ONNX version to export the model to
do_constant_folding=True, # whether to execute constant folding for optimization
input_names=['input'], # the model's input names
output_names=['output'])
model = ONNXModel(load_model(save_path))
return model


Expand Down
Loading