Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
root committed Jan 24, 2022
1 parent 10c4c9e commit e5d7386
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#include "driver/huawei_ascend_npu/converter/converter.h"
#include "utility/debug.h"
#include "utility/logging.h"

#include "utility/modeling.h"
namespace nnadapter {
namespace huawei_ascend_npu {

Expand All @@ -40,6 +40,39 @@ int ConvertPad(Converter* converter, hal::Operation* operation) {
if (!input_operator) {
input_operator = converter->ConvertOperand(input_operand);
}
if ((mode == NNADAPTER_PAD_MODE_REFLECT || mode == NNADAPTER_PAD_MODE_EDGE) &&
input_operand->type.dimensions.count == 5 &&
IsConstantOperand(pads_operand) &&
!IsOperandWithDynamicShape(input_operand)) {
uint32_t pads_size = pads_operand->length / static_cast<uint32_t>(sizeof(int32_t));
auto pads_buffer = reinterpret_cast<int32_t*>(pads_operand->buffer);
if (pads_size == 10 &&
pads_buffer[6] == 0 && pads_buffer[7] == 0 && pads_buffer[8] == 0 && pads_buffer[9] == 0) {
// Reshape to 4-dimensions
std::vector<int32_t> shape_data(input_operand->type.dimensions.data, input_operand->type.dimensions.data + 3);
shape_data.push_back(input_operand->type.dimensions.data[3] * input_operand->type.dimensions.data[4]);
auto shape_operator = converter->AddInt32ConstantOperator(shape_data);
auto reshape_op = converter->AddOperator<ge::op::Reshape>(output_operand);
SET_INPUT(reshape_op, x, input_operator);
SET_INPUT(reshape_op, shape, shape_operator);
auto reshape_output_operator = MAP_OUTPUT(reshape_op, y, output_operand);
// Use NCHW data format
std::vector<int32_t> pad_data(pads_buffer, pads_buffer + 8);
auto pads_operator = converter->AddInt32ConstantOperator(pad_data);
auto pad_op = converter->AddOperator<ge::op::PadV3>(output_operand);
pad_op->set_attr_mode(pad_mode);
SET_INPUT(pad_op, x, reshape_output_operator);
SET_INPUT(pad_op, paddings, pads_operator);
auto pad2d_output_operator = MAP_OUTPUT(pad_op, y, output_operand);
// Reshape to 5-dimensions
auto shape_operator2 = converter->AddInt32ConstantOperator(std::vector<int32_t>(output_operand->type.dimensions.data, output_operand->type.dimensions.data + output_operand->type.dimensions.count));
auto reshape_op2 = converter->AddOperator<ge::op::Reshape>(output_operand);
SET_INPUT(reshape_op2, x, pad2d_output_operator);
SET_INPUT(reshape_op2, shape, shape_operator2);
MAP_OUTPUT(reshape_op2, y, output_operand);
return NNADAPTER_NO_ERROR;
}
}
auto pads_operator = converter->GetMappedOperator(pads_operand);
if (!pads_operator) {
pads_operator = converter->ConvertOperand(pads_operand);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ bool BuildOMModelToBuffer(
options.insert(std::make_pair(ge::ir_option::LOG_LEVEL, "error"));
options.insert(std::make_pair(ge::ir_option::OP_DEBUG_LEVEL, "0"));
ATC_CALL(aclgrphBuildModel(ir_graph, options, om_buffer));
ATC_CALL(aclgrphSaveModel("ir_graph_model", om_buffer)); // For debug: save ascend offline model to local.
// Copy from om model buffer
model_buffer->resize(om_buffer.length);
memcpy(reinterpret_cast<void*>(model_buffer->data()),
Expand Down
13 changes: 11 additions & 2 deletions lite/core/optimizer/mir/fusion/unsqueeze2_pad3d_squeeze2_fuse.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
//

#include "lite/core/optimizer/mir/fusion/unsqueeze2_pad3d_squeeze2_fuse.h"

#include <memory>
#include <vector>

Expand All @@ -26,6 +27,12 @@ namespace mir {
namespace fusion {

void Unsqueeze2Pad3dSqueeze2Fuser::BuildPattern() {
auto paddings_teller = [](const Node* node) -> bool {
auto op_desc = *const_cast<Node*>(node)->stmt()->op_info();
auto paddings = op_desc.GetAttr<std::vector<int>>("paddings");
return paddings.size() == 6 && paddings[4] == 0 && paddings[5] == 0;
};

// create input nodes.
auto* unsqu_input = VarNode("unsqu_input")
->assert_is_op_input(unsqueeze2_type_, "X")
Expand All @@ -35,8 +42,10 @@ void Unsqueeze2Pad3dSqueeze2Fuser::BuildPattern() {
auto* unsque = OpNode("unsqueeze2", unsqueeze2_type_)
->assert_is_op(unsqueeze2_type_)
->AsIntermediate();
auto* p3d =
OpNode("pad3d", pad3d_type_)->assert_is_op(pad3d_type_)->AsIntermediate();
auto* p3d = OpNode("pad3d", pad3d_type_)
->assert_is_op(pad3d_type_)
->assert_node_satisfied(paddings_teller)
->AsIntermediate();
auto* sque = OpNode("squeeze2", squeeze2_type_)
->assert_is_op(squeeze2_type_)
->AsIntermediate();
Expand Down
3 changes: 1 addition & 2 deletions lite/tests/kernels/pad2d_compute_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,7 @@ TEST(Pad2d, precision) {
#if defined(NNADAPTER_WITH_HUAWEI_ASCEND_NPU)
// Ascend does not support the following scenarios.
if (std::abs(pad_value - 1) < 1e-6 ||
(pad_top == 0 && pad_bottom == 1 && pad_left == 0 &&
pad_right == 0))
((pad_top == 1 || pad_bottom == 1) && pad_left == 0 && pad_right == 0))
continue;
#endif
VLOG(5) << "pad param: " << pad_mode << " " << pad_value << " "
Expand Down

0 comments on commit e5d7386

Please sign in to comment.