2.3K Star 8.1K Fork 4.3K

GVPMindSpore / mindspore

 / 详情

Pow和Eltwise算子推理出错

DONE
Bug-Report
创建于  
2022-04-07 14:16

简单的测试了一下Pow和Eltwise算子,可以caffe转ms, 但在推理和量化的时候都有问题。

输入图片说明
测试的网络结构:
输入图片说明

name:"test_power"
layer {
  name: "frame_1" 
  type: "Input"
  top: "frame_1"
  input_param {
    shape {
      dim: 1
      dim: 3
      dim: 272
      dim: 480
    }
  }
}
layer {
  name: "frame_2" 
  type: "Input"
  top: "frame_2"
  input_param {
    shape {
      dim: 1
      dim: 3
      dim: 272
      dim: 480
    }
  }
}
layer {
  name: "concatenate_1"
  type: "Concat"
  bottom: "frame_1"
  bottom: "frame_2"
  top: "concatenate_1"
  concat_param {
    axis: 1
  }
}

layer {
  name: "head_output"
  type: "Convolution"
  bottom: "concatenate_1"
  top: "head_output"
  convolution_param {
    num_output: 2
    bias_term: true
    pad_h: 1
    pad_w: 1
    kernel_h: 3
    kernel_w: 3
    stride_h: 1
    stride_w: 1
    dilation: 1
    weight_filler: {
      type: "xavier"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "fg_output"
  type: "Convolution"
  bottom: "concatenate_1"
  top: "fg_output"
  convolution_param {
    num_output: 2
    bias_term: true
    pad_h: 1
    pad_w: 1
    kernel_h: 3
    kernel_w: 3
    stride_h: 1
    stride_w: 1
    dilation: 1
    weight_filler: {
      type: "xavier"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "flow_output"
  type: "Convolution"
  bottom: "concatenate_1"
  top: "flow_output"
  convolution_param {
    num_output: 2
    bias_term: true
    pad_h: 1
    pad_w: 1
    kernel_h: 3
    kernel_w: 3
    stride_h: 1
    stride_w: 1
    dilation: 1
    weight_filler: {
      type: "xavier"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "head_output_reverse_layer"
  bottom: "head_output"
  top: "head_output_reverse"
  type: "Power"
  power_param {
    power: 1
    scale: -1
    shift: 0
  }
}

layer {
  name: "layer_threshold_head_output"
  bottom: "head_output_reverse"
  top: "argmax_thr_head_output"
  type: "ArgMax"
  argmax_param {
    axis: 1
  }
}

layer {
  name: "fg_output_reverse_layer"
  bottom: "fg_output"
  top: "fg_output_reverse"
  type: "Power"
  power_param {
    power: 1
    scale: -1
    shift: 0
  }
}

layer {
  name: "layer_threshold_fg_output"
  bottom: "fg_output_reverse"
  top: "argmax_thr_fg_output"
  type: "ArgMax"
  argmax_param {
    axis: 1
  }
}

layer {
  name: "flow_output_reverse_layer"
  bottom: "flow_output"
  top: "flow_output_reverse"
  type: "Power"
  power_param {
    power: 1
    scale: -1
    shift: 0
  }
}

layer {
  name: "layer_threshold_flow_output"
  bottom: "flow_output_reverse"
  top: "argmax_thr_flow_output"
  type: "ArgMax"
  argmax_param {
    axis: 1
  }
}

layer {
  name: "argmax_thr_flow_output_power_layer"
  bottom: "argmax_thr_flow_output"
  top: "argmax_thr_flow_output_p"
  type: "Power"
  power_param {
    power: 1
    scale: 4
    shift: 0
  }
}

  layer {
  name: "argmax_thr_head_output_power_layer"
  bottom: "argmax_thr_head_output"
  top: "argmax_thr_head_output_p"
  type: "Power"
  power_param {
    power: 1
    scale: 2
    shift: 0
  }
}

  layer {
  name: "res1"
  type: "Eltwise"
  bottom: "argmax_thr_flow_output_p"
  bottom: "argmax_thr_head_output_p"
  top: "res1"
  eltwise_param {
    operation: SUM
    coeff: 1
    coeff: 1
  }
}

layer {
  name: "final_res"
  type: "Eltwise"
  bottom: "res1"
  bottom: "argmax_thr_fg_output"
  top: "final_res"
  eltwise_param {
    operation: SUM
    coeff: 1
    coeff: 1
  }
}

此外,Eltwise算子只支持两个输入和系数为1的情况

评论 (12)

突tututu兔 创建了Bug-Report

Please assign maintainer to check this issue.
请为此issue分配处理人。
@fangwenyi @chengxiaoli

Please add labels (comp or sig), also you can visit https://gitee.com/mindspore/community/blob/master/sigs/dx/docs/labels.md to find more.
为了让代码尽快被审核,请您为Pull Request打上 组件(comp)或兴趣组(sig) 标签,打上标签的PR可以直接推送给责任人进行审核。
更多的标签可以查看https://gitee.com/mindspore/community/blob/master/sigs/dx/docs/labels.md
以组件相关代码提交为例,如果你提交的是data组件代码,你可以这样评论:
//comp/data
当然你也可以邀请data SIG组来审核代码,可以这样写:
//sig/data
另外你还可以给这个PR标记类型,例如是bugfix或者是特性需求:
//kind/bug or //kind/feature
恭喜你,你已经学会了使用命令来打标签,接下来就在下面的评论里打上标签吧!

突tututu兔 修改了描述
fangwenyi 任务状态TODO 修改为ACCEPTED
fangwenyi 负责人设置为zhaodezan
fangwenyi 关联项目设置为MindSpore Issue Assistant
fangwenyi 添加了
 
mindspore-assistant
标签

你好,问题收到了,已经安排人员专门处理您的这个问题,请随时关注码云上信息

Eltwise报错情况:
输入图片说明
输入图片说明

name:"test_power"
layer {
  name: "frame_1" 
  type: "Input"
  top: "frame_1"
  input_param {
    shape {
      dim: 1
      dim: 3
      dim: 272
      dim: 480
    }
  }
}
layer {
  name: "frame_2" 
  type: "Input"
  top: "frame_2"
  input_param {
    shape {
      dim: 1
      dim: 3
      dim: 272
      dim: 480
    }
  }
}
layer {
  name: "concatenate_1"
  type: "Concat"
  bottom: "frame_1"
  bottom: "frame_2"
  top: "concatenate_1"
  concat_param {
    axis: 1
  }
}

layer {
  name: "head_output"
  type: "Convolution"
  bottom: "concatenate_1"
  top: "head_output"
  convolution_param {
    num_output: 2
    bias_term: true
    pad_h: 1
    pad_w: 1
    kernel_h: 3
    kernel_w: 3
    stride_h: 1
    stride_w: 1
    dilation: 1
    weight_filler: {
      type: "xavier"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "fg_output"
  type: "Convolution"
  bottom: "concatenate_1"
  top: "fg_output"
  convolution_param {
    num_output: 2
    bias_term: true
    pad_h: 1
    pad_w: 1
    kernel_h: 3
    kernel_w: 3
    stride_h: 1
    stride_w: 1
    dilation: 1
    weight_filler: {
      type: "xavier"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "flow_output"
  type: "Convolution"
  bottom: "concatenate_1"
  top: "flow_output"
  convolution_param {
    num_output: 2
    bias_term: true
    pad_h: 1
    pad_w: 1
    kernel_h: 3
    kernel_w: 3
    stride_h: 1
    stride_w: 1
    dilation: 1
    weight_filler: {
      type: "xavier"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "head_output_reverse_layer"
  bottom: "head_output"
  top: "head_output_reverse"
  type: "Power"
  power_param {
    power: 1
    scale: -1
    shift: 0
  }
}

layer {
  name: "layer_threshold_head_output"
  bottom: "head_output_reverse"
  top: "argmax_thr_head_output"
  type: "ArgMax"
  argmax_param {
    axis: 1
  }
}

layer {
  name: "fg_output_reverse_layer"
  bottom: "fg_output"
  top: "fg_output_reverse"
  type: "Power"
  power_param {
    power: 1
    scale: -1
    shift: 0
  }
}

layer {
  name: "layer_threshold_fg_output"
  bottom: "fg_output_reverse"
  top: "argmax_thr_fg_output"
  type: "ArgMax"
  argmax_param {
    axis: 1
  }
}

layer {
  name: "flow_output_reverse_layer"
  bottom: "flow_output"
  top: "flow_output_reverse"
  type: "Power"
  power_param {
    power: 1
    scale: -1
    shift: 0
  }
}

layer {
  name: "layer_threshold_flow_output"
  bottom: "flow_output_reverse"
  top: "argmax_thr_flow_output"
  type: "ArgMax"
  argmax_param {
    axis: 1
  }
}

  layer {
  name: "res1"
  type: "Eltwise"
  bottom: "argmax_thr_flow_output"
  bottom: "argmax_thr_head_output"
  top: "res1"
  eltwise_param {
    operation: SUM
    coeff: 1
    coeff: 1
  }
}

layer {
  name: "final_res"
  type: "Eltwise"
  bottom: "res1"
  bottom: "argmax_thr_fg_output"
  top: "final_res"
  eltwise_param {
    operation: SUM
    coeff: 1
    coeff: 1
  }
}
  1. 这个eltwise确实只支持两个输入的操作,是一个arithmetic算子,认为两个就够用了.onnx标准也只有两个输入.
  2. 量化的时候没有找到pow算子的backend,是这个不支持量化,不要用全量化了,用部分量化就可已

没有量化,直接推理也不行呢,build model就会出错,上面的这两个情况,都是没有做量化,在前向推理的时候出现找不到backend的问题

你用我上面发的这两个model,测试看看呢

你用我上面发的这两个model,测试看看呢

@突tututu兔 发一下你生成的ms模型吧,放到这个帖子附件里,我定位一下

貌似添加不了附件,我怎么发给你

貌似添加不了附件,我怎么发给你

@突tututu兔 1183489276@qq.com

邮箱在上面发一下吧

zhaodezan 任务状态ACCEPTED 修改为VALIDATION

你好,问题解决了吗?如果没解决的话,可以找zhaodezan支撑下
同时由于长时间没有反馈,此ISSUE先关闭,如有问题,可以反馈下具体信息,并将ISSUE状态修改为WIP,我们这边会进一步跟踪,谢谢

fangwenyi 任务状态VALIDATION 修改为DONE

登录 后才可以发表评论

状态
负责人
项目
里程碑
Pull Requests
关联的 Pull Requests 被合并后可能会关闭此 issue
分支
开始日期   -   截止日期
-
置顶选项
优先级
预计工期 (小时)
参与者(4)
Python
1
https://gitee.com/mindspore/mindspore.git
git@gitee.com:mindspore/mindspore.git
mindspore
mindspore
mindspore

搜索帮助