代码拉取完成,页面将自动刷新
# Copyright (c) 2022 Huawei Technologies Co., Ltd
# All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch_npu
from torch_npu.contrib.function import fuse_add_softmax_dropout
from torch_npu.testing.testcase import TestCase, run_tests
class TestFuseAddSoftmaxDropout(TestCase):
def npu_fuse_add_softmax_dropout(self, dropout, attn_mask, attn_scores, attn_head_size):
attn_scores = torch.add(attn_mask, attn_scores, alpha=(1 / math.sqrt(attn_head_size)))
attn_probs = F.softmax(attn_scores, dim=-1)
attn_probs = dropout(attn_probs)
return attn_probs
def test_fuse_add_softmax_dropout(self):
training = True
dropout = nn.DropoutWithByteMask(0)
npu_input1 = torch.rand(96, 12, 384, 384).npu().half()
npu_input2 = torch.rand(96, 12, 384, 384).npu().half()
alpha = 64
axis = 0
npu_output = self.npu_fuse_add_softmax_dropout(dropout, npu_input1, npu_input2, alpha)
high_performance_output = fuse_add_softmax_dropout(training=training, dropout=dropout, \
attn_mask=npu_input1, attn_scores=npu_input2,
attn_head_size=alpha, p=axis)
self.assertRtolEqual(npu_output.detach().cpu().numpy(), high_performance_output.detach().cpu().numpy())
if __name__ == "__main__":
run_tests()
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。