代码拉取完成,页面将自动刷新
# Copyright 2019 The JAX Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common functions for neural network libraries."""
# Note: import <name> as <name> is required for names to be exported.
# See PEP 484 & https://github.com/jax-ml/jax/issues/7570
from jax.numpy import tanh as tanh
from jax.nn import initializers as initializers
from jax._src.nn.functions import (
celu as celu,
elu as elu,
gelu as gelu,
glu as glu,
hard_sigmoid as hard_sigmoid,
hard_silu as hard_silu,
hard_swish as hard_swish,
hard_tanh as hard_tanh,
leaky_relu as leaky_relu,
log_sigmoid as log_sigmoid,
log_softmax as log_softmax,
logmeanexp as logmeanexp,
logsumexp as logsumexp,
standardize as standardize,
one_hot as one_hot,
relu as relu,
identity as identity,
relu6 as relu6,
dot_product_attention as dot_product_attention,
get_scaled_dot_general_config as get_scaled_dot_general_config,
scaled_dot_general as scaled_dot_general,
scaled_matmul as scaled_matmul,
selu as selu,
sigmoid as sigmoid,
soft_sign as soft_sign,
softmax as softmax,
softplus as softplus,
sparse_plus as sparse_plus,
sparse_sigmoid as sparse_sigmoid,
silu as silu,
swish as swish,
squareplus as squareplus,
mish as mish,
log1mexp as log1mexp,
)
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。