2.3K Star 8.1K Fork 4.3K

GVPMindSpore / mindspore

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
opencl_runtime_wrapper.h 5.29 KB
一键复制 编辑 原始数据 按行查看 历史
gongdaguo1 提交于 2022-08-18 14:12 . Fix OpenCLRuntimeWrapper
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_LITE_INCLUDE_REGISTRY_OPENCL_RUNTIME_WRAPPER_H_
#define MINDSPORE_LITE_INCLUDE_REGISTRY_OPENCL_RUNTIME_WRAPPER_H_
#include <vector>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <utility>
#include <type_traits>
#include "CL/cl2.hpp"
#include "include/api/allocator.h"
#include "include/api/status.h"
#include "include/api/dual_abi_helper.h"
namespace mindspore::registry::opencl {
class MS_API OpenCLRuntimeWrapper {
public:
OpenCLRuntimeWrapper() = default;
~OpenCLRuntimeWrapper() = default;
/// \brief Load the OpenCl source code and bind the program name.
///
/// \param[in] program_name Define OpenCl source program name.
/// \param[in] source Define OpenCl source.
///
/// \return Status as a status identification of loading code.
inline Status LoadSource(const std::string &program_name, const std::string &source);
/// \brief Building OpenCL code.
///
/// \param[in] kernel Used to return the compiled kernel
/// \param[in] program_name Define OpenCl source program name.
/// \param[in] kernel_name Define OpenCl source kernel name.
/// \param[in] build_options_ext Define OpenCl kernel build options.
///
/// \return Status as a status identification of build Kernel
inline Status BuildKernel(cl::Kernel *kernel, const std::string &program_name, const std::string &kernel_name,
const std::vector<std::string> &build_options_ext = {});
/// \brief Set kernel argument
///
/// \param[in] kernel Define OpenCl kernel.
/// \param[in] index Define OpenCl kernel argument index.
/// \param[in] value Define OpenCl kernel argument value pointer.
/// \param[in] mem_type Define OpenCl kernel argument value memory type.
///
/// \return Status as a status identification of set kernel argument
Status SetKernelArg(const cl::Kernel &kernel, uint32_t index, void *const value);
/// \brief Set kernel argument
///
/// \param[in] kernel Define OpenCl kernel.
/// \param[in] index Define OpenCl kernel argument index.
/// \param[in] value Define OpenCl kernel argument value.
/// \param[in] mem_type Define OpenCl kernel argument value memory type.
///
/// \return Status as a status identification of set kernel argument
template <typename T>
typename std::enable_if<!std::is_pointer<T>::value, Status>::type SetKernelArg(const cl::Kernel &kernel,
uint32_t index, const T value) {
if (const_cast<cl::Kernel &>(kernel).setArg(index, value) != CL_SUCCESS) {
return kLiteError;
} else {
return kSuccess;
}
}
/// \brief Run OpenCl kernel
///
/// \param[in] kernel Define OpenCl kernel.
/// \param[in] global Define the number of work items
/// \param[in] local Define the number of work_items in a work_group
/// \param[in] command_queue Define the command queue
/// \param[in] event Define event of kernel run
///
/// \return Status as a status identification of run OpenCl kernel
Status RunKernel(const cl::Kernel &kernel, const cl::NDRange &global, const cl::NDRange &local,
cl::CommandQueue *command_queue = nullptr, cl::Event *event = nullptr);
/// \brief Synchronization command queue
///
/// \return Status as a status identification of synchronization command queue
Status SyncCommandQueue();
void *MapBuffer(void *host_ptr, int flags, bool sync = true);
Status UnmapBuffer(void *host_ptr);
Status ReadImage(void *buffer, void *dst_data);
Status WriteImage(void *buffer, void *src_data);
std::shared_ptr<Allocator> GetAllocator();
uint64_t DeviceMaxWorkGroupSize();
uint64_t GetMaxImage2DWidth();
uint64_t GetMaxImage2DHeight();
uint64_t GetImagePitchAlignment();
private:
Status LoadSource(const std::vector<char> &program_name, const std::vector<char> &source);
Status BuildKernel(cl::Kernel *kernel, const std::vector<char> &program_name, const std::vector<char> &kernel_name,
const std::vector<std::vector<char>> &build_options_ext);
};
Status OpenCLRuntimeWrapper::LoadSource(const std::string &program_name, const std::string &source) {
return LoadSource(StringToChar(program_name), StringToChar(source));
}
Status OpenCLRuntimeWrapper::BuildKernel(cl::Kernel *kernel, const std::string &program_name,
const std::string &kernel_name,
const std::vector<std::string> &build_options_ext) {
return BuildKernel(kernel, StringToChar(program_name), StringToChar(kernel_name),
VectorStringToChar(build_options_ext));
}
} // namespace mindspore::registry::opencl
#endif // MINDSPORE_LITE_INCLUDE_REGISTRY_OPENCL_RUNTIME_WRAPPER_H_
Python
1
https://gitee.com/mindspore/mindspore.git
git@gitee.com:mindspore/mindspore.git
mindspore
mindspore
mindspore
master

搜索帮助