diff --git a/en/native_sdk/ai/mindspore/context.h b/en/native_sdk/ai/mindspore/context.h index dfcb9c70e0d842bed420ae363b22be08aa848dab..60594a1f3d1d7aff7608aba0b11e7cf6f14285a5 100644 --- a/en/native_sdk/ai/mindspore/context.h +++ b/en/native_sdk/ai/mindspore/context.h @@ -29,7 +29,7 @@ * * @brief Provides **Context** APIs for configuring runtime information. * - * File to include: \ + * @include indspore/context.h * @library libmindspore_lite_ndk.so * @since 9 */ @@ -40,6 +40,7 @@ #include #include #include "mindspore/types.h" +#include "mindspore/status.h" #ifdef __cplusplus extern "C" @@ -409,13 +410,12 @@ OH_AI_API OH_AI_Priority OH_AI_DeviceInfoGetPriority(const OH_AI_DeviceInfoHandl * @brief Adds extended configuration in the form of key/value pairs to the device information. * This function is available only for NNRt devices. * - * Note: The key/value pairs currently supported include - * {"CachePath": "YourCachePath"}, * {"CacheVersion": "YourCacheVersion"}, - * {"QuantBuffer": "YourQuantBuffer"}, {"ModelName": "YourModelName"}, - * {"isProfiling": "YourisProfiling"}, {"opLayout": "YouropLayout"}, - * {"InputDims": "YourInputDims"},{"DynamicDims": "YourDynamicDims"}, - * {"QuantConfigData": "YourQuantConfigData"},{"BandMode": "YourBandMode"}, - * {"NPU_FM_SHARED": "YourNPU_FM_SHARED"} + * Note: The key/value pairs currently supported include: + * {"CachePath": "YourCachePath"}, {"CacheVersion": "YouCacheVersion"},\n + * {"QuantBuffer": "YourQuantBuffer"}, {"ModelName": "YourModelName"},\n + * {"isProfiling": "YourisProfiling"}, {"opLayout": "YouropLayout"},\n + * {"InputDims": "YourInputDims"}, {"DynamicDims": "YourDynamicDims"},\n + * {"QuantConfigData": "YourQuantConfigData"},and {"BandMode": "YourBandMode"}.\n * A total of 11 key-value pairs are provided. You can replace the values as required. * * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance. diff --git a/en/native_sdk/ai/mindspore/data_type.h b/en/native_sdk/ai/mindspore/data_type.h index bc62d844649c037e590797a24c052cf9e961dddd..35c4fe736a2cbd4e3bca0cb3e77c9582315a3379 100644 --- a/en/native_sdk/ai/mindspore/data_type.h +++ b/en/native_sdk/ai/mindspore/data_type.h @@ -29,7 +29,7 @@ * * @brief Declares tensor data types. * - * File to include: \ + * @include mindspore/data_type.h * @library libmindspore_lite_ndk.so * @since 9 */ diff --git a/en/native_sdk/ai/mindspore/format.h b/en/native_sdk/ai/mindspore/format.h index c12d19c56c3f3ea65632280e8c58dbbe32ecb684..9bd754b7dbd27b2fdbb9be2a751f6a487ef267b9 100644 --- a/en/native_sdk/ai/mindspore/format.h +++ b/en/native_sdk/ai/mindspore/format.h @@ -29,7 +29,7 @@ * * @brief Declares tensor data formats. * - * File to include: \ + * @include * @library libmindspore_lite_ndk.so * @since 9 */ diff --git a/en/native_sdk/ai/mindspore/model.h b/en/native_sdk/ai/mindspore/model.h index 084f6b7f1cd73444cb7aa418bf99e9cf4a7b75b0..2c7b3b35ee913ae325cf50f4d4af40d4ff645e67 100644 --- a/en/native_sdk/ai/mindspore/model.h +++ b/en/native_sdk/ai/mindspore/model.h @@ -29,7 +29,7 @@ * * @brief Provides model-related APIs for model creation and inference. * - * File to include: + * @include * @library libmindspore_lite_ndk.so * @since 9 */ @@ -81,7 +81,8 @@ typedef struct OH_AI_TensorHandleArray #define OH_AI_MAX_SHAPE_NUM 32 /** - * @brief Defines dimension information. The maximum dimension is set by {@link OH_AI_MAX_SHAPE_NUM}. + * @brief Defines dimension information. The maximum dimension reserved is 32. + * The maximum dimension currently supported is 8. * * @since 9 */ @@ -107,7 +108,7 @@ typedef struct OH_AI_CallBackParam } OH_AI_CallBackParam; /** - * @brief Defines the pointer to a callback. + * @brief Defines the pointer to a callback.\n * * This pointer is used to set the two callback functions in {@link OH_AI_ModelPredict}. * Each callback function must contain three parameters, where **inputs** and **outputs** indicate @@ -127,7 +128,7 @@ typedef bool (*OH_AI_KernelCallBack)(const OH_AI_TensorHandleArray inputs, const * @return Pointer to the model object. * @since 9 */ -OH_AI_API OH_AI_ModelHandle OH_AI_ModelCreate(); +OH_AI_API OH_AI_ModelHandle OH_AI_ModelCreate(void); /** * @brief Destroys a model object. @@ -138,7 +139,7 @@ OH_AI_API OH_AI_ModelHandle OH_AI_ModelCreate(); OH_AI_API void OH_AI_ModelDestroy(OH_AI_ModelHandle *model); /** - * @brief Loads and builds a MindSpore model from the memory buffer. + * @brief Loads and builds a MindSpore model from the memory buffer.\n * * Note that the same {@link OH_AI_ContextHandle} object can be passed to {@link OH_AI_ModelBuild} or * {@link OH_AI_ModelBuildFromFile} only once. @@ -150,7 +151,7 @@ OH_AI_API void OH_AI_ModelDestroy(OH_AI_ModelHandle *model); * @param data_size Length of the model data. * @param model_type Type of the model file. For details, see {@link OH_AI_ModelType}. * @param model_context Context for model running. For details, see {@link OH_AI_ContextHandle}. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 9 */ @@ -169,7 +170,7 @@ OH_AI_API OH_AI_Status OH_AI_ModelBuild(OH_AI_ModelHandle model, const void *mod * @param model_path Path of the model file. * @param model_type Type of the model file. For details, see {@link OH_AI_ModelType}. * @param model_context Context for model running. For details, see {@link OH_AI_ContextHandle}. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 9 */ @@ -177,14 +178,14 @@ OH_AI_API OH_AI_Status OH_AI_ModelBuildFromFile(OH_AI_ModelHandle model, const c OH_AI_ModelType model_type, const OH_AI_ContextHandle model_context); /** - * @brief Adjusts the input tensor shapes of a built model. + * @brief Adjusts the input tensor shapes of a built model.\n * * @param model Pointer to the model object. * @param inputs Tensor array structure corresponding to the model input. * @param shape_infos Input shape array, which consists of tensor shapes arranged in the model input sequence. * The model adjusts the tensor shapes in sequence. * @param shape_info_num Length of the input shape array. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 9 */ @@ -199,7 +200,7 @@ OH_AI_API OH_AI_Status OH_AI_ModelResize(OH_AI_ModelHandle model, const OH_AI_Te * @param outputs Pointer to the tensor array structure corresponding to the model output. * @param before Callback function executed before model inference. * @param after Callback function executed after model inference. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 9 */ @@ -312,7 +313,7 @@ OH_AI_API void OH_AI_TrainCfgSetOptimizationLevel(OH_AI_TrainCfgHandle train_cfg * @param model_type Type of the model file. For details, see {@link OH_AI_ModelType}. * @param model_context Context for model running. For details, see {@link OH_AI_ContextHandle}. * @param train_cfg Pointer to the training configuration object. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -329,7 +330,7 @@ OH_AI_API OH_AI_Status OH_AI_TrainModelBuild(OH_AI_ModelHandle model, const void * @param model_type Type of the model file. For details, see {@link OH_AI_ModelType}. * @param model_context Context for model running. For details, see {@link OH_AI_ContextHandle}. * @param train_cfg Pointer to the training configuration object. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -344,7 +345,7 @@ OH_AI_API OH_AI_Status OH_AI_TrainModelBuildFromFile(OH_AI_ModelHandle model, co * @param model Pointer to the model object. * @param before Callback function executed before model inference. * @param after Callback function executed after model inference. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -356,7 +357,7 @@ OH_AI_API OH_AI_Status OH_AI_RunStep(OH_AI_ModelHandle model, const OH_AI_Kernel * * @param model Pointer to the model object. * @param learning_rate Learning rate. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -385,7 +386,7 @@ OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetWeights(OH_AI_ModelHandle model) * * @param model Pointer to the model object. * @param new_weights Weight tensors to update. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -405,7 +406,7 @@ OH_AI_API bool OH_AI_ModelGetTrainMode(OH_AI_ModelHandle model); * * @param model Pointer to the model object. * @param train Whether the training mode is used. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -419,7 +420,7 @@ OH_AI_API OH_AI_Status OH_AI_ModelSetTrainMode(OH_AI_ModelHandle model, bool tra * the virtual batch is disabled. * @param lr Learning rate. The default value is -1.0f. * @param momentum Momentum. The default value is -1.0f. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -437,7 +438,7 @@ OH_AI_API OH_AI_Status OH_AI_ModelSetupVirtualBatch(OH_AI_ModelHandle model, int * @param output_tensor_name Output tensor of the exported model. This parameter is left blank by default, * which indicates full export. * @param num Number of output tensors. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ @@ -457,11 +458,11 @@ OH_AI_API OH_AI_Status OH_AI_ExportModel(OH_AI_ModelHandle model, OH_AI_ModelTyp * @param output_tensor_name Output tensor of the exported model. This parameter is left blank by default, * which indicates full export. * @param num Number of output tensors. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ -OH_AI_API OH_AI_Status OH_AI_ExportModelBuffer(OH_AI_ModelHandle model, OH_AI_ModelType model_type, char **model_data, +OH_AI_API OH_AI_Status OH_AI_ExportModelBuffer(OH_AI_ModelHandle model, OH_AI_ModelType model_type, void *model_data, size_t *data_size, OH_AI_QuantizationType quantization_type, bool export_inference_only, char **output_tensor_name, size_t num); @@ -476,7 +477,7 @@ OH_AI_API OH_AI_Status OH_AI_ExportModelBuffer(OH_AI_ModelHandle model, OH_AI_Mo * @param enable_fp16 Whether to save floating-point weights in float16 format. * @param changeable_weights_name Name of the weight tensor with a variable shape. * @param num Number of weight tensors with a variable shape. - * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_Status::OH_AI_STATUS_SUCCESS** + * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS** * indicates that the operation is successful. * @since 11 */ diff --git a/en/native_sdk/ai/mindspore/status.h b/en/native_sdk/ai/mindspore/status.h index 152e2072dc1fc92a771fe92c89e6ff2d4457eb8a..c4972354f223795879324ce0d765dddf916db8b8 100644 --- a/en/native_sdk/ai/mindspore/status.h +++ b/en/native_sdk/ai/mindspore/status.h @@ -29,7 +29,7 @@ * * @brief Provides the status codes of MindSpore Lite. * - * File to include: \ + * @include * @library libmindspore_lite_ndk.so * @since 9 */ diff --git a/en/native_sdk/ai/mindspore/tensor.h b/en/native_sdk/ai/mindspore/tensor.h index fc1aecbdb4e493278d5e4fea0db0309a602c11b1..4fba713e4e75564f8f8919c4178dd95a871df73b 100644 --- a/en/native_sdk/ai/mindspore/tensor.h +++ b/en/native_sdk/ai/mindspore/tensor.h @@ -29,7 +29,7 @@ * * @brief Provides APIs for creating and modifying tensor information. * - * File to include: \ + * @include * @library libmindspore_lite_ndk.so * @since 9 */ @@ -38,6 +38,7 @@ #define MINDSPORE_INCLUDE_C_API_TENSOE_C_H #include +#include "mindspore/status.h" #include "mindspore/types.h" #include "mindspore/data_type.h" #include "mindspore/format.h" diff --git a/en/native_sdk/ai/mindspore/types.h b/en/native_sdk/ai/mindspore/types.h index 0ea789c374467d52c9e55c3b43fe2da02cedecb3..4e4e96e1fba53f2e8a010c7228e4d0d77297c4af 100644 --- a/en/native_sdk/ai/mindspore/types.h +++ b/en/native_sdk/ai/mindspore/types.h @@ -29,7 +29,7 @@ * * @brief Provides the model file types and device types supported by MindSpore Lite. * - * File to include: + * @include * @library libmindspore_lite_ndk.so * @since 9 */ @@ -164,7 +164,7 @@ typedef enum OH_AI_PerformanceMode { * * @since 10 */ - OH_AI_PERFORMANCE_EXTREME = 4, + OH_AI_PERFORMANCE_EXTREME = 4 } OH_AI_PerformanceMode; /** @@ -192,7 +192,7 @@ typedef enum OH_AI_Priority { * * @since 10 */ - OH_AI_PRIORITY_HIGH = 3, + OH_AI_PRIORITY_HIGH = 3 } OH_AI_Priority; /**