1 Star 0 Fork 4K

Feng Lin/interface_sdk-js

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
@ohos.ai.mindSporeLite.d.ts 25.62 KB
一键复制 编辑 原始数据 按行查看 历史
钱丹 提交于 2024-05-27 15:56 +08:00 . modify dts
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995
/*
* Copyright (c) 2023 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @file
* @kit MindSporeLiteKit
*/
import { Callback } from './@ohos.base';
/**
* @namespace mindSporeLite
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
declare namespace mindSporeLite {
/**
* Create a Model instance from file path
* @param { string } model - model indicates model path to be loaded
* @param { Context } context - context indicates model context information
* @returns { Promise<Model> } the promise returned by the function.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromFile(
model: string,
context?: Context): Promise<Model>;
/**
* Create a Model instance from file path.
* @param { string } model - model indicates model path to be loaded
* @param { Callback<Model> } callback - the callback of model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromFile(
model: string, callback: Callback<Model>): void;
/**
* Create a Model instance from file path.
* @param { string } model - model indicates model path to be loaded
* @param { Context } context - context indicates model context information
* @param { Callback<Model> } callback - the callback of model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromFile(
model: string,
context: Context, callback: Callback<Model>): void;
/**
* Create a Model instance from buffer
* @param { ArrayBuffer } model - model indicates model buffer to be loaded
* @param { Context } [context] - context indicates model context information
* @returns { Promise<Model> } the promise returned by the function.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromBuffer(
model: ArrayBuffer,
context?: Context): Promise<Model>;
/**
* Create a Model instance from buffer
* @param { ArrayBuffer } model - model indicates model buffer to be loaded
* @param { Callback<Model> } callback - the callback of model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromBuffer(
model: ArrayBuffer, callback: Callback<Model>): void;
/**
* Create a Model instance from buffer
* @param { ArrayBuffer } model - model indicates model buffer to be loaded
* @param { Context } context - context indicates model context information
* @param { Callback<Model> } callback - the callback of model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromBuffer(
model: ArrayBuffer,
context: Context, callback: Callback<Model>): void;
/**
* Creates a Model instance file description
* @param { number } model - model indicates model file description to be loaded
* @param { Context } [context] - context indicates model context information
* @returns { Promise<Model> } the promise returned by the function.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromFd(
model: number,
context?: Context): Promise<Model>;
/**
* Create a Model instance from file description
* @param { number } model - model indicates model file description to be loaded
* @param { Callback<Model> } callback - the callback of model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromFd(
model: number, callback: Callback<Model>): void;
/**
* Create a Model instance from file description
* @param { number } model - model indicates model file description to be loaded
* @param { Context } context - context indicates model context information
* @param { Callback<Model> } callback - the callback of model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
function loadModelFromFd(
model: number,
context: Context, callback: Callback<Model>): void;
/**
* Load train model from file
* @param { string } model - model file path
* @param { TrainCfg } [trainCfg] - model train configuration
* @param { Context } [context] - model build context
* @returns { Promise<Model> } the promise of the built model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
function loadTrainModelFromFile(
model: string,
trainCfg?: TrainCfg,
context?: Context): Promise<Model>;
/**
* Load train model from buffer
* @param { ArrayBuffer } model - model buffer
* @param { TrainCfg } [trainCfg] - model train configuration
* @param { Context } [context] - model build context
* @returns { Promise<Model> } the promise of the built model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
function loadTrainModelFromBuffer(
model: ArrayBuffer,
trainCfg?: TrainCfg,
context?: Context): Promise<Model>;
/**
* Load train model from file description
* @param { number } model - model file description
* @param { TrainCfg } [trainCfg] - model train configuration
* @param { Context } [context] - model build context
* @returns { Promise<Model> } the promise of the built model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
function loadTrainModelFromFd(
model: number,
trainCfg?: TrainCfg,
context?: Context): Promise<Model>;
/**
* Provides manages model function. Including get inputs, predict ,resize.
* @typedef Model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
interface Model {
/**
* The learning rate of the training model
* @type {?number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
learningRate?: number;
/**
* The running mode of the model
* @type {?boolean}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
trainMode?: boolean;
/**
* Get model input tensors.
* @returns { MSTensor[] } the MSTensor array of the inputs.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
getInputs(): MSTensor[];
/**
* Infer model
* @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
* @param { Callback<MSTensor[]> } callback - the callback of MSTensor array.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
predict(inputs: MSTensor[], callback: Callback<MSTensor[]>): void;
/**
* Infer model
* @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
* @returns { Promise<MSTensor[]> } the promise returned by the function.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
predict(inputs: MSTensor[]): Promise<MSTensor[]>;
/**
* resize model input
* @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
* @param { Array<Array<number>> } dims - indicates the target new shape array
* @returns { boolean } the boolean result if the resize operation is successful
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
resize(inputs: MSTensor[], dims: Array<Array<number>>): boolean;
/**
* Train model by step
* @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
* @returns { boolean } the boolean result if the runStep operation is successful
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
runStep(inputs: MSTensor[]): boolean;
/**
* Obtain all weights of the model
* @returns { MSTensor[] } the weight tensors of the model
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
getWeights(): MSTensor[];
/**
* Update weights of the model
* @param { MSTensor[] } weights - indicates the MSTensor array of the inputs
* @returns { boolean } the boolean result if updating weights operation is successful
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
updateWeights(weights: MSTensor[]): boolean;
/**
* Setup training with virtual batches
* @param { number } virtualBatchMultiplier - virtual batch multiplier, use any number < 1 to disable
* @param { number } lr - learning rate to use for virtual batch, -1 for internal configuration
* @param { number } momentum - batch norm momentum to use for virtual batch, -1 for internal configuration
* @returns { boolean } the boolean result if the operation is successful
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
setupVirtualBatch(virtualBatchMultiplier: number, lr: number, momentum: number): boolean;
/**
* Export train model to file
* @param { string } modelFile - model file path.
* @param { QuantizationType } [quantizationType] - the quantization type, default NO_QUANT.
* @param { boolean } [exportInferenceOnly] - whether to export a inference only model, default true.
* @param { string[] } [outputTensorName] - the set of name of output tensor the exported inference model,
* @returns { boolean } - the boolean result if the operation is successful
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
exportModel(
modelFile: string,
quantizationType?: QuantizationType,
exportInferenceOnly?: boolean,
outputTensorName?: string[]): boolean;
/**
* Export model's weights, which can be used in micro only. Only valid for Lite Train
* @param { string } weightFile - weight file path
* @param { boolean } [isInference] - whether to export weights from inference model, only support this is `true` for now, default true
* @param { boolean } [enableFp16] - float-weight is whether to be saved in float16 format, default false
* @param { string[] } [changeableWeightsName] - changeable weights name
* @returns { boolean } the boolean result if the operation is successful
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
exportWeightsCollaborateWithMicro(
weightFile: string,
isInference?: boolean,
enableFp16?: boolean,
changeableWeightsName?: string[]): boolean;
}
/**
* Enum for quantization type
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
export enum QuantizationType {
/**
* No quantization.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
NO_QUANT = 0,
/**
* Weight quantization.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
WEIGHT_QUANT = 1,
/**
* Full quantization.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
FULL_QUANT = 2,
}
/**
* Enum for optimization level
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
export enum OptimizationLevel {
/**
* Do not change
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
O0 = 0,
/**
* Cast network to float16, keep batch norm and loss in float32
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
O2 = 2,
/**
* Cast network to float16, including batch norm
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
O3 = 3,
/**
* Choose optimization based on device
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
AUTO = 4,
}
/**
* Provides the train configuration
* @typedef TrainCfg
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
interface TrainCfg {
/**
* Array of loss name
* @type {?string[]}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
lossName?: string[],
/**
* Train optimization level
* @type {?OptimizationLevel}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
optimizationLevel?: OptimizationLevel,
}
/**
* Provides the device configurations
* @typedef Context
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
interface Context {
/**
* The target device
* @type {?string[]}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
target?: string[];
/**
* The cpu device information
* @type {?CpuDevice}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
cpu?: CpuDevice;
/**
* The NNRT device information
* @type {?NNRTDevice}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
nnrt?: NNRTDevice;
}
/**
* Provides the CPU device info
* @typedef CpuDevice
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
interface CpuDevice {
/**
* The thread num
* @type {?number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
threadNum?: number;
/**
* The thread affinity mode
* @type {?ThreadAffinityMode}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
threadAffinityMode?: ThreadAffinityMode;
/**
* The thread affinity core list
* @type {?number[]}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
threadAffinityCoreList?: number[];
/**
* The precision mode
* @type {?string}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
precisionMode?: string;
}
/**
* Enum for performance mode
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
export enum PerformanceMode {
/**
* No performance mode preference
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PERFORMANCE_NONE = 0,
/**
* Low power consumption mode
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PERFORMANCE_LOW = 1,
/**
* Medium performance mode
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PERFORMANCE_MEDIUM = 2,
/**
* High performance mode
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PERFORMANCE_HIGH = 3,
/**
* Ultimate performance mode
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PERFORMANCE_EXTREME = 4,
}
/**
* Enum for scheduling priority
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
export enum Priority {
/**
* No priority preference
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PRIORITY_NONE = 0,
/**
* Low priority
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PRIORITY_LOW = 1,
/**
* Medium priority
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PRIORITY_MEDIUM = 2,
/**
* High priority
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
PRIORITY_HIGH = 3,
}
/**
* Provides the extension information of nnrt device
* @typedef Extension
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
interface Extension {
/**
* Extension name
* @type {string}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
name: string,
/**
* Extension array buffer
* @type {ArrayBuffer}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
value: ArrayBuffer
}
/**
* Enum for nnrt device type
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
export enum NNRTDeviceType {
/**
* Devices that are not CPU, GPU, or dedicated accelerator
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
NNRTDEVICE_OTHERS = 0,
/**
* CPU device
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
NNRTDEVICE_CPU = 1,
/**
* GPU device
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
NNRTDEVICE_GPU = 2,
/**
* Dedicated hardware accelerator
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
NNRTDEVICE_ACCELERATOR = 3,
}
/**
* Provides the nnrt device description
* @typedef NNRTDeviceDescription
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
interface NNRTDeviceDescription {
/**
* Get device id
* @returns { bigint } the number of device id
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
deviceID() : bigint;
/**
* Get device type.
* @returns { NNRTDeviceType } the device type
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
deviceType() : NNRTDeviceType;
/**
* Get device name.
* @returns { string } device name
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
deviceName() : string;
}
/**
* Obtain the all device descriptions in NNRT.
* @returns { NNRTDeviceDescription[] } the array of NNRTDeviceDescription
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
function getAllNNRTDeviceDescriptions() : NNRTDeviceDescription[];
/**
* Provides the NNRT device info
* @typedef NNRTDevice
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
interface NNRTDevice {
/**
* NNRT device id.
* @type {?bigint}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
deviceID?: bigint,
/**
* NNRT device performance mode.
* @type {?PerformanceMode}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
performanceMode?: PerformanceMode,
/**
* NNRT device priority.
* @type {?Priority}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
priority?: Priority,
/**
* NNRT device extension array.
* @type {?Extension[]}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 12
*/
extensions?: Extension[],
}
/**
* Enum for provides CPU thread affinity mode
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
export enum ThreadAffinityMode {
/**
* Thread affinity mode is no bind.
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NO_AFFINITIES = 0,
/**
* Thread affinity mode is big cores first
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
BIG_CORES_FIRST = 1,
/**
* Thread affinity mode is little cores first
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
LITTLE_CORES_FIRST = 2,
}
/**
* Provides MSTensor definition
* @typedef MSTensor
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
interface MSTensor {
/**
* The name of the tensor.
* @type {string}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
name: string;
/**
* The shape of the tensor.
* @type {number[]}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
shape: number[];
/**
* The number of elements in the tensor.
* @type {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
elementNum: number;
/**
* The data size of the tensor.
* @type {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
dataSize: number;
/**
* The data type of the tensor.
* @type {DataType}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
dtype: DataType;
/**
* The format of the tensor.
* @type {Format}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
format: Format;
/**
* Get MSTensor data
* @returns { ArrayBuffer } the data of tensor
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
getData(): ArrayBuffer;
/**
* Set MSTensor data
* @param { ArrayBuffer } inputArray - indicates the buffer of tensor
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
setData(inputArray: ArrayBuffer): void;
}
/**
* Enum for provides MSTensor data type
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
export enum DataType {
/**
* data type is unknown
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
TYPE_UNKNOWN = 0,
/**
* data type is int8
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_INT8 = 32,
/**
* data type is int16
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_INT16 = 33,
/**
* data type is int32
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_INT32 = 34,
/**
* data type is int64
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_INT64 = 35,
/**
* data type is uint8
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_UINT8 = 37,
/**
* data type is uint16
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_UINT16 = 38,
/**
* data type is uint32
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_UINT32 = 39,
/**
* data type is uint64
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_UINT64 = 40,
/**
* data type is float16
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_FLOAT16 = 42,
/**
* data type is float32
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_FLOAT32 = 43,
/**
* data type is float64
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NUMBER_TYPE_FLOAT64 = 44,
}
/**
* Enum for provides MSTensor format
* @enum {number}
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
export enum Format {
/**
* data format is default
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
DEFAULT_FORMAT = -1,
/**
* data format is NCHW
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NCHW = 0,
/**
* data format is NHWC
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NHWC = 1,
/**
* data format is NHWC4
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
NHWC4 = 2,
/**
* data format is HWKC
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
HWKC = 3,
/**
* data format is HWCK
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
HWCK = 4,
/**
* data format is KCHW
* @syscap SystemCapability.AI.MindSporeLite
* @stagemodelonly
* @since 10
*/
KCHW = 5,
}
}
export default mindSporeLite;
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/lffl8796/interface_sdk-js.git
git@gitee.com:lffl8796/interface_sdk-js.git
lffl8796
interface_sdk-js
interface_sdk-js
master

搜索帮助