Program Listing for File types.h

Return to documentation for file (include/runtime/include/api/types.h)

#ifndef MINDSPORE_INCLUDE_API_TYPES_H
#define MINDSPORE_INCLUDE_API_TYPES_H

#include <cstddef>
#include <string>
#include <vector>
#include <memory>
#include <functional>
#include "include/api/data_type.h"
#include "include/api/dual_abi_helper.h"
#include "include/api/format.h"
#include "include/api/visible.h"

namespace mindspore {
enum class ModelType : uint32_t {
  kMindIR = 0,
  kAIR = 1,
  kOM = 2,
  kONNX = 3,
  kMindIR_Lite = 4,
  kUnknownType = 0xFFFFFFFF
};

enum class QuantizationType : uint32_t {
  kNoQuant = 0,
  kWeightQuant = 1,
  kFullQuant = 2,
  kUnknownQuantType = 0xFFFFFFFF
};

enum class OptimizationLevel : uint32_t {
  kO0 = 0,
  kO2 = 2,
  kO3 = 3,
  kAuto = 4,
  kOptimizationType = 0xFFFFFFFF
};

struct QuantParam {
  int bit_num;
  double scale;
  int32_t zero_point;
  double min;
  double max;
};

class Allocator;
class MS_API MSTensor {
 public:
  class Impl;
  static inline MSTensor *CreateTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
                                       const void *data, size_t data_len) noexcept;

  static inline MSTensor *CreateRefTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
                                          const void *data, size_t data_len, bool own_data = true) noexcept;

  static inline MSTensor CreateDeviceTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape,
                                            void *data, size_t data_len) noexcept;

  static inline MSTensor *CreateTensorFromFile(const std::string &file, DataType type = DataType::kNumberTypeUInt8,
                                               const std::vector<int64_t> &shape = {}) noexcept;

  static inline MSTensor *StringsToTensor(const std::string &name, const std::vector<std::string> &str);

  static inline std::vector<std::string> TensorToStrings(const MSTensor &tensor);

  static void DestroyTensorPtr(MSTensor *tensor) noexcept;

  MSTensor();
  explicit MSTensor(const std::shared_ptr<Impl> &impl);
  // if malloc data, user need to free after constructing MSTensor, else memory leak.
  inline MSTensor(const std::string &name, DataType type, const std::vector<int64_t> &shape, const void *data,
                  size_t data_len);
  explicit MSTensor(std::nullptr_t);
  ~MSTensor();

  inline std::string Name() const;

  enum DataType DataType() const;

  const std::vector<int64_t> &Shape() const;

  int64_t ElementNum() const;

  std::shared_ptr<const void> Data() const;

  void *MutableData();

  size_t DataSize() const;

  bool IsConst() const;

  bool IsDevice() const;

  MSTensor *Clone() const;

  bool operator==(std::nullptr_t) const;

  bool operator!=(std::nullptr_t) const;

  bool operator==(const MSTensor &tensor) const;

  bool operator!=(const MSTensor &tensor) const;

  void SetShape(const std::vector<int64_t> &shape);

  void SetDataType(enum DataType data_type);

  inline void SetTensorName(const std::string &name);

  void SetAllocator(std::shared_ptr<Allocator> allocator);

  std::shared_ptr<Allocator> allocator() const;

  void SetFormat(mindspore::Format format);

  mindspore::Format format() const;

  void SetData(void *data, bool own_data = true);

  void SetDeviceData(void *data);

  void *GetDeviceData();

  std::vector<QuantParam> QuantParams() const;

  void SetQuantParams(std::vector<QuantParam> quant_params);

  const std::shared_ptr<Impl> impl() const { return impl_; }

 private:
  // api without std::string
  static MSTensor *CreateTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
                                const void *data, size_t data_len) noexcept;
  static MSTensor *CreateRefTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape,
                                   const void *data, size_t data_len, bool own_data) noexcept;
  static MSTensor CreateDeviceTensor(const std::vector<char> &name, enum DataType type,
                                     const std::vector<int64_t> &shape, void *data, size_t data_len) noexcept;
  static MSTensor *CreateTensorFromFile(const std::vector<char> &file, enum DataType type,
                                        const std::vector<int64_t> &shape) noexcept;
  static MSTensor *CharStringsToTensor(const std::vector<char> &name, const std::vector<std::vector<char>> &str);
  static std::vector<std::vector<char>> TensorToStringChars(const MSTensor &tensor);

  MSTensor(const std::vector<char> &name, enum DataType type, const std::vector<int64_t> &shape, const void *data,
           size_t data_len);
  std::vector<char> CharName() const;
  void SetTensorName(const std::vector<char> &name);

  friend class ModelImpl;
  std::shared_ptr<Impl> impl_;
};

class MS_API Buffer {
 public:
  Buffer();
  Buffer(const void *data, size_t data_len);
  ~Buffer();

  const void *Data() const;
  void *MutableData();
  size_t DataSize() const;

  bool ResizeData(size_t data_len);
  bool SetData(const void *data, size_t data_len);

  Buffer Clone() const;

 private:
  class Impl;
  std::shared_ptr<Impl> impl_;
};

MSTensor *MSTensor::CreateTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
                                 const void *data, size_t data_len) noexcept {
  return CreateTensor(StringToChar(name), type, shape, data, data_len);
}

MSTensor *MSTensor::CreateRefTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
                                    const void *data, size_t data_len, bool own_data) noexcept {
  return CreateRefTensor(StringToChar(name), type, shape, data, data_len, own_data);
}

MSTensor MSTensor::CreateDeviceTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape,
                                      void *data, size_t data_len) noexcept {
  return CreateDeviceTensor(StringToChar(name), type, shape, data, data_len);
}

MSTensor *MSTensor::CreateTensorFromFile(const std::string &file, enum DataType type,
                                         const std::vector<int64_t> &shape) noexcept {
  return CreateTensorFromFile(StringToChar(file), type, shape);
}

MSTensor *MSTensor::StringsToTensor(const std::string &name, const std::vector<std::string> &str) {
  return CharStringsToTensor(StringToChar(name), VectorStringToChar(str));
}

std::vector<std::string> MSTensor::TensorToStrings(const MSTensor &tensor) {
  return VectorCharToString(TensorToStringChars(tensor));
}

MSTensor::MSTensor(const std::string &name, enum DataType type, const std::vector<int64_t> &shape, const void *data,
                   size_t data_len)
    : MSTensor(StringToChar(name), type, shape, data, data_len) {}

std::string MSTensor::Name() const { return CharToString(CharName()); }

void MSTensor::SetTensorName(const std::string &name) { SetTensorName(StringToChar(name)); }

using Key = struct MS_API Key {
  const size_t max_key_len = 32;
  size_t len = 0;
  unsigned char key[32] = {0};
  Key() : len(0) {}
  explicit Key(const char *dec_key, size_t key_len);
};

constexpr char kDecModeAesGcm[] = "AES-GCM";

struct MSCallBackParam {
  std::string node_name;
  std::string node_type;
  double execute_time;
};

using MSKernelCallBack =
  std::function<bool(const std::vector<MSTensor> & /* inputs */, const std::vector<MSTensor> & /* outputs */,
                     const MSCallBackParam &opInfo)>;

MS_API std::vector<char> CharVersion();
inline std::string Version() { return CharToString(CharVersion()); }

}  // namespace mindspore
#endif  // MINDSPORE_INCLUDE_API_TYPES_H