:gitee_url: https://gitee.com/mindspore/docs .. _program_listing_file_include_serialization.h: Program Listing for File serialization.h ======================================== |exhale_lsh| :ref:`Return to documentation for file ` (``include/serialization.h``) .. |exhale_lsh| unicode:: U+021B0 .. UPWARDS ARROW WITH TIP LEFTWARDS .. code-block:: cpp #ifndef MINDSPORE_INCLUDE_API_SERIALIZATION_H #define MINDSPORE_INCLUDE_API_SERIALIZATION_H #include #include #include #include #include "include/api/status.h" #include "include/api/types.h" #include "include/api/model.h" #include "include/api/graph.h" #include "include/api/dual_abi_helper.h" namespace mindspore { class Serialization { public: inline static Status Load(const void *model_data, size_t data_size, ModelType model_type, Graph *graph, const Key &dec_key = {}, const std::string &dec_mode = kDecModeAesGcm); inline static Status Load(const std::string &file, ModelType model_type, Graph *graph, const Key &dec_key = {}, const std::string &dec_mode = kDecModeAesGcm); inline static Status Load(const std::vector &files, ModelType model_type, std::vector *graphs, const Key &dec_key = {}, const std::string &dec_mode = kDecModeAesGcm); static Status SetParameters(const std::map ¶meters, Model *model); static Status ExportModel(const Model &model, ModelType model_type, Buffer *model_data); inline static Status ExportModel(const Model &model, ModelType model_type, const std::string &model_file, QuantizationType quantization_type = kNoQuant, bool export_inference_only = true, std::vector output_tensor_name = {}); private: static Status Load(const void *model_data, size_t data_size, ModelType model_type, Graph *graph, const Key &dec_key, const std::vector &dec_mode); static Status Load(const std::vector &file, ModelType model_type, Graph *graph); static Status Load(const std::vector &file, ModelType model_type, Graph *graph, const Key &dec_key, const std::vector &dec_mode); static Status Load(const std::vector> &files, ModelType model_type, std::vector *graphs, const Key &dec_key, const std::vector &dec_mode); static Status ExportModel(const Model &model, ModelType model_type, const std::vector &model_file, QuantizationType quantization_type, bool export_inference_only, const std::vector> &output_tensor_name); }; Status Serialization::Load(const void *model_data, size_t data_size, ModelType model_type, Graph *graph, const Key &dec_key, const std::string &dec_mode) { return Load(model_data, data_size, model_type, graph, dec_key, StringToChar(dec_mode)); } Status Serialization::Load(const std::string &file, ModelType model_type, Graph *graph, const Key &dec_key, const std::string &dec_mode) { return Load(StringToChar(file), model_type, graph, dec_key, StringToChar(dec_mode)); } Status Serialization::Load(const std::vector &files, ModelType model_type, std::vector *graphs, const Key &dec_key, const std::string &dec_mode) { return Load(VectorStringToChar(files), model_type, graphs, dec_key, StringToChar(dec_mode)); } Status Serialization::ExportModel(const Model &model, ModelType model_type, const std::string &model_file, QuantizationType quantization_type, bool export_inference_only, std::vector output_tensor_name) { return ExportModel(model, model_type, StringToChar(model_file), quantization_type, export_inference_only, VectorStringToChar(output_tensor_name)); } } // namespace mindspore #endif // MINDSPORE_INCLUDE_API_SERIALIZATION_H