diff --git a/research/cv/ibnnet/README_CN.md b/research/cv/ibnnet/README_CN.md index 307fd6725e8896e203057f1f97adcd786f87add4..ae8768d7ed61fabe11c7a043488fa04233ba7592 100644 --- a/research/cv/ibnnet/README_CN.md +++ b/research/cv/ibnnet/README_CN.md @@ -15,13 +15,14 @@ - [分布式训练](#分布式训练) - [评估过程](#评估过程) - [评估](#评估) + - [导出mindir模型](#导出mindir模型) + - [推理过程](#推理过程) + - [用法](#用法) + - [结果](#结果) - [模型描述](#模型描述) - [性能](#性能) + - [训练性能](#训练性能) - [评估性能](#评估性能) - - [推理性能](#推理性能) - - [使用方法](#使用方法) - - [推理](#推理) - - [迁移学习](#迁移学习) - [随机情况说明](#随机情况说明) - [ModelZoo主页](#ModelZoo主页) @@ -50,8 +51,8 @@ - 框架 - [MindSpore](https://www.mindspore.cn/install) - 如需查看详情,请参见如下资源: - - [MindSpore教程](https://www.mindspore.cn/tutorials/zh-CN/master/index.html) - - [MindSpore Python API](https://www.mindspore.cn/docs/api/zh-CN/master/index.html) + - [MindSpore教程](https://www.mindspore.cn/tutorial/training/zh-CN/master/index.html) + - [MindSpore Python API](https://www.mindspore.cn/doc/api_python/zh-CN/master/index.html) # 快速入门 @@ -75,25 +76,35 @@ sh scripts/run_eval.sh ```path └── IBNNet ├── README.md // IBNNet相关描述 - ├── scripts - ├── run_distribute_train.sh // 用于分布式训练的shell脚本 + ├── ascend310_infer //310推理 + ├── inc + ├── utils.h + ├── src + ├── main.cc + ├── utils.cc + ├── build.sh + └── CMakeLists.txt + ├── scripts + ├── run_310_infer.sh // 用于310推理的shell脚本 + ├── run_distribute_train.sh // 用于分布式训练的shell脚本 ├── run_distribute_train_gpu.sh // 用于GPU分布式训练的shell脚本 - ├── run_standalone_train.sh // 用于单机训练的shell脚本 - ├── run_standalone_train.sh // 用于GPU单机训练的shell脚本 - ├── run_eval.sh // 用于评估的shell脚本 - └── run_eval.sh // 用于GPU评估的shell脚本 + ├── run_standalone_train.sh // 用于单机训练的shell脚本 + ├── run_standalone_train.sh // 用于GPU单机训练的shell脚本 + ├── run_eval.sh // 用于评估的shell脚本 + └── run_eval.sh // 用于GPU评估的shell脚本 ├── src - ├── loss.py //损失函数 + ├── loss.py //损失函数 ├── lr_generator.py //生成学习率 ├── config.py // 参数配置 ├── dataset.py // 创建数据集 - ├── resnet_ibn.py // IBNNet架构 + ├── resnet_ibn.py // IBNNet架构 ├── utils ├── pth2ckpt.py //转换pth文件为ckpt文件 ├── export.py ├── eval.py // 测试脚本 ├── train.py // 训练脚本 - + ├── preprocess.py // 310推理数据预处理 + ├── preprocess.py // 310推理数据后处理 ``` @@ -191,11 +202,36 @@ sh scripts/run_eval_gpu.sh path/evalset path/ckpt ============== Accuracy:{'top_5_accuracy': 0.93684, 'top_1_accuracy': 0.7743} ============== ``` +## 导出mindir模型 + +```python +python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT] +``` + +参数`ckpt_file` 是必需的,`EXPORT_FORMAT` 必须在 ["AIR", "MINDIR"]中进行选择。 + +# 推理过程 + +## 用法 + +在执行推理之前,需要通过export.py导出mindir文件。 + +```bash +# Ascend310 推理 +bash run_310_infer.sh [MINDIR_PATH] [DATASET_PATH] +``` + +`MINDIR_PATH` 为mindir文件路径,`DATASET_PATH` 表示数据集路径。 + +### 结果 + +推理结果保存在当前路径,可在acc.log中看到最终精度结果。 + # 模型描述 ## 性能 -### 评估性能 +### 训练性能 | 参数 | IBN-Net | | ------------- | ----------------------------------------------- | @@ -215,7 +251,7 @@ sh scripts/run_eval_gpu.sh path/evalset path/ckpt | 微调检查点 | 293M (.ckpt file) | | 脚本 | [脚本路径](https://gitee.com/mindspore/models/tree/master/research/cv/ibnnet) | -### 推理性能 +### 评估性能 | 参数 | IBN-Net | | ------------- | ------------------ | @@ -227,55 +263,6 @@ sh scripts/run_eval_gpu.sh path/evalset path/ckpt | 输出 | 概率 | | 准确性 | 1卡:77.45%; 8卡:77.45% | -## 使用方法 - -### 推理 - -如果您需要使用已训练模型在GPU、Ascend 910、Ascend 310等多个硬件平台上进行推理,可参考[此处](https://www.mindspore.cn/docs/programming_guide/zh-CN/master/multi_platform_inference.html)。操作示例如下: - -```python -# 加载未知数据集进行推理 -dataset = dataset.create_dataset(cfg.data_path, 1, False) - -# 定义模型 -net = resnet50_ibn_a(num_classes=1000, pretrained=False) -param_dict = load_checkpoint(args.ckpt_url) -load_param_into_net(net, param_dict) -print('Load Pretrained parameters done!') - -criterion = SoftmaxCrossEntropyExpand(sparse=True) - -step = train_dataset.get_dataset_size() -lr = lr_generator(args.lr, train_epoch, steps_per_epoch=step) -optimizer = nn.SGD(params=net.trainable_params(), learning_rate=lr, -momentum=args.momentum, weight_decay=args.weight_decay) - -# 模型变形 -model = Model(net, loss_fn=criterion, optimizer=optimizer, metrics={"Accuracy": Accuracy()}) - -time_cb = TimeMonitor(data_size=train_dataset.get_dataset_size()) -loss_cb = LossMonitor() - -# 设置并应用检查点参数 -config_ck = CheckpointConfig(save_checkpoint_steps=step, keep_checkpoint_max=5) -ckpoint_cb = ModelCheckpoint(prefix="ResNet50_" + str(device_id), config=config_ck, directory='/cache/train_output/device_' + str(device_id)) - -cb = [ckpoint_cb, time_cb, loss_cb, eval_cb] -model.train(train_epoch, train_dataset, callbacks=cb) - -# 加载预训练模型 -param_dict = load_checkpoint(cfg.checkpoint_path) -load_param_into_net(net, param_dict) - -# 对未知数据集进行预测 -acc = model.eval(eval_dataset) -print("accuracy: ", acc) -``` - -### 迁移学习 - -待补充 - # 随机情况说明 在dataset.py中,我们设置了“create_dataset_ImageNet”函数内的种子。 @@ -283,3 +270,4 @@ print("accuracy: ", acc) # ModelZoo主页 请浏览官网[主页](https://gitee.com/mindspore/models)。 + diff --git a/research/cv/ibnnet/ascend310_infer/CMakeLists.txt b/research/cv/ibnnet/ascend310_infer/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee3c85447340e0449ff2b70ed24f60a17e07b2b6 --- /dev/null +++ b/research/cv/ibnnet/ascend310_infer/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.14.1) +project(Ascend310Infer) +add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") +set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) +option(MINDSPORE_PATH "mindspore install path" "") +include_directories(${MINDSPORE_PATH}) +include_directories(${MINDSPORE_PATH}/include) +include_directories(${PROJECT_SRC_ROOT}) +find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) +file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) + +add_executable(main src/main.cc src/utils.cc) +target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/research/cv/ibnnet/ascend310_infer/build.sh b/research/cv/ibnnet/ascend310_infer/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..285514e19f2a1878a7bf8f0eed3c99fbc73868c4 --- /dev/null +++ b/research/cv/ibnnet/ascend310_infer/build.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +if [ -d out ]; then + rm -rf out +fi + +mkdir out +cd out || exit + +if [ -f "Makefile" ]; then + make clean +fi + +cmake .. \ + -DMINDSPORE_PATH="`pip3.7 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/research/cv/ibnnet/ascend310_infer/inc/utils.h b/research/cv/ibnnet/ascend310_infer/inc/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..f8ae1e5b473d869b77af8d725a280d7c7665527c --- /dev/null +++ b/research/cv/ibnnet/ascend310_infer/inc/utils.h @@ -0,0 +1,35 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_INFERENCE_UTILS_H_ +#define MINDSPORE_INFERENCE_UTILS_H_ + +#include <sys/stat.h> +#include <dirent.h> +#include <vector> +#include <string> +#include <memory> +#include "include/api/types.h" + +std::vector<std::string> GetAllFiles(std::string_view dirName); +DIR *OpenDir(std::string_view dirName); +std::string RealPath(std::string_view path); +mindspore::MSTensor ReadFileToTensor(const std::string &file); +int WriteResult(const std::string& imageFile, const std::vector<mindspore::MSTensor> &outputs); +std::vector<std::string> GetAllFiles(std::string dir_name); +std::vector<std::vector<std::string>> GetAllInputData(std::string dir_name); + +#endif diff --git a/research/cv/ibnnet/ascend310_infer/src/main.cc b/research/cv/ibnnet/ascend310_infer/src/main.cc new file mode 100644 index 0000000000000000000000000000000000000000..1b1f2a91f01ef99b8dc63ef5d201fa3219e5d141 --- /dev/null +++ b/research/cv/ibnnet/ascend310_infer/src/main.cc @@ -0,0 +1,152 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include <sys/time.h> +#include <gflags/gflags.h> +#include <dirent.h> +#include <iostream> +#include <string> +#include <algorithm> +#include <iosfwd> +#include <vector> +#include <fstream> +#include <sstream> + +#include "include/api/model.h" +#include "include/api/context.h" +#include "include/api/types.h" +#include "include/api/serialization.h" +#include "include/dataset/vision_ascend.h" +#include "include/dataset/execute.h" +#include "include/dataset/transforms.h" +#include "include/dataset/vision.h" +#include "inc/utils.h" + +using mindspore::Context; +using mindspore::Serialization; +using mindspore::Model; +using mindspore::Status; +using mindspore::ModelType; +using mindspore::GraphCell; +using mindspore::kSuccess; +using mindspore::MSTensor; +using mindspore::dataset::Execute; +using mindspore::dataset::vision::Decode; +using mindspore::dataset::vision::Resize; +using mindspore::dataset::vision::CenterCrop; +using mindspore::dataset::vision::Normalize; +using mindspore::dataset::vision::HWC2CHW; + + +DEFINE_string(mindir_path, "", "mindir path"); +DEFINE_string(dataset_name, "imagenet2012", "['cifar10', 'imagenet2012']"); +DEFINE_string(input0_path, ".", "input0 path"); +DEFINE_int32(device_id, 0, "device id"); + +int load_model(Model *model, std::vector<MSTensor> *model_inputs, std::string mindir_path, int device_id) { + if (RealPath(mindir_path).empty()) { + std::cout << "Invalid mindir" << std::endl; + return 1; + } + + auto context = std::make_shared<Context>(); + auto ascend310 = std::make_shared<mindspore::Ascend310DeviceInfo>(); + ascend310->SetDeviceID(device_id); + context->MutableDeviceInfo().push_back(ascend310); + mindspore::Graph graph; + Serialization::Load(mindir_path, ModelType::kMindIR, &graph); + + Status ret = model->Build(GraphCell(graph), context); + if (ret != kSuccess) { + std::cout << "ERROR: Build failed." << std::endl; + return 1; + } + + *model_inputs = model->GetInputs(); + if (model_inputs->empty()) { + std::cout << "Invalid model, inputs is empty." << std::endl; + return 1; + } + return 0; +} + +int main(int argc, char **argv) { + gflags::ParseCommandLineFlags(&argc, &argv, true); + + Model model; + std::vector<MSTensor> model_inputs; + load_model(&model, &model_inputs, FLAGS_mindir_path, FLAGS_device_id); + + std::map<double, double> costTime_map; + struct timeval start = {0}; + struct timeval end = {0}; + double startTimeMs; + double endTimeMs; + + auto input0_files = GetAllInputData(FLAGS_input0_path); + if (input0_files.empty()) { + std::cout << "ERROR: no input data." << std::endl; + return 1; + } + size_t size = input0_files.size(); + for (size_t i = 0; i < size; ++i) { + for (size_t j = 0; j < input0_files[i].size(); ++j) { + std::vector<MSTensor> inputs; + std::vector<MSTensor> outputs; + std::cout << "Start predict input files:" << input0_files[i][j] <<std::endl; + auto decode = Decode(); + auto resize = Resize({256, 256}); + auto centercrop = CenterCrop({224, 224}); + auto normalize = Normalize({123.675, 116.28, 103.53}, {58.395, 57.12, 57.375}); + auto hwc2chw = HWC2CHW(); + + Execute SingleOp({decode, resize, centercrop, normalize, hwc2chw}); + auto imgDvpp = std::make_shared<MSTensor>(); + SingleOp(ReadFileToTensor(input0_files[i][j]), imgDvpp.get()); + inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), + imgDvpp->Data().get(), imgDvpp->DataSize()); + gettimeofday(&start, nullptr); + Status ret = model.Predict(inputs, &outputs); + gettimeofday(&end, nullptr); + if (ret != kSuccess) { + std::cout << "Predict " << input0_files[i][j] << " failed." << std::endl; + return 1; + } + startTimeMs = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000; + endTimeMs = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000; + costTime_map.insert(std::pair<double, double>(startTimeMs, endTimeMs)); + WriteResult(input0_files[i][j], outputs); + } + } + double average = 0.0; + int inferCount = 0; + + for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) { + double diff = 0.0; + diff = iter->second - iter->first; + average += diff; + inferCount++; + } + average = average / inferCount; + std::stringstream timeCost; + timeCost << "NN inference cost average time: "<< average << " ms of infer_count " << inferCount << std::endl; + std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << inferCount << std::endl; + std::string fileName = "./time_Result" + std::string("/test_perform_static.txt"); + std::ofstream fileStream(fileName.c_str(), std::ios::trunc); + fileStream << timeCost.str(); + fileStream.close(); + costTime_map.clear(); + return 0; +} diff --git a/research/cv/ibnnet/ascend310_infer/src/utils.cc b/research/cv/ibnnet/ascend310_infer/src/utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..d71f388b83d23c2813d8bfc883dbcf2e7e0e4ef0 --- /dev/null +++ b/research/cv/ibnnet/ascend310_infer/src/utils.cc @@ -0,0 +1,185 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <fstream> +#include <algorithm> +#include <iostream> +#include "inc/utils.h" + +using mindspore::MSTensor; +using mindspore::DataType; + + +std::vector<std::vector<std::string>> GetAllInputData(std::string dir_name) { + std::vector<std::vector<std::string>> ret; + + DIR *dir = OpenDir(dir_name); + if (dir == nullptr) { + return {}; + } + struct dirent *filename; + /* read all the files in the dir ~ */ + std::vector<std::string> sub_dirs; + while ((filename = readdir(dir)) != nullptr) { + std::string d_name = std::string(filename->d_name); + // get rid of "." and ".." + if (d_name == "." || d_name == ".." || d_name.empty()) { + continue; + } + std::string dir_path = RealPath(std::string(dir_name) + "/" + filename->d_name); + struct stat s; + lstat(dir_path.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + continue; + } + + sub_dirs.emplace_back(dir_path); + } + std::sort(sub_dirs.begin(), sub_dirs.end()); + + (void)std::transform(sub_dirs.begin(), sub_dirs.end(), std::back_inserter(ret), + [](const std::string &d) { return GetAllFiles(d); }); + + return ret; +} + + +std::vector<std::string> GetAllFiles(std::string dir_name) { + struct dirent *filename; + DIR *dir = OpenDir(dir_name); + if (dir == nullptr) { + return {}; + } + + std::vector<std::string> res; + while ((filename = readdir(dir)) != nullptr) { + std::string d_name = std::string(filename->d_name); + if (d_name == "." || d_name == ".." || d_name.size() <= 3) { + continue; + } + res.emplace_back(std::string(dir_name) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + + return res; +} + + +std::vector<std::string> GetAllFiles(std::string_view dirName) { + struct dirent *filename; + DIR *dir = OpenDir(dirName); + if (dir == nullptr) { + return {}; + } + std::vector<std::string> res; + while ((filename = readdir(dir)) != nullptr) { + std::string dName = std::string(filename->d_name); + if (dName == "." || dName == ".." || filename->d_type != DT_REG) { + continue; + } + res.emplace_back(std::string(dirName) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + for (auto &f : res) { + std::cout << "image file: " << f << std::endl; + } + return res; +} + + +int WriteResult(const std::string& imageFile, const std::vector<MSTensor> &outputs) { + std::string homePath = "./result_Files"; + for (size_t i = 0; i < outputs.size(); ++i) { + size_t outputSize; + std::shared_ptr<const void> netOutput; + netOutput = outputs[i].Data(); + outputSize = outputs[i].DataSize(); + int pos = imageFile.rfind('/'); + std::string fileName(imageFile, pos + 1); + fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin"); + std::string outFileName = homePath + "/" + fileName; + FILE *outputFile = fopen(outFileName.c_str(), "wb"); + fwrite(netOutput.get(), outputSize, sizeof(char), outputFile); + fclose(outputFile); + outputFile = nullptr; + } + return 0; +} + +mindspore::MSTensor ReadFileToTensor(const std::string &file) { + if (file.empty()) { + std::cout << "Pointer file is nullptr" << std::endl; + return mindspore::MSTensor(); + } + + std::ifstream ifs(file); + if (!ifs.good()) { + std::cout << "File: " << file << " is not exist" << std::endl; + return mindspore::MSTensor(); + } + + if (!ifs.is_open()) { + std::cout << "File: " << file << "open failed" << std::endl; + return mindspore::MSTensor(); + } + + ifs.seekg(0, std::ios::end); + size_t size = ifs.tellg(); + mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast<int64_t>(size)}, nullptr, size); + + ifs.seekg(0, std::ios::beg); + ifs.read(reinterpret_cast<char *>(buffer.MutableData()), size); + ifs.close(); + + return buffer; +} + + +DIR *OpenDir(std::string_view dirName) { + if (dirName.empty()) { + std::cout << " dirName is null ! " << std::endl; + return nullptr; + } + std::string realPath = RealPath(dirName); + struct stat s; + lstat(realPath.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dirName is not a valid directory !" << std::endl; + return nullptr; + } + DIR *dir; + dir = opendir(realPath.c_str()); + if (dir == nullptr) { + std::cout << "Can not open dir " << dirName << std::endl; + return nullptr; + } + std::cout << "Successfully opened the dir " << dirName << std::endl; + return dir; +} + +std::string RealPath(std::string_view path) { + char realPathMem[PATH_MAX] = {0}; + char *realPathRet = nullptr; + realPathRet = realpath(path.data(), realPathMem); + if (realPathRet == nullptr) { + std::cout << "File: " << path << " is not exist."; + return ""; + } + + std::string realPath(realPathMem); + std::cout << path << " realpath is: " << realPath << std::endl; + return realPath; +} diff --git a/research/cv/ibnnet/eval.py b/research/cv/ibnnet/eval.py index 69733a27852552a7d62d46ecec3c65cf21fc47f4..b619806b7d056a6024c386926c037084deecba43 100644 --- a/research/cv/ibnnet/eval.py +++ b/research/cv/ibnnet/eval.py @@ -59,7 +59,7 @@ if __name__ == "__main__": step = 60 target = args.device_target context.set_context(mode=context.GRAPH_MODE, device_target=target, save_graphs=False) - context.set_context(device_id=args.device_id) + context.set_context(device_id=args.device_id, enable_auto_mixed_precision=True) lr = lr_generator(cfg.lr, train_epoch, steps_per_epoch=step) net = resnet50_ibn_a(num_classes=cfg.class_num) diff --git a/research/cv/ibnnet/postprocess.py b/research/cv/ibnnet/postprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..4144a3ee71e41825744a5651d01e04983513e355 --- /dev/null +++ b/research/cv/ibnnet/postprocess.py @@ -0,0 +1,48 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""postprocess for 310 inference""" +import os +import argparse +import json +import numpy as np +from mindspore.nn import Top1CategoricalAccuracy, Top5CategoricalAccuracy +parser = argparse.ArgumentParser(description="postprocess") +label_path = "./preprocess_Result/cifar10_label_ids.npy" +parser.add_argument("--result_dir", type=str, default="./result_Files", help="result files path.") +parser.add_argument('--dataset_name', type=str, default="imagenet2012") +parser.add_argument("--label_dir", type=str, default=label_path, help="image file path.") +args = parser.parse_args() + +def calcul_acc(lab, preds): + return sum(1 for x, y in zip(lab, preds) if x == y) / len(lab) +if __name__ == '__main__': + batch_size = 1 + top1_acc = Top1CategoricalAccuracy() + rst_path = args.result_dir + label_list = [] + pred_list = [] + file_list = os.listdir(rst_path) + top5_acc = Top5CategoricalAccuracy() + with open('./preprocess_Result/imagenet_label.json', "r") as label: + labels = json.load(label) + for f in file_list: + label = f.split("_0.bin")[0] + ".JPEG" + label_list.append(labels[label]) + pred = np.fromfile(os.path.join(rst_path, f), np.float32) + pred = pred.reshape(batch_size, int(pred.shape[0] / batch_size)) + top1_acc.update(pred, [labels[label],]) + top5_acc.update(pred, [labels[label],]) + print("Top1 acc: ", top1_acc.eval()) + print("Top5 acc: ", top5_acc.eval()) diff --git a/research/cv/ibnnet/preprocess.py b/research/cv/ibnnet/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..da3a7f8f5ebf3b7645f493246bb5181bbe5990dd --- /dev/null +++ b/research/cv/ibnnet/preprocess.py @@ -0,0 +1,48 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""preprocess""" +import os +import argparse +import json +parser = argparse.ArgumentParser('preprocess') +parser.add_argument('--dataset_name', type=str, choices=["cifar10", "imagenet2012"], default="imagenet2012") +parser.add_argument('--data_path', type=str, default='', help='eval data dir') +parser.add_argument('--result_path', type=str, default='./preprocess_Result/', help='result path') +def create_label(result_path, dir_path): + """create label json for imagenet""" + print("[WARNING] Create imagenet label. Currently only use for Imagenet2012!") + dirs = os.listdir(dir_path) + file_list = [] + for file in dirs: + file_list.append(file) + file_list = sorted(file_list) + + total = 0 + img_label = {} + for i, file_dir in enumerate(file_list): + files = os.listdir(os.path.join(dir_path, file_dir)) + for f in files: + img_label[f] = i + total += len(files) + + json_file = os.path.join(result_path, "imagenet_label.json") + with open(json_file, "w+") as label: + json.dump(img_label, label) + + print("[INFO] Completed! Total {} data.".format(total)) + +args = parser.parse_args() +if __name__ == "__main__": + create_label('./preprocess_Result/', args.data_path) diff --git a/research/cv/ibnnet/scripts/run_310_infer.sh b/research/cv/ibnnet/scripts/run_310_infer.sh new file mode 100644 index 0000000000000000000000000000000000000000..acf73c1fcd99ef81036e3fbbcce00e75536e1412 --- /dev/null +++ b/research/cv/ibnnet/scripts/run_310_infer.sh @@ -0,0 +1,124 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [[ $# -lt 2 || $# -gt 3 ]]; then + echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATASET_PATH] [DEVICE_ID] + DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" +exit 1 +fi + +get_real_path(){ + if [ "${1:0:1}" == "/" ]; then + echo "$1" + else + echo "$(realpath -m $PWD/$1)" + fi +} +model=$(get_real_path $1) +dataset_name='imagenet2012' +dataset_path=$(get_real_path $2) +need_preprocess='y' +device_id=0 +if [ $# == 3 ]; then + device_id=$3 +fi + +echo "mindir name: "$model +echo "dataset name: "$dataset_name +echo "dataset path: "$dataset_path +echo "need preprocess: "$need_preprocess +echo "device id: "$device_id + +export ASCEND_HOME=/usr/local/Ascend/ +if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp +else + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/opp +fi +export ASCEND_HOME=/usr/local/Ascend +export PATH=$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/toolkit/bin:$PATH +export LD_LIBRARY_PATH=/usr/local/lib/:/usr/local/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:/usr/local/Ascend/toolkit/lib64:$LD_LIBRARY_PATH +export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages +export PATH=/usr/local/python375/bin:$PATH +export NPU_HOST_LIB=/usr/local/Ascend/acllib/lib64/stub +export ASCEND_OPP_PATH=/usr/local/Ascend/opp +export ASCEND_AICPU_PATH=/usr/local/Ascend +export LD_LIBRARY_PATH=/usr/local/lib64/:$LD_LIBRARY_PATH +function preprocess_data() +{ + if [ -d preprocess_Result ]; then + rm -rf ./preprocess_Result + fi + mkdir preprocess_Result + python3.7 ../preprocess.py --dataset_name=$dataset_name --data_path=$dataset_path +} + +function compile_app() +{ + cd ../ascend310_infer/ || exit + bash build.sh &> build.log +} + +function infer() +{ + cd - || exit + if [ -d result_Files ]; then + rm -rf ./result_Files + fi + if [ -d time_Result ]; then + rm -rf ./time_Result + fi + mkdir result_Files + mkdir time_Result + + ../ascend310_infer/out/main --mindir_path=$model --dataset_name=$dataset_name --input0_path=$dataset_path --device_id=$device_id &> infer.log + +} + +function cal_acc() +{ + python3.7 ../postprocess.py --dataset_name=$dataset_name &> acc.log +} + +if [ $need_preprocess == "y" ]; then + preprocess_data + if [ $? -ne 0 ]; then + echo "preprocess dataset failed" + exit 1 + fi +fi +compile_app +if [ $? -ne 0 ]; then + echo "compile app code failed" + exit 1 +fi +infer +if [ $? -ne 0 ]; then + echo " execute inference failed" + exit 1 +fi +cal_acc +if [ $? -ne 0 ]; then + echo "calculate accuracy failed" + exit 1 +fi diff --git a/research/cv/ibnnet/train.py b/research/cv/ibnnet/train.py index 15abb4796eed9a248dc000a8efd437a33527fc6b..a3eb7ac9fba9b89b8268921b798c1d40c147bdee 100644 --- a/research/cv/ibnnet/train.py +++ b/research/cv/ibnnet/train.py @@ -20,8 +20,7 @@ import os import mindspore.nn as nn from mindspore import context -from mindspore.context import ParallelMode -from mindspore.train.model import Model +from mindspore.train.model import Model, ParallelMode from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor, Callback from mindspore.nn.metrics import Accuracy @@ -97,7 +96,8 @@ if __name__ == "__main__": if args.device_num > 1: if target == 'Ascend': device_id = int(os.getenv('DEVICE_ID')) - context.set_context(device_id=device_id) + context.set_context(device_id=device_id, + enable_auto_mixed_precision=True) context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True, auto_parallel_search_mode="recursive_programming")