diff --git a/research/cv/DeepID/README.md b/research/cv/DeepID/README.md index faea081b4591e8d41717d8b2e444880bf3ef7724..8647d1a34dcc67c3250f04874377b836c5cb0256 100644 --- a/research/cv/DeepID/README.md +++ b/research/cv/DeepID/README.md @@ -54,7 +54,7 @@ python crop.py After cropping images, change **src_folder** in split.py and split data into two set, One is for train and one is for valid. -```python +```bash python split.py ``` @@ -75,11 +75,13 @@ python split.py ```shell . 鈹斺攢 DeepID + 鈹溾攢鈹€ ascend310_infer // 310 infer directory 鈹溾攢 README.md // Descriptions about DeepID 鈹溾攢 scripts 鈹溾攢 run_standalone_train_ascend.sh // Train standalone 鈹溾攢 run_distribute_train_ascend.sh // Train distribute 鈹斺攢 run_eval_ascend.sh // Evaluation + 鈹斺攢 run_infer_310.sh // 310 inference 鈹溾攢src 鈹溾攢 dataset.py // Prepare dataset 鈹溾攢 loss.py // Loss function @@ -92,11 +94,13 @@ python split.py 鈹溾攢 eval.py // Evaluation script 鈹溾攢 train.py // Train script 鈹溾攢 export.py // Export mindir script + 鈹溾攢 preprocess.py // Convert images and labels to bin + 鈹溾攢 postprocess.py // Calculate accuracy ``` ## Script parameters -```shell +```bash 'data_url':'./data/' # Dataset path 'epochs':200 # Total epochs 'lr':1e-4 # Learning rate @@ -132,6 +136,25 @@ sh run_distribute_train.sh [DEVICE_NUM] [DISTRIBUTE] [RANK_TABLE_FILE] sh eval_ascend.sh [DEVICE_NUM] [DEVICE_ID] ``` +## [Ascen 310 infer](#contents) + +### Export MindIR + +```bash +python export.py +``` + +### Infer on Ascend 310 + +```bash +cd scripts +bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DEVICE_ID] +``` + +- `MINDIR_PATH` Directionary of MINDIR +- `DATA_PATH` Directionary of dataset +- `DEVICE_ID` Optional, default 0 + # [Result](#Contents) The evaluation results will be saved in the sample path in a log file named "log_eval.txt". You can find results similar to the following in the log. @@ -173,6 +196,16 @@ Valid dataset accuracy: 0.9683 | batch_size | 512 | | outputs | Accuracy = 96.83% | +| Parameters | Ascend 310 | +| ------------------- | --------------------------- | +| Model Version | DeepID | +| Resource | Ascend | +| Uploaded Date | 11/30/2021 (month/day/year) | +| MindSpore Version | 1.3.1 | +| Dataset | Youtube Face | +| batch_size | 1 | +| outputs | Accuracy = 96.83% | + # [ModelZoo Homepage](#Contents) Please check the official [homepage](https://gitee.com/mindspore/models). \ No newline at end of file diff --git a/research/cv/DeepID/ascend310_infer/inc/utils.h b/research/cv/DeepID/ascend310_infer/inc/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..0b400632f51ee34707a5becc00f7f5ba05899b3a --- /dev/null +++ b/research/cv/DeepID/ascend310_infer/inc/utils.h @@ -0,0 +1,33 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_INFERENCE_UTILS_H_ +#define MINDSPORE_INFERENCE_UTILS_H_ + +#include <sys/stat.h> +#include <dirent.h> +#include <vector> +#include <string> +#include <memory> +#include "include/api/types.h" + +DIR *OpenDir(std::string_view dirName); +std::string RealPath(std::string_view path); +mindspore::MSTensor ReadFileToTensor(const std::string &file); +int WriteResult(const std::string& imageFile, const std::vector<mindspore::MSTensor> &outputs); +std::vector<std::string> GetAllFiles(std::string dir_name); + +#endif diff --git a/research/cv/DeepID/ascend310_infer/src/CMakeLists.txt b/research/cv/DeepID/ascend310_infer/src/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..14e676821a4936c03e98b9299b3b5f5e4496a8ea --- /dev/null +++ b/research/cv/DeepID/ascend310_infer/src/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.14.1) +project(MindSporeCxxTestcase[CXX]) +add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") +set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) +option(MINDSPORE_PATH "mindspore install path" "") +include_directories(${MINDSPORE_PATH}) +include_directories(${MINDSPORE_PATH}/include) +include_directories(${PROJECT_SRC_ROOT}/../) +find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) +file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) +find_package(gflags REQUIRED) +add_executable(main main.cc utils.cc) +target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/research/cv/DeepID/ascend310_infer/src/build.sh b/research/cv/DeepID/ascend310_infer/src/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..f1945fe52b0924fe9826042de2f1b7c7f02ebe98 --- /dev/null +++ b/research/cv/DeepID/ascend310_infer/src/build.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake . -DMINDSPORE_PATH="`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/research/cv/DeepID/ascend310_infer/src/main.cc b/research/cv/DeepID/ascend310_infer/src/main.cc new file mode 100644 index 0000000000000000000000000000000000000000..a5ac8bc96188f6c91c0a0daf52889d888800d4e7 --- /dev/null +++ b/research/cv/DeepID/ascend310_infer/src/main.cc @@ -0,0 +1,139 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include <sys/time.h> +#include <gflags/gflags.h> +#include <dirent.h> +#include <iostream> +#include <string> +#include <algorithm> +#include <iosfwd> +#include <vector> +#include <fstream> +#include <sstream> + +#include "../inc/utils.h" +#include "include/dataset/execute.h" +#include "include/dataset/transforms.h" +#include "include/dataset/vision.h" +#include "include/dataset/vision_ascend.h" +#include "include/api/types.h" +#include "include/api/model.h" +#include "include/api/serialization.h" +#include "include/api/context.h" +using mindspore::dataset::vision::Decode; +using mindspore::dataset::vision::Resize; +using mindspore::dataset::vision::CenterCrop; +using mindspore::dataset::vision::Normalize; +using mindspore::dataset::vision::HWC2CHW; +using mindspore::dataset::TensorTransform; +using mindspore::Context; +using mindspore::Serialization; +using mindspore::Model; +using mindspore::Status; +using mindspore::ModelType; +using mindspore::GraphCell; +using mindspore::kSuccess; +using mindspore::MSTensor; +using mindspore::dataset::Execute; + +DEFINE_string(mindir_path, "", "mindir path"); +DEFINE_string(img_path, ".", "img path"); +DEFINE_int32(device_id, 0, "device id"); + +int main(int argc, char **argv) { + gflags::ParseCommandLineFlags(&argc, &argv, true); + if (RealPath(FLAGS_mindir_path).empty()) { + std::cout << "Invalid mindir" << std::endl; + return 1; + } + if (RealPath(FLAGS_img_path).empty()) { + std::cout << "Invalid img" << std::endl; + return 1; + } + + auto context = std::make_shared<Context>(); + auto ascend310 = std::make_shared<mindspore::Ascend310DeviceInfo>(); + ascend310->SetDeviceID(FLAGS_device_id); + context->MutableDeviceInfo().push_back(ascend310); + mindspore::Graph graph; + Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph); + Model model; + Status ret = model.Build(GraphCell(graph), context); + if (ret != kSuccess) { + std::cout << "ERROR: Build failed." << std::endl; + return 1; + } + + std::vector<MSTensor> modelInputs = model.GetInputs(); + auto all_img_files = GetAllFiles(FLAGS_img_path); + if (all_img_files.empty()) { + std::cout << "ERROR: no input img." << std::endl; + return 1; + } + + std::map<double, double> costTime_map; + size_t size = all_img_files.size(); + + for (size_t i = 0; i < size; ++i) { + struct timeval start = {0}; + struct timeval end = {0}; + double startTimeMs; + double endTimeMs; + + std::vector<MSTensor> inputs; + std::vector<MSTensor> outputs; + std::cout << "Start predict input files:" << all_img_files[i] <<std::endl; + + auto image = ReadFileToTensor(all_img_files[i]); + + inputs.emplace_back(modelInputs[0].Name(), modelInputs[0].DataType(), modelInputs[0].Shape(), + image.Data().get(), image.DataSize()); + + gettimeofday(&start, nullptr); + ret = model.Predict(inputs, &outputs); + gettimeofday(&end, nullptr); + if (ret != kSuccess) { + std::cout << "Predict " << all_img_files[i] << " failed." << std::endl; + return 1; + } + std::cout << "Predict Successful!" << std::endl; + startTimeMs = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000; + endTimeMs = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000; + costTime_map.insert(std::pair<double, double>(startTimeMs, endTimeMs)); + int rst = WriteResult(all_img_files[i], outputs); + if (rst != 0) { + std::cout << "write result failed." << std::endl; + return rst; + } + } + double average = 0.0; + int inferCount = 0; + + for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) { + average += iter->second - iter->first; + inferCount++; + } + average = average / inferCount; + std::stringstream timeCost; + timeCost << "NN inference cost average time: "<< average << " ms of infer_count " << inferCount << std::endl; + std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << inferCount << std::endl; + std::string fileName = "./time_Result" + std::string("/test_perform_static.txt"); + std::ofstream fileStream(fileName.c_str(), std::ios::trunc); + fileStream << timeCost.str(); + fileStream.close(); + costTime_map.clear(); + return 0; +} diff --git a/research/cv/DeepID/ascend310_infer/src/utils.cc b/research/cv/DeepID/ascend310_infer/src/utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..d545acd312fbd7dc3ae2da3a7d29aea3c6db86e3 --- /dev/null +++ b/research/cv/DeepID/ascend310_infer/src/utils.cc @@ -0,0 +1,197 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <fstream> +#include <algorithm> +#include <iostream> +#include "inc/utils.h" + +using mindspore::MSTensor; +using mindspore::DataType; + +std::vector<std::vector<std::string>> GetAllInputData(std::string dir_name) { + std::vector<std::vector<std::string>> ret; + + DIR *dir = OpenDir(dir_name); + if (dir == nullptr) { + return {}; + } + struct dirent *filename; + /* read all the files in the dir ~ */ + std::vector<std::string> sub_dirs; + while ((filename = readdir(dir)) != nullptr) { + std::string d_name = std::string(filename->d_name); + // get rid of "." and ".." + if (d_name == "." || d_name == ".." || d_name.empty()) { + continue; + } + std::string dir_path = RealPath(std::string(dir_name) + "/" + filename->d_name); + struct stat s; + lstat(dir_path.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + continue; + } + + sub_dirs.emplace_back(dir_path); + } + std::sort(sub_dirs.begin(), sub_dirs.end()); + + (void)std::transform(sub_dirs.begin(), sub_dirs.end(), std::back_inserter(ret), + [](const std::string &d) { return GetAllFiles(d); }); + + return ret; +} + + +std::vector<std::string> GetAllFiles(std::string dir_name) { + struct dirent *filename; + DIR *dir = OpenDir(dir_name); + if (dir == nullptr) { + return {}; + } + + std::vector<std::string> res; + while ((filename = readdir(dir)) != nullptr) { + std::string d_name = std::string(filename->d_name); + if (d_name == "." || d_name == ".." || d_name.size() <= 3) { + continue; + } + res.emplace_back(std::string(dir_name) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + + return res; +} + + +std::vector<std::string> GetAllFiles(std::string_view dirName) { + struct dirent *filename; + DIR *dir = OpenDir(dirName); + if (dir == nullptr) { + return {}; + } + std::vector<std::string> res; + while ((filename = readdir(dir)) != nullptr) { + std::string dName = std::string(filename->d_name); + if (dName == "." || dName == ".." || filename->d_type != DT_REG) { + continue; + } + res.emplace_back(std::string(dirName) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + for (auto &f : res) { + std::cout << "image file: " << f << std::endl; + } + return res; +} + + +int WriteResult(const std::string& imageFile, const std::vector<MSTensor> &outputs) { + std::string homePath = "./result_Files"; + const int INVALID_POINTER = -1; + const int ERROR = -2; + for (size_t i = 0; i < outputs.size(); ++i) { + size_t outputSize; + std::shared_ptr<const void> netOutput; + netOutput = outputs[i].Data(); + outputSize = outputs[i].DataSize(); + int pos = imageFile.rfind('/'); + std::string fileName(imageFile, pos + 1); + fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin"); + std::string outFileName = homePath + "/" + fileName; + FILE *outputFile = fopen(outFileName.c_str(), "wb"); + if (outputFile == nullptr) { + std::cout << "open result file " << outFileName << " failed" << std::endl; + return INVALID_POINTER; + } + size_t size = fwrite(netOutput.get(), sizeof(char), outputSize, outputFile); + if (size != outputSize) { + fclose(outputFile); + outputFile = nullptr; + std::cout << "write result file " << outFileName << " failed, write size[" << size << + "] is smaller than output size[" << outputSize << "], maybe the disk is full." << std::endl; + return ERROR; + } + fclose(outputFile); + outputFile = nullptr; + } + return 0; +} + +mindspore::MSTensor ReadFileToTensor(const std::string &file) { + if (file.empty()) { + std::cout << "Pointer file is nullptr" << std::endl; + return mindspore::MSTensor(); + } + + std::ifstream ifs(file); + if (!ifs.good()) { + std::cout << "File: " << file << " is not exist" << std::endl; + return mindspore::MSTensor(); + } + + if (!ifs.is_open()) { + std::cout << "File: " << file << "open failed" << std::endl; + return mindspore::MSTensor(); + } + + ifs.seekg(0, std::ios::end); + size_t size = ifs.tellg(); + mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast<int64_t>(size)}, nullptr, size); + + ifs.seekg(0, std::ios::beg); + ifs.read(reinterpret_cast<char *>(buffer.MutableData()), size); + ifs.close(); + + return buffer; +} + + +DIR *OpenDir(std::string_view dirName) { + if (dirName.empty()) { + std::cout << " dirName is null ! " << std::endl; + return nullptr; + } + std::string realPath = RealPath(dirName); + struct stat s; + lstat(realPath.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dirName is not a valid directory !" << std::endl; + return nullptr; + } + DIR *dir; + dir = opendir(realPath.c_str()); + if (dir == nullptr) { + std::cout << "Can not open dir " << dirName << std::endl; + return nullptr; + } + std::cout << "Successfully opened the dir " << dirName << std::endl; + return dir; +} + +std::string RealPath(std::string_view path) { + char realPathMem[PATH_MAX] = {0}; + char *realPathRet = nullptr; + realPathRet = realpath(path.data(), realPathMem); + if (realPathRet == nullptr) { + std::cout << "File: " << path << " is not exist."; + return ""; + } + + std::string realPath(realPathMem); + std::cout << path << " realpath is: " << realPath << std::endl; + return realPath; +} diff --git a/research/cv/DeepID/postprocess.py b/research/cv/DeepID/postprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..68c20f8e3ff7d99433832b8e53a7efe085e90b2e --- /dev/null +++ b/research/cv/DeepID/postprocess.py @@ -0,0 +1,51 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""post process for 310 inference""" +import os +import argparse +import numpy as np + + +parser = argparse.ArgumentParser(description='PostProcess args') +parser.add_argument('--result_path', type=str, required=True, help='Dataset path') +parser.add_argument('--ori_path', type=str, required=True, help='Train output path') + +args_opt = parser.parse_args() + + +if __name__ == '__main__': + result_path = args_opt.result_path + ori_path = args_opt.ori_path + count = 0 + + result_file = os.listdir(result_path) + ori_path_file = os.listdir(ori_path) + + assert len(result_file) == len(ori_path_file) + + total_num = len(result_file) + + for i in range(total_num): + ori_label_name = os.path.join(ori_path, 'sop_' + str(i) + '.bin') + result_label = os.path.join(result_path, 'sop_' + str(i) + '_0.bin') + ori_label = np.fromfile(ori_label_name, np.int64) + result_label = np.argmax(np.fromfile(result_label, np.float32), axis=0) + print("Start processing", ori_label_name) + if ori_label == result_label: + count += 1 + + acc = 100 * count / total_num + print("=" * 20, "Convert bin files finished", "=" * 20) + print("Accuracy is", round(acc, 2), "%") diff --git a/research/cv/DeepID/preprocess.py b/research/cv/DeepID/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..c9795af56b432bc6c7e17e1504d52e6442136218 --- /dev/null +++ b/research/cv/DeepID/preprocess.py @@ -0,0 +1,56 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""Preprocess for DeepID""" +import os +import time +import argparse + +from src.dataset import dataloader + +parser = argparse.ArgumentParser(description='DeepID_preprocess') + +parser.add_argument('--data_url', type=str, default='data/', help='Dataset path') +parser.add_argument('--save_url', type=str, default='../bin_data', help='Dataset path') +parser.add_argument('--batch_size', type=int, default=1, help='Batch Size') +parser.add_argument('--mode', type=str, default='valid', help='dataset mode') + +if __name__ == '__main__': + args_opt = parser.parse_args() + + valid_dataset, valid_dataset_length = dataloader(args_opt.data_url, epoch=1, + mode=args_opt.mode, batch_size=args_opt.batch_size) + + valid_dataset_iter = valid_dataset.create_dict_iterator() + print('Valid dataset length:', valid_dataset_length) + + img_path = os.path.join(args_opt.save_url, "img_data") + label_path = os.path.join(args_opt.save_url, "label") + if not os.path.exists(img_path): + os.makedirs(img_path) + if not os.path.exists(label_path): + os.makedirs(label_path) + + for idx, data in enumerate(valid_dataset_iter): + step_begin_time = time.time() + img_valid = data['image'] + label_valid = data['label'] + file_name = "sop_" + str(idx) + ".bin" + img_file_path = os.path.join(img_path, file_name) + label_file_path = os.path.join(label_path, file_name) + img_valid.asnumpy().tofile(img_file_path) + label_valid.asnumpy().tofile(label_file_path) + print('Finish processing img', idx, "saving as", file_name) + + print("=" * 20, "export bin files finished", "=" * 20) diff --git a/research/cv/DeepID/scripts/run_infer_310.sh b/research/cv/DeepID/scripts/run_infer_310.sh new file mode 100644 index 0000000000000000000000000000000000000000..f00242954bca67bc7b4e63fe47bd61d3be08e30e --- /dev/null +++ b/research/cv/DeepID/scripts/run_infer_310.sh @@ -0,0 +1,120 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [[ $# -lt 2 || $# -gt 3 ]]; then + echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DEVICE_ID] + DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" +exit 1 +fi + +get_real_path(){ + if [ "${1:0:1}" == "/" ]; then + echo "$1" + else + echo "$(realpath -m $PWD/$1)" + fi +} +model=$(get_real_path $1) +data_path=$(get_real_path $2) + +scripts_path=$PWD + +device_id=0 +if [ $# == 3 ]; then + device_id=$3 +fi + +echo "mindir name: "$model +echo "dataset path: "$data_path +echo "device id: "$device_id + +export ASCEND_HOME=/usr/local/Ascend/ +if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp +else + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/opp +fi + + +function preprocess_data() +{ + cd ../ || exit + if [ -d bin_data ]; then + rm -rf ./bin_data + fi + mkdir bin_data + cd $scripts_path + python3.7 ../preprocess.py --data_url=$data_path &> preprocess.log +} + +function compile_app() +{ + cd ../ascend310_infer/src/ || exit + if [ -f "Makefile" ]; then + make clean + fi + bash build.sh &> build.log + +} + +function infer() +{ + cd - || exit + if [ -d result_Files ]; then + rm -rf ./result_Files + fi + if [ -d time_Result ]; then + rm -rf ./time_Result + fi + mkdir result_Files + mkdir time_Result + + ../ascend310_infer/src/main --mindir_path=$model --img_path="../bin_data/img_data" --device_id=$device_id &> infer.log +} + +function postprocess() +{ + cd $scripts_path + python3.7 ../postprocess.py --result_path='./result_Files' --ori_path='../bin_data/label' &> postprocess.log +} + +preprocess_data +if [ $? -ne 0 ]; then + echo "preprocess data failed" + exit 1 +fi +compile_app +if [ $? -ne 0 ]; then + echo "compile app code failed" + exit 1 +fi +infer +if [ $? -ne 0 ]; then + echo "execute inference failed" + exit 1 +fi +postprocess +if [ $? -ne 0 ]; then + echo "execute inference failed" + exit 1 +fi