Skip to content
Snippets Groups Projects
Commit dc57dde9 authored by liurishen's avatar liurishen
Browse files

add pagenet 310 infer

parent 5f109fae
No related branches found
No related tags found
No related merge requests found
......@@ -88,6 +88,7 @@ PAGE-Net网络由三个部分组成,提取特征的CNN模块,金字塔注意
│ ├── run_distribute_train_gpu.sh # 多卡训练脚本文件(gpu)
│ ├── run_distribute_train.sh # 多卡训练脚本文件(ascend)
│ ├── run_eval.sh # 评估脚本文件(ascend & gpu)
│ ├── run_infer_310.sh # 评估脚本文件(ascend 310)
├── src
| ├── model_utils
| | ├── config.py
......@@ -99,10 +100,13 @@ PAGE-Net网络由三个部分组成,提取特征的CNN模块,金字塔注意
│ ├── train_loss.py # 损失定义
| ├── MyTrainOneStep.py # 定义训练网络封装类
| ├── vgg.py # 定义vgg
├── ascend310_infer # 310推理
├── train.py # 训练脚本
├── eval.py # 评估脚本
├── export.py # 模型导出脚本
├── requirements.txt # 需求文档
├── preprocess.py # 预处理
├── postprocess.py # 后处理
```
### 脚本参数
......@@ -158,6 +162,19 @@ bash scripts/eval.sh [DEVICE_ID] [CONFIG_PATH] #运行推理
python export.py --config_path=[CONFIG_PATH] #导出mindir,模型文件路径为config中的ckpt_file
```
```text
成功导出模型之后,执行以下指令进行310推理
首先需修改default_config_ascend.yaml文件:
1. 修改test_img_path为推理数据集原图的路径
2. 修改test_gt_path为推理数据集MASK的路径
3. 修改batchsize为1
执行推理指令如下:
```
```bash
bash run_infer_310.sh [MINDIR_PATH] [CONFIG_PATH] [DEVICE_ID]
```
## 模型描述
### 评估性能
......@@ -232,4 +249,4 @@ python export.py --config_path=[CONFIG_PATH] #导出mindir,模型文件路径
## ModelZoo主页
请浏览官网[主页](https://gitee.com/mindspore/models)
\ No newline at end of file
请浏览官网[主页](https://gitee.com/mindspore/models)
cmake_minimum_required(VERSION 3.14.1)
project(Ascend310Infer)
add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined")
set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/)
option(MINDSPORE_PATH "mindspore install path" "")
include_directories(${MINDSPORE_PATH})
include_directories(${MINDSPORE_PATH}/include)
include_directories(${PROJECT_SRC_ROOT})
find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib)
file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*)
add_executable(main src/main.cc src/utils.cc)
target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags)
find_package(gflags REQUIRED)
\ No newline at end of file
#!/bin/bash
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
if [ -d out ]; then
rm -rf out
fi
mkdir out
cd out || exit
if [ -f "Makefile" ]; then
make clean
fi
cmake .. \
-DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`"
make
/**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INFERENCE_UTILS_H_
#define MINDSPORE_INFERENCE_UTILS_H_
#include <sys/stat.h>
#include <dirent.h>
#include <vector>
#include <string>
#include <memory>
#include "include/api/types.h"
std::vector<std::string> GetAllFiles(std::string_view dirName);
DIR *OpenDir(std::string_view dirName);
std::string RealPath(std::string_view path);
mindspore::MSTensor ReadFileToTensor(const std::string &file);
int WriteResult(const std::string& imageFile, const std::vector<mindspore::MSTensor> &outputs);
#endif
/**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <dirent.h>
#include <gflags/gflags.h>
#include <sys/time.h>
#include <algorithm>
#include <fstream>
#include <iosfwd>
#include <iostream>
#include <sstream>
#include <string>
#include <vector>
#include "inc/utils.h"
#include "include/api/context.h"
#include "include/api/model.h"
#include "include/api/serialization.h"
#include "include/api/types.h"
#include "include/dataset/execute.h"
#include "include/dataset/vision.h"
using mindspore::Context;
using mindspore::GraphCell;
using mindspore::kSuccess;
using mindspore::Model;
using mindspore::ModelType;
using mindspore::MSTensor;
using mindspore::Serialization;
using mindspore::Status;
using mindspore::dataset::Execute;
DEFINE_string(mindir_path, "", "mindir path");
DEFINE_string(input_path, ".", "input path");
DEFINE_int32(device_id, 0, "device id");
int main(int argc, char **argv) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
if (RealPath(FLAGS_mindir_path).empty()) {
std::cout << "Invalid mindir" << std::endl;
return 1;
}
auto context = std::make_shared<Context>();
auto ascend310 = std::make_shared<mindspore::Ascend310DeviceInfo>();
ascend310->SetDeviceID(FLAGS_device_id);
context->MutableDeviceInfo().push_back(ascend310);
mindspore::Graph graph;
Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph);
Model model;
Status ret = model.Build(GraphCell(graph), context);
if (ret != kSuccess) {
std::cout << "ERROR: Build failed." << std::endl;
return 1;
}
std::vector<MSTensor> model_inputs = model.GetInputs();
if (model_inputs.empty()) {
std::cout << "Invalid model, inputs is empty." << std::endl;
return 1;
}
auto input_files = GetAllFiles(FLAGS_input_path);
if (input_files.empty()) {
std::cout << "ERROR: input data empty." << std::endl;
return 1;
}
std::map<double, double> costTime_map;
size_t size = input_files.size();
for (size_t i = 0; i < size; ++i) {
struct timeval start = {0};
struct timeval end = {0};
double startTimeMs;
double endTimeMs;
std::vector<MSTensor> inputs;
std::vector<MSTensor> outputs;
std::cout << "Start predict input files:" << input_files[i] << std::endl;
auto input0 = ReadFileToTensor(input_files[i]);
inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(),
model_inputs[0].Shape(), input0.Data().get(),
input0.DataSize());
gettimeofday(&start, nullptr);
ret = model.Predict(inputs, &outputs);
gettimeofday(&end, nullptr);
if (ret != kSuccess) {
std::cout << "Predict " << input_files[i] << " failed." << std::endl;
return 1;
}
startTimeMs = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000;
endTimeMs = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000;
costTime_map.insert(std::pair<double, double>(startTimeMs, endTimeMs));
WriteResult(input_files[i], outputs);
}
double average = 0.0;
int inferCount = 0;
for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) {
double diff = iter->second - iter->first;
average += diff;
inferCount++;
}
average = average / inferCount;
std::stringstream timeCost;
timeCost << "NN inference cost average time: " << average
<< " ms of infer_count " << inferCount << std::endl;
std::cout << "NN inference cost average time: " << average
<< "ms of infer_count " << inferCount << std::endl;
std::string fileName =
"./time_Result" + std::string("/test_perform_static.txt");
std::ofstream fileStream(fileName.c_str(), std::ios::trunc);
fileStream << timeCost.str();
fileStream.close();
costTime_map.clear();
return 0;
}
/**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "inc/utils.h"
#include <algorithm>
#include <fstream>
#include <iostream>
using mindspore::DataType;
using mindspore::MSTensor;
std::vector<std::string> GetAllFiles(std::string_view dirName) {
struct dirent *filename;
DIR *dir = OpenDir(dirName);
if (dir == nullptr) {
return {};
}
std::vector<std::string> res;
while ((filename = readdir(dir)) != nullptr) {
std::string dName = std::string(filename->d_name);
if (dName == "." || dName == ".." || filename->d_type != DT_REG) {
continue;
}
res.emplace_back(std::string(dirName) + "/" + filename->d_name);
}
std::sort(res.begin(), res.end());
for (auto &f : res) {
std::cout << "image file: " << f << std::endl;
}
return res;
}
int WriteResult(const std::string &imageFile,
const std::vector<MSTensor> &outputs) {
std::string homePath = "./result_Files";
size_t outputSize;
std::shared_ptr<const void> netOutput;
netOutput = outputs[9].Data();
outputSize = outputs[9].DataSize();
int pos = imageFile.rfind('/');
std::string fileName(imageFile, pos + 1);
fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), ".bin");
std::string outFileName = homePath + "/" + fileName;
FILE *outputFile = fopen(outFileName.c_str(), "wb");
fwrite(netOutput.get(), outputSize, sizeof(char), outputFile);
fclose(outputFile);
outputFile = nullptr;
return 0;
}
mindspore::MSTensor ReadFileToTensor(const std::string &file) {
if (file.empty()) {
std::cout << "Pointer file is nullptr" << std::endl;
return mindspore::MSTensor();
}
std::ifstream ifs(file);
if (!ifs.good()) {
std::cout << "File: " << file << " is not exist" << std::endl;
return mindspore::MSTensor();
}
if (!ifs.is_open()) {
std::cout << "File: " << file << "open failed" << std::endl;
return mindspore::MSTensor();
}
ifs.seekg(0, std::ios::end);
size_t size = ifs.tellg();
mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8,
{static_cast<int64_t>(size)}, nullptr, size);
ifs.seekg(0, std::ios::beg);
ifs.read(reinterpret_cast<char *>(buffer.MutableData()), size);
ifs.close();
return buffer;
}
DIR *OpenDir(std::string_view dirName) {
if (dirName.empty()) {
std::cout << " dirName is null ! " << std::endl;
return nullptr;
}
std::string realPath = RealPath(dirName);
struct stat s;
lstat(realPath.c_str(), &s);
if (!S_ISDIR(s.st_mode)) {
std::cout << "dirName is not a valid directory !" << std::endl;
return nullptr;
}
DIR *dir;
dir = opendir(realPath.c_str());
if (dir == nullptr) {
std::cout << "Can not open dir " << dirName << std::endl;
return nullptr;
}
std::cout << "Successfully opened the dir " << dirName << std::endl;
return dir;
}
std::string RealPath(std::string_view path) {
char realPathMem[PATH_MAX] = {0};
char *realPathRet = nullptr;
realPathRet = realpath(path.data(), realPathMem);
if (realPathRet == nullptr) {
std::cout << "File: " << path << " is not exist.";
return "";
}
std::string realPath(realPathMem);
std::cout << path << " realpath is: " << realPath << std::endl;
return realPath;
}
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""post process for 310 inference"""
import os
import argparse
import numpy as np
from PIL import Image
import mindspore.nn as nn
def parse(arg=None):
"""Define configuration of postprocess"""
parser = argparse.ArgumentParser()
parser.add_argument('--bin_path', type=str, default='./result_Files/')
parser.add_argument('--mask_path', type=str, default='./preprocess_Mask_Result/')
parser.add_argument('--output_dir', type=str, default='./postprocess_Result/')
return parser.parse_args(arg)
def load_bin_file(bin_file, shape=None, dtype="float32"):
"""Load data from bin file"""
data = np.fromfile(bin_file, dtype=dtype)
if shape:
data = np.reshape(data, shape)
return data
def save_bin_to_image(data, out_name):
"""Save bin file to image arrays"""
pic = Image.fromarray(data)
pic = pic.convert('RGB')
pic.save(out_name)
print("Successfully save image in " + out_name)
def scan_dir(bin_path):
"""Scan directory"""
out = os.listdir(bin_path)
return out
def sigmoid(z):
"""sigmoid"""
return 1/(1 + np.exp(-z))
def postprocess(args):
"""Post process bin file"""
file_list = scan_dir(args.bin_path)
loss = nn.Loss()
F_score = nn.F1()
loss.clear()
total_test_step = 0
test_data_size = len(file_list)
for file_path in file_list:
data = load_bin_file(args.bin_path + file_path, shape=(224, 224), dtype="float32")
targets1 = load_bin_file(args.mask_path + file_path, shape=(224, 224), dtype="float32")
pre_mask = data
targets1 = targets1.astype(int)
pre_mask = pre_mask.flatten()
targets1 = targets1.flatten()
pre_mask1 = pre_mask.tolist()
F_pre = np.array([[1 - i, i] for i in pre_mask1])
F_score.update(F_pre, targets1)
total_test_step = total_test_step + 1
if total_test_step % 100 == 0:
print("evaling:{}/{}".format(total_test_step, test_data_size))
F_score_result = F_score.eval()
print("F-score: ", (F_score_result[0] + F_score_result[1]) / 2)
print("---------------eval finish------------")
if __name__ == "__main__":
argms = parse()
postprocess(argms)
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""pre process for 310 inference"""
import os
from src.mind_dataloader_final import get_test_loader
from src.model_utils.config import config
def preprocess():
test_loader = get_test_loader(config.test_img_path, config.test_gt_path, batchsize=1, testsize=config.train_size)
data_iterator = test_loader.create_tuple_iterator()
total_test_step = 0
test_data_size = test_loader.get_dataset_size()
image_root = config.test_img_path
Names = []
for data in os.listdir(image_root):
name = data.split(".")[0]
Names.append(name)
Names = sorted(Names)
for imgs, targets in data_iterator:
targets1 = targets.asnumpy()
targets1 = targets1.astype(int)
file_name = Names[total_test_step]
data_name = os.path.join("./preprocess_Result/", file_name + ".bin")
mask_name = os.path.join("./preprocess_Mask_Result/", file_name + ".bin")
imgs.asnumpy().tofile(data_name)
targets.asnumpy().tofile(mask_name)
total_test_step = total_test_step + 1
if total_test_step % 100 == 0:
print("preprocess:{}/{}".format(total_test_step, test_data_size))
if __name__ == "__main__":
preprocess()
#!/bin/bash
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
if [[ $# -lt 2 || $# -gt 3 ]]; then
echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [CONFIG_FILE] [DEVICE_ID]
DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero"
exit 1
fi
get_real_path(){
if [ "${1:0:1}" == "/" ]; then
echo "$1"
else
echo "$(realpath -m $PWD/$1)"
fi
}
model=$(get_real_path $1)
config_path=$(get_real_path $2)
device_id=0
if [ $# == 3 ]; then
device_id=$3
fi
echo "mindir name: "$model
echo "device id: "$device_id
echo "config_path: "$config_path
export ASCEND_HOME=/usr/local/Ascend/
if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then
export PATH=$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH
export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe
export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp
else
export ASCEND_HOME=/usr/local/Ascend/latest/
export PATH=$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH
export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
export PYTHONPATH=$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH
export ASCEND_OPP_PATH=$ASCEND_HOME/opp
fi
function preprocess_data()
{
if [ -d preprocess_Result ]; then
rm -rf ./preprocess_Result
fi
if [ -d preprocess_Mask_Result ]; then
rm -rf ./preprocess_Mask_Result
fi
mkdir preprocess_Result
mkdir preprocess_Mask_Result
python ../preprocess.py --config_path=$config_path &> preprocess.log
}
function compile_app()
{
cd ../ascend310_infer/ || exit
bash build.sh &> build.log
}
function infer()
{
cd - || exit
if [ -d result_Files ]; then
rm -rf ./result_Files
fi
if [ -d time_Result ]; then
rm -rf ./time_Result
fi
mkdir result_Files
mkdir time_Result
../ascend310_infer/out/main --mindir_path=$model --input_path=./preprocess_Result --device_id=$device_id &> infer.log
}
function post_process()
{
if [ -d postprocess_Result ]; then
rm -rf ./postprocess_Result
fi
mkdir postprocess_Result
python ../postprocess.py --bin_path='./result_Files/' --mask_path='./preprocess_Mask_Result/' --output_dir='./postprocess_Result/' &> postprocess.log
}
preprocess_data
if [ $? -ne 0 ]; then
echo "preprocess dataset failed"
exit 1
fi
compile_app
if [ $? -ne 0 ]; then
echo "compile app code failed"
exit 1
fi
infer
if [ $? -ne 0 ]; then
echo " execute inference failed"
exit 1
fi
post_process
if [ $? -ne 0 ]; then
echo " execute post_process failed"
exit 1
fi
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment