Skip to content
Snippets Groups Projects
Commit 3f5dba2c authored by Hunter's avatar Hunter
Browse files

Refinedet 310 dvpp deleted and formats correction

parent b538d4ee
Branches
No related tags found
No related merge requests found
...@@ -389,10 +389,9 @@ python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [ ...@@ -389,10 +389,9 @@ python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [
```shell ```shell
# Ascend310 inference # Ascend310 inference
bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DVPP] [DEVICE_ID] bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DEVICE_ID]
``` ```
- `DVPP` 为必填项,需要在["DVPP", "CPU"]选择,大小写均可。
- `DEVICE_ID` 可选,默认值为0。 - `DEVICE_ID` 可选,默认值为0。
### 结果 ### 结果
......
aipp_op {
aipp_mode : static
input_format : YUV420SP_U8
related_input_rank : 0
csc_switch : true
rbuv_swap_switch : false
matrix_r0c0 : 256
matrix_r0c1 : 0
matrix_r0c2 : 359
matrix_r1c0 : 256
matrix_r1c1 : -88
matrix_r1c2 : -183
matrix_r2c0 : 256
matrix_r2c1 : 454
matrix_r2c2 : 0
input_bias_0 : 0
input_bias_1 : 128
input_bias_2 : 128
mean_chn_0 : 124
mean_chn_1 : 117
mean_chn_2 : 104
var_reci_chn_0 : 0.0171247538316637
var_reci_chn_1 : 0.0175070028011204
var_reci_chn_2 : 0.0174291938997821
}
\ No newline at end of file
...@@ -25,5 +25,5 @@ if [ -f "Makefile" ]; then ...@@ -25,5 +25,5 @@ if [ -f "Makefile" ]; then
fi fi
cmake .. \ cmake .. \
-DMINDSPORE_PATH="`pip3 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" -DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`"
make make
...@@ -43,7 +43,6 @@ using mindspore::kSuccess; ...@@ -43,7 +43,6 @@ using mindspore::kSuccess;
using mindspore::MSTensor; using mindspore::MSTensor;
using mindspore::dataset::Execute; using mindspore::dataset::Execute;
using mindspore::dataset::TensorTransform; using mindspore::dataset::TensorTransform;
using mindspore::dataset::vision::DvppDecodeResizeJpeg;
using mindspore::dataset::vision::Resize; using mindspore::dataset::vision::Resize;
using mindspore::dataset::vision::HWC2CHW; using mindspore::dataset::vision::HWC2CHW;
using mindspore::dataset::vision::Normalize; using mindspore::dataset::vision::Normalize;
...@@ -52,8 +51,6 @@ using mindspore::dataset::vision::Decode; ...@@ -52,8 +51,6 @@ using mindspore::dataset::vision::Decode;
DEFINE_string(mindir_path, "", "mindir path"); DEFINE_string(mindir_path, "", "mindir path");
DEFINE_string(dataset_path, ".", "dataset path"); DEFINE_string(dataset_path, ".", "dataset path");
DEFINE_int32(device_id, 0, "device id"); DEFINE_int32(device_id, 0, "device id");
DEFINE_string(aipp_path, "./aipp.cfg", "aipp path");
DEFINE_string(cpu_dvpp, "DVPP", "cpu or dvpp process");
DEFINE_int32(image_height, 320, "image height"); DEFINE_int32(image_height, 320, "image height");
DEFINE_int32(image_width, 320, "image width"); DEFINE_int32(image_width, 320, "image width");
...@@ -71,14 +68,6 @@ int main(int argc, char **argv) { ...@@ -71,14 +68,6 @@ int main(int argc, char **argv) {
context->MutableDeviceInfo().push_back(ascend310); context->MutableDeviceInfo().push_back(ascend310);
mindspore::Graph graph; mindspore::Graph graph;
Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph); Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph);
if (FLAGS_cpu_dvpp == "DVPP") {
if (RealPath(FLAGS_aipp_path).empty()) {
std::cout << "Invalid aipp path" << std::endl;
return 1;
} else {
ascend310->SetInsertOpConfigPath(FLAGS_aipp_path);
}
}
Model model; Model model;
Status ret = model.Build(GraphCell(graph), context); Status ret = model.Build(GraphCell(graph), context);
...@@ -104,14 +93,6 @@ int main(int argc, char **argv) { ...@@ -104,14 +93,6 @@ int main(int argc, char **argv) {
std::vector<MSTensor> inputs; std::vector<MSTensor> inputs;
std::vector<MSTensor> outputs; std::vector<MSTensor> outputs;
std::cout << "Start predict input files:" << all_files[i] << std::endl; std::cout << "Start predict input files:" << all_files[i] << std::endl;
if (FLAGS_cpu_dvpp == "DVPP") {
auto resizeShape = {static_cast <uint32_t>(FLAGS_image_height), static_cast <uint32_t>(FLAGS_image_width)};
Execute resize_op(std::shared_ptr<DvppDecodeResizeJpeg>(new DvppDecodeResizeJpeg(resizeShape)));
auto imgDvpp = std::make_shared<MSTensor>();
resize_op(ReadFileToTensor(all_files[i]), imgDvpp.get());
inputs.emplace_back(imgDvpp->Name(), imgDvpp->DataType(), imgDvpp->Shape(),
imgDvpp->Data().get(), imgDvpp->DataSize());
} else {
std::shared_ptr<TensorTransform> decode(new Decode()); std::shared_ptr<TensorTransform> decode(new Decode());
std::shared_ptr<TensorTransform> hwc2chw(new HWC2CHW()); std::shared_ptr<TensorTransform> hwc2chw(new HWC2CHW());
std::shared_ptr<TensorTransform> normalize( std::shared_ptr<TensorTransform> normalize(
...@@ -129,7 +110,6 @@ int main(int argc, char **argv) { ...@@ -129,7 +110,6 @@ int main(int argc, char **argv) {
} }
inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(),
img.Data().get(), img.DataSize()); img.Data().get(), img.DataSize());
}
gettimeofday(&start, nullptr); gettimeofday(&start, nullptr);
ret = model.Predict(inputs, &outputs); ret = model.Predict(inputs, &outputs);
......
...@@ -14,9 +14,8 @@ ...@@ -14,9 +14,8 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
if [[ $# -lt 3 || $# -gt 4 ]]; then if [[ $# -lt 2 || $# -gt 3 ]]; then
echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DVPP] [DEVICE_ID] echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DEVICE_ID]
DVPP is mandatory, and must choose from [DVPP|CPU], it's case-insensitive
DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero"
exit 1 exit 1
fi fi
...@@ -30,29 +29,29 @@ get_real_path(){ ...@@ -30,29 +29,29 @@ get_real_path(){
} }
model=$(get_real_path $1) model=$(get_real_path $1)
data_path=$(get_real_path $2) data_path=$(get_real_path $2)
DVPP=${3^^}
device_id=0 device_id=0
if [ $# == 4 ]; then if [ $# == 3 ]; then
device_id=$4 device_id=$3
fi fi
echo "mindir name: "$model echo "mindir name: "$model
echo "dataset path: "$data_path echo "dataset path: "$data_path
echo "image process mode: "$DVPP echo "image process mode: CPU"
echo "device id: "$device_id echo "device id: "$device_id
export ASCEND_HOME=/usr/local/Ascend/ export ASCEND_HOME=/usr/local/Ascend/
if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then
export PATH=$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH
export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe
export PYTHONPATH=${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH
export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp
else else
export PATH=$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH export ASCEND_HOME=/usr/local/Ascend/latest/
export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH
export PYTHONPATH=$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH
export ASCEND_OPP_PATH=$ASCEND_HOME/opp export ASCEND_OPP_PATH=$ASCEND_HOME/opp
fi fi
...@@ -73,19 +72,12 @@ function infer() ...@@ -73,19 +72,12 @@ function infer()
fi fi
mkdir result_Files mkdir result_Files
mkdir time_Result mkdir time_Result
if [ "$DVPP" == "DVPP" ];then ./ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id --image_height=320 --image_width=320 &> infer.log
./ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id --cpu_dvpp=$DVPP --aipp_path=./ascend310_infer/aipp.cfg --image_height=320 --image_width=320 &> infer.log
elif [ "$DVPP" == "CPU" ]; then
./ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --cpu_dvpp=$DVPP --device_id=$device_id --image_height=320 --image_width=320 &> infer.log
else
echo "image process mode must be in [DVPP|CPU]"
exit 1
fi
} }
function cal_acc() function cal_acc()
{ {
python3 ./postprocess.py --result_path=./result_Files --img_path=$data_path --drop &> acc.log python ./postprocess.py --result_path=./result_Files --img_path=$data_path --drop &> acc.log
} }
compile_app compile_app
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment