diff --git a/research/cv/Auto-DeepLab/README.md b/research/cv/Auto-DeepLab/README.md
index 324651f41c1565c6f48f977c04a57749696d6e62..80956de1a8ce552f8ac35601c243f55eba48cf1e 100644
--- a/research/cv/Auto-DeepLab/README.md
+++ b/research/cv/Auto-DeepLab/README.md
@@ -22,10 +22,12 @@
             - [Ascend](#ascend-1)
     - [Evaluation](#evaluation)
     - [Export](#export)
+    - [Inference](#inference)
 - [Model Description](#model-description)
     - [Performance](#performance)
         - [Training Accuracy](#training-accuracy)
         - [Distributed Training Performance](#distributed-training-performance)
+        - [Inference Performance on Ascend310](#inference-performance-on-ascend310)
 - [ModelZoo Homepage](#modelzoo-homepage)
 
 # [Auto-DeepLab Description](#contents)
@@ -350,6 +352,15 @@ bash scripts/run_eval.sh [DATASET_PATH] [CKPT_FILE] [OUTPUT_PATH]
 python export.py --filter_multiplier=20 --parallel=False --ckpt_name=[CKPT_NAME]
 ```
 
+## [Inference](#contents)
+
+- Inference on Ascend310 device
+
+```bash
+cd /PATH/TO/Auto-DeepLab/scripts
+bash run_infer_310.sh /PATH/TO/MINDIR/Auto-DeepLab-s.mindir /PATH/TO/DATASET/cityscapes/ 0
+```
+
 # [Model Description](#contents)
 
 ## [Performance](#contents)
@@ -375,7 +386,7 @@ be 16 or larger. Simply, we set batch size = 16 and Epoch 1300, 2700, 4000 corre
 | Resource                   | Ascend 910 * 8; CPU 2.60GHz, 192cores; Memory 755G          |
 | uploaded Date              | 11/11/2021 (month/day/year)                                 |
 | MindSpore Version          | 1.3.0                                                       |
-| Dataset                    | Cityscapes                                                  |
+| Dataset                    | Cityscapes (cropped 769*769)                                |
 | Training Parameters        | epoch=(1300, 2700, 4000), batch_size = 16, lr=0.05, bn_momentum=0.995                   |
 | Optimizer                  | Momentum                                                    |
 | Loss Function              | Cross Entropy with Online Hard Example Mining               |
@@ -384,6 +395,16 @@ be 16 or larger. Simply, we set batch size = 16 and Epoch 1300, 2700, 4000 corre
 | Total time                 | (42, 82, 125) hour (8pcs)                                   |
 | Checkpoint                 | 85.37m (.ckpt file)                                         |
 
+### Inference Performance on Ascend310
+
+| Parameters                 | Auto-DeepLab                         |
+| -------------------------- | ------------------------------------ |
+| Resource                   | Ascend 310 * 1                       |
+| uploaded Date              | 12/6/2021 (month/day/year)          |
+| MindSpore Version          | 1.3.0                                |
+| Dataset                    | Cityscapes (full image 1024*2048)    |
+| Speed                      | 1677.48 ms/img                       |
+
 # [ModelZoo Homepage](#contents)
 
 Please check the official [homepage](https://gitee.com/mindspore/models).
diff --git a/research/cv/Auto-DeepLab/ascend310_infer/CMakeLists.txt b/research/cv/Auto-DeepLab/ascend310_infer/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..435823554c506455be6098283942611ae974f4bf
--- /dev/null
+++ b/research/cv/Auto-DeepLab/ascend310_infer/CMakeLists.txt
@@ -0,0 +1,15 @@
+cmake_minimum_required(VERSION 3.14.1)
+project(Ascend310Infer)
+add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0)
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined")
+set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/)
+option(MINDSPORE_PATH "mindspore install path" "")
+include_directories(${MINDSPORE_PATH})
+include_directories(${MINDSPORE_PATH}/include)
+include_directories(${PROJECT_SRC_ROOT})
+find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib)
+find_package(gflags REQUIRED)
+file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*)
+
+add_executable(main src/main.cc src/utils.cc)
+target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags)
diff --git a/research/cv/Auto-DeepLab/ascend310_infer/build.sh b/research/cv/Auto-DeepLab/ascend310_infer/build.sh
new file mode 100644
index 0000000000000000000000000000000000000000..3a0b610858ddc0faafc0fa12571a9ce93405f164
--- /dev/null
+++ b/research/cv/Auto-DeepLab/ascend310_infer/build.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+export ASCEND_HOME=/usr/local/Ascend/
+if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then
+    export PATH=$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH
+    export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
+    export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe
+    export PYTHONPATH=${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH
+    export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp
+else
+    export PATH=$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH
+    export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
+    export PYTHONPATH=$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH
+    export ASCEND_OPP_PATH=$ASCEND_HOME/opp
+fi
+
+
+if [ ! -d out ]; then
+  mkdir out
+fi
+cd out || exit
+cmake .. \
+    -DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`"
+make
diff --git a/research/cv/Auto-DeepLab/ascend310_infer/inc/utils.h b/research/cv/Auto-DeepLab/ascend310_infer/inc/utils.h
new file mode 100644
index 0000000000000000000000000000000000000000..abeb8fcbf11a042e6fefafa5868166d975e44dfb
--- /dev/null
+++ b/research/cv/Auto-DeepLab/ascend310_infer/inc/utils.h
@@ -0,0 +1,32 @@
+/**
+ * Copyright 2021 Huawei Technologies Co., Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MINDSPORE_INFERENCE_UTILS_H_
+#define MINDSPORE_INFERENCE_UTILS_H_
+
+#include <sys/stat.h>
+#include <dirent.h>
+#include <vector>
+#include <string>
+#include <memory>
+#include "include/api/types.h"
+
+std::vector<std::string> GetAllFiles(std::string_view dirName);
+DIR *OpenDir(std::string_view dirName);
+std::string RealPath(std::string_view path);
+mindspore::MSTensor ReadFileToTensor(const std::string &file);
+int WriteResult(const std::string& imageFile, const std::vector<mindspore::MSTensor> &outputs);
+#endif
diff --git a/research/cv/Auto-DeepLab/ascend310_infer/src/main.cc b/research/cv/Auto-DeepLab/ascend310_infer/src/main.cc
new file mode 100644
index 0000000000000000000000000000000000000000..f8aa4b4352a8d5833e0aaa3285575faa42780642
--- /dev/null
+++ b/research/cv/Auto-DeepLab/ascend310_infer/src/main.cc
@@ -0,0 +1,177 @@
+/**
+ * Copyright 2021 Huawei Technologies Co., Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/time.h>
+#include <gflags/gflags.h>
+#include <dirent.h>
+#include <iostream>
+#include <string>
+#include <algorithm>
+#include <iosfwd>
+#include <vector>
+#include <fstream>
+#include <sstream>
+
+#include "../inc/utils.h"
+#include "include/dataset/execute.h"
+#include "include/dataset/transforms.h"
+#include "include/dataset/vision.h"
+#include "include/dataset/vision_ascend.h"
+#include "include/api/types.h"
+#include "include/api/model.h"
+#include "include/api/serialization.h"
+#include "include/api/context.h"
+
+using mindspore::Serialization;
+using mindspore::Model;
+using mindspore::Context;
+using mindspore::Status;
+using mindspore::ModelType;
+using mindspore::Graph;
+using mindspore::GraphCell;
+using mindspore::kSuccess;
+using mindspore::MSTensor;
+using mindspore::DataType;
+using mindspore::dataset::Execute;
+using mindspore::dataset::TensorTransform;
+using mindspore::dataset::vision::Decode;
+using mindspore::dataset::vision::Resize;
+using mindspore::dataset::vision::Rescale;
+using mindspore::dataset::vision::Normalize;
+using mindspore::dataset::vision::HWC2CHW;
+using mindspore::dataset::vision::HorizontalFlip;
+using mindspore::dataset::vision::SwapRedBlue;
+using mindspore::dataset::transforms::TypeCast;
+
+DEFINE_string(model_path, "/PATH/TO/Auto-DeepLab-s.mindir", "model path");
+DEFINE_string(dataset_path, "/PATH/TO/Cityscapes/leftImg8bit/val", "dataset path");
+DEFINE_int32(device_id, 0, "device id");
+DEFINE_string(precision_mode, "allow_fp32_to_fp16", "precision mode");
+DEFINE_string(op_select_impl_mode, "", "op select impl mode");
+DEFINE_string(device_target, "Ascend310", "device target");
+
+int main(int argc, char **argv) {
+    gflags::ParseCommandLineFlags(&argc, &argv, true);
+    if (RealPath(FLAGS_model_path).empty()) {
+        std::cout << "Invalid model" << std::endl;
+        return 1;
+    }
+
+    auto context = std::make_shared<Context>();
+    auto ascend310_info = std::make_shared<mindspore::Ascend310DeviceInfo>();
+    ascend310_info->SetDeviceID(FLAGS_device_id);
+    context->MutableDeviceInfo().push_back(ascend310_info);
+
+    Graph graph;
+    Status ret = Serialization::Load(FLAGS_model_path, ModelType::kMindIR, &graph);
+    if (ret != kSuccess) {
+        std::cout << "Load model failed." << std::endl;
+        return 1;
+    }
+
+    Model model;
+    ret = model.Build(GraphCell(graph), context);
+    if (ret != kSuccess) {
+        std::cout << "ERROR: Build failed." << std::endl;
+        return 1;
+    }
+
+    std::vector<MSTensor> modelInputs = model.GetInputs();
+
+    auto all_files = GetAllFiles(FLAGS_dataset_path);
+    if (all_files.empty()) {
+        std::cout << "ERROR: no input data." << std::endl;
+        return 1;
+    }
+
+    auto decode = Decode();
+    auto normalize = Normalize({123.675, 116.28, 103.53}, {58.395, 57.12, 57.375});
+    auto hwc2chw = HWC2CHW();
+    auto swapredblue = SwapRedBlue();
+    auto flip = HorizontalFlip();
+    auto typeCast = TypeCast(DataType::kNumberTypeFloat32);
+
+    mindspore::dataset::Execute transformDecode({decode, swapredblue});
+    mindspore::dataset::Execute transform({normalize, hwc2chw});
+    mindspore::dataset::Execute transformFlip({normalize, flip, hwc2chw});
+    mindspore::dataset::Execute transformCast(typeCast);
+
+    std::map<double, double> costTime_map;
+
+    size_t size = all_files.size();
+    for (size_t i = 0; i < size; ++i) {
+        struct timeval start;
+        struct timeval end;
+        double startTime_ms;
+        double endTime_ms;
+        std::vector<MSTensor> inputs;
+        std::vector<MSTensor> flippedInputs;
+        std::vector<MSTensor> outputs;
+        std::vector<MSTensor> flippedOutputs;
+
+        std::cout << "Start predict input files:" << all_files[i] << std::endl;
+        mindspore::MSTensor image =  ReadFileToTensor(all_files[i]);
+        mindspore::MSTensor flippedImage;
+
+        ret = transformDecode(image, &image);
+        if (ret != kSuccess) {
+            std::cout << "ERROR: Decode failed." << std::endl;
+            return 1;
+        }
+        std::vector<int64_t> shape = image.Shape();
+        transformFlip(image, &flippedImage);
+        transform(image, &image);
+
+        inputs.emplace_back(modelInputs[0].Name(), modelInputs[0].DataType(), modelInputs[0].Shape(),
+                            image.Data().get(), image.DataSize());
+        flippedInputs.emplace_back(modelInputs[0].Name(), modelInputs[0].DataType(), modelInputs[0].Shape(),
+                                   flippedImage.Data().get(), flippedImage.DataSize());
+
+        gettimeofday(&start, NULL);
+        model.Predict(inputs, &outputs);
+        model.Predict(flippedInputs, &flippedOutputs);
+        gettimeofday(&end, NULL);
+
+        startTime_ms = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000;
+        endTime_ms = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000;
+        costTime_map.insert(std::pair<double, double>(startTime_ms, endTime_ms));
+        std::string flippedName = all_files[i];
+        flippedName.replace(flippedName.find('.'), flippedName.size() - flippedName.find('.'), "_flip.png");
+        WriteResult(all_files[i], outputs);
+        WriteResult(flippedName, flippedOutputs);
+    }
+    double average = 0.0;
+    int infer_cnt = 0;
+
+    for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) {
+        double diff = 0.0;
+        diff = iter->second - iter->first;
+        average += diff;
+        infer_cnt++;
+    }
+
+    average = average / infer_cnt;
+
+    std::stringstream timeCost;
+    timeCost << "NN inference cost average time: " << average << " ms of infer_count " << infer_cnt << std::endl;
+    std::cout << "NN inference cost average time: " << average << "ms of infer_count " << infer_cnt << std::endl;
+    std::string file_name = "./time_Result" + std::string("/test_perform_static.txt");
+    std::ofstream file_stream(file_name.c_str(), std::ios::trunc);
+    file_stream << timeCost.str();
+    file_stream.close();
+    costTime_map.clear();
+    return 0;
+}
diff --git a/research/cv/Auto-DeepLab/ascend310_infer/src/utils.cc b/research/cv/Auto-DeepLab/ascend310_infer/src/utils.cc
new file mode 100644
index 0000000000000000000000000000000000000000..ed7208ba7373986bb0c3e43ee380b3084449c9c5
--- /dev/null
+++ b/research/cv/Auto-DeepLab/ascend310_infer/src/utils.cc
@@ -0,0 +1,145 @@
+/**
+ * Copyright 2021 Huawei Technologies Co., Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "inc/utils.h"
+
+#include <fstream>
+#include <algorithm>
+#include <iostream>
+
+using mindspore::MSTensor;
+using mindspore::DataType;
+
+
+std::vector<std::string> GetAllFiles(std::string_view dirName) {
+    struct dirent *filename;
+    DIR *dir = OpenDir(dirName);
+    if (dir == nullptr) {
+        return {};
+    }
+    std::vector<std::string> dirs;
+    std::vector<std::string> files;
+    while ((filename = readdir(dir)) != nullptr) {
+        std::string dName = std::string(filename->d_name);
+        if (dName == "." || dName == "..") {
+            continue;
+        } else if (filename->d_type == DT_DIR) {
+            dirs.emplace_back(std::string(dirName) + "/" + filename->d_name);
+        } else if (filename->d_type == DT_REG) {
+            files.emplace_back(std::string(dirName) + "/" + filename->d_name);
+        } else {
+            continue;
+        }
+    }
+
+    for (auto d : dirs) {
+        dir = OpenDir(d);
+        while ((filename = readdir(dir)) != nullptr) {
+            std::string dName = std::string(filename->d_name);
+            if (dName == "." || dName == ".." || filename->d_type != DT_REG) {
+                continue;
+            }
+            files.emplace_back(std::string(d) + "/" + filename->d_name);
+        }
+    }
+    std::sort(files.begin(), files.end());
+    for (auto &f : files) {
+        std::cout << "image file: " << f << std::endl;
+    }
+    return files;
+}
+
+int WriteResult(const std::string& imageFile, const std::vector<MSTensor> &outputs) {
+    std::string homePath = "./result_Files";
+    for (size_t i = 0; i < outputs.size(); ++i) {
+        size_t outputSize;
+        std::shared_ptr<const void> netOutput = outputs[i].Data();
+        outputSize = outputs[i].DataSize();
+        int pos = imageFile.rfind('/');
+        std::string fileName(imageFile, pos + 1);
+        fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin");
+        std::string outFileName = homePath + "/" + fileName;
+        FILE *outputFile = fopen(outFileName.c_str(), "wb");
+        fwrite(netOutput.get(), outputSize, sizeof(char), outputFile);
+        fclose(outputFile);
+        outputFile = nullptr;
+    }
+    return 0;
+}
+
+mindspore::MSTensor ReadFileToTensor(const std::string &file) {
+    if (file.empty()) {
+        std::cout << "Pointer file is nullptr" << std::endl;
+        return mindspore::MSTensor();
+    }
+
+    std::ifstream ifs(file);
+    if (!ifs.good()) {
+        std::cout << "File: " << file << " is not exist" << std::endl;
+        return mindspore::MSTensor();
+    }
+
+    if (!ifs.is_open()) {
+        std::cout << "File: " << file << "open failed" << std::endl;
+        return mindspore::MSTensor();
+    }
+
+    ifs.seekg(0, std::ios::end);
+    size_t size = ifs.tellg();
+    mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast<int64_t>(size)},
+    nullptr, size);
+
+    ifs.seekg(0, std::ios::beg);
+    ifs.read(reinterpret_cast<char *>(buffer.MutableData()), size);
+    ifs.close();
+
+  return buffer;
+}
+
+DIR *OpenDir(std::string_view dirName) {
+    if (dirName.empty()) {
+        std::cout << " dirName is null ! " << std::endl;
+        return nullptr;
+    }
+    std::string realPath = RealPath(dirName);
+    struct stat s;
+    lstat(realPath.c_str(), &s);
+    if (!S_ISDIR(s.st_mode)) {
+        std::cout << "dirName is not a valid directory !" << std::endl;
+        return nullptr;
+    }
+    DIR *dir = opendir(realPath.c_str());
+    if (dir == nullptr) {
+        std::cout << "Can not open dir " << dirName << std::endl;
+        return nullptr;
+    }
+    std::cout << "Successfully opened the dir " << dirName << std::endl;
+    return dir;
+}
+
+std::string RealPath(std::string_view path) {
+    char realPathMem[PATH_MAX] = {0};
+    char *realPathRet = nullptr;
+    realPathRet = realpath(path.data(), realPathMem);
+    if (realPathRet == nullptr) {
+        std::cout << "File: " << path << " is not exist.";
+        return "";
+    }
+
+    std::string realPath(realPathMem);
+    std::cout << path << " realpath is: " << realPath << std::endl;
+    return realPath;
+}
diff --git a/research/cv/Auto-DeepLab/eval.py b/research/cv/Auto-DeepLab/eval.py
index 4faa9fb34baf48da83080588753b54a16d270bcc..597a77be3d342ce2f08b301c58bf1281354ca113 100644
--- a/research/cv/Auto-DeepLab/eval.py
+++ b/research/cv/Auto-DeepLab/eval.py
@@ -142,5 +142,5 @@ def evaluate():
     return 0
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     evaluate()
diff --git a/research/cv/Auto-DeepLab/export.py b/research/cv/Auto-DeepLab/export.py
index aa0127440cba5b3837da2c809c0ace9fa307bbf0..cea380224cedf2209f2183eec28ed1fe0eb5c674 100644
--- a/research/cv/Auto-DeepLab/export.py
+++ b/research/cv/Auto-DeepLab/export.py
@@ -23,7 +23,7 @@ from src.utils.utils import BuildEvalNetwork
 
 context.set_context(mode=context.GRAPH_MODE, save_graphs=False)
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     args = obtain_autodeeplab_args()
     args.total_iters = 0
 
diff --git a/research/cv/Auto-DeepLab/postprocess.py b/research/cv/Auto-DeepLab/postprocess.py
new file mode 100644
index 0000000000000000000000000000000000000000..18e559d014db29138b4a757f61fd1f6fd6722f0e
--- /dev/null
+++ b/research/cv/Auto-DeepLab/postprocess.py
@@ -0,0 +1,137 @@
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+"""Evaluate mIOU and Pixel accuracy"""
+import os
+import argparse
+import ast
+
+import cv2
+from PIL import Image
+import numpy as np
+
+from src.utils.utils import fast_hist
+from build_mindrecord import encode_segmap
+
+
+def decode_segmap(pred):
+    """decode_segmap"""
+    mask = np.uint8(pred)
+
+    num_classes = 19
+    valid_classes = [7, 8, 11, 12, 13, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 31, 32, 33]
+    rank_classes = range(num_classes)
+
+    class_map = dict(zip(rank_classes, valid_classes))
+
+    for _rank in rank_classes:
+        mask[mask == _rank] = class_map[_rank]
+
+    return mask
+
+def get_color(npimg):
+    """get_color"""
+    cityspallete = [
+        128, 64, 128,
+        244, 35, 232,
+        70, 70, 70,
+        102, 102, 156,
+        190, 153, 153,
+        153, 153, 153,
+        250, 170, 30,
+        220, 220, 0,
+        107, 142, 35,
+        152, 251, 152,
+        0, 130, 180,
+        220, 20, 60,
+        255, 0, 0,
+        0, 0, 142,
+        0, 0, 70,
+        0, 60, 100,
+        0, 80, 100,
+        0, 0, 230,
+        119, 11, 32,
+    ]
+    img = Image.fromarray(npimg.astype('uint8'), "P")
+    img.putpalette(cityspallete)
+    out_img = np.array(img.convert('RGB'))
+    return out_img
+
+def infer(args):
+    """infer"""
+    images_base = os.path.join(args.dataset_path, 'leftImg8bit/val')
+    annotations_base = os.path.join(args.dataset_path, 'gtFine/val')
+    hist = np.zeros((args.num_classes, args.num_classes))
+    for root, _, files in os.walk(images_base):
+        for filename in files:
+            if filename.endswith('.png'):
+                print("start infer ", filename)
+                file_name = filename.split('.')[0]
+
+                prob_file = os.path.join(args.result_path, file_name + "_0.bin")
+                flipped_prob_file = os.path.join(args.result_path, file_name + "_flip_0.bin")
+                prob = np.fromfile(prob_file, dtype=np.float32)
+
+                prob = prob.reshape(1, 19, 1024, 2048)
+                flipped_prob = np.fromfile(flipped_prob_file, dtype=np.float32).reshape(1, 19, 1024, 2048)
+                pred = (prob + flipped_prob[:, :, :, ::-1])
+
+                pred = pred.argmax(1).astype(np.uint8)
+                folder_name = root.split(os.sep)[-1]
+
+                if args.cal_acc:
+                    gtFine_name = filename.replace('leftImg8bit', 'gtFine_labelIds')
+                    label_file = os.path.join(annotations_base, folder_name, gtFine_name)
+                    label = np.array(cv2.imread(label_file, cv2.IMREAD_GRAYSCALE), np.uint8)
+                    label = encode_segmap(label, 255)
+                    hist = hist + fast_hist(pred.copy().flatten(), label.flatten(), args.num_classes)
+
+                if args.save_img:
+                    # labelIds image
+                    predImg_name = filename.replace('leftImg8bit', 'predImg_labelIds')
+                    predImg_root = os.path.join(args.output_path, folder_name)
+                    predImg_file = os.path.join(predImg_root, predImg_name)
+                    if not os.path.isdir(predImg_root):
+                        os.makedirs(predImg_root)
+                    decode_pred = decode_segmap(pred.copy().squeeze(0))
+                    cv2.imwrite(predImg_file, decode_pred, [cv2.IMWRITE_PNG_COMPRESSION])
+
+                    # colorful segmentation image
+                    colorImg_name = filename.replace('leftImg8bit', 'predImg_colorful')
+                    colorImg_root = args.output_path
+                    colorImg_root = os.path.join(colorImg_root.replace('output', 'output_img'), folder_name)
+                    colorImg_file = os.path.join(colorImg_root, colorImg_name)
+                    if not os.path.isdir(colorImg_root):
+                        os.makedirs(colorImg_root)
+                    color_pred = get_color(pred.copy().squeeze(0))
+                    color_pred = cv2.cvtColor(np.asarray(color_pred), cv2.COLOR_RGB2BGR)
+                    cv2.imwrite(colorImg_file, color_pred, [cv2.IMWRITE_PNG_COMPRESSION])
+
+    if args.cal_acc:
+        miou = np.diag(hist) / (hist.sum(0) + hist.sum(1) - np.diag(hist) + 1e-10)
+        miou = round(np.nanmean(miou) * 100, 2)
+        print("mIOU = ", miou, "%")
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description="Auto-DeepLab Inference post-process")
+    parser.add_argument("--dataset_path", type=str, default="", help="dataset path for evaluation")
+    parser.add_argument("--num_classes", type=int, default=19)
+    parser.add_argument("--device_id", type=int, default=0, help="Device id, default: 0.")
+    parser.add_argument("--result_path", type=str, default="", help="Prob bin file path.")
+    parser.add_argument("--output_path", type=str, default="", help="Output path.")
+    parser.add_argument("--save_img", type=ast.literal_eval, default=True, help="Whether save pics after inference.")
+    parser.add_argument("--cal_acc", type=ast.literal_eval, default=True, help="Calculate mIOU or not.")
+    Args = parser.parse_args()
+    infer(Args)
diff --git a/research/cv/Auto-DeepLab/scripts/run_infer_310.sh b/research/cv/Auto-DeepLab/scripts/run_infer_310.sh
new file mode 100644
index 0000000000000000000000000000000000000000..404879ac59bb4cfe62e2123ab47ce49c3ca7da6f
--- /dev/null
+++ b/research/cv/Auto-DeepLab/scripts/run_infer_310.sh
@@ -0,0 +1,110 @@
+#!/bin/bash
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+
+if [ $# != 3 ]; then
+    echo "Usage: sh run_infer_310.sh [MODEL_PATH] [DATA_PATH] [DEVICE_ID]
+    DEVICE_ID is optional, it can be set by environment variable DEVICE_ID, otherwise the value is zero"
+exit 1
+fi
+
+get_real_path(){
+  if [ "${1:0:1}" == "/" ]; then
+    echo "$1"
+  else
+    echo "$(realpath -m $PWD/$1)"
+  fi
+}
+
+MODEL=$(get_real_path $1)
+DATA_PATH=$(get_real_path $2)
+DEVICE_ID=$3
+
+echo "$MODEL"
+echo "$DATA_PATH"
+echo "$DEVICE_ID"
+
+export ASCEND_HOME=/usr/local/Ascend/
+if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then
+    export PATH=$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH
+    export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
+    export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe
+    export PYTHONPATH=${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH
+    export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp
+else
+    export PATH=$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH
+    export LD_LIBRARY_PATH=/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH
+    export PYTHONPATH=$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH
+    export ASCEND_OPP_PATH=$ASCEND_HOME/opp
+fi
+
+function compile_app()
+{
+    cd ../ascend310_infer || exit
+    if [ -f "Makefile" ]; then
+        make clean
+    fi
+    sh build.sh &> build.log
+
+    if [ $? -ne 0 ]; then
+        echo "compile app code failed"
+        exit 1
+    fi
+    cd - || exit
+}
+
+function infer()
+{
+    if [ -d result_Files ]; then
+        rm -rf ./result_Files
+    fi
+     if [ -d time_Result ]; then
+        rm -rf ./time_Result
+    fi
+    mkdir result_Files
+    mkdir time_Result
+    img_path=$DATA_PATH/leftImg8bit/val
+    ../ascend310_infer/out/main --model_path="$MODEL" --dataset_path="$img_path" --device_id=$DEVICE_ID &> infer.log
+
+    if [ $? -ne 0 ]; then
+        echo "execute inference failed"
+        exit 1
+    fi
+}
+
+function cal_acc()
+{
+    if [ -d output ]; then
+        rm -rf ./output
+    fi
+     if [ -d output_img ]; then
+        rm -rf ./output_img
+    fi
+    mkdir output
+    mkdir output_img
+    gt_path=$DATA_PATH
+    RESULT_FILES=$(realpath -m "./result_Files")
+    OUTPUT_PATH=$(realpath -m "./output")
+    python ../postprocess.py --dataset_path="$gt_path" --result_path="${RESULT_FILES}" --output_path="${OUTPUT_PATH}" &> acc.log
+    if [ $? -ne 0 ]; then
+        echo "calculate accuracy failed"
+        exit 1
+    fi
+
+}
+
+compile_app
+infer
+cal_acc