Skip to content
Snippets Groups Projects
Select Git revision
  • e283546c0465dd3026bc94f7b1a9de7f6b8969ec
  • openEuler-1.0-LTS default protected
  • openEuler-22.09
  • OLK-5.10
  • openEuler-22.03-LTS
  • openEuler-22.03-LTS-Ascend
  • master
  • openEuler-22.03-LTS-LoongArch-NW
  • openEuler-22.09-HCK
  • openEuler-20.03-LTS-SP3
  • openEuler-21.09
  • openEuler-21.03
  • openEuler-20.09
  • 4.19.90-2210.5.0
  • 5.10.0-123.0.0
  • 5.10.0-60.63.0
  • 5.10.0-60.62.0
  • 4.19.90-2210.4.0
  • 5.10.0-121.0.0
  • 5.10.0-60.61.0
  • 4.19.90-2210.3.0
  • 5.10.0-60.60.0
  • 5.10.0-120.0.0
  • 5.10.0-60.59.0
  • 5.10.0-119.0.0
  • 4.19.90-2210.2.0
  • 4.19.90-2210.1.0
  • 5.10.0-118.0.0
  • 5.10.0-106.19.0
  • 5.10.0-60.58.0
  • 4.19.90-2209.6.0
  • 5.10.0-106.18.0
  • 5.10.0-106.17.0
33 results

enum.h

Blame
  • postprocess.py 2.72 KiB
    # Copyright 2020-2021 Huawei Technologies Co., Ltd
    #
    # Licensed under the Apache License, Version 2.0 (the "License");
    # you may not use this file except in compliance with the License.
    # You may obtain a copy of the License at
    #
    # http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    # ============================================================================
    """post process for 310 inference"""
    import os
    import numpy as np
    from pycocotools.coco import COCO
    
    from src.util import coco_eval, bbox2result_1image, results2json
    from src.model_utils.config import config
    from src.model_utils.moxing_adapter import moxing_wrapper
    
    
    dst_width = 1280
    dst_height = 768
    
    def modelarts_pre_process():
        pass
    
    @moxing_wrapper(pre_process=modelarts_pre_process)
    def get_eval_result(anno_path, result_path):
        """ get evaluation result of faster rcnn"""
        max_num = 128
        result_path = result_path
    
        outputs = []
    
        dataset_coco = COCO(anno_path)
        img_ids = dataset_coco.getImgIds()
    
        for img_id in img_ids:
            file_id = str(img_id).zfill(12)
    
            bbox_result_file = os.path.join(result_path, file_id + "_0.bin")
            label_result_file = os.path.join(result_path, file_id + "_1.bin")
            mask_result_file = os.path.join(result_path, file_id + "_2.bin")
    
            all_bbox = np.fromfile(bbox_result_file, dtype=np.float16).reshape(80000, 5)
            all_label = np.fromfile(label_result_file, dtype=np.int32).reshape(80000, 1)
            all_mask = np.fromfile(mask_result_file, dtype=np.bool_).reshape(80000, 1)
    
            all_bbox_squee = np.squeeze(all_bbox)
            all_label_squee = np.squeeze(all_label)
            all_mask_squee = np.squeeze(all_mask)
    
            all_bboxes_tmp_mask = all_bbox_squee[all_mask_squee, :]
            all_labels_tmp_mask = all_label_squee[all_mask_squee]
    
            if all_bboxes_tmp_mask.shape[0] > max_num:
                inds = np.argsort(-all_bboxes_tmp_mask[:, -1])
                inds = inds[:max_num]
                all_bboxes_tmp_mask = all_bboxes_tmp_mask[inds]
                all_labels_tmp_mask = all_labels_tmp_mask[inds]
    
            outputs_tmp = bbox2result_1image(all_bboxes_tmp_mask, all_labels_tmp_mask, config.num_classes)
            outputs.append(outputs_tmp)
    
        eval_types = ["bbox"]
        result_files = results2json(dataset_coco, outputs, "./results.pkl")
        coco_eval(config, result_files, eval_types, dataset_coco, single_result=False)
    
    if __name__ == '__main__':
        get_eval_result(config.anno_path, config.result_path)