diff --git a/research/gnn/dgcn/ascend310_infer/CMakeLists.txt b/research/gnn/dgcn/ascend310_infer/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..435823554c506455be6098283942611ae974f4bf --- /dev/null +++ b/research/gnn/dgcn/ascend310_infer/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.14.1) +project(Ascend310Infer) +add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") +set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) +option(MINDSPORE_PATH "mindspore install path" "") +include_directories(${MINDSPORE_PATH}) +include_directories(${MINDSPORE_PATH}/include) +include_directories(${PROJECT_SRC_ROOT}) +find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) +find_package(gflags REQUIRED) +file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) + +add_executable(main src/main.cc src/utils.cc) +target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/research/gnn/dgcn/ascend310_infer/build.sh b/research/gnn/dgcn/ascend310_infer/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..285514e19f2a1878a7bf8f0eed3c99fbc73868c4 --- /dev/null +++ b/research/gnn/dgcn/ascend310_infer/build.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +if [ -d out ]; then + rm -rf out +fi + +mkdir out +cd out || exit + +if [ -f "Makefile" ]; then + make clean +fi + +cmake .. \ + -DMINDSPORE_PATH="`pip3.7 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/research/gnn/dgcn/ascend310_infer/inc/utils.h b/research/gnn/dgcn/ascend310_infer/inc/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..efebe03a8c1179f5a1f9d5f7ee07e0352a9937c6 --- /dev/null +++ b/research/gnn/dgcn/ascend310_infer/inc/utils.h @@ -0,0 +1,32 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_INFERENCE_UTILS_H_ +#define MINDSPORE_INFERENCE_UTILS_H_ + +#include <sys/stat.h> +#include <dirent.h> +#include <vector> +#include <string> +#include <memory> +#include "include/api/types.h" + +std::vector<std::string> GetAllFiles(std::string_view dirName); +DIR *OpenDir(std::string_view dirName); +std::string RealPath(std::string_view path); +mindspore::MSTensor ReadFileToTensor(const std::string &file); +int WriteResult(const std::string& imageFile, const std::vector<mindspore::MSTensor> &outputs); +#endif diff --git a/research/gnn/dgcn/ascend310_infer/src/main.cc b/research/gnn/dgcn/ascend310_infer/src/main.cc new file mode 100644 index 0000000000000000000000000000000000000000..966bc0d4dd1c826af7017b028c688f449ca64dfa --- /dev/null +++ b/research/gnn/dgcn/ascend310_infer/src/main.cc @@ -0,0 +1,147 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include <sys/time.h> +#include <gflags/gflags.h> +#include <dirent.h> +#include <iostream> +#include <string> +#include <algorithm> +#include <iosfwd> +#include <vector> +#include <fstream> +#include <sstream> + +#include "include/api/model.h" +#include "include/api/context.h" +#include "include/api/types.h" +#include "include/api/serialization.h" +#include "include/dataset/execute.h" +#include "include/dataset/vision.h" +#include "inc/utils.h" + +using mindspore::Context; +using mindspore::Serialization; +using mindspore::Model; +using mindspore::Status; +using mindspore::MSTensor; +using mindspore::dataset::Execute; +using mindspore::ModelType; +using mindspore::GraphCell; +using mindspore::kSuccess; + +DEFINE_string(mindir_path, "", "mindir path"); +DEFINE_string(input0_path, ".", "input0 path"); +DEFINE_string(input1_path, ".", "input1 path"); +DEFINE_string(input2_path, ".", "input2 path"); +DEFINE_int32(device_id, 0, "device id"); + +int main(int argc, char **argv) { + gflags::ParseCommandLineFlags(&argc, &argv, true); + if (RealPath(FLAGS_mindir_path).empty()) { + std::cout << "Invalid mindir" << std::endl; + return 1; + } + + auto context = std::make_shared<Context>(); + auto ascend310 = std::make_shared<mindspore::Ascend310DeviceInfo>(); + ascend310->SetDeviceID(FLAGS_device_id); + context->MutableDeviceInfo().push_back(ascend310); + mindspore::Graph graph; + std::cout <<"Start load mindir" << std::endl; + Serialization::Load(FLAGS_mindir_path, ModelType::kMindIR, &graph); + Model model; + std::cout << "Start build graph" << std::endl; + Status ret = model.Build(GraphCell(graph), context); + if (ret != kSuccess) { + std::cout << "ERROR: Build failed." << std::endl; + return 1; + } + std::cout << "Start get inputs" << std::endl; + std::vector<MSTensor> model_inputs = model.GetInputs(); + if (model_inputs.empty()) { + std::cout << "Invalid model, inputs is empty." << std::endl; + return 1; + } + + auto input0_files = GetAllFiles(FLAGS_input0_path); + auto input1_files = GetAllFiles(FLAGS_input1_path); + auto input2_files = GetAllFiles(FLAGS_input2_path); + std::cout << "size is : " << input0_files.size() << ", " << input1_files.size() << ", " + << input2_files.size() << std::endl; + if (input0_files.empty() || input1_files.empty() || input2_files.empty()) { + std::cout << "ERROR: input data empty." << std::endl; + return 1; + } + + std::map<double, double> costTime_map; + size_t size = input0_files.size(); + + for (size_t i = 0; i < size; ++i) { + struct timeval start = {0}; + struct timeval end = {0}; + double startTimeMs; + double endTimeMs; + std::vector<MSTensor> inputs; + std::vector<MSTensor> outputs; + std::cout << "Start predict input files:" << input0_files[i] << std::endl; + + auto input0 = ReadFileToTensor(input0_files[i]); + auto input1 = ReadFileToTensor(input1_files[i]); + auto input2 = ReadFileToTensor(input2_files[i]); + + inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), + input0.Data().get(), input0.DataSize()); + inputs.emplace_back(model_inputs[1].Name(), model_inputs[1].DataType(), model_inputs[1].Shape(), + input1.Data().get(), input1.DataSize()); + inputs.emplace_back(model_inputs[2].Name(), model_inputs[2].DataType(), model_inputs[2].Shape(), + input2.Data().get(), input2.DataSize()); + + gettimeofday(&start, nullptr); + ret = model.Predict(inputs, &outputs); + gettimeofday(&end, nullptr); + if (ret != kSuccess) { + std::cout << "Predict " << input0_files[i] << " failed." << std::endl; + return 1; + } + startTimeMs = (1.0 * start.tv_sec * 1000000 + start.tv_usec) / 1000; + endTimeMs = (1.0 * end.tv_sec * 1000000 + end.tv_usec) / 1000; + costTime_map.insert(std::pair<double, double>(startTimeMs, endTimeMs)); + int ret_ = WriteResult(input0_files[i], outputs); + if (ret_ != kSuccess) { + std::cout << "write result failed." << std::endl; + return 1; + } + } + double average = 0.0; + int inferCount = 0; + + for (auto iter = costTime_map.begin(); iter != costTime_map.end(); iter++) { + double diff = 0.0; + diff = iter->second - iter->first; + average += diff; + inferCount++; + } + average = average / inferCount; + std::stringstream timeCost; + timeCost << "NN inference cost average time: "<< average << " ms of infer_count " << inferCount << std::endl; + std::cout << "NN inference cost average time: "<< average << "ms of infer_count " << inferCount << std::endl; + std::string fileName = "./time_Result" + std::string("/test_perform_static.txt"); + std::ofstream fileStream(fileName.c_str(), std::ios::trunc); + fileStream << timeCost.str(); + fileStream.close(); + costTime_map.clear(); + return 0; +} diff --git a/research/gnn/dgcn/ascend310_infer/src/utils.cc b/research/gnn/dgcn/ascend310_infer/src/utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..6192ba084f7fe17414c6876f752c80739d98fbef --- /dev/null +++ b/research/gnn/dgcn/ascend310_infer/src/utils.cc @@ -0,0 +1,141 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <fstream> +#include <algorithm> +#include <iostream> +#include "inc/utils.h" + +using mindspore::MSTensor; +using mindspore::DataType; + +std::vector<std::string> GetAllFiles(std::string_view dirName) { + struct dirent *filename; + DIR *dir = OpenDir(dirName); + if (dir == nullptr) { + return {}; + } + std::vector<std::string> res; + while ((filename = readdir(dir)) != nullptr) { + std::string dName = std::string(filename->d_name); + if (dName == "." || dName == ".." || filename->d_type != DT_REG) { + continue; + } + res.emplace_back(std::string(dirName) + "/" + filename->d_name); + } + std::sort(res.begin(), res.end()); + for (auto &f : res) { + std::cout << "image file: " << f << std::endl; + } + return res; +} + +int WriteResult(const std::string& imageFile, const std::vector<MSTensor> &outputs) { + std::string homePath = "./result_Files"; + for (size_t i = 0; i < outputs.size(); ++i) { + size_t outputSize; + std::shared_ptr<const void> netOutput; + netOutput = outputs[i].Data(); + outputSize = outputs[i].DataSize(); + int pos = imageFile.rfind('/'); + std::string fileName(imageFile, pos + 1); + fileName.replace(fileName.find('.'), fileName.size() - fileName.find('.'), '_' + std::to_string(i) + ".bin"); + std::string outFileName = homePath + "/" + fileName; + FILE * outputFile = fopen(outFileName.c_str(), "wb"); + if (outputFile == nullptr) { + std::cout << "open result file" << outFileName << "failed" << std::endl; + return -1; + } + size_t size = fwrite(netOutput.get(), sizeof(char), outputSize, outputFile); + if (size != outputSize) { + fclose(outputFile); + outputFile = nullptr; + std::cout << "writer result file" << outFileName << "failed write size[" << size << + "] is smaller than output size[" << outputSize << "], maybe the disk is full" << std::endl; + return -1; + } + + fclose(outputFile); + outputFile = nullptr; + } + return 0; +} + +mindspore::MSTensor ReadFileToTensor(const std::string &file) { + if (file.empty()) { + std::cout << "Pointer file is nullptr" << std::endl; + return mindspore::MSTensor(); + } + + std::ifstream ifs(file); + if (!ifs.good()) { + std::cout << "File: " << file << " is not exist" << std::endl; + return mindspore::MSTensor(); + } + + if (!ifs.is_open()) { + std::cout << "File: " << file << "open failed" << std::endl; + return mindspore::MSTensor(); + } + + ifs.seekg(0, std::ios::end); + size_t size = ifs.tellg(); + mindspore::MSTensor buffer(file, mindspore::DataType::kNumberTypeUInt8, {static_cast<int64_t>(size)}, nullptr, size); + + ifs.seekg(0, std::ios::beg); + ifs.read(reinterpret_cast<char *>(buffer.MutableData()), size); + ifs.close(); + + return buffer; +} + + +DIR *OpenDir(std::string_view dirName) { + if (dirName.empty()) { + std::cout << " dirName is null ! " << std::endl; + return nullptr; + } + std::string realPath = RealPath(dirName); + struct stat s; + lstat(realPath.c_str(), &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dirName is not a valid directory !" << std::endl; + return nullptr; + } + DIR *dir; + dir = opendir(realPath.c_str()); + if (dir == nullptr) { + std::cout << "Can not open dir " << dirName << std::endl; + return nullptr; + } + std::cout << "Successfully opened the dir " << dirName << std::endl; + return dir; +} + +std::string RealPath(std::string_view path) { + char realPathMem[PATH_MAX] = {0}; + char *realPathRet = nullptr; + realPathRet = realpath(path.data(), realPathMem); + + if (realPathRet == nullptr) { + std::cout << "File: " << path << " is not exist."; + return ""; + } + + std::string realPath(realPathMem); + std::cout << path << " realpath is: " << realPath << std::endl; + return realPath; +} diff --git a/research/gnn/dgcn/export.py b/research/gnn/dgcn/export.py index 2991518ef032ae03ec8c3db475ffa40d8ebb7149..8bcc3c8a7842012518462fa300b318a8c345cd8c 100644 --- a/research/gnn/dgcn/export.py +++ b/research/gnn/dgcn/export.py @@ -39,21 +39,21 @@ if __name__ == "__main__": if args.dataset == "cora": input_dim = 1433 output_dim = 7 - diffusions = Tensor(np.zeros((2708, 2708), np.float32)) - ppmi = Tensor(np.zeros((2708, 2708), np.float32)) - features = Tensor(np.zeros((2708, 1433), np.float32)) + diffusions = Tensor(np.zeros((2708, 2708), np.float16)) + ppmi = Tensor(np.zeros((2708, 2708), np.float16)) + features = Tensor(np.zeros((2708, 1433), np.float16)) if args.dataset == "citeseer": input_dim = 3703 output_dim = 6 - diffusions = Tensor(np.zeros((3312, 3312), np.float32)) - ppmi = Tensor(np.zeros((3312, 3312), np.float32)) - features = Tensor(np.zeros((3312, 3703), np.float32)) + diffusions = Tensor(np.zeros((3327, 3327), np.float16)) + ppmi = Tensor(np.zeros((3327, 3327), np.float16)) + features = Tensor(np.zeros((3327, 3703), np.float16)) if args.dataset == "pubmed": - input_dim = 3703 + input_dim = 500 output_dim = 3 - diffusions = Tensor(np.zeros((19717, 19717), np.float32)) - ppmi = Tensor(np.zeros((19717, 19717), np.float32)) - features = Tensor(np.zeros((19717, 500), np.float32)) + diffusions = Tensor(np.zeros((19717, 19717), np.float16)) + ppmi = Tensor(np.zeros((19717, 19717), np.float16)) + features = Tensor(np.zeros((19717, 500), np.float16)) dgcn_net = DGCN(input_dim=input_dim, hidden_dim=config.hidden1, output_dim=output_dim, dropout=config.dropout) @@ -61,7 +61,8 @@ if __name__ == "__main__": dgcn_net.add_flags_recursive(fp16=True) param_dict = load_checkpoint(args.ckpt_file) load_param_into_net(dgcn_net, param_dict) - export(dgcn_net, diffusions, ppmi, features, file_name=args.file_name, file_format=args.file_format) + input_data = [diffusions, ppmi, features] + export(dgcn_net, *input_data, file_name=args.file_name, file_format=args.file_format) print("==========================================") print(args.file_name + ".mindir exported successfully!") print("==========================================") diff --git a/research/gnn/dgcn/postprocess.py b/research/gnn/dgcn/postprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..f60ec1d4af63c5c9167f3dfe1e6b44179893dd48 --- /dev/null +++ b/research/gnn/dgcn/postprocess.py @@ -0,0 +1,62 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +""" +postprocess. +""" +import os +import argparse + +import numpy as np + +def Accuracy(label, mask, preds): + """Accuracy with masking.""" + preds = preds.astype(np.float32) + correct_prediction = np.equal(np.argmax(preds, axis=1), np.argmax(label, axis=1)) + accuracy_all = correct_prediction.astype(np.float32) + mask = mask.astype(np.float32) + mask_reduce = np.mean(mask) + mask = mask / mask_reduce + accuracy_all *= mask + return np.mean(accuracy_all) + + +def get_acc(): + """get infer Accuracy.""" + parser = argparse.ArgumentParser(description='postprocess') + parser.add_argument('--dataset_name', type=str, default='cora', + choices=['cora', 'citeseer', 'pubmed'], help='dataset name') + parser.add_argument('--result_path', type=str, default='./result_Files', help='result Files') + parser.add_argument('--label_path', type=str, default='', help='y_test npy Files') + parser.add_argument('--mask_path', type=str, default='', help='test_mask npy Files') + args_opt = parser.parse_args() + + label_onehot = np.load(args_opt.label_path) + test_mask = np.load(args_opt.mask_path) + + pred = np.fromfile(os.path.join(args_opt.result_path, 'diffusions_0.bin'), np.float16) + if args_opt.dataset_name == 'cora': + pred = pred.reshape(2708, 7) + if args_opt.dataset_name == 'citeseer': + pred = pred.reshape(3327, 6) + if args_opt.dataset_name == 'pubmed': + pred = pred.reshape(19717, 3) + + acc = Accuracy(label_onehot, test_mask, pred) + print("Test set results:", "accuracy=", "{:.5f}".format(acc)) + +if __name__ == '__main__': + get_acc() + \ No newline at end of file diff --git a/research/gnn/dgcn/preprocess.py b/research/gnn/dgcn/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..f1a37c799a144295dc98569aef7d53f9150dc857 --- /dev/null +++ b/research/gnn/dgcn/preprocess.py @@ -0,0 +1,62 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +""" +preprocess. +""" +import os +import argparse + +import numpy as np +from src.data_process import load_graph_data +from src.utilities import diffusion_fun_improved_ppmi_dynamic_sparsity, diffusion_fun_sparse +from src.config import ConfigDGCN + + +def generate_bin(): + """Generate bin files.""" + parser = argparse.ArgumentParser(description='preprocess') + parser.add_argument('--data_dir', type=str, default='./data/cora/cora_mr', help='Dataset directory') + parser.add_argument('--test_nodes_num', type=int, default=1000, help='Nodes numbers for test') + parser.add_argument('--result_path', type=str, default='./preprocess_Result/', help='Result path') + args_opt = parser.parse_args() + + adj, features, y_train, y_val, y_test, train_mask, val_mask, test_mask, labels = load_graph_data(args_opt.data_dir) + print(shape(y_train), shape(y_val), shape(y_test), shape(train_mask), shape(val_mask)) + adj_path = os.path.join(args_opt.result_path, "00_data") + ppmi_path = os.path.join(args_opt.result_path, "01_data") + feature_path = os.path.join(args_opt.result_path, "02_data") + os.makedirs(adj_path) + os.makedirs(feature_path) + os.makedirs(ppmi_path) + config = ConfigDGCN() + diffusions = diffusion_fun_sparse(adj.tocsc()) + diffusions = diffusions.toarray() + ppmi = diffusion_fun_improved_ppmi_dynamic_sparsity(adj, path_len=config.path_len, k=1.0) + ppmi = ppmi.toarray() + features = features.toarray() + diffusions = diffusions.astype(np.float16) + ppmi = ppmi.astype(np.float16) + features = features.astype(np.float16) + + diffusions.tofile(os.path.join(adj_path, "diffusions.bin")) + ppmi.tofile(os.path.join(ppmi_path, "ppmi.bin")) + features.tofile(os.path.join(feature_path, "feature.bin")) + np.save(os.path.join(args_opt.result_path, 'label_onehot.npy'), labels) + np.save(os.path.join(args_opt.result_path, 'test_mask.npy'), test_mask) + +if __name__ == '__main__': + generate_bin() + \ No newline at end of file diff --git a/research/gnn/dgcn/readme_CN.md b/research/gnn/dgcn/readme_CN.md index 27da63d8c0e433120e2d1dade4f350cdc95b2bca..f2f1e6f6824581ce4cf182ea1c70346fb72edf58 100644 --- a/research/gnn/dgcn/readme_CN.md +++ b/research/gnn/dgcn/readme_CN.md @@ -14,9 +14,11 @@ - [鐢ㄦ硶](#鐢ㄦ硶) - [鍚姩](#鍚姩) - [缁撴灉](#缁撴灉) - - [瀵煎嚭MindIR妯″瀷](#瀵煎嚭mindir妯″瀷) + - [鎺ㄧ悊杩囩▼](#鎺ㄧ悊杩囩▼) + - [瀵煎嚭MindIR](#瀵煎嚭mindir) + - [鍦ˋscend310鎵ц鎺ㄧ悊](#鍦ˋscend310鎵ц鎺ㄧ悊) - [妯″瀷鎻忚堪](#妯″瀷鎻忚堪) - - [鎬ц兘](#鎬ц兘) + - [鎬ц兘](#鎬ц兘) - [闅忔満鎯呭喌璇存槑](#闅忔満鎯呭喌璇存槑) <!-- /TOC --> @@ -235,7 +237,9 @@ Convolution Layers:[(1433, 36), (36, 7)] Eval results: loss= 0.52596 accuracy= 0.82800 time= 13.57054 ``` -## [瀵煎嚭MindIR妯″瀷](#contents) +## 鎺ㄧ悊杩囩▼ + +### [瀵煎嚭MindIR](#contents) ```shell python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT] @@ -244,8 +248,29 @@ python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [ 绀轰緥 ```text -python export.py --ckpt_file ./checkpoint/cora/dgcn.ckpt --file_format MINDIR -鍙傛暟ckpt_file涓哄繀濉」锛宍FILE_FORMAT` 蹇呴』鍦� ["AIR", "MINDIR"]涓€夋嫨銆� +python export.py --ckpt_file ./checkpoint/cora/dgcn.ckpt +鍙傛暟ckpt_file涓哄繀濉」锛宍EXPORT_FORMAT` 蹇呴』鍦� ["AIR", "MINDIR"]涓€夋嫨銆� +``` + +### 鍦ˋscend310鎵ц鎺ㄧ悊 + +鍦ㄦ墽琛屾帹鐞嗗墠锛宮indir鏂囦欢蹇呴』閫氳繃`export.py`鑴氭湰瀵煎嚭銆備互涓嬪睍绀轰簡浣跨敤minir妯″瀷鎵ц鎺ㄧ悊鐨勭ず渚嬨€� + +```shell +# Ascend310 鎺ㄧ悊 +bash run_infer_310.sh [MINDIR_PATH] [DATASET_NAME] [NEED_PREPROCESS] [DEVICE_ID] +``` + +- `DATASET_NAME` 琛ㄧず鏁版嵁闆嗗悕绉帮紝鍙栧€艰寖鍥达細 ['cora', 'citeseer'锛� 'pubmed']銆� +- `NEED_PREPROCESS` 琛ㄧず鏁版嵁鏄惁闇€瑕侀澶勭悊锛屽彇鍊艰寖鍥达細'y' 鎴栬€� 'n'銆� +- `DEVICE_ID` 鍙€夛紝榛樿鍊间负0銆� + +### result + +鎺ㄧ悊缁撴灉淇濆瓨鍦ㄨ剼鏈墽琛岀殑褰撳墠璺緞锛屼綘鍙互鍦╝cc.log涓湅鍒颁互涓嬬簿搴﹁绠楃粨鏋溿€� + +```bash +Test set results: accuracy= 0.82800 ``` ## 妯″瀷鎻忚堪 diff --git a/research/gnn/dgcn/script/run_eval.sh b/research/gnn/dgcn/script/run_eval.sh index 9dc7777f2b415714e741de4a4f110556986af65e..4917fa847765ea8dfb2be403b5044290438c09b0 100644 --- a/research/gnn/dgcn/script/run_eval.sh +++ b/research/gnn/dgcn/script/run_eval.sh @@ -14,11 +14,14 @@ # limitations under the License. # ============================================================================ -if [[ $# -gt 1 ]]; then - echo "Usage: bash ./scripts/run_eval.sh [CHECKPOINT]" +if [[ $# -gt 2 ]]; then + echo "Usage: bash run_eval.sh [CHECKPOINT] [DATASET]" exit 1 fi +DATASET_NAME=$2 +CHECKPOINT=$1 + if [ ! -d "eval" ]; then mkdir eval fi @@ -27,4 +30,4 @@ cp -r ../src ./eval cp -r ../data ./eval cp -r ../checkpoint ./eval cd ./eval || exit -nohup python -u eval.py --checkpoint=$1 > eval.log 2>&1 & \ No newline at end of file +nohup python -u eval.py --checkpoint=$CHECKPOINT --dataset=$DATASET_NAME > eval.log 2>&1 & \ No newline at end of file diff --git a/research/gnn/dgcn/script/run_infer_310.sh b/research/gnn/dgcn/script/run_infer_310.sh new file mode 100644 index 0000000000000000000000000000000000000000..3e40b9bbaefce73e1f62f914cb8c5d3dd756502e --- /dev/null +++ b/research/gnn/dgcn/script/run_infer_310.sh @@ -0,0 +1,129 @@ +#!/bin/bash +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +if [[ $# -lt 4 || $# -gt 4 ]]; then + echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATASET_NAME] [NEED_PREPROCESS] [DEVICE_ID] + DATASET_NAME must be in ['cora', 'citeseer', 'pubmed']. + NEED_PREPROCESS means weather need preprocess or not, it's value is 'y' or 'n'. + DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" +exit 1 +fi + +get_real_path(){ + if [ "${1:0:1}" == "/" ]; then + echo "$1" + else + echo "$(realpath -m $PWD/$1)" + fi +} +model=$(get_real_path $1) +if [ "$2" == "cora" ] || [ "$2" == "citeseer" ] || [ "$2" == "pubmed" ];then + dataset_name=$2 +else + echo "dataset must be in ['cora', 'citeseer', 'pubmed']" + exit 1 +fi + + +if [ "$3" == "y" ] || [ "$3" == "n" ];then + need_preprocess=$3 +else + echo "weather need preprocess or not, it's value must be in [y, n]" + exit 1 +fi + +device_id=0 +if [ $# == 4 ]; then + device_id=$4 +fi + +echo "mindir name: "$model +echo "dataset name: "$dataset_name +echo "need preprocess: "$need_preprocess +echo "device id: "$device_id + +export ASCEND_HOME=/usr/local/Ascend +if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp +else + export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH + export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH + export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH + export ASCEND_OPP_PATH=$ASCEND_HOME/opp +fi + +function preprocess_data() +{ + if [ -d preprocess_Result ]; then + rm -rf ./preprocess_Result + fi + mkdir preprocess_Result + python3.7 ../preprocess.py --data_dir=$dataset_name --result_path=./preprocess_Result/ +} + +function compile_app() +{ + cd ../ascend310_infer || exit + bash build.sh &> build.log +} + +function infer() +{ + cd - || exit + if [ -d result_Files ]; then + rm -rf ./result_Files + fi + if [ -d time_Result ]; then + rm -rf ./time_Result + fi + mkdir result_Files + mkdir time_Result + + ../ascend310_infer/out/main --mindir_path=$model --input0_path=./preprocess_Result/00_data --input1_path=./preprocess_Result/01_data --input2_path=./preprocess_Result/02_data --device_id=$device_id &> infer.log + +} + +function cal_acc() +{ + python3.7 ../postprocess.py --result_path=./result_Files --dataset_name=$dataset_name --label_path=./preprocess_Result/label_onehot.npy --mask_path=./preprocess_Result/test_mask.npy &> acc.log +} + +if [ $need_preprocess == "y" ]; then + preprocess_data + if [ $? -ne 0 ]; then + echo "preprocess dataset failed" + exit 1 + fi +fi +compile_app +if [ $? -ne 0 ]; then + echo "compile app code failed" + exit 1 +fi +infer +if [ $? -ne 0 ]; then + echo " execute inference failed" + exit 1 +fi +cal_acc +if [ $? -ne 0 ]; then + echo "calculate accuracy failed" + exit 1 +fi diff --git a/research/gnn/dgcn/script/run_train_8p.sh b/research/gnn/dgcn/script/run_train_8p.sh index 74ad8123142360aafc4427f716d4d2d2a8b30e7b..872e8ccc94517659ed8abc67c484e649b8fe796f 100644 --- a/research/gnn/dgcn/script/run_train_8p.sh +++ b/research/gnn/dgcn/script/run_train_8p.sh @@ -15,7 +15,7 @@ # ============================================================================ if [[ $# -gt 5 ]]; then - echo "Usage: bash ./scripts/run_train_8p.sh [RANK_TABLE] [RANK_SIZE] [DEVICE_START] [DATASET_NAME] [DISTRIBUTED]" + echo "Usage: bash run_train_8p.sh [RANK_TABLE] [RANK_SIZE] [DEVICE_START] [DATASET_NAME] [DISTRIBUTED]" exit 1 fi @@ -38,6 +38,7 @@ do cp -r ../src ./device$i cp -r ../data ./device$i cp ../*.py ./device$i + cp *.sh ./device$i echo "Start training for rank $RANK_ID, device $DEVICE_ID" cd ./device$i env > env.log diff --git a/research/gnn/dgcn/train.py b/research/gnn/dgcn/train.py index 3cc2272585a07716e3af890bb323a65226d3886f..744b26261944feb5a6fd4a5ed5286e616f1b8095 100644 --- a/research/gnn/dgcn/train.py +++ b/research/gnn/dgcn/train.py @@ -50,7 +50,7 @@ def run_train(learning_rate=0.01, n_epochs=200, dataset=None, dropout_rate=0.5, hidden_size=36): """run train.""" context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target) - if args.device_target == "Ascend": + if args.device_target == "Ascend" and not args.distributed: context.set_context(device_id=args.device_id) if args.distributed: device_id = int(os.getenv('DEVICE_ID'))