From 7eb79703ea249558ea154ecf4c4a580b0e507fd6 Mon Sep 17 00:00:00 2001
From: anzhengqi <anzhengqi@huawei.com>
Date: Mon, 1 Nov 2021 11:01:31 +0800
Subject: [PATCH] modify network wide&deep readme file

---
 official/recommend/wide_and_deep/README.md    | 24 +++++++-------
 official/recommend/wide_and_deep/README_CN.md | 32 +++++++------------
 .../src/generate_synthetic_data.py            |  2 +-
 .../wide_and_deep/src/preprocess_data.py      |  2 +-
 4 files changed, 26 insertions(+), 34 deletions(-)

diff --git a/official/recommend/wide_and_deep/README.md b/official/recommend/wide_and_deep/README.md
index 0a993a3e8..d7d406056 100644
--- a/official/recommend/wide_and_deep/README.md
+++ b/official/recommend/wide_and_deep/README.md
@@ -61,13 +61,13 @@ Currently we support host-device mode with multi-dimensional partition parallel
 1. Clone the Code
 
 ```bash
-git clone https://gitee.com/mindspore/mindspore.git
-cd mindspore/model_zoo/official/recommend/wide_and_deep
+git clone https://gitee.com/mindspore/models.git
+cd models/official/recommend/wide_and_deep
 ```
 
 2. Download the Dataset
 
-  > Please refer to [1] to obtain the download link
+  > Please refer to [1](#dataset) to obtain the download link
 
 ```bash
 mkdir -p data/origin_data && cd data/origin_data
@@ -86,13 +86,13 @@ python src/preprocess_data.py  --data_path=./data/ --dense_dim=13 --slot_dim=26
 Once the dataset is ready, the model can be trained and evaluated on the single device(Ascend) by the command as follows:
 
 ```bash
-python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord
+python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend
 ```
 
 To evaluate the model, command as follows:
 
 ```bash
-python eval.py  --data_path=./data/mindrecord --dataset_type=mindrecord --ckpt_path=./ckpt/widedeep_train-15_2582.ckpt
+python eval.py  --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend --ckpt_path=./ckpt/widedeep_train-15_2582.ckpt
 ```
 
 - Running on ModelArts (If you want to run in modelarts, please check the official documentation of [modelarts](https://support.huaweicloud.com/modelarts/), and you can start training as follows)
@@ -312,7 +312,7 @@ wget DATA_LINK
 tar -zxvf dac.tar.gz
 ```
 
-> Please refer to [1] to obtain the download link
+> Please refer to [1](#dataset) to obtain the download link
 
 2. Use this script to preprocess the data
 
@@ -344,7 +344,7 @@ python src/preprocess_data.py --data_path=./syn_data/  --dense_dim=13 --slot_dim
 To train and evaluate the model, command as follows:
 
 ```python
-python train_and_eval.py
+python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend
 ```
 
 ### [SingleDevice For Cache Mode](#contents)
@@ -352,7 +352,7 @@ python train_and_eval.py
 To train and evaluate the model, command as follows:
 
 ```python
-python train_and_eval.py  --vocab_size=200000  --vocab_cache_size=160000
+python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend --sparse=True --vocab_size=200000 --vocab_cache_size=160000
 ```
 
 ### [Distribute Training](#contents)
@@ -402,15 +402,15 @@ bash run_parameter_server_train.sh RANK_SIZE EPOCHS DATASET RANK_TABLE_FILE SERV
 To evaluate the model, command as follows:
 
 ```python
-python eval.py
+python eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend --ckpt_path=./ckpt/widedeep_train-15_2582.ckpt
 ```
 
 ## Inference Process
 
 ### [Export MindIR](#contents)
 
-```shell
-python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT]
+```python
+python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --device_target [DEVICE_TARGET] --file_format [FILE_FORMAT]
 ```
 
 The ckpt_file parameter is required,
@@ -448,7 +448,7 @@ Inference result is saved in current path, you can find result like this in acc.
 | Resource                 | Ascend 910; OS Euler2.8                      | Tesla V100-PCIE 32G             | Ascend 910; OS Euler2.8                     | Ascend 910; OS Euler2.8                      |
 | Uploaded Date            | 07/05/2021 (month/day/year)     | 07/05/2021 (month/day/year)     | 07/05/2021 (month/day/year)     | 07/05/2021 (month/day/year)     |
 | MindSpore Version        | 1.3.0                           | 1.3.0                           | 1.3.0                           | 1.3.0                           |
-| Dataset                  | [1]                             | [1]                             | [1]                             | [1]                             |
+| Dataset                  | [1](#dataset)                   | [1](#dataset)                   | [1](#dataset)                   | [1](#dataset)                   |
 | Training Parameters      | Epoch=15,<br />batch_size=16000 | Epoch=15,<br />batch_size=16000 | Epoch=15,<br />batch_size=16000 | Epoch=15,<br />batch_size=16000 |
 | Optimizer                | FTRL,Adam                       | FTRL,Adam                       | FTRL,Adam                       | FTRL,Adam                       |
 | Loss Function            | SigmoidCrossEntroy              | SigmoidCrossEntroy              | SigmoidCrossEntroy              | SigmoidCrossEntroy              |
diff --git a/official/recommend/wide_and_deep/README_CN.md b/official/recommend/wide_and_deep/README_CN.md
index 850985a2d..4eb80af4b 100644
--- a/official/recommend/wide_and_deep/README_CN.md
+++ b/official/recommend/wide_and_deep/README_CN.md
@@ -1,7 +1,7 @@
 锘�# 鐩綍
 
 - [鐩綍](#鐩綍)
-- [Wide&Deep姒傝堪](#widedeep姒傝堪)
+- [Wide&Deep姒傝堪](#Wide&Deep姒傝堪)
 - [妯″瀷鏋舵瀯](#妯″瀷鏋舵瀯)
 - [鏁版嵁闆哴(#鏁版嵁闆�)
 - [鐜瑕佹眰](#鐜瑕佹眰)
@@ -64,13 +64,13 @@ Wide&Deep妯″瀷璁粌浜嗗绾挎€фā鍨嬪拰娣卞害瀛︿範绁炵粡缃戠粶锛岀粨鍚堜簡
 1. 鍏嬮殕浠g爜銆�
 
 ```bash
-git clone https://gitee.com/mindspore/mindspore.git
-cd mindspore/model_zoo/official/recommend/wide_and_deep
+git clone https://gitee.com/mindspore/models.git
+cd models/official/recommend/wide_and_deep
 ```
 
 2. 涓嬭浇鏁版嵁闆嗐€�
 
-  > 璇峰弬鑰僛1]鑾峰緱涓嬭浇閾炬帴銆�
+  > 璇峰弬鑰僛1](#鏁版嵁闆�)鑾峰緱涓嬭浇閾炬帴銆�
 
 ```bash
 mkdir -p data/origin_data && cd data/origin_data
@@ -89,13 +89,13 @@ python src/preprocess_data.py  --data_path=./data/ --dense_dim=13 --slot_dim=26
 鏁版嵁闆嗗噯澶囧氨缁悗锛屽嵆鍙湪Ascend涓婂崟鏈鸿缁冨拰璇勪及妯″瀷銆�
 
 ```bash
-python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord
+python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend
 ```
 
 鎸夊涓嬫搷浣滆瘎浼版ā鍨嬶細
 
 ```bash
-python eval.py  --data_path=./data/mindrecord --dataset_type=mindrecord --ckpt_path=./ckpt/widedeep_train-15_2582.ckpt
+python eval.py  --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend --ckpt_path=./ckpt/widedeep_train-15_2582.ckpt
 ```
 
 - 鍦∕odelArts涓婅繍琛岋紙濡傛灉鎯冲湪modelarts涓繍琛岋紝璇锋煡鐪嬨€恗odelarts銆戝畼鏂规枃妗o紙https://support.huaweicloud.com/modelarts/锛夛紝濡備笅寮€濮嬭缁冨嵆鍙級
@@ -316,7 +316,7 @@ wget DATA_LINK
 tar -zxvf dac.tar.gz
 ```
 
-> 浠嶽1]鑾峰彇涓嬭浇閾炬帴銆�
+> 浠嶽1](#鏁版嵁闆�)鑾峰彇涓嬭浇閾炬帴銆�
 
 2. 浣跨敤姝よ剼鏈澶勭悊鏁版嵁銆�
 
@@ -348,7 +348,7 @@ python src/preprocess_data.py --data_path=./syn_data/  --dense_dim=13 --slot_dim
 杩愯濡備笅鍛戒护璁粌鍜岃瘎浼版ā鍨嬶細
 
 ```bash
-python train_and_eval.py
+python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend
 ```
 
 ### 鍗曟満璁粌缂撳瓨妯″紡
@@ -356,7 +356,7 @@ python train_and_eval.py
 杩愯濡備笅鍛戒护璁粌鍜岃瘎浼版ā鍨嬶細
 
 ```bash
-python train_and_eval.py  --vocab_size=200000  --vocab_cache_size=160000
+python train_and_eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend --sparse=True --vocab_size=200000 --vocab_cache_size=160000
 ```
 
 ### 鍒嗗竷寮忚缁�
@@ -405,16 +405,8 @@ bash run_parameter_server_train.sh RANK_SIZE EPOCHS DATASET RANK_TABLE_FILE SERV
 
 杩愯濡備笅鍛戒护璇勪及妯″瀷锛�
 
-```bash
-python eval.py
-```
-
-## [Evaluation Process](#contents)
-
-To evaluate the model, command as follows:
-
 ```python
-python eval.py
+python eval.py --data_path=./data/mindrecord --dataset_type=mindrecord --device_target=Ascend --ckpt_path=./ckpt/widedeep_train-15_2582.ckpt
 ```
 
 ## 鎺ㄧ悊杩囩▼
@@ -422,7 +414,7 @@ python eval.py
 ### [瀵煎嚭MindIR](#contents)
 
 ```shell
-python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT]
+python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --device_target [DEVICE_TARGET] --file_format [FILE_FORMAT]
 ```
 
 鍙傛暟ckpt_file涓哄繀濉」锛�
@@ -460,7 +452,7 @@ bash run_infer_310.sh [MINDIR_PATH] [DATASET_PATH] [DATA_TYPE] [NEED_PREPROCESS]
 | 璧勬簮                 |Ascend 910锛涚郴缁� Euler2.8                | Tesla V100-PCIE 32G             | Ascend 910锛涚郴缁� Euler2.8                      | Ascend 910锛涚郴缁� Euler2.8                      |
 | 涓婁紶鏃ユ湡            | 2021-07-05     |  2021-07-05    | 2021-07-05   | 2021-07-05     |
 | MindSpore鐗堟湰        | 1.3.0                      | 1.3.0                          | 1.3.0                      | 1.3.0                      |
-| 鏁版嵁闆�                  | [1]                             | [1]                             | [1]                             | [1]                             |
+| 鏁版嵁闆�                  | [1](#鏁版嵁闆�)                     | [1](#鏁版嵁闆�)                       | [1](#鏁版嵁闆�)                       | [1](#鏁版嵁闆�)                       |
 | 璁粌鍙傛暟      | Epoch=15,<br />batch_size=16000 | Epoch=15,<br />batch_size=16000 | Epoch=15,<br />batch_size=16000 | Epoch=15,<br />batch_size=16000 |
 | 浼樺寲鍣�                | FTRL,Adam                       | FTRL,Adam                       | FTRL,Adam                       | FTRL,Adam                       |
 | 鎹熷け鍑芥暟            | Sigmoid浜ゅ弶鐔�              | Sigmoid浜ゅ弶鐔�              | Sigmoid浜ゅ弶鐔�              | Sigmoid浜ゅ弶鐔�              |
diff --git a/official/recommend/wide_and_deep/src/generate_synthetic_data.py b/official/recommend/wide_and_deep/src/generate_synthetic_data.py
index ed34698dc..7d767a45d 100644
--- a/official/recommend/wide_and_deep/src/generate_synthetic_data.py
+++ b/official/recommend/wide_and_deep/src/generate_synthetic_data.py
@@ -16,7 +16,7 @@
 """Generate the synthetic data for wide&deep model training"""
 import time
 import numpy as np
-from .model_utils.config import config
+from src.model_utils.config import config
 
 def generate_data(output_path, label_dim, number_examples, dense_dim, slot_dim, vocabulary_size, random_slot_values):
     """
diff --git a/official/recommend/wide_and_deep/src/preprocess_data.py b/official/recommend/wide_and_deep/src/preprocess_data.py
index a23e9eb86..d947fbe4f 100644
--- a/official/recommend/wide_and_deep/src/preprocess_data.py
+++ b/official/recommend/wide_and_deep/src/preprocess_data.py
@@ -18,7 +18,7 @@ import pickle
 import collections
 import numpy as np
 from mindspore.mindrecord import FileWriter
-from .model_utils.config import config
+from src.model_utils.config import config
 
 class StatsDict():
     """preprocessed data"""
-- 
GitLab