Skip to content
Snippets Groups Projects
Select Git revision
  • f5198bcaeeafea4686c72682568c6243fafb7adc
  • master default protected
  • benchmark protected
  • v2.0.0-rc4
  • v2.0.0-rc2
  • v2.0.0-rc1
  • v1.1.1
  • v1.1.0
  • v1.0.0
  • v0.10.6
  • v0.10.5
  • v0.10.4
  • v0.10.3
  • v0.10.2
  • v0.10.1
  • v0.8.1
  • v0.10.0
  • v0.9.1
  • v0.9.0
  • v0.8.0
  • v0.7.1
  • v0.7.0
  • v0.6.0
23 results

LoadIndex.h

Blame
  • run_standalone_train.sh 1.61 KiB
    #!/bin/bash
    # Copyright 2021 Huawei Technologies Co., Ltd
    #
    # Licensed under the Apache License, Version 2.0 (the "License");
    # you may not use this file except in compliance with the License.
    # You may obtain a copy of the License at
    #
    # http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    # ============================================================================
    
    if [ $# != 1 ]
    then
        echo "Usage: bash run_standalone_train.sh [DATASET_PATH]"
    exit 1
    fi
    
    get_real_path(){
      if [ "${1:0:1}" == "/" ]; then
        echo "$1"
      else
        echo "$(realpath -m $PWD/$1)"
      fi
    }
    
    DATASET_PATH=$(get_real_path $1)
    echo $DATASET_PATH
    
    
    if [ ! -d $DATASET_PATH ]
    then
        echo "error: DATASET_PATH=$DATASET_PATH is not a directory"
    exit 1
    fi
    
    
    export DEVICE_NUM=1
    export DEVICE_ID=0
    export RANK_ID=0
    export RANK_SIZE=1
    
    if [ -d "train" ];
    then
        rm -rf ./train
    fi
    mkdir ./train
    cp ../*.py ./train
    cp ../*.yaml ./train
    cp -r ../src ./train
    cp -r ../model_utils ./train
    cd ./train || exit
    echo "start training for device $DEVICE_ID"
    env > env.log
    
    python train.py \
        --data_dir=$DATASET_PATH \
        --is_distributed=0 \
        --lr=0.01 \
        --t_max=300 \
        --max_epoch=300 \
        --warmup_epochs=4 \
        --training_shape=640 \
        --per_batch_size=32 \
        --weight_decay=0.016 \
        --lr_scheduler=cosine_annealing > log.txt 2>&1 &
    cd ..