diff --git a/main.py b/main.py
index a34997546ac005845f63a2ca2ccda3e4c2d8a483..aae5d59f05723984a094ef9ade520c2bc5f64c1e 100644
--- a/main.py
+++ b/main.py
@@ -14,7 +14,7 @@ from unet_medical.unet_model import UNetMedical
 from nets.deeplab_v3 import deeplab_v3
 from dataset import GetDatasetGenerator
 from loss import SoftmaxCrossEntropyLoss
-from utils import learning_rates
+from utils import exponential_lr
 
 context.set_context(mode=context.PYNATIVE_MODE, save_graphs=False,
                     device_target='Ascend', device_id=7)
@@ -25,7 +25,7 @@ train_dataset_generator = GetDatasetGenerator('./datasets', 'train')
 train_dataset = ds.GeneratorDataset(train_dataset_generator, ["data", "label"], shuffle=True)
 train_dataset = train_dataset.batch(4, drop_remainder=True)
 
-lr_iter = learning_rates.exponential_lr(3e-5, 20, 0.98, 500, staircase=True)
+lr_iter = exponential_lr(3e-5, 20, 0.98, 500, staircase=True)
 
 net_loss = SoftmaxCrossEntropyLoss(6, 255)
 net_opt = nn.Adam(net.trainable_params(), learning_rate=lr_iter)
diff --git a/utils/__init__.py b/utils/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..c7590659223a9b65f23a01daf11f46829b2bd2f3 100644
--- a/utils/__init__.py
+++ b/utils/__init__.py
@@ -0,0 +1 @@
+from learning_rates import cosine_lr, poly_lr, exponential_lr
\ No newline at end of file