Skip to content
Snippets Groups Projects
Unverified Commit 9e546610 authored by i-robot's avatar i-robot Committed by Gitee
Browse files

!2277 Update _Loss to LossBase

Merge pull request !2277 from chenhaozhe/update_loss_base
parents 4fa75f78 0aaa1eba
No related branches found
No related tags found
No related merge requests found
Showing
with 47 additions and 53 deletions
......@@ -17,13 +17,7 @@
from mindspore import nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
try:
from mindspore.nn.loss.loss import Loss
except ImportError:
try:
from mindspore.nn.loss.loss import LossBase as Loss
except ImportError:
from mindspore.nn.loss.loss import _Loss as Loss
from mindspore.nn.loss.loss import LossBase as Loss
from mindspore.ops import functional as F
from mindspore.ops import operations as P
......
......@@ -16,12 +16,12 @@
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -18,10 +18,10 @@ import mindspore.ops.operations as P
import mindspore.ops as F
from mindspore.common.tensor import Tensor
from mindspore import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
class CrossEntropyWithLogits(_Loss):
class CrossEntropyWithLogits(LossBase):
"""
Cross-entropy loss function for semantic segmentation,
and different classes have the same weight.
......@@ -61,7 +61,7 @@ class CrossEntropyWithLogits(_Loss):
return loss
class CrossEntropyWithWeights(_Loss):
class CrossEntropyWithWeights(LossBase):
"""
Cross-entropy loss function for semantic segmentation,
and different classes have different weights.
......
......@@ -16,12 +16,12 @@
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -17,12 +17,12 @@ import mindspore.nn as nn
from mindspore import Tensor
from mindspore import ops
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class SoftTargetCrossEntropy(_Loss):
class SoftTargetCrossEntropy(LossBase):
"""SoftTargetCrossEntropy for MixUp Augment"""
def __init__(self):
......@@ -38,7 +38,7 @@ class SoftTargetCrossEntropy(_Loss):
return self.mean_ops(loss)
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
......
......@@ -19,7 +19,7 @@ import mindspore.ops.operations as P
import mindspore.ops as F
from mindspore.common.tensor import Tensor
from mindspore import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from src.config import config_hrnetv2_w48 as config
......@@ -30,7 +30,7 @@ weights_list = [0.8373, 0.918, 0.866, 1.0345,
1.0865, 1.1529, 1.0507]
class CrossEntropyWithLogits(_Loss):
class CrossEntropyWithLogits(LossBase):
"""
Cross-entropy loss function for semantic segmentation,
and different classes have the same weight.
......@@ -71,7 +71,7 @@ class CrossEntropyWithLogits(_Loss):
return loss
class CrossEntropyWithLogitsAndWeights(_Loss):
class CrossEntropyWithLogitsAndWeights(LossBase):
"""
Cross-entropy loss function for semantic segmentation,
and different classes have different weights.
......
......@@ -24,12 +24,12 @@ import mindspore.ops.operations as P
from mindspore.parallel._utils import (_get_device_num, _get_gradients_mean, _get_parallel_mode)
from mindspore.context import ParallelMode
from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from src.utils.config import get_args
args = get_args()
class SigmoidCrossEntropyWithLogits(_Loss):
class SigmoidCrossEntropyWithLogits(LossBase):
def __init__(self):
super(SigmoidCrossEntropyWithLogits, self).__init__()
self.cross_entropy = P.SigmoidCrossEntropyWithLogits()
......@@ -38,7 +38,7 @@ class SigmoidCrossEntropyWithLogits(_Loss):
x = self.cross_entropy(data, label)
return self.get_loss(x)
class D_Loss(_Loss):
class D_Loss(LossBase):
"""
Define Dloss.
"""
......@@ -70,7 +70,7 @@ class D_WithLossCell(nn.Cell):
pred0 = self.netD(realA, fakeB)
return self._loss_fn(pred1, pred0)
class G_Loss(_Loss):
class G_Loss(LossBase):
"""
Define Gloss.
"""
......
......@@ -18,11 +18,11 @@ loss function script.
import mindspore.ops as ops
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
import mindspore as ms
import numpy as np
class PrototypicalLoss(_Loss):
class PrototypicalLoss(LossBase):
'''
Loss class deriving from Module for the prototypical loss function defined below
'''
......
......@@ -15,14 +15,14 @@
"""
define loss function for network.
"""
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore import Tensor
from mindspore.common import dtype as mstype
import mindspore.nn as nn
class CrossEntropy(_Loss):
class CrossEntropy(LossBase):
"""
the redefined loss function with SoftmaxCrossEntropyWithLogits.
"""
......
......@@ -24,7 +24,7 @@ from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.common import set_seed
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
......@@ -41,7 +41,7 @@ parser.add_argument('--checkpoint_path', type=str, default='./ckpt_0', help='Che
args_opt = parser.parse_args()
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -34,7 +34,7 @@ from mindspore.context import ParallelMode
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.common import set_seed
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
......@@ -152,7 +152,7 @@ def warmup_cosine_annealing_lr(lr5, steps_per_epoch, warmup_epochs, max_epoch, T
return np.array(lr_each_step).astype(np.float32)
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -17,12 +17,12 @@ import mindspore.nn as nn
from mindspore import Tensor
from mindspore import ops
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class SoftTargetCrossEntropy(_Loss):
class SoftTargetCrossEntropy(LossBase):
"""SoftTargetCrossEntropy for MixUp Augment"""
def __init__(self):
......@@ -38,7 +38,7 @@ class SoftTargetCrossEntropy(_Loss):
return self.mean_ops(loss)
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
......
......@@ -13,14 +13,14 @@
# limitations under the License.
# ============================================================================
"""loss function"""
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore import Tensor
from mindspore.common import dtype as mstype
import mindspore.nn as nn
class CrossEntropy(_Loss):
class CrossEntropy(LossBase):
"""CrossEntropy"""
def __init__(self, smooth_factor=0., num_classes=1000):
super(CrossEntropy, self).__init__()
......
......@@ -16,7 +16,7 @@
import os
import mindspore.ops as ops
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore import nn
from mindspore import load_checkpoint, load_param_into_net
from mindspore.ops.functional import stop_gradient
......@@ -82,7 +82,7 @@ class Vgg19(nn.Cell):
return out
class ContrastLoss(_Loss):
class ContrastLoss(LossBase):
"""[ContrastLoss]
Args:
......
......@@ -17,12 +17,12 @@ import mindspore.nn as nn
from mindspore import Tensor
from mindspore import ops
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class SoftTargetCrossEntropy(_Loss):
class SoftTargetCrossEntropy(LossBase):
"""SoftTargetCrossEntropy for MixUp Augment"""
def __init__(self):
......@@ -38,7 +38,7 @@ class SoftTargetCrossEntropy(_Loss):
return self.mean_ops(loss)
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
......
......@@ -16,7 +16,7 @@
import os
import mindspore.ops as ops
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore import nn
from mindspore import load_checkpoint, load_param_into_net
from mindspore.ops.functional import stop_gradient
......@@ -108,7 +108,7 @@ class Vgg19(nn.Cell):
return out
class ContrastLoss(_Loss):
class ContrastLoss(LossBase):
"""[ContrastLoss]
Args:
......
......@@ -16,12 +16,12 @@
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""define loss function for network."""
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.common import dtype as mstype
......@@ -21,7 +21,7 @@ from mindspore import Tensor
import mindspore.nn as nn
class LabelSmoothingCrossEntropy(_Loss):
class LabelSmoothingCrossEntropy(LossBase):
"""LabelSmoothingCrossEntropy"""
def __init__(self, smooth_factor=0.1, num_classes=1000):
super(LabelSmoothingCrossEntropy, self).__init__()
......@@ -38,7 +38,7 @@ class LabelSmoothingCrossEntropy(_Loss):
return loss_logit
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -16,12 +16,12 @@
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
......@@ -24,7 +24,7 @@ from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.common import set_seed
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import _Loss
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
......@@ -43,7 +43,7 @@ parser.add_argument('--checkpoint_path', type=str, default='./ckpt_0', help='Che
args_opt = parser.parse_args()
class CrossEntropySmooth(_Loss):
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment