Skip to content

Exceptions

DatasetItemsException

Bases: Exception

Source code in src/super_gradients/common/exceptions/dataset_exceptions.py
54
55
56
57
58
59
60
61
62
63
64
class DatasetItemsException(Exception):
    def __init__(self, data_sample: Tuple, collate_type: Type, expected_item_names: Tuple):
        """
        :param data_sample:         item(s) returned by a dataset
        :param collate_type:        type of the collate that caused the exception
        :param expected_item_names: tuple of names of items that are expected by the collate to be returned from the dataset
        """
        collate_type_name = collate_type.__name__
        num_sample_items = len(data_sample) if isinstance(data_sample, tuple) else 1
        error_msg = f"`{collate_type_name}` only supports Datasets that return a tuple {expected_item_names}, but got a tuple of len={num_sample_items}"
        super().__init__(error_msg)

__init__(data_sample, collate_type, expected_item_names)

Parameters:

Name Type Description Default
data_sample Tuple

item(s) returned by a dataset

required
collate_type Type

type of the collate that caused the exception

required
expected_item_names Tuple

tuple of names of items that are expected by the collate to be returned from the dataset

required
Source code in src/super_gradients/common/exceptions/dataset_exceptions.py
55
56
57
58
59
60
61
62
63
64
def __init__(self, data_sample: Tuple, collate_type: Type, expected_item_names: Tuple):
    """
    :param data_sample:         item(s) returned by a dataset
    :param collate_type:        type of the collate that caused the exception
    :param expected_item_names: tuple of names of items that are expected by the collate to be returned from the dataset
    """
    collate_type_name = collate_type.__name__
    num_sample_items = len(data_sample) if isinstance(data_sample, tuple) else 1
    error_msg = f"`{collate_type_name}` only supports Datasets that return a tuple {expected_item_names}, but got a tuple of len={num_sample_items}"
    super().__init__(error_msg)

EmptyDatasetException

Bases: DatasetValidationException

Exception raised when a dataset does not have any image for a specific config

Parameters:

Name Type Description Default
desc str

explanation of the error

required
Source code in src/super_gradients/common/exceptions/dataset_exceptions.py
24
25
26
27
28
29
30
31
32
33
class EmptyDatasetException(DatasetValidationException):
    """
    Exception raised when a dataset does not have any image for a specific config

    :param desc: explanation of the error
    """

    def __init__(self, desc: str):
        self.message = "Empty Dataset: " + desc
        super().__init__(self.message)

IllegalDatasetParameterException

Bases: DatasetValidationException

Exception raised illegal dataset param.

Parameters:

Name Type Description Default
desc str

Explanation of the error

required
Source code in src/super_gradients/common/exceptions/dataset_exceptions.py
12
13
14
15
16
17
18
19
20
21
class IllegalDatasetParameterException(DatasetValidationException):
    """
    Exception raised illegal dataset param.

    :param desc: Explanation of the error
    """

    def __init__(self, desc: str):
        self.message = "Unsupported dataset parameter format: " + desc
        super().__init__(self.message)

UnsupportedBatchItemsFormat

Bases: ValueError

Exception raised illegal batch items returned from data loader.

Parameters:

Name Type Description Default
batch_items tuple

batch items returned from data loader

required
Source code in src/super_gradients/common/exceptions/dataset_exceptions.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
class UnsupportedBatchItemsFormat(ValueError):
    """Exception raised illegal batch items returned from data loader.

    :param batch_items: batch items returned from data loader
    """

    def __init__(self, batch_items: tuple):
        self.message = (
            f"The data loader is expected to return 2 to 3 items, but got {len(batch_items)} instead.\n"
            "Items expected:\n"
            "   - inputs = batch_items[0] # model input - The type might depend on the model you are using.\n"
            "   - targets = batch_items[1] # Target that will be used to compute loss/metrics - The type might depend on the function you are using.\n"
            "   - [OPTIONAL] additional_batch_items = batch_items[2] # Dict made of any additional item that you might want to use.\n"
            "To fix this, please change the implementation of your dataset __getitem__ method, so that it would return the items defined above.\n"
        )
        super().__init__(self.message)

UnknownTypeException

Bases: Exception

Type error with message, followed by type suggestion, chosen by fuzzy matching (out of 'choices' arg passed in init).

Parameters:

Name Type Description Default
unknown_type str

The type that was not found.

required
choices List

List of valid types

required
message str

Explanation of the error

None
Source code in src/super_gradients/common/exceptions/factory_exceptions.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
class UnknownTypeException(Exception):
    """Type error with message, followed by type suggestion, chosen by fuzzy matching
     (out of 'choices' arg passed in __init__).

    :param unknown_type:    The type that was not found.
    :param choices:         List of valid types
    :param message:         Explanation of the error
    """

    def __init__(self, unknown_type: str, choices: List, message: str = None):
        choices = [str(choice) for choice in choices]  # Ensure all choices are strings
        message = message or f"Unknown object type: {unknown_type} in configuration. valid types are: {choices}"
        err_msg_tip = ""
        if isinstance(unknown_type, str):
            choice, score, _ = process.extractOne(unknown_type, choices, scorer=fuzz.WRatio)
            if score > 70:
                err_msg_tip = f"\n Did you mean: {choice}?"
        self.message = message + err_msg_tip
        super().__init__(self.message)

ArchitectureKwargsException

Bases: KDModelException

Exception raised when subnet architectures are not defined.

Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
12
13
14
15
16
class ArchitectureKwargsException(KDModelException):
    """Exception raised when subnet architectures are not defined."""

    def __init__(self):
        super().__init__("When architecture is not intialized both student_architecture and teacher_architecture must be passed " "through **kwargs")

InconsistentParamsException

Bases: KDModelException

Exception raised when values between arch_params/checkpoint_params should be equivalent.

Parameters:

Name Type Description Default
inconsistent_key1 str

Name of the key provided

required
inconsistent_key1_container_name str

Container name of the key provided

required
inconsistent_key2 str

Name of the key expected

required
inconsistent_key2_container_name str

Container name of the key expected

required
Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
class InconsistentParamsException(KDModelException):
    """Exception raised when values between arch_params/checkpoint_params should be equivalent.

    :param inconsistent_key1:                   Name of the key provided
    :param inconsistent_key1_container_name:    Container name of the key provided
    :param inconsistent_key2:                   Name of the key expected
    :param inconsistent_key2_container_name:    Container name of the key expected
    """

    def __init__(
        self,
        inconsistent_key1: str,
        inconsistent_key1_container_name: str,
        inconsistent_key2: str,
        inconsistent_key2_container_name: str,
    ):
        super().__init__(
            f"{inconsistent_key1} in {inconsistent_key1_container_name} must be equal to " f"{inconsistent_key2} in {inconsistent_key2_container_name}"
        )

KDModelException

Bases: Exception

Exception raised illegal training param format.

Parameters:

Name Type Description Default
desc str

Explanation of the error

required
Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
1
2
3
4
5
6
7
8
9
class KDModelException(Exception):
    """Exception raised illegal training param format.

    :param desc: Explanation of the error
    """

    def __init__(self, desc: str):
        self.message = "KDTrainer: " + desc
        super().__init__(self.message)

TeacherKnowledgeException

Bases: KDModelException

Exception raised when teacher net doesn't hold any knowledge (i.e weights are the initial ones).

Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
61
62
63
64
65
class TeacherKnowledgeException(KDModelException):
    """Exception raised when teacher net doesn't hold any knowledge (i.e weights are the initial ones)."""

    def __init__(self):
        super().__init__("Expected: at least one of: teacher_pretrained_weights, teacher_checkpoint_path or load_kd_trainer_checkpoint=True")

UndefinedNumClassesException

Bases: KDModelException

Exception raised when num_classes is not defined for subnets (and cannot be derived).

Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
68
69
70
71
72
class UndefinedNumClassesException(KDModelException):
    """Exception raised when num_classes is not defined for subnets (and cannot be derived)."""

    def __init__(self):
        super().__init__("Number of classes must be defined in students and teachers arch params or by connecting to a dataset interface")

UnsupportedKDArchitectureException

Bases: KDModelException

Exception raised for unsupported kd architecture.

Parameters:

Name Type Description Default
architecture str

Explanation of the error

required
Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
19
20
21
22
23
24
25
26
class UnsupportedKDArchitectureException(KDModelException):
    """Exception raised for unsupported kd architecture.

    :param architecture: Explanation of the error
    """

    def __init__(self, architecture: str):
        super().__init__("Unsupported KD architecture: " + str(architecture))

UnsupportedKDModelArgException

Bases: KDModelException

Exception raised for unsupported args that might be supported for Trainer but not for KDTrainer.

Parameters:

Name Type Description Default
param_name str

Name of the param that is not supported

required
dict_name str

Name of the dict including the param that is not supported

required
Source code in src/super_gradients/common/exceptions/kd_trainer_exceptions.py
50
51
52
53
54
55
56
57
58
class UnsupportedKDModelArgException(KDModelException):
    """Exception raised for unsupported args that might be supported for Trainer but not for KDTrainer.

    :param param_name: Name of the param that is not supported
    :param dict_name: Name of the dict including the param that is not supported
    """

    def __init__(self, param_name: str, dict_name: str):
        super().__init__(param_name + " in " + dict_name + " not supported for KD models.")

IllegalRangeForLossAttributeException

Bases: Exception

Exception raised illegal value (i.e not in range) for _Loss attribute.

Parameters:

Name Type Description Default
range_vals tuple

Range of valid values

required
attr_name str

Name of attribute that is not in range

required
Source code in src/super_gradients/common/exceptions/loss_exceptions.py
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
class IllegalRangeForLossAttributeException(Exception):
    """
    Exception raised illegal value (i.e not in range) for _Loss attribute.
    :param range_vals: Range of valid values
    :param attr_name: Name of attribute that is not in range
    """

    def __init__(self, range_vals: tuple, attr_name: str):
        self.message = attr_name + " must be in range " + str(range_vals)
        super().__init__(self.message)

RequiredLossComponentReductionException

Bases: Exception

Exception raised illegal reduction for _Loss component.

Parameters:

Name Type Description Default
component_name str

Name of component

required
reduction str

Reduction provided

required
required_reduction str

Reduction required

required
Source code in src/super_gradients/common/exceptions/loss_exceptions.py
13
14
15
16
17
18
19
20
21
22
23
24
class RequiredLossComponentReductionException(Exception):
    """
    Exception raised illegal reduction for _Loss component.

    :param component_name:      Name of component
    :param reduction:           Reduction provided
    :param required_reduction:  Reduction required
    """

    def __init__(self, component_name: str, reduction: str, required_reduction: str):
        self.message = component_name + ".reduction must be " + required_reduction + ", got" + reduction
        super().__init__(self.message)

GPUModeNotSetupError

Bases: Exception

Exception raised when the DDP should be setup but is not.

Source code in src/super_gradients/common/exceptions/sg_trainer_exceptions.py
26
27
28
29
30
31
32
33
34
class GPUModeNotSetupError(Exception):
    """Exception raised when the DDP should be setup but is not."""

    def __init__(self):
        super().__init__(
            "Your environment was not setup to support DDP. Please run at the beginning of your script:\n"
            ">>> from super_gradients.common.environment.env_helpers import init_trainer\n"
            ">>> setup_device(multi_gpu=..., num_gpus=...)\n"
        )

IllegalDataloaderInitialization

Bases: Exception

Exception raised illegal data loaders.

Source code in src/super_gradients/common/exceptions/sg_trainer_exceptions.py
19
20
21
22
23
class IllegalDataloaderInitialization(Exception):
    """Exception raised illegal data loaders."""

    def __init__(self):
        super().__init__("train_loader, valid_loader and class parameters are required when initializing Trainer with data loaders")

UnsupportedOptimizerFormat

Bases: UnsupportedTrainingParameterFormat

Exception raised illegal optimizer format.

Source code in src/super_gradients/common/exceptions/sg_trainer_exceptions.py
12
13
14
15
16
class UnsupportedOptimizerFormat(UnsupportedTrainingParameterFormat):
    """Exception raised illegal optimizer format."""

    def __init__(self):
        super().__init__("optimizer parameter expected one of ['Adam','SGD','RMSProp'], or torch.optim.Optimizer object")

UnsupportedTrainingParameterFormat

Bases: Exception

Exception raised illegal training param format.

Parameters:

Name Type Description Default
desc str

Explanation of the error

required
Source code in src/super_gradients/common/exceptions/sg_trainer_exceptions.py
1
2
3
4
5
6
7
8
9
class UnsupportedTrainingParameterFormat(Exception):
    """Exception raised illegal training param format.

    :param desc: Explanation of the error
    """

    def __init__(self, desc: str):
        self.message = "Unsupported training parameter format: " + desc
        super().__init__(self.message)