Skip to content

one_of

OneOf

Bases: NumpyOp

Perform one of several possible NumpyOps.

Parameters:

Name Type Description Default
*numpy_ops NumpyOp

Ops to choose between with a specified (or uniform) probability.

()
probs Optional[List[float]]

List of probabilities, must sum to 1. When None, the probabilities will be equally distributed.

None
Source code in fastestimator/fastestimator/op/numpyop/meta/one_of.py
@traceable()
class OneOf(NumpyOp):
    """Perform one of several possible NumpyOps.

    Args:
        *numpy_ops: Ops to choose between with a specified (or uniform) probability.
        probs: List of probabilities, must sum to 1. When None, the probabilities will be equally distributed.
    """
    def __init__(self, *numpy_ops: NumpyOp, probs: Optional[List[float]] = None) -> None:
        inputs = numpy_ops[0].inputs
        outputs = numpy_ops[0].outputs
        mode = numpy_ops[0].mode
        ds_id = numpy_ops[0].ds_id
        super().__init__(inputs=inputs, outputs=outputs, mode=mode, ds_id=ds_id)
        self.in_list = numpy_ops[0].in_list
        self.out_list = numpy_ops[0].out_list
        for op in numpy_ops[1:]:
            assert not isinstance(op, Batch), "Cannot nest the Batch op inside OneOf"
            assert inputs == op.inputs, "All ops within a OneOf must share the same inputs"
            assert self.in_list == op.in_list, "All ops within OneOf must share the same input configuration"
            assert outputs == op.outputs, "All ops within a OneOf must share the same outputs"
            assert self.out_list == op.out_list, "All ops within OneOf must share the same output configuration"
            assert mode == op.mode, "All ops within a OneOf must share the same mode"
            assert ds_id == op.ds_id, "All ops within a OneOf must share the same ds_id"
        if probs:
            assert len(numpy_ops) == len(probs), "The number of probabilities do not match with number of Operators"
            assert abs(sum(probs) - 1) < 1e-8, "Probabilities must sum to 1"
        self.ops = numpy_ops
        self.probs = probs

    def __getstate__(self) -> Dict[str, List[Dict[Any, Any]]]:
        return {'ops': [elem.__getstate__() if hasattr(elem, '__getstate__') else {} for elem in self.ops]}

    def set_rua_level(self, magnitude_coef: float) -> None:
        """Set the augmentation intensity based on the magnitude_coef.

        This method is specifically designed to be invoked by the RUA Op.

        Args:
            magnitude_coef: The desired augmentation intensity (range [0-1]).

        Raises:
            AttributeError: If ops don't have a 'set_rua_level' method.
        """
        for op in self.ops:
            if hasattr(op, "set_rua_level") and inspect.ismethod(getattr(op, "set_rua_level")):
                op.set_rua_level(magnitude_coef=magnitude_coef)
            else:
                raise AttributeError(
                    "RUA Augmentations should have a 'set_rua_level' method but it's not present in Op: {}".format(
                        op.__class__.__name__))

    def forward(self, data: Union[np.ndarray, List[np.ndarray]],
                state: Dict[str, Any]) -> Union[np.ndarray, List[np.ndarray]]:
        """Execute a randomly selected op from the list of `numpy_ops`.

        Args:
            data: The information to be passed to one of the wrapped operators.
            state: Information about the current execution context, for example {"mode": "train"}.

        Returns:
            The `data` after application of one of the available numpyOps.
        """
        return np.random.choice(self.ops, p=self.probs).forward(data, state)

    def forward_batch(self,
                      data: Union[np.ndarray, List[np.ndarray]],
                      state: Dict[str, Any]) -> Union[np.ndarray, List[np.ndarray]]:
        return np.random.choice(self.ops, p=self.probs).forward_batch(data, state)

forward

Execute a randomly selected op from the list of numpy_ops.

Parameters:

Name Type Description Default
data Union[ndarray, List[ndarray]]

The information to be passed to one of the wrapped operators.

required
state Dict[str, Any]

Information about the current execution context, for example {"mode": "train"}.

required

Returns:

Type Description
Union[ndarray, List[ndarray]]

The data after application of one of the available numpyOps.

Source code in fastestimator/fastestimator/op/numpyop/meta/one_of.py
def forward(self, data: Union[np.ndarray, List[np.ndarray]],
            state: Dict[str, Any]) -> Union[np.ndarray, List[np.ndarray]]:
    """Execute a randomly selected op from the list of `numpy_ops`.

    Args:
        data: The information to be passed to one of the wrapped operators.
        state: Information about the current execution context, for example {"mode": "train"}.

    Returns:
        The `data` after application of one of the available numpyOps.
    """
    return np.random.choice(self.ops, p=self.probs).forward(data, state)

set_rua_level

Set the augmentation intensity based on the magnitude_coef.

This method is specifically designed to be invoked by the RUA Op.

Parameters:

Name Type Description Default
magnitude_coef float

The desired augmentation intensity (range [0-1]).

required

Raises:

Type Description
AttributeError

If ops don't have a 'set_rua_level' method.

Source code in fastestimator/fastestimator/op/numpyop/meta/one_of.py
def set_rua_level(self, magnitude_coef: float) -> None:
    """Set the augmentation intensity based on the magnitude_coef.

    This method is specifically designed to be invoked by the RUA Op.

    Args:
        magnitude_coef: The desired augmentation intensity (range [0-1]).

    Raises:
        AttributeError: If ops don't have a 'set_rua_level' method.
    """
    for op in self.ops:
        if hasattr(op, "set_rua_level") and inspect.ismethod(getattr(op, "set_rua_level")):
            op.set_rua_level(magnitude_coef=magnitude_coef)
        else:
            raise AttributeError(
                "RUA Augmentations should have a 'set_rua_level' method but it's not present in Op: {}".format(
                    op.__class__.__name__))