Privacy

Classes

DPController

DPController(dp_args=None)

Controls DP action during training.

Parameters:

Name Type Description Default
dp_args Union[Dict, None]

Arguments for differential privacy

None
Source code in fedbiomed/common/privacy/_dp_controller.py
def __init__(self, dp_args: Union[Dict, None] = None) -> None:
    """Constructs DPController with given model.

    Args:
        dp_args: Arguments for differential privacy
    """
    self._privacy_engine = PrivacyEngine()
    self._dp_args = dp_args or {}
    self._is_active = dp_args is not None
    # Configure/validate dp arguments
    if self._is_active:
        self._configure_dp_args()

Functions

after_training
after_training(params)

DP actions after the training.

Parameters:

Name Type Description Default
params Dict

Contains model parameters after training with DP

required

Returns: params fixed model parameters after applying differential privacy

Source code in fedbiomed/common/privacy/_dp_controller.py
def after_training(self, params: Dict) -> Dict:
    """DP actions after the training.

    Args:
        params: Contains model parameters after training with DP
    Returns:
        `params` fixed model parameters after applying differential privacy
    """
    if self._is_active:
        params = self._postprocess_dp(params)
    return params
before_training
before_training(optimizer, loader)

DP action before starting training.

Parameters:

Name Type Description Default
optimizer NativeTorchOptimizer

NativeTorchOptimizer for training

required
loader DataLoader

Data loader for training

required

Returns:

Type Description
Tuple[NativeTorchOptimizer, DPDataLoader]

Differential privacy applied Optimizer and data loader

Source code in fedbiomed/common/privacy/_dp_controller.py
def before_training(self,
                    optimizer: NativeTorchOptimizer,
                    loader: DataLoader) -> Tuple[NativeTorchOptimizer, DPDataLoader]:
    """DP action before starting training.

    Args:
        optimizer: NativeTorchOptimizer for training
        loader: Data loader for training

    Returns:
        Differential privacy applied Optimizer and data loader
    """


    if self._is_active:
        if not isinstance(optimizer.optimizer, torch.optim.Optimizer):
            raise FedbiomedDPControllerError(
                f"{ErrorNumbers.FB616.value}: "
                f"Optimizer must be an instance of torch.optim.Optimizer, but got {optimizer}"
                "\nDeclearn optimizers are not yet compatible with Differential Privacy"
        )
        if not isinstance(loader, DataLoader):
            raise FedbiomedDPControllerError(
                f"{ErrorNumbers.FB616.value}: "
                "Data loader must be an instance of torch.utils.data.DataLoader"
            )
        try:
            optimizer._model.model, optimizer.optimizer, loader = self._privacy_engine.make_private(
                module=optimizer._model.model,
                optimizer=optimizer.optimizer,
                data_loader=loader,
                noise_multiplier=float(self._dp_args['sigma']),
                max_grad_norm=float(self._dp_args['clip'])
            )
        except Exception as e:
            raise FedbiomedDPControllerError(
                f"{ErrorNumbers.FB616.value}: "
                f"Error while running privacy engine: {e}"
            )
    return optimizer, loader
validate_and_fix_model
validate_and_fix_model(model)

Validate and Fix model to be DP-compliant.

Parameters:

Name Type Description Default
model Module

An instance of Module

required

Returns:

Type Description
Module

Fixed or validated model

Source code in fedbiomed/common/privacy/_dp_controller.py
def validate_and_fix_model(self, model: Module) -> Module:
    """Validate and Fix model to be DP-compliant.

    Args:
        model: An instance of [`Module`][torch.nn.Module]

    Returns:
        Fixed or validated model
    """
    if self._is_active and not ModuleValidator.is_valid(model):
        try:
            model = ModuleValidator.fix(model)
        except Exception as e:
            raise FedbiomedDPControllerError(
                f"{ErrorNumbers.FB616.value}: "
                f"Error while making model DP-compliant: {e}"
            )
    return model