Skip to content
This repository has been archived by the owner on Apr 1, 2024. It is now read-only.

Use Opacus to train language model privately #3

Open
wants to merge 2 commits into
base: dp-patch
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion fairseq/models/lstm.py
Original file line number Diff line number Diff line change
Expand Up @@ -679,7 +679,21 @@ def LSTM(input_size, hidden_size, **kwargs):


def LSTMCell(input_size, hidden_size, **kwargs):
m = nn.LSTMCell(input_size, hidden_size, **kwargs)
# ROUTE 1: from OpenDP
# from opendp.network.layers.lstm import DPLSTMCell as DPLSTMCellOpenDP
# LSTMCellProxy = DPLSTMCellOpenDP

# ROUTE 2: from Opacus
print("using opacus")
from opacus.layers.dp_rnn import DPLSTMCell as DPLSTMCellOpacus
def DPLSTMCellOpacusWrapper(input_size: int, hidden_size: int, bias: bool=True):
return DPLSTMCellOpacus(input_size, hidden_size, bias)
LSTMCellProxy = DPLSTMCellOpacusWrapper

# ROUTE 3: non-DP from PyTorch
# LSTMCellProxy = nn.LSTMCell

m = LSTMCellProxy(input_size, hidden_size, **kwargs)
for name, param in m.named_parameters():
if "weight" in name or "bias" in name:
param.data.uniform_(-0.1, 0.1)
Expand Down
4 changes: 4 additions & 0 deletions fairseq/optim/adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@ def __init__(self, cfg: FairseqAdamConfig, params):
else:
self._optimizer = Adam(params, **self.optimizer_config)

# this optimizer doesn't inherit from torch.optim.Optimizer
# so it has to be patched up to work with Opacus
self.state = None

@property
def optimizer_config(self):
"""
Expand Down
16 changes: 16 additions & 0 deletions fairseq_cli/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@
import sys
from typing import Dict, Optional, Any, List, Tuple, Callable

from opacus.privacy_engine import PrivacyEngine
# from opendp.network.odometer_reconstruction import ReconstructionPrivacyOdometer
from torch.utils.data.dataloader import DataLoader

# We need to setup root logger before importing any fairseq libraries.
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
Expand Down Expand Up @@ -242,6 +246,18 @@ def train(
fix_batches_to_gpus=cfg.distributed_training.fix_batches_to_gpus,
shuffle=(epoch_itr.next_epoch_idx > cfg.dataset.curriculum),
)
print('TRAINING WITH PRIVACY.')
# trainer.odometer = ReconstructionPrivacyOdometer(step_epsilon=100.)
# trainer.odometer.track_(trainer.model)

trainer.engine = PrivacyEngine()
trainer._model, trainer._optimizer, _ = trainer.engine.make_private(
module=trainer.model,
optimizer=trainer.optimizer,
data_loader=DataLoader([[]]),
noise_multiplier=1.0,
max_grad_norm=1.0,
)
update_freq = (
cfg.optimization.update_freq[epoch_itr.epoch - 1]
if epoch_itr.epoch <= len(cfg.optimization.update_freq)
Expand Down