Skip to content

Commit 4411311

Browse files
committed
trying to use multiprocess with transformers
1 parent 0d7f3cf commit 4411311

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

delft/sequenceLabelling/trainer.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import numpy as np
44
import tensorflow as tf
5-
import wandb
65
from tensorflow.keras.callbacks import Callback, EarlyStopping, ModelCheckpoint
76
from transformers import create_optimizer
87

@@ -197,7 +196,7 @@ def train_model(self, local_model, x_train, y_train, f_train=None,
197196
multiprocessing = self.training_config.multiprocessing
198197

199198
# multiple workers should work with transformer layers, but not with ELMo due to GPU memory limit (with GTX 1080Ti 11GB)
200-
if self.model_config.transformer_name is not None or (self.embeddings and self.embeddings.use_ELMo):
199+
if self.embeddings and self.embeddings.use_ELMo:
201200
# worker at 0 means the training will be executed in the main thread
202201
nb_workers = 0
203202
multiprocessing = False

0 commit comments

Comments
 (0)