Skip to content

Commit

Permalink
trying to use multiprocess with transformers
Browse files Browse the repository at this point in the history
  • Loading branch information
lfoppiano committed Nov 6, 2024
1 parent 0d7f3cf commit 4411311
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions delft/sequenceLabelling/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import numpy as np
import tensorflow as tf
import wandb
from tensorflow.keras.callbacks import Callback, EarlyStopping, ModelCheckpoint
from transformers import create_optimizer

Expand Down Expand Up @@ -197,7 +196,7 @@ def train_model(self, local_model, x_train, y_train, f_train=None,
multiprocessing = self.training_config.multiprocessing

# multiple workers should work with transformer layers, but not with ELMo due to GPU memory limit (with GTX 1080Ti 11GB)
if self.model_config.transformer_name is not None or (self.embeddings and self.embeddings.use_ELMo):
if self.embeddings and self.embeddings.use_ELMo:
# worker at 0 means the training will be executed in the main thread
nb_workers = 0
multiprocessing = False
Expand Down

0 comments on commit 4411311

Please sign in to comment.