From df47de686aeb3515e9989a5180a9af53af7a581f Mon Sep 17 00:00:00 2001 From: Benedikt Fuchs Date: Mon, 19 Jun 2023 09:23:01 +0200 Subject: [PATCH] fix multiple arguments destination --- flair/embeddings/token.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/flair/embeddings/token.py b/flair/embeddings/token.py index 771c3a6397..21c124f295 100644 --- a/flair/embeddings/token.py +++ b/flair/embeddings/token.py @@ -428,14 +428,14 @@ def to_params(self) -> Dict[str, Any]: "embedding_length": self.__embedding_length, } - def state_dict(self, *args, destination=None, prefix="", keep_vars=False): + def state_dict(self, *args, **kwargs): # when loading the old versions from pickle, the embeddings might not be added as pytorch module. # we do this delayed, when the weights are collected (e.g. for saving), as doing this earlier might # lead to issues while loading (trying to load weights that weren't stored as python weights and therefore # not finding them) if list(self.modules()) == [self]: self.embedding = self.embedding - return super().state_dict(*args, destination=destination, prefix=prefix, keep_vars=keep_vars) + return super().state_dict(*args, **kwargs) @register_embeddings