We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 12e6e86 commit e25f0cdCopy full SHA for e25f0cd
1 file changed
turftopic/late.py
@@ -54,7 +54,10 @@ def _encode_tokens(
54
"""
55
self.has_used_token_level = True
56
token_embeddings = self.encode(
57
- texts, output_value="token_embeddings", batch_size=batch_size
+ texts,
58
+ output_value="token_embeddings",
59
+ batch_size=batch_size,
60
+ show_progress_bar=show_progress_bar,
61
)
62
offsets = self.tokenizer(
63
texts, return_offsets_mapping=True, verbose=False
@@ -63,7 +66,7 @@ def _encode_tokens(
66
offs[: len(embs)] for offs, embs in zip(offsets, token_embeddings)
64
67
]
65
68
token_embeddings = [
- embs.numpy(force=True)
69
+ embs.float().numpy(force=True)
70
for embs in token_embeddings
71
if torch.is_tensor(embs)
72
0 commit comments