Skip to content

Commit 149d878

Browse files
Merge pull request #130 from x-tabdeveloping/bfloat_fix
Bfloat fix
2 parents 12e6e86 + 1693475 commit 149d878

2 files changed

Lines changed: 6 additions & 3 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ profile = "black"
99

1010
[project]
1111
name = "turftopic"
12-
version = "0.25.1"
12+
version = "0.25.2"
1313
description = "Topic modeling with contextual representations from sentence transformers."
1414
authors = [
1515
{ name = "Márton Kardos <power.up1163@gmail.com>", email = "martonkardos@cas.au.dk" }

turftopic/late.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,10 @@ def _encode_tokens(
5454
"""
5555
self.has_used_token_level = True
5656
token_embeddings = self.encode(
57-
texts, output_value="token_embeddings", batch_size=batch_size
57+
texts,
58+
output_value="token_embeddings",
59+
batch_size=batch_size,
60+
show_progress_bar=show_progress_bar,
5861
)
5962
offsets = self.tokenizer(
6063
texts, return_offsets_mapping=True, verbose=False
@@ -63,7 +66,7 @@ def _encode_tokens(
6366
offs[: len(embs)] for offs, embs in zip(offsets, token_embeddings)
6467
]
6568
token_embeddings = [
66-
embs.numpy(force=True)
69+
embs.float().numpy(force=True)
6770
for embs in token_embeddings
6871
if torch.is_tensor(embs)
6972
]

0 commit comments

Comments
 (0)