Skip to content

Commit 3c6fcb9

Browse files
authored
Fix caching for other APIs as well (#23)
1 parent 6c7d0d2 commit 3c6fcb9

File tree

3 files changed

+5
-3
lines changed

3 files changed

+5
-3
lines changed

tokenizer_ts/package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

tokenizer_ts/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "@microsoft/tiktokenizer",
33
"displayName": "tiktokenizer",
44
"description": "Tokenizer for OpenAI large language models.",
5-
"version": "1.0.2",
5+
"version": "1.0.3",
66
"author": {
77
"name": "Microsoft Corporation"
88
},

tokenizer_ts/src/tikTokenizer.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,7 @@ export class TikTokenizer {
253253
const bytes = this.textEncoder.encode(piece);
254254
const token = this.encoder!.get(uint8ArrayToString(bytes));
255255
if (token !== undefined) {
256+
this.cache.set(piece, [token]);
256257
tokenCount++;
257258
if (tokenCount <= maxTokenCount) {
258259
encodeLength += piece.length;
@@ -397,6 +398,7 @@ export class TikTokenizer {
397398
const bytes = new TextEncoder().encode(piece);
398399
const token = this.encoder!.get(uint8ArrayToString(bytes));
399400
if (token !== undefined) {
401+
this.cache.set(piece, [token]);
400402
tokenCount++;
401403
encodeLength += piece.length;
402404
tokenIds.push(token);

0 commit comments

Comments
 (0)