fix: Tokenizer dependency (#30)

This commit is contained in:
Loc Bui
2024-03-18 12:57:04 -07:00
committed by GitHub
parent a7b2b54f18
commit 0588abec77

View File

@@ -135,14 +135,14 @@ class LLMEvaluator {
enum LoadState {
case idle
case loaded(LLMModel, LLM.Tokenizer)
case loaded(LLMModel, Tokenizers.Tokenizer)
}
var loadState = LoadState.idle
/// load and return the model -- can be called multiple times, subsequent calls will
/// just return the loaded model
func load() async throws -> (LLMModel, LLM.Tokenizer) {
func load() async throws -> (LLMModel, Tokenizers.Tokenizer) {
switch loadState {
case .idle:
// limit the buffer cache