diff --git a/operators/tokenizer/tokenizer_jsconfig.hpp b/operators/tokenizer/tokenizer_jsconfig.hpp index 9ea4b444..9fc41dc6 100644 --- a/operators/tokenizer/tokenizer_jsconfig.hpp +++ b/operators/tokenizer/tokenizer_jsconfig.hpp @@ -116,7 +116,7 @@ class TokenJsonConfig final { vocab_stream = std::make_unique(vocab_str); } } else { - auto ifs = std::make_unique(vocab_path_); + auto ifs = std::make_unique(path(vocab_path_.data()).open()); if (!ifs->is_open()) { return OrtxStatus(extError_t::kOrtxErrorInvalidArgument, vocab_path_ + ": does not exist."); }