Skip to content

Commit

Permalink
fix windown unicode model path
Browse files Browse the repository at this point in the history
  • Loading branch information
royshil committed Oct 29, 2023
1 parent a39979e commit 030bb81
Showing 1 changed file with 27 additions and 0 deletions.
27 changes: 27 additions & 0 deletions src/whisper-processing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

#include <algorithm>
#include <cctype>
#include <fstream>
#include <Windows.h>

#define VAD_THOLD 0.0001f
#define FREQ_THOLD 100.0f
Expand Down Expand Up @@ -73,7 +75,32 @@ bool vad_simple(float *pcmf32, size_t pcm32f_size, uint32_t sample_rate, float v
struct whisper_context *init_whisper_context(const std::string &model_path)
{
obs_log(LOG_INFO, "Loading whisper model from %s", model_path.c_str());

#ifdef _WIN32
// convert model path UTF8 to wstring (wchar_t) for whisper
int count = MultiByteToWideChar(CP_UTF8, 0, model_path.c_str(), (int)model_path.length(), NULL, 0);
std::wstring model_path_ws(count, 0);
MultiByteToWideChar(CP_UTF8, 0, model_path.c_str(), (int)model_path.length(), &model_path_ws[0], count);

// Read model into buffer
std::ifstream modelFile(model_path_ws, std::ios::binary);
if (!modelFile.is_open()) {
obs_log(LOG_ERROR, "Failed to open whisper model file %s", model_path.c_str());
return nullptr;
}
modelFile.seekg(0, std::ios::end);
const size_t modelFileSize = modelFile.tellg();
modelFile.seekg(0, std::ios::beg);
std::vector<char> modelBuffer(modelFileSize);
modelFile.read(modelBuffer.data(), modelFileSize);
modelFile.close();

// Initialize whisper
struct whisper_context *ctx = whisper_init_from_buffer(modelBuffer.data(), modelFileSize);
#else
struct whisper_context *ctx = whisper_init_from_file(model_path.c_str());
#endif

if (ctx == nullptr) {
obs_log(LOG_ERROR, "Failed to load whisper model");
return nullptr;
Expand Down

0 comments on commit 030bb81

Please sign in to comment.